From f5a36684b8c2088d36308bad86643bc7ab3d6a78 Mon Sep 17 00:00:00 2001 From: Shahar Kaminsky Date: Wed, 15 Feb 2023 10:42:28 +0200 Subject: [PATCH] A new era MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit most impactful thing you can do is to increase people’s freedom --- .cargo/config.toml | 2 + .dockerignore | 36 + .eslintignore | 8 + .gitattributes | 2 + .githooks/pre-commit | 14 + .githooks/pre-push | 14 + .gitignore | 65 + .gitmodules | 12 + .markdownlintignore | 2 + .prettierignore | 2 + CHANGELOG.md | 0 CODEOWNERS | 1 + CONTRIBUTING.md | 9 + Cargo.lock | 7552 ++++++++++ Cargo.toml | 63 + LICENSE-APACHE | 176 + LICENSE-MIT | 21 + README.md | 37 + bin/ci_run | 6 + bin/run_loadtest_from_github_actions | 32 + bin/zk | 9 + bors.toml | 17 + codecov.yml | 17 + contracts | 1 + core/bin/admin-tools/Cargo.toml | 23 + core/bin/admin-tools/src/application.rs | 70 + core/bin/admin-tools/src/blocks.rs | 309 + core/bin/admin-tools/src/main.rs | 170 + core/bin/admin-tools/src/prover.rs | 170 + core/bin/blob_purger/Cargo.toml | 19 + core/bin/blob_purger/src/main.rs | 143 + core/bin/circuit_synthesizer/Cargo.lock | 6177 ++++++++ core/bin/circuit_synthesizer/Cargo.toml | 31 + .../src/circuit_synthesizer.rs | 246 + core/bin/circuit_synthesizer/src/main.rs | 85 + core/bin/contract-verifier/Cargo.toml | 33 + core/bin/contract-verifier/src/error.rs | 27 + core/bin/contract-verifier/src/main.rs | 136 + core/bin/contract-verifier/src/verifier.rs | 337 + .../bin/contract-verifier/src/zksolc_utils.rs | 104 + .../Cargo.toml | 11 + .../src/main.rs | 24 + core/bin/prover/Cargo.lock | 6393 ++++++++ core/bin/prover/Cargo.toml | 45 + core/bin/prover/README.md | 8 + core/bin/prover/rust-toolchain.toml | 2 + core/bin/prover/src/artifact_provider.rs | 21 + core/bin/prover/src/main.rs | 196 + core/bin/prover/src/prover.rs | 267 + core/bin/prover/src/prover_params.rs | 36 + core/bin/prover/src/socket_listener.rs | 82 + .../src/synthesized_circuit_provider.rs | 49 + .../Cargo.toml | 11 + .../src/main.rs | 18 + .../setup_key_generator_and_server/Cargo.lock | 5002 +++++++ .../setup_key_generator_and_server/Cargo.toml | 31 + .../data/.gitkeep | 0 .../setup_key_generator_and_server/src/lib.rs | 56 + .../src/main.rs | 54 + core/bin/storage_logs_migration/Cargo.toml | 11 + core/bin/storage_logs_migration/src/main.rs | 27 + .../bin/system-constants-generator/Cargo.toml | 28 + .../src/intrinsic_costs.rs | 209 + .../system-constants-generator/src/main.rs | 235 + .../system-constants-generator/src/utils.rs | 334 + .../Cargo.toml | 33 + .../data/verification_0_key.json | 399 + .../data/verification_10_key.json | 399 + .../data/verification_11_key.json | 399 + .../data/verification_12_key.json | 399 + .../data/verification_13_key.json | 399 + .../data/verification_14_key.json | 399 + .../data/verification_15_key.json | 399 + .../data/verification_16_key.json | 399 + .../data/verification_17_key.json | 399 + .../data/verification_18_key.json | 399 + .../data/verification_1_key.json | 399 + .../data/verification_2_key.json | 399 + .../data/verification_3_key.json | 399 + .../data/verification_4_key.json | 399 + .../data/verification_5_key.json | 399 + .../data/verification_6_key.json | 399 + .../data/verification_7_key.json | 399 + .../data/verification_8_key.json | 399 + .../data/verification_9_key.json | 399 + .../src/commitment_generator.rs | 104 + .../src/json_to_binary_vk_converter.rs | 31 + .../src/lib.rs | 114 + .../src/main.rs | 82 + .../src/tests.rs | 66 + core/bin/zksync_core/Cargo.toml | 81 + .../src/api_server/execution_sandbox.rs | 663 + .../src/api_server/explorer/api_decl.rs | 80 + .../src/api_server/explorer/api_impl.rs | 488 + .../src/api_server/explorer/mod.rs | 83 + .../src/api_server/explorer/network_stats.rs | 84 + core/bin/zksync_core/src/api_server/mod.rs | 5 + .../src/api_server/tx_sender/error.rs | 127 + .../src/api_server/tx_sender/mod.rs | 571 + .../api_server/web3/backend_jsonrpc/error.rs | 45 + .../api_server/web3/backend_jsonrpc/mod.rs | 3 + .../web3/backend_jsonrpc/namespaces/eth.rs | 374 + .../web3/backend_jsonrpc/namespaces/mod.rs | 4 + .../web3/backend_jsonrpc/namespaces/net.rs | 37 + .../web3/backend_jsonrpc/namespaces/web3.rs | 22 + .../web3/backend_jsonrpc/namespaces/zks.rs | 200 + .../web3/backend_jsonrpc/pub_sub.rs | 53 + .../api_server/web3/backend_jsonrpsee/mod.rs | 5 + .../web3/backend_jsonrpsee/namespaces/eth.rs | 205 + .../namespaces/eth_subscribe.rs | 1 + .../web3/backend_jsonrpsee/namespaces/mod.rs | 5 + .../web3/backend_jsonrpsee/namespaces/net.rs | 17 + .../web3/backend_jsonrpsee/namespaces/web3.rs | 8 + .../web3/backend_jsonrpsee/namespaces/zks.rs | 113 + .../zksync_core/src/api_server/web3/mod.rs | 393 + .../src/api_server/web3/namespaces/eth.rs | 852 ++ .../web3/namespaces/eth_subscribe.rs | 118 + .../src/api_server/web3/namespaces/mod.rs | 24 + .../src/api_server/web3/namespaces/net.rs | 20 + .../src/api_server/web3/namespaces/web3.rs | 12 + .../src/api_server/web3/namespaces/zks.rs | 486 + .../src/api_server/web3/pubsub_notifier.rs | 137 + .../zksync_core/src/api_server/web3/state.rs | 162 + .../bin/zksync_core/src/bin/block_reverter.rs | 475 + .../bin/merkle_tree_consistency_checker.rs | 12 + core/bin/zksync_core/src/bin/rocksdb_util.rs | 73 + core/bin/zksync_core/src/bin/zksync_server.rs | 130 + .../zksync_core/src/data_fetchers/error.rs | 83 + core/bin/zksync_core/src/data_fetchers/mod.rs | 36 + .../src/data_fetchers/token_list/mock.rs | 78 + .../src/data_fetchers/token_list/mod.rs | 126 + .../src/data_fetchers/token_list/one_inch.rs | 71 + .../data_fetchers/token_price/coingecko.rs | 219 + .../token_price/coinmarketcap.rs | 193 + .../src/data_fetchers/token_price/mock.rs | 50 + .../src/data_fetchers/token_price/mod.rs | 129 + .../token_trading_volume/mock.rs | 41 + .../data_fetchers/token_trading_volume/mod.rs | 124 + .../token_trading_volume/uniswap.rs | 146 + .../zksync_core/src/db_storage_provider.rs | 53 + .../zksync_core/src/eth_sender/aggregator.rs | 271 + .../src/eth_sender/block_publish_criterion.rs | 255 + core/bin/zksync_core/src/eth_sender/error.rs | 7 + .../src/eth_sender/eth_tx_aggregator.rs | 174 + .../src/eth_sender/eth_tx_manager.rs | 609 + .../src/eth_sender/grafana_metrics.rs | 42 + core/bin/zksync_core/src/eth_sender/mod.rs | 16 + core/bin/zksync_core/src/eth_sender/tests.rs | 397 + .../src/eth_sender/zksync_functions.rs | 43 + core/bin/zksync_core/src/eth_watch/client.rs | 229 + core/bin/zksync_core/src/eth_watch/mod.rs | 248 + core/bin/zksync_core/src/eth_watch/tests.rs | 278 + core/bin/zksync_core/src/fee_monitor/mod.rs | 220 + core/bin/zksync_core/src/fee_ticker/error.rs | 12 + .../zksync_core/src/fee_ticker/gas_price.rs | 8 + core/bin/zksync_core/src/fee_ticker/mod.rs | 90 + core/bin/zksync_core/src/fee_ticker/types.rs | 5 + core/bin/zksync_core/src/gas_adjuster/mod.rs | 227 + .../bin/zksync_core/src/gas_adjuster/tests.rs | 61 + .../zksync_core/src/gas_tracker/constants.rs | 17 + core/bin/zksync_core/src/gas_tracker/mod.rs | 95 + core/bin/zksync_core/src/genesis.rs | 267 + .../src/house_keeper/gcs_blob_cleaner.rs | 217 + .../house_keeper/gpu_prover_queue_monitor.rs | 29 + core/bin/zksync_core/src/house_keeper/mod.rs | 5 + .../src/house_keeper/periodic_job.rs | 30 + .../witness_generator_misc_reporter.rs | 44 + .../witness_generator_queue_monitor.rs | 50 + core/bin/zksync_core/src/lib.rs | 548 + .../src/metadata_calculator/mod.rs | 597 + .../src/metadata_calculator/tests.rs | 241 + .../src/state_keeper/batch_executor/mod.rs | 511 + .../state_keeper/batch_executor/tests/mod.rs | 206 + .../batch_executor/tests/tester.rs | 335 + .../src/state_keeper/extractors.rs | 239 + .../src/state_keeper/io/mempool.rs | 622 + .../zksync_core/src/state_keeper/io/mod.rs | 66 + .../zksync_core/src/state_keeper/keeper.rs | 390 + .../src/state_keeper/mempool_actor.rs | 76 + core/bin/zksync_core/src/state_keeper/mod.rs | 66 + .../state_keeper/seal_criteria/function.rs | 55 + .../src/state_keeper/seal_criteria/gas.rs | 173 + .../seal_criteria/geometry_seal_criteria.rs | 257 + .../src/state_keeper/seal_criteria/mod.rs | 268 + .../seal_criteria/pubdata_bytes.rs | 143 + .../src/state_keeper/seal_criteria/slots.rs | 65 + .../src/state_keeper/seal_criteria/timeout.rs | 91 + .../zksync_core/src/state_keeper/tests/mod.rs | 137 + .../src/state_keeper/tests/tester.rs | 657 + .../bin/zksync_core/src/state_keeper/types.rs | 51 + .../state_keeper/updates/l1_batch_updates.rs | 109 + .../state_keeper/updates/miniblock_updates.rs | 164 + .../src/state_keeper/updates/mod.rs | 175 + .../src/witness_generator/basic_circuits.rs | 367 + .../src/witness_generator/leaf_aggregation.rs | 222 + .../zksync_core/src/witness_generator/mod.rs | 427 + .../src/witness_generator/node_aggregation.rs | 263 + .../precalculated_merkle_paths_provider.rs | 267 + .../src/witness_generator/scheduler.rs | 235 + .../src/witness_generator/tests.rs | 286 + .../src/witness_generator/utils.rs | 34 + core/lib/basic_types/Cargo.toml | 14 + core/lib/basic_types/src/lib.rs | 153 + core/lib/basic_types/src/macros.rs | 78 + core/lib/basic_types/src/network.rs | 90 + core/lib/circuit_breaker/Cargo.toml | 27 + core/lib/circuit_breaker/src/code_hashes.rs | 80 + .../circuit_breaker/src/facet_selectors.rs | 126 + core/lib/circuit_breaker/src/l1_txs.rs | 24 + core/lib/circuit_breaker/src/lib.rs | 79 + core/lib/circuit_breaker/src/utils.rs | 9 + core/lib/circuit_breaker/src/vks.rs | 275 + core/lib/config/Cargo.toml | 22 + core/lib/config/src/configs/api.rs | 239 + core/lib/config/src/configs/chain.rs | 199 + .../config/src/configs/circuit_synthesizer.rs | 83 + .../config/src/configs/contract_verifier.rs | 57 + core/lib/config/src/configs/contracts.rs | 85 + core/lib/config/src/configs/database.rs | 150 + core/lib/config/src/configs/eth_client.rs | 51 + core/lib/config/src/configs/eth_sender.rs | 171 + core/lib/config/src/configs/eth_watch.rs | 64 + core/lib/config/src/configs/fetcher.rs | 108 + core/lib/config/src/configs/mod.rs | 43 + core/lib/config/src/configs/nfs.rs | 35 + core/lib/config/src/configs/object_store.rs | 45 + core/lib/config/src/configs/prover.rs | 307 + core/lib/config/src/configs/prover_group.rs | 190 + core/lib/config/src/configs/test_utils.rs | 38 + core/lib/config/src/configs/utils.rs | 18 + .../config/src/configs/witness_generator.rs | 109 + core/lib/config/src/constants/blocks.rs | 8 + core/lib/config/src/constants/contracts.rs | 96 + core/lib/config/src/constants/crypto.rs | 23 + core/lib/config/src/constants/ethereum.rs | 21 + .../config/src/constants/fees/intrinsic.rs | 20 + core/lib/config/src/constants/fees/mod.rs | 50 + core/lib/config/src/constants/mod.rs | 15 + .../config/src/constants/system_context.rs | 57 + .../lib/config/src/constants/trusted_slots.rs | 52 + core/lib/config/src/lib.rs | 48 + core/lib/config/src/test_config/mod.rs | 63 + core/lib/contracts/Cargo.toml | 17 + core/lib/contracts/src/lib.rs | 230 + core/lib/crypto/Cargo.toml | 30 + core/lib/crypto/README.md | 10 + core/lib/crypto/src/convert.rs | 101 + core/lib/crypto/src/error.rs | 21 + core/lib/crypto/src/hasher/blake2.rs | 36 + core/lib/crypto/src/hasher/keccak.rs | 28 + core/lib/crypto/src/hasher/mod.rs | 23 + core/lib/crypto/src/hasher/sha256.rs | 36 + core/lib/crypto/src/lib.rs | 52 + core/lib/crypto/src/primitives.rs | 132 + core/lib/crypto/src/proof.rs | 59 + core/lib/crypto/src/serialization.rs | 462 + core/lib/dal/.gitignore | 1 + core/lib/dal/Cargo.toml | 50 + .../migrations/20211026134308_init.down.sql | 39 + .../dal/migrations/20211026134308_init.up.sql | 232 + ...20120160234_add_tx_execution_info.down.sql | 1 + ...0220120160234_add_tx_execution_info.up.sql | 1 + ...220127113853_tx_received_at_index.down.sql | 1 + ...20220127113853_tx_received_at_index.up.sql | 1 + .../20220204131627_add_merkle_root.down.sql | 1 + .../20220204131627_add_merkle_root.up.sql | 2 + ...04223302_transactions_new_mempool.down.sql | 4 + ...0204223302_transactions_new_mempool.up.sql | 9 + ...0220304160112_drop_events_indices.down.sql | 5 + .../20220304160112_drop_events_indices.up.sql | 5 + ...124416_add_l2_to_l1_communication.down.sql | 2 + ...08124416_add_l2_to_l1_communication.up.sql | 2 + .../20220315095541_txs_priority_id.down.sql | 1 + .../20220315095541_txs_priority_id.up.sql | 1 + ...20220315144416_remove_tx_valid_in.down.sql | 2 + .../20220315144416_remove_tx_valid_in.up.sql | 2 + ...321131756_change_events_loc_index.down.sql | 2 + ...20321131756_change_events_loc_index.up.sql | 2 + .../20220325153146_remove_events_id.down.sql | 4 + .../20220325153146_remove_events_id.up.sql | 5 + ...01114554_storage_tables_migration.down.sql | 6 + ...0401114554_storage_tables_migration.up.sql | 7 + .../20220404102332_eth_tx_index.down.sql | 1 + .../20220404102332_eth_tx_index.up.sql | 1 + ...06085905_eth_tx_has_failed_status.down.sql | 1 + ...0406085905_eth_tx_has_failed_status.up.sql | 2 + ...220412142956_add_block_hash_index.down.sql | 1 + ...20220412142956_add_block_hash_index.up.sql | 1 + ...1125432_drop_to_porter_authorized.down.sql | 3 + ...421125432_drop_to_porter_authorized.up.sql | 3 + .../20220504154136_remove_nonce_gaps.down.sql | 1 + .../20220504154136_remove_nonce_gaps.up.sql | 1 + ...0_using_base_fee_and_priority_fee.down.sql | 6 + ...600_using_base_fee_and_priority_fee.up.sql | 6 + ...35000_add_contract_address_column.down.sql | 1 + ...7135000_add_contract_address_column.up.sql | 2 + ...220519101248_btree_events_indices.down.sql | 5 + ...20220519101248_btree_events_indices.up.sql | 5 + .../20220519103453_in_mempool.down.sql | 1 + .../20220519103453_in_mempool.up.sql | 2 + ...0220519150925_eth_sender_refactor.down.sql | 16 + .../20220519150925_eth_sender_refactor.up.sql | 17 + ...431_change_block_commitment_model.down.sql | 2 + ...01431_change_block_commitment_model.up.sql | 1 + ...820_block_number_tx_index_indices.down.sql | 2 + ...03820_block_number_tx_index_indices.up.sql | 2 + ...20601105448_contract_verification.down.sql | 3 + ...0220601105448_contract_verification.up.sql | 19 + ...0121550_add_predicted_gas_columns.down.sql | 5 + ...610121550_add_predicted_gas_columns.up.sql | 5 + .../20220611134715_token-bridging.down.sql | 3 + .../20220611134715_token-bridging.up.sql | 5 + ...t_verification_compilation_errors.down.sql | 2 + ...act_verification_compilation_errors.up.sql | 2 + ..._l1_block_number_for_priority_ops.down.sql | 1 + ...ck_l1_block_number_for_priority_ops.up.sql | 1 + .../20220704121755_witness_inputs.down.sql | 1 + .../20220704121755_witness_inputs.up.sql | 9 + .../20220705133822_add_value_column.down.sql | 1 + .../20220705133822_add_value_column.up.sql | 1 + ...20220707151034_storage_logs_dedup.down.sql | 1 + .../20220707151034_storage_logs_dedup.up.sql | 21 + ...20220708093726_make-type-not-null.down.sql | 1 + .../20220708093726_make-type-not-null.up.sql | 1 + ...0_save_initial_bootloader_content.down.sql | 1 + ...020_save_initial_bootloader_content.up.sql | 1 + ...626_add_columns_to_witness_inputs.down.sql | 1 + ...73626_add_columns_to_witness_inputs.up.sql | 1 + ...3408_add_used_contracts_to_blocks.down.sql | 2 + ...093408_add_used_contracts_to_blocks.up.sql | 2 + ...220713090226_remove-priority-mode.down.sql | 1 + ...20220713090226_remove-priority-mode.up.sql | 1 + ...-contract-verification-started-at.down.sql | 2 + ...dd-contract-verification-started-at.up.sql | 2 + ...20220801131413_add_paymaster_data.down.sql | 2 + .../20220801131413_add_paymaster_data.up.sql | 2 + .../20220807172230_block_metadata.down.sql | 11 + .../20220807172230_block_metadata.up.sql | 10 + .../20220815130828_support_eip1559.down.sql | 7 + .../20220815130828_support_eip1559.up.sql | 7 + .../20220816141002_add_prover_jobs.down.sql | 1 + .../20220816141002_add_prover_jobs.up.sql | 14 + ...save_block_ergs_per_pubdata_limit.down.sql | 1 + ...5_save_block_ergs_per_pubdata_limit.up.sql | 1 + .../20220827110416_miniblocks.down.sql | 46 + .../20220827110416_miniblocks.up.sql | 68 + ...solc-version-to-contract-verifier.down.sql | 2 + ...d-solc-version-to-contract-verifier.up.sql | 2 + ...me_taken_column_in_witness_inputs.down.sql | 1 + ...time_taken_column_in_witness_inputs.up.sql | 1 + ..._started_at_to_witness_inputs.sql.down.sql | 2 + ...ng_started_at_to_witness_inputs.sql.up.sql | 2 + ..._time_taken_column_in_prover_jobs.down.sql | 1 + ...dd_time_taken_column_in_prover_jobs.up.sql | 1 + ...8094615_add_error_field_to_queues.down.sql | 3 + ...908094615_add_error_field_to_queues.up.sql | 3 + ...03955_verifier-compilers-versions.down.sql | 2 + ...8103955_verifier-compilers-versions.up.sql | 10 + ...908145203_remove-redundant-fields.down.sql | 2 + ...20908145203_remove-redundant-fields.up.sql | 2 + ..._unique-blocknumber-for-witnesses.down.sql | 1 + ...48_unique-blocknumber-for-witnesses.up.sql | 1 + ...20220915125943_l1_batch_tx_number.down.sql | 1 + .../20220915125943_l1_batch_tx_number.up.sql | 1 + ...0930085018_add_proof_aggregations.down.sql | 4 + ...220930085018_add_proof_aggregations.up.sql | 35 + .../20221001090302_add_proof_result.down.sql | 1 + .../20221001090302_add_proof_result.up.sql | 1 + ...31821_add_static_artifact_storage.down.sql | 1 + ...1131821_add_static_artifact_storage.up.sql | 7 + ...02190817_add_circuit_id_to_proofs.down.sql | 1 + ...1002190817_add_circuit_id_to_proofs.up.sql | 1 + ...090515_add-commitment-hash-fields.down.sql | 3 + ...03090515_add-commitment-hash-fields.up.sql | 3 + ...04114549_add_scheduler_jobs_table.down.sql | 1 + ...1004114549_add_scheduler_jobs_table.up.sql | 18 + ...21007085909_add-attempts-for-jobs.down.sql | 6 + ...0221007085909_add-attempts-for-jobs.up.sql | 6 + ..._ergs_per_code_decommittment_word.down.sql | 1 + ...ve_ergs_per_code_decommittment_word.up.sql | 1 + ...n_result_coords_to_scheduler_jobs.down.sql | 1 + ...ion_result_coords_to_scheduler_jobs.up.sql | 1 + ...ification-requests-panic-messages.down.sql | 1 + ...erification-requests-panic-messages.up.sql | 1 + ...0221019112725_l2_to_l1_logs_table.down.sql | 2 + .../20221019112725_l2_to_l1_logs_table.up.sql | 22 + ...21031110209_miniblocks-hash-index.down.sql | 1 + ...0221031110209_miniblocks-hash-index.up.sql | 1 + .../20221103104136_add-signed-raw-tx.down.sql | 4 + .../20221103104136_add-signed-raw-tx.up.sql | 5 + .../20221108190838_set-primary-keys.down.sql | 5 + .../20221108190838_set-primary-keys.up.sql | 5 + .../20221109094807_block-skip-proof.down.sql | 1 + .../20221109094807_block-skip-proof.up.sql | 1 + ..._paths_blob_url_in_witness_inputs.down.sql | 1 + ...ee_paths_blob_url_in_witness_inputs.up.sql | 1 + ..._in_leaf_aggregation_witness_jobs.down.sql | 2 + ...rl_in_leaf_aggregation_witness_jobs.up.sql | 2 + ..._in_node_aggregation_witness_jobs.down.sql | 2 + ...rl_in_node_aggregation_witness_jobs.up.sql | 2 + ...lob_url_in_scheduler_witness_jobs.down.sql | 2 + ..._blob_url_in_scheduler_witness_jobs.up.sql | 2 + ...ut_blob_url_column_in_prover_jobs.down.sql | 1 + ...nput_blob_url_column_in_prover_jobs.up.sql | 1 + ...02801_events-tx-initiator-address.down.sql | 2 + ...2102801_events-tx-initiator-address.up.sql | 4 + ...dd_composite_index_to_prover_jobs.down.sql | 1 + ..._add_composite_index_to_prover_jobs.up.sql | 1 + ...0221215094205_prover-job-identity.down.sql | 2 + .../20221215094205_prover-job-identity.up.sql | 2 + .../20221227165603_fee_model.down.sql | 10 + .../20221227165603_fee_model.up.sql | 11 + ...104142_add_gpu_prover_queue_table.down.sql | 1 + ...04104142_add_gpu_prover_queue_table.up.sql | 11 + .../20230105122559_protective_reads.down.sql | 2 + .../20230105122559_protective_reads.up.sql | 11 + ...remove_contracts_null_restriction.down.sql | 5 + ...6_remove_contracts_null_restriction.up.sql | 5 + ...tatus_column_to_tables_with_blobs.down.sql | 14 + ..._status_column_to_tables_with_blobs.up.sql | 14 + ...tatus_index_for_tables_with_blobs.down.sql | 9 + ..._status_index_for_tables_with_blobs.up.sql | 9 + ...actor_contracts_verification_info.down.sql | 5 + ...efactor_contracts_verification_info.up.sql | 7 + .../20230112111801_initial_writes.down.sql | 2 + .../20230112111801_initial_writes.up.sql | 9 + ...0113113154_add_storage_logs_index.down.sql | 1 + ...230113113154_add_storage_logs_index.up.sql | 2 + ...nd_free_slots_in_gpu_prover_queue.down.sql | 3 + ..._and_free_slots_in_gpu_prover_queue.up.sql | 3 + ..._null_constraint_gpu_prover_queue.down.sql | 2 + ...op_null_constraint_gpu_prover_queue.up.sql | 2 + .../20230119182427_l1_refunds.down.sql | 4 + .../20230119182427_l1_refunds.up.sql | 4 + .../20230202142858_ergs_to_gas.down.sql | 20 + .../20230202142858_ergs_to_gas.up.sql | 20 + ...over_group_id_in_gpu_prover_queue.down.sql | 2 + ...prover_group_id_in_gpu_prover_queue.up.sql | 2 + core/lib/dal/sqlx-data.json | 9428 ++++++++++++ core/lib/dal/src/blocks_dal.rs | 911 ++ core/lib/dal/src/blocks_web3_dal.rs | 397 + core/lib/dal/src/connection/holder.rs | 27 + core/lib/dal/src/connection/mod.rs | 95 + core/lib/dal/src/connection/test_pool.rs | 108 + core/lib/dal/src/eth_sender_dal.rs | 289 + core/lib/dal/src/events_dal.rs | 205 + core/lib/dal/src/events_web3_dal.rs | 180 + .../src/explorer/contract_verification_dal.rs | 363 + .../dal/src/explorer/explorer_accounts_dal.rs | 139 + .../dal/src/explorer/explorer_blocks_dal.rs | 88 + .../dal/src/explorer/explorer_events_dal.rs | 117 + .../lib/dal/src/explorer/explorer_misc_dal.rs | 113 + .../src/explorer/explorer_transactions_dal.rs | 806 + core/lib/dal/src/explorer/mod.rs | 58 + .../dal/src/explorer/storage_contract_info.rs | 34 + core/lib/dal/src/fee_monitor_dal.rs | 169 + core/lib/dal/src/gpu_prover_queue_dal.rs | 168 + core/lib/dal/src/lib.rs | 241 + core/lib/dal/src/models/mod.rs | 11 + core/lib/dal/src/models/storage_block.rs | 352 + core/lib/dal/src/models/storage_contract.rs | 17 + core/lib/dal/src/models/storage_eth_tx.rs | 95 + core/lib/dal/src/models/storage_event.rs | 87 + .../lib/dal/src/models/storage_fee_monitor.rs | 16 + core/lib/dal/src/models/storage_log.rs | 29 + .../dal/src/models/storage_prover_job_info.rs | 76 + .../dal/src/models/storage_state_record.rs | 6 + core/lib/dal/src/models/storage_token.rs | 70 + .../lib/dal/src/models/storage_transaction.rs | 648 + .../src/models/storage_witness_job_info.rs | 76 + core/lib/dal/src/prover_dal.rs | 578 + core/lib/dal/src/storage_dal.rs | 248 + core/lib/dal/src/storage_load_dal.rs | 155 + core/lib/dal/src/storage_logs_dal.rs | 200 + core/lib/dal/src/storage_logs_dedup_dal.rs | 359 + core/lib/dal/src/storage_web3_dal.rs | 200 + core/lib/dal/src/tests/mod.rs | 240 + core/lib/dal/src/time_utils.rs | 21 + core/lib/dal/src/tokens_dal.rs | 201 + core/lib/dal/src/tokens_web3_dal.rs | 124 + core/lib/dal/src/transactions_dal.rs | 821 + core/lib/dal/src/transactions_web3_dal.rs | 348 + core/lib/dal/src/witness_generator_dal.rs | 978 ++ core/lib/db_test_macro/Cargo.toml | 13 + core/lib/db_test_macro/src/lib.rs | 134 + core/lib/eth_client/Cargo.toml | 28 + .../lib/eth_client/src/clients/http_client.rs | 649 + core/lib/eth_client/src/clients/mock.rs | 322 + core/lib/eth_client/src/clients/mod.rs | 2 + core/lib/eth_client/src/lib.rs | 4 + core/lib/eth_signer/Cargo.toml | 35 + core/lib/eth_signer/src/error.rs | 34 + core/lib/eth_signer/src/json_rpc_signer.rs | 554 + core/lib/eth_signer/src/lib.rs | 26 + core/lib/eth_signer/src/pk_signer.rs | 127 + core/lib/eth_signer/src/raw_ethereum_tx.rs | 220 + core/lib/mempool/Cargo.toml | 15 + core/lib/mempool/src/lib.rs | 6 + core/lib/mempool/src/mempool_store.rs | 239 + core/lib/mempool/src/tests.rs | 435 + core/lib/mempool/src/types.rs | 201 + core/lib/merkle_tree/Cargo.toml | 39 + .../merge_join_with_max_predecessor.rs | 351 + core/lib/merkle_tree/src/iter_ext/mod.rs | 31 + core/lib/merkle_tree/src/lib.rs | 46 + core/lib/merkle_tree/src/patch.rs | 151 + core/lib/merkle_tree/src/storage.rs | 210 + core/lib/merkle_tree/src/tests.rs | 473 + core/lib/merkle_tree/src/tree_config.rs | 92 + core/lib/merkle_tree/src/types.rs | 90 + core/lib/merkle_tree/src/utils.rs | 28 + core/lib/merkle_tree/src/zksync_tree.rs | 527 + core/lib/mini_merkle_tree/Cargo.toml | 18 + core/lib/mini_merkle_tree/src/lib.rs | 208 + core/lib/object_store/Cargo.toml | 22 + .../src/file_backed_object_store.rs | 121 + core/lib/object_store/src/gcs_object_store.rs | 169 + core/lib/object_store/src/gcs_utils.rs | 42 + core/lib/object_store/src/lib.rs | 10 + core/lib/object_store/src/object_store.rs | 88 + core/lib/object_store/src/tests.rs | 24 + core/lib/prometheus_exporter/Cargo.toml | 17 + core/lib/prometheus_exporter/src/lib.rs | 69 + core/lib/prover_utils/Cargo.toml | 16 + core/lib/prover_utils/src/lib.rs | 89 + core/lib/queued_job_processor/Cargo.toml | 19 + core/lib/queued_job_processor/src/lib.rs | 140 + core/lib/state/Cargo.toml | 20 + core/lib/state/src/lib.rs | 4 + core/lib/state/src/secondary_storage.rs | 206 + core/lib/state/src/storage_view.rs | 167 + core/lib/storage/Cargo.toml | 22 + core/lib/storage/src/db.rs | 221 + core/lib/storage/src/lib.rs | 5 + core/lib/storage/src/util.rs | 21 + core/lib/types/Cargo.toml | 59 + core/lib/types/src/aggregated_operations.rs | 231 + core/lib/types/src/api.rs | 493 + core/lib/types/src/block.rs | 190 + core/lib/types/src/circuit.rs | 11 + core/lib/types/src/commitment.rs | 651 + core/lib/types/src/eth_sender.rs | 35 + core/lib/types/src/event.rs | 189 + core/lib/types/src/explorer_api.rs | 438 + core/lib/types/src/fee.rs | 73 + core/lib/types/src/helpers.rs | 8 + core/lib/types/src/l1/error.rs | 13 + core/lib/types/src/l1/mod.rs | 346 + core/lib/types/src/l2/error.rs | 18 + core/lib/types/src/l2/mod.rs | 330 + core/lib/types/src/l2_to_l1_log.rs | 42 + core/lib/types/src/lib.rs | 152 + core/lib/types/src/log_query_sorter.rs | 312 + .../lib/types/src/priority_op_onchain_data.rs | 53 + core/lib/types/src/proofs.rs | 314 + core/lib/types/src/pubdata_packing.rs | 263 + core/lib/types/src/storage/log.rs | 141 + core/lib/types/src/storage/mod.rs | 139 + core/lib/types/src/storage/writes.rs | 64 + core/lib/types/src/system_contracts.rs | 64 + core/lib/types/src/tokens.rs | 53 + core/lib/types/src/transaction_request.rs | 1219 ++ core/lib/types/src/tx/execute.rs | 76 + core/lib/types/src/tx/mod.rs | 34 + .../eip712_signature/member_types.rs | 115 + .../src/tx/primitives/eip712_signature/mod.rs | 14 + .../eip712_signature/struct_builder.rs | 184 + .../tx/primitives/eip712_signature/tests.rs | 202 + .../eip712_signature/typed_structure.rs | 186 + .../tx/primitives/eip712_signature/utils.rs | 32 + core/lib/types/src/tx/primitives/mod.rs | 5 + .../src/tx/primitives/packed_eth_signature.rs | 225 + core/lib/types/src/tx/tx_execution_info.rs | 146 + core/lib/types/src/utils.rs | 137 + core/lib/types/src/vm_trace.rs | 50 + core/lib/utils/Cargo.toml | 29 + core/lib/utils/src/bytecode.rs | 55 + core/lib/utils/src/convert.rs | 261 + core/lib/utils/src/env_tools.rs | 33 + core/lib/utils/src/format.rs | 78 + core/lib/utils/src/lib.rs | 17 + core/lib/utils/src/macros.rs | 35 + core/lib/utils/src/misc.rs | 14 + core/lib/utils/src/panic_extractor.rs | 16 + core/lib/utils/src/panic_notify.rs | 25 + core/lib/utils/src/serde_wrappers.rs | 242 + core/lib/utils/src/test_utils.rs | 41 + core/lib/utils/src/time.rs | 19 + core/lib/vlog/Cargo.toml | 22 + core/lib/vlog/src/lib.rs | 317 + core/lib/vm/Cargo.toml | 37 + core/lib/vm/src/bootloader_state.rs | 103 + core/lib/vm/src/errors/bootloader_error.rs | 58 + core/lib/vm/src/errors/mod.rs | 9 + core/lib/vm/src/errors/tx_revert_reason.rs | 206 + core/lib/vm/src/errors/vm_revert_reason.rs | 230 + core/lib/vm/src/event_sink.rs | 170 + core/lib/vm/src/events.rs | 149 + core/lib/vm/src/history_recorder.rs | 635 + core/lib/vm/src/lib.rs | 37 + core/lib/vm/src/memory.rs | 285 + core/lib/vm/src/oracle_tools.rs | 39 + core/lib/vm/src/oracles/decommitter.rs | 186 + core/lib/vm/src/oracles/mod.rs | 19 + core/lib/vm/src/oracles/precompile.rs | 78 + core/lib/vm/src/oracles/storage.rs | 290 + core/lib/vm/src/oracles/tracer.rs | 808 + core/lib/vm/src/pubdata_utils.rs | 94 + core/lib/vm/src/storage.rs | 63 + core/lib/vm/src/test_utils.rs | 331 + core/lib/vm/src/tests/bootloader.rs | 1588 ++ core/lib/vm/src/tests/mod.rs | 1 + core/lib/vm/src/transaction_data.rs | 484 + core/lib/vm/src/utils.rs | 281 + core/lib/vm/src/vm.rs | 904 ++ core/lib/vm/src/vm_with_bootloader.rs | 575 + core/lib/web3_decl/Cargo.toml | 26 + core/lib/web3_decl/src/error.rs | 36 + core/lib/web3_decl/src/lib.rs | 17 + core/lib/web3_decl/src/namespaces/eth.rs | 160 + .../web3_decl/src/namespaces/eth_subscribe.rs | 1 + core/lib/web3_decl/src/namespaces/mod.rs | 19 + core/lib/web3_decl/src/namespaces/net.rs | 25 + core/lib/web3_decl/src/namespaces/web3.rs | 21 + core/lib/web3_decl/src/namespaces/zks.rs | 95 + core/lib/web3_decl/src/types.rs | 417 + core/tests/loadnext/Cargo.toml | 38 + core/tests/loadnext/README.md | 125 + .../src/account/api_request_executor.rs | 162 + .../src/account/explorer_api_executor.rs | 317 + core/tests/loadnext/src/account/mod.rs | 384 + .../loadnext/src/account/pubsub_executor.rs | 127 + .../src/account/tx_command_executor.rs | 445 + core/tests/loadnext/src/account_pool.rs | 176 + core/tests/loadnext/src/all.rs | 9 + core/tests/loadnext/src/command/api.rs | 95 + .../loadnext/src/command/explorer_api.rs | 58 + core/tests/loadnext/src/command/mod.rs | 11 + core/tests/loadnext/src/command/pubsub.rs | 32 + core/tests/loadnext/src/command/tx_command.rs | 195 + core/tests/loadnext/src/config.rs | 376 + core/tests/loadnext/src/constants.rs | 23 + core/tests/loadnext/src/corrupted_tx.rs | 148 + core/tests/loadnext/src/executor.rs | 651 + core/tests/loadnext/src/fs_utils.rs | 109 + core/tests/loadnext/src/lib.rs | 14 + core/tests/loadnext/src/main.rs | 47 + core/tests/loadnext/src/report.rs | 230 + .../src/report_collector/metrics_collector.rs | 244 + .../loadnext/src/report_collector/mod.rs | 112 + .../operation_results_collector.rs | 91 + core/tests/loadnext/src/rng.rs | 103 + core/tests/revert-test/package.json | 35 + .../tests/revert-and-restart.test.ts | 204 + core/tests/revert-test/tests/tester.ts | 103 + core/tests/revert-test/tsconfig.json | 9 + core/tests/test_account/Cargo.toml | 19 + core/tests/test_account/src/lib.rs | 163 + core/tests/testkit/Cargo.toml | 40 + .../testkit/src/commands/gas_price/mod.rs | 216 + .../testkit/src/commands/gas_price/types.rs | 177 + .../testkit/src/commands/gas_price/utils.rs | 336 + core/tests/testkit/src/commands/mod.rs | 4 + .../testkit/src/commands/revert_block.rs | 68 + .../testkit/src/commands/upgrade_contract.rs | 73 + core/tests/testkit/src/commands/utils.rs | 340 + core/tests/testkit/src/eth_provider.rs | 662 + core/tests/testkit/src/external_commands.rs | 156 + core/tests/testkit/src/main.rs | 72 + core/tests/testkit/src/server_handler.rs | 42 + core/tests/testkit/src/tester.rs | 632 + core/tests/testkit/src/types.rs | 316 + core/tests/testkit/src/utils.rs | 69 + core/tests/ts-integration/README.md | 189 + core/tests/ts-integration/contracts/README.md | 4 + .../basic-constructor/basic-constructor.sol | 16 + .../contracts/context/context.sol | 47 + .../contracts/counter/counter.sol | 26 + .../ts-integration/contracts/create/Foo.sol | 8 + .../contracts/create/create.sol | 17 + .../contracts/custom-account/Constants.sol | 37 + .../contracts/custom-account/RLPEncoder.sol | 99 + .../custom-account/SystemContext.sol | 103 + .../custom-account/SystemContractsCaller.sol | 249 + .../custom-account/TransactionHelper.sol | 467 + .../contracts/custom-account/Utils.sol | 38 + .../custom-account/custom-account.sol | 111 + .../custom-account/custom-paymaster.sol | 88 + .../custom-account/interfaces/IAccount.sol | 47 + .../interfaces/IContractDeployer.sol | 108 + .../custom-account/interfaces/IERC20.sol | 82 + .../interfaces/INonceHolder.sol | 42 + .../custom-account/interfaces/IPaymaster.sol | 51 + .../interfaces/IPaymasterFlow.sol | 16 + .../custom-account/nonce-holder-test.sol | 100 + .../ts-integration/contracts/error/error.sol | 22 + .../contracts/events/events.sol | 21 + .../contracts/events/sample-calldata | Bin 0 -> 96 bytes .../contracts/expensive/expensive.sol | 15 + .../contracts/infinite/infinite.sol | 19 + .../writes-and-messages.sol | 41 + core/tests/ts-integration/hardhat.config.ts | 19 + core/tests/ts-integration/jest.config.json | 20 + core/tests/ts-integration/package.json | 26 + .../tests/ts-integration/src/context-owner.ts | 533 + core/tests/ts-integration/src/env.ts | 110 + core/tests/ts-integration/src/helpers.ts | 107 + core/tests/ts-integration/src/index.ts | 5 + .../src/jest-setup/add-matchers.ts | 9 + .../src/jest-setup/global-setup.ts | 36 + .../src/jest-setup/global-teardown.ts | 17 + .../ts-integration/src/matchers/big-number.ts | 100 + .../src/matchers/eth-primitives.ts | 53 + .../tests/ts-integration/src/matchers/fail.ts | 6 + .../src/matchers/matcher-helpers.ts | 61 + .../src/matchers/transaction.ts | 167 + .../src/modifiers/balance-checker.ts | 242 + .../ts-integration/src/modifiers/index.ts | 28 + .../src/modifiers/receipt-check.ts | 42 + .../tests/ts-integration/src/prerequisites.ts | 43 + core/tests/ts-integration/src/reporter.ts | 121 + .../ts-integration/src/retry-provider.ts | 47 + core/tests/ts-integration/src/system.ts | 116 + core/tests/ts-integration/src/test-master.ts | 134 + core/tests/ts-integration/src/types.ts | 76 + .../ts-integration/tests/api/explorer.test.ts | 589 + .../ts-integration/tests/api/web3.test.ts | 664 + .../ts-integration/tests/contracts.test.ts | 297 + .../tests/custom-account.test.ts | 199 + core/tests/ts-integration/tests/erc20.test.ts | 202 + core/tests/ts-integration/tests/ether.test.ts | 165 + core/tests/ts-integration/tests/l1.test.ts | 373 + .../ts-integration/tests/mempool.test.ts | 141 + .../ts-integration/tests/paymaster.test.ts | 384 + .../ts-integration/tests/self-unit.test.ts | 39 + .../tests/ts-integration/tests/system.test.ts | 490 + core/tests/ts-integration/tsconfig.json | 16 + core/tests/ts-integration/typings/jest.d.ts | 101 + core/tests/ts-integration/yarn.lock | 3092 ++++ docker-compose-backup-test.yml | 25 + docker-compose-gpu-runner.yml | 42 + docker-compose-runner.yml | 36 + docker-compose.yml | 22 + docker/circuit-synthesizer/Dockerfile | 29 + docker/contract-verifier/Dockerfile | 43 + docker/contract-verifier/install-all-solc.sh | 20 + docker/geth/Dockerfile | 13 + docker/geth/fast-dev.json | 77 + docker/geth/geth-entry.sh | 46 + ...--8a91dc2d28b689474298d91899f0c1baf62cb85b | 1 + docker/geth/mainnet-dev.json | 77 + docker/geth/password.sec | 0 docker/geth/standard-dev.json | 77 + docker/local-node/Dockerfile | 67 + docker/local-node/entrypoint.sh | 46 + docker/prover/Dockerfile | 42 + docker/runner/Dockerfile | 5 + docker/server-v2/Dockerfile | 29 + docker/zk-environment/Dockerfile | 178 + docker/zk-rust-nightly-environment/Dockerfile | 20 + docs/architecture.md | 110 + docs/development.md | 148 + docs/launch.md | 183 + docs/setup-dev.md | 184 + eraLogo.svg | 37 + etc/ERC20/contracts/ZkSyncERC20.sol | 15 + etc/ERC20/contracts/interfaces/Context.sol | 23 + etc/ERC20/contracts/interfaces/ERC20.sol | 368 + etc/ERC20/contracts/interfaces/IERC20.sol | 81 + .../contracts/interfaces/IERC20Metadata.sol | 27 + etc/ERC20/hardhat.config.ts | 19 + etc/ERC20/package.json | 11 + .../zksync_testharness_test.json | 74 + etc/contracts-test-data/README.md | 4 + .../basic-constructor/basic-constructor.sol | 16 + .../contracts/context/context.sol | 47 + .../contracts/counter/counter.sol | 22 + .../contracts/create/Foo.sol | 8 + .../contracts/create/create.sol | 17 + .../contracts/custom-account/Constants.sol | 37 + .../contracts/custom-account/RLPEncoder.sol | 99 + .../custom-account/SystemContext.sol | 67 + .../custom-account/SystemContractsCaller.sol | 249 + .../custom-account/TransactionHelper.sol | 467 + .../contracts/custom-account/Utils.sol | 38 + .../custom-account/custom-account.sol | 106 + .../custom-account/custom-paymaster.sol | 81 + .../custom-account/interfaces/IAccount.sol | 47 + .../interfaces/IContractDeployer.sol | 108 + .../custom-account/interfaces/IERC20.sol | 82 + .../interfaces/INonceHolder.sol | 42 + .../custom-account/interfaces/IPaymaster.sol | 51 + .../interfaces/IPaymasterFlow.sol | 16 + .../custom-account/nonce-holder-test.sol | 100 + .../contracts/error/error.sol | 22 + .../contracts/estimator/estimator.sol | 30 + .../contracts/events/events.sol | 15 + .../contracts/events/sample-calldata | Bin 0 -> 96 bytes .../contracts/expensive/expensive.sol | 15 + .../contracts/infinite/infinite.sol | 19 + .../contracts/loadnext/loadnext_contract.sol | 56 + etc/contracts-test-data/hardhat.config.ts | 19 + etc/contracts-test-data/package.json | 12 + etc/env/base/README.md | 29 + etc/env/base/api.toml | 60 + etc/env/base/chain.toml | 63 + etc/env/base/circuit_synthesizer.toml | 9 + etc/env/base/contract_verifier.toml | 4 + etc/env/base/contracts.toml | 33 + etc/env/base/database.toml | 13 + etc/env/base/eth_client.toml | 4 + etc/env/base/eth_sender.toml | 57 + etc/env/base/eth_watch.toml | 6 + etc/env/base/fetcher.toml | 16 + etc/env/base/misc.toml | 15 + etc/env/base/nfs.toml | 2 + etc/env/base/object_store.toml | 5 + etc/env/base/private.toml | 16 + etc/env/base/prover.toml | 74 + etc/env/base/prover_group.toml | 11 + etc/env/base/rust.toml | 33 + etc/env/base/witness_generator.toml | 11 + etc/env/docker.env | 14 + etc/lint-config/js.js | 35 + etc/lint-config/md.js | 8 + etc/lint-config/sol.js | 25 + etc/lint-config/ts.js | 15 + etc/openzeppelin-contracts | 1 + etc/prettier-config/js.js | 7 + etc/prettier-config/md.js | 9 + etc/prettier-config/sol.js | 8 + etc/prettier-config/ts.js | 8 + etc/prettier-config/vue.js | 7 + etc/scripts/prepare_bellman_cuda.sh | 7 + etc/system-contracts | 1 + etc/test_config/.gitignore | 2 + etc/test_config/README.md | 11 + etc/test_config/constant/api.json | 3 + etc/test_config/constant/eth.json | 5 + etc/test_config/volatile/.empty | 2 + etc/thread | 0 etc/tokens/goerli.json | 26 + etc/tokens/mainnet.json | 98 + etc/tokens/rinkeby.json | 122 + etc/tokens/ropsten.json | 56 + etc/tokens/test.json | 1 + .../local-setup-preparation/.gitignore | 1 + .../local-setup-preparation/README.md | 4 + .../local-setup-preparation/package.json | 17 + .../local-setup-preparation/src/index.ts | 58 + .../local-setup-preparation/src/utils.ts | 28 + .../local-setup-preparation/tsconfig.json | 16 + .../package.json | 20 + .../src/index.ts | 72 + .../tsconfig.json | 16 + infrastructure/reading-tool/.gitignore | 1 + infrastructure/reading-tool/README.md | 10 + infrastructure/reading-tool/package.json | 15 + infrastructure/reading-tool/src/index.ts | 47 + infrastructure/reading-tool/tsconfig.json | 16 + infrastructure/zk/.gitignore | 2 + infrastructure/zk/README.md | 30 + infrastructure/zk/package.json | 32 + infrastructure/zk/src/clean.ts | 54 + infrastructure/zk/src/compiler.ts | 29 + infrastructure/zk/src/completion.ts | 73 + infrastructure/zk/src/config.ts | 170 + infrastructure/zk/src/contract.ts | 125 + infrastructure/zk/src/contract_verifier.ts | 10 + infrastructure/zk/src/database/database.ts | 78 + infrastructure/zk/src/docker.ts | 119 + infrastructure/zk/src/down.ts | 8 + infrastructure/zk/src/dummy-prover.ts | 69 + infrastructure/zk/src/env.ts | 152 + infrastructure/zk/src/fmt.ts | 67 + infrastructure/zk/src/index.ts | 89 + infrastructure/zk/src/init.ts | 123 + infrastructure/zk/src/lint.ts | 54 + infrastructure/zk/src/prover.ts | 26 + infrastructure/zk/src/run/data-restore.ts | 66 + infrastructure/zk/src/run/run.ts | 200 + infrastructure/zk/src/server.ts | 92 + infrastructure/zk/src/test/integration.ts | 143 + infrastructure/zk/src/test/test.ts | 54 + infrastructure/zk/src/up.ts | 8 + infrastructure/zk/src/utils.ts | 149 + infrastructure/zk/tsconfig.json | 15 + package.json | 51 + renovate.json | 5 + rust-toolchain | 1 + sdk/zksync-rs/Cargo.toml | 30 + sdk/zksync-rs/README.md | 4 + sdk/zksync-rs/src/abi/IERC20.json | 463 + sdk/zksync-rs/src/abi/IL1Bridge.json | 243 + sdk/zksync-rs/src/abi/IPaymasterFlow.json | 40 + sdk/zksync-rs/src/abi/IZkSync.json | 2176 +++ sdk/zksync-rs/src/abi/update-abi.sh | 10 + sdk/zksync-rs/src/error.rs | 44 + .../src/ethereum/DepositERC20GasLimit.json | 42 + sdk/zksync-rs/src/ethereum/mod.rs | 594 + sdk/zksync-rs/src/lib.rs | 18 + .../src/operations/deploy_contract.rs | 162 + .../src/operations/execute_contract.rs | 162 + sdk/zksync-rs/src/operations/mod.rs | 151 + sdk/zksync-rs/src/operations/transfer.rs | 197 + sdk/zksync-rs/src/operations/withdraw.rs | 190 + sdk/zksync-rs/src/signer.rs | 146 + sdk/zksync-rs/src/utils.rs | 115 + sdk/zksync-rs/src/wallet.rs | 186 + sdk/zksync-rs/tests/integration.rs | 731 + sdk/zksync-rs/tests/unit.rs | 653 + sdk/zksync-web3.js/.gitignore | 1 + sdk/zksync-web3.js/abi/ContractDeployer.json | 374 + sdk/zksync-web3.js/abi/IAllowList.json | 337 + sdk/zksync-web3.js/abi/IERC1271.json | 28 + sdk/zksync-web3.js/abi/IERC20.json | 226 + sdk/zksync-web3.js/abi/IEthToken.json | 192 + sdk/zksync-web3.js/abi/IL1Bridge.json | 243 + sdk/zksync-web3.js/abi/IL1Messenger.json | 48 + sdk/zksync-web3.js/abi/IL2Bridge.json | 173 + sdk/zksync-web3.js/abi/IPaymasterFlow.json | 40 + sdk/zksync-web3.js/abi/IZkSync.json | 2195 +++ sdk/zksync-web3.js/abi/update-abi.sh | 20 + sdk/zksync-web3.js/package.json | 32 + sdk/zksync-web3.js/src/adapters.ts | 440 + sdk/zksync-web3.js/src/calldata.ts | 103 + sdk/zksync-web3.js/src/contract.ts | 89 + sdk/zksync-web3.js/src/index.ts | 6 + sdk/zksync-web3.js/src/paymaster-utils.ts | 31 + sdk/zksync-web3.js/src/provider.ts | 608 + sdk/zksync-web3.js/src/signer.ts | 170 + sdk/zksync-web3.js/src/types.ts | 201 + sdk/zksync-web3.js/src/utils.ts | 463 + sdk/zksync-web3.js/src/wallet.ts | 128 + sdk/zksync-web3.js/tests/main.test.ts | 0 sdk/zksync-web3.js/tsconfig.json | 18 + sdk/zksync-web3.js/typechain/IAllowList.d.ts | 767 + .../typechain/IAllowListFactory.ts | 317 + .../typechain/IERC20Metadata.d.ts | 511 + .../typechain/IERC20MetadataFactory.ts | 242 + sdk/zksync-web3.js/typechain/IEthToken.d.ts | 426 + .../typechain/IEthTokenFactory.ts | 208 + sdk/zksync-web3.js/typechain/IL1Bridge.d.ts | 537 + .../typechain/IL1BridgeFactory.ts | 259 + sdk/zksync-web3.js/typechain/IL2Bridge.d.ts | 376 + .../typechain/IL2BridgeFactory.ts | 127 + sdk/zksync-web3.js/typechain/IZkSync.d.ts | 4973 ++++++ .../typechain/IZkSyncFactory.ts | 2211 +++ sdk/zksync-web3.js/typechain/index.ts | 13 + sdk/zksync-web3.js/typechain/update.sh | 6 + tarpaulin.toml | 21 + yarn.lock | 12452 ++++++++++++++++ 952 files changed, 168992 insertions(+) create mode 100644 .cargo/config.toml create mode 100644 .dockerignore create mode 100644 .eslintignore create mode 100644 .gitattributes create mode 100755 .githooks/pre-commit create mode 100755 .githooks/pre-push create mode 100644 .gitignore create mode 100644 .gitmodules create mode 100644 .markdownlintignore create mode 100644 .prettierignore create mode 100644 CHANGELOG.md create mode 100644 CODEOWNERS create mode 100644 CONTRIBUTING.md create mode 100644 Cargo.lock create mode 100644 Cargo.toml create mode 100644 LICENSE-APACHE create mode 100644 LICENSE-MIT create mode 100644 README.md create mode 100755 bin/ci_run create mode 100755 bin/run_loadtest_from_github_actions create mode 100755 bin/zk create mode 100644 bors.toml create mode 100644 codecov.yml create mode 160000 contracts create mode 100644 core/bin/admin-tools/Cargo.toml create mode 100644 core/bin/admin-tools/src/application.rs create mode 100644 core/bin/admin-tools/src/blocks.rs create mode 100644 core/bin/admin-tools/src/main.rs create mode 100644 core/bin/admin-tools/src/prover.rs create mode 100644 core/bin/blob_purger/Cargo.toml create mode 100644 core/bin/blob_purger/src/main.rs create mode 100644 core/bin/circuit_synthesizer/Cargo.lock create mode 100644 core/bin/circuit_synthesizer/Cargo.toml create mode 100644 core/bin/circuit_synthesizer/src/circuit_synthesizer.rs create mode 100644 core/bin/circuit_synthesizer/src/main.rs create mode 100644 core/bin/contract-verifier/Cargo.toml create mode 100644 core/bin/contract-verifier/src/error.rs create mode 100644 core/bin/contract-verifier/src/main.rs create mode 100644 core/bin/contract-verifier/src/verifier.rs create mode 100644 core/bin/contract-verifier/src/zksolc_utils.rs create mode 100644 core/bin/events_tx_initiator_address_migration/Cargo.toml create mode 100644 core/bin/events_tx_initiator_address_migration/src/main.rs create mode 100644 core/bin/prover/Cargo.lock create mode 100644 core/bin/prover/Cargo.toml create mode 100644 core/bin/prover/README.md create mode 100644 core/bin/prover/rust-toolchain.toml create mode 100644 core/bin/prover/src/artifact_provider.rs create mode 100644 core/bin/prover/src/main.rs create mode 100644 core/bin/prover/src/prover.rs create mode 100644 core/bin/prover/src/prover_params.rs create mode 100644 core/bin/prover/src/socket_listener.rs create mode 100644 core/bin/prover/src/synthesized_circuit_provider.rs create mode 100644 core/bin/set_correct_tx_format_for_priority_ops/Cargo.toml create mode 100644 core/bin/set_correct_tx_format_for_priority_ops/src/main.rs create mode 100644 core/bin/setup_key_generator_and_server/Cargo.lock create mode 100644 core/bin/setup_key_generator_and_server/Cargo.toml create mode 100644 core/bin/setup_key_generator_and_server/data/.gitkeep create mode 100644 core/bin/setup_key_generator_and_server/src/lib.rs create mode 100644 core/bin/setup_key_generator_and_server/src/main.rs create mode 100644 core/bin/storage_logs_migration/Cargo.toml create mode 100644 core/bin/storage_logs_migration/src/main.rs create mode 100644 core/bin/system-constants-generator/Cargo.toml create mode 100644 core/bin/system-constants-generator/src/intrinsic_costs.rs create mode 100644 core/bin/system-constants-generator/src/main.rs create mode 100644 core/bin/system-constants-generator/src/utils.rs create mode 100644 core/bin/verification_key_generator_and_server/Cargo.toml create mode 100644 core/bin/verification_key_generator_and_server/data/verification_0_key.json create mode 100644 core/bin/verification_key_generator_and_server/data/verification_10_key.json create mode 100644 core/bin/verification_key_generator_and_server/data/verification_11_key.json create mode 100644 core/bin/verification_key_generator_and_server/data/verification_12_key.json create mode 100644 core/bin/verification_key_generator_and_server/data/verification_13_key.json create mode 100644 core/bin/verification_key_generator_and_server/data/verification_14_key.json create mode 100644 core/bin/verification_key_generator_and_server/data/verification_15_key.json create mode 100644 core/bin/verification_key_generator_and_server/data/verification_16_key.json create mode 100644 core/bin/verification_key_generator_and_server/data/verification_17_key.json create mode 100644 core/bin/verification_key_generator_and_server/data/verification_18_key.json create mode 100644 core/bin/verification_key_generator_and_server/data/verification_1_key.json create mode 100644 core/bin/verification_key_generator_and_server/data/verification_2_key.json create mode 100644 core/bin/verification_key_generator_and_server/data/verification_3_key.json create mode 100644 core/bin/verification_key_generator_and_server/data/verification_4_key.json create mode 100644 core/bin/verification_key_generator_and_server/data/verification_5_key.json create mode 100644 core/bin/verification_key_generator_and_server/data/verification_6_key.json create mode 100644 core/bin/verification_key_generator_and_server/data/verification_7_key.json create mode 100644 core/bin/verification_key_generator_and_server/data/verification_8_key.json create mode 100644 core/bin/verification_key_generator_and_server/data/verification_9_key.json create mode 100644 core/bin/verification_key_generator_and_server/src/commitment_generator.rs create mode 100644 core/bin/verification_key_generator_and_server/src/json_to_binary_vk_converter.rs create mode 100644 core/bin/verification_key_generator_and_server/src/lib.rs create mode 100644 core/bin/verification_key_generator_and_server/src/main.rs create mode 100644 core/bin/verification_key_generator_and_server/src/tests.rs create mode 100644 core/bin/zksync_core/Cargo.toml create mode 100644 core/bin/zksync_core/src/api_server/execution_sandbox.rs create mode 100644 core/bin/zksync_core/src/api_server/explorer/api_decl.rs create mode 100644 core/bin/zksync_core/src/api_server/explorer/api_impl.rs create mode 100644 core/bin/zksync_core/src/api_server/explorer/mod.rs create mode 100644 core/bin/zksync_core/src/api_server/explorer/network_stats.rs create mode 100644 core/bin/zksync_core/src/api_server/mod.rs create mode 100644 core/bin/zksync_core/src/api_server/tx_sender/error.rs create mode 100644 core/bin/zksync_core/src/api_server/tx_sender/mod.rs create mode 100644 core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/error.rs create mode 100644 core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/mod.rs create mode 100644 core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/namespaces/eth.rs create mode 100644 core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/namespaces/mod.rs create mode 100644 core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/namespaces/net.rs create mode 100644 core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/namespaces/web3.rs create mode 100644 core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/namespaces/zks.rs create mode 100644 core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/pub_sub.rs create mode 100644 core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/mod.rs create mode 100644 core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/eth.rs create mode 100644 core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/eth_subscribe.rs create mode 100644 core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/mod.rs create mode 100644 core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/net.rs create mode 100644 core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/web3.rs create mode 100644 core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/zks.rs create mode 100644 core/bin/zksync_core/src/api_server/web3/mod.rs create mode 100644 core/bin/zksync_core/src/api_server/web3/namespaces/eth.rs create mode 100644 core/bin/zksync_core/src/api_server/web3/namespaces/eth_subscribe.rs create mode 100644 core/bin/zksync_core/src/api_server/web3/namespaces/mod.rs create mode 100644 core/bin/zksync_core/src/api_server/web3/namespaces/net.rs create mode 100644 core/bin/zksync_core/src/api_server/web3/namespaces/web3.rs create mode 100644 core/bin/zksync_core/src/api_server/web3/namespaces/zks.rs create mode 100644 core/bin/zksync_core/src/api_server/web3/pubsub_notifier.rs create mode 100644 core/bin/zksync_core/src/api_server/web3/state.rs create mode 100644 core/bin/zksync_core/src/bin/block_reverter.rs create mode 100644 core/bin/zksync_core/src/bin/merkle_tree_consistency_checker.rs create mode 100644 core/bin/zksync_core/src/bin/rocksdb_util.rs create mode 100644 core/bin/zksync_core/src/bin/zksync_server.rs create mode 100644 core/bin/zksync_core/src/data_fetchers/error.rs create mode 100644 core/bin/zksync_core/src/data_fetchers/mod.rs create mode 100644 core/bin/zksync_core/src/data_fetchers/token_list/mock.rs create mode 100644 core/bin/zksync_core/src/data_fetchers/token_list/mod.rs create mode 100644 core/bin/zksync_core/src/data_fetchers/token_list/one_inch.rs create mode 100644 core/bin/zksync_core/src/data_fetchers/token_price/coingecko.rs create mode 100644 core/bin/zksync_core/src/data_fetchers/token_price/coinmarketcap.rs create mode 100644 core/bin/zksync_core/src/data_fetchers/token_price/mock.rs create mode 100644 core/bin/zksync_core/src/data_fetchers/token_price/mod.rs create mode 100644 core/bin/zksync_core/src/data_fetchers/token_trading_volume/mock.rs create mode 100644 core/bin/zksync_core/src/data_fetchers/token_trading_volume/mod.rs create mode 100644 core/bin/zksync_core/src/data_fetchers/token_trading_volume/uniswap.rs create mode 100644 core/bin/zksync_core/src/db_storage_provider.rs create mode 100644 core/bin/zksync_core/src/eth_sender/aggregator.rs create mode 100644 core/bin/zksync_core/src/eth_sender/block_publish_criterion.rs create mode 100644 core/bin/zksync_core/src/eth_sender/error.rs create mode 100644 core/bin/zksync_core/src/eth_sender/eth_tx_aggregator.rs create mode 100644 core/bin/zksync_core/src/eth_sender/eth_tx_manager.rs create mode 100644 core/bin/zksync_core/src/eth_sender/grafana_metrics.rs create mode 100644 core/bin/zksync_core/src/eth_sender/mod.rs create mode 100644 core/bin/zksync_core/src/eth_sender/tests.rs create mode 100644 core/bin/zksync_core/src/eth_sender/zksync_functions.rs create mode 100644 core/bin/zksync_core/src/eth_watch/client.rs create mode 100644 core/bin/zksync_core/src/eth_watch/mod.rs create mode 100644 core/bin/zksync_core/src/eth_watch/tests.rs create mode 100644 core/bin/zksync_core/src/fee_monitor/mod.rs create mode 100644 core/bin/zksync_core/src/fee_ticker/error.rs create mode 100644 core/bin/zksync_core/src/fee_ticker/gas_price.rs create mode 100644 core/bin/zksync_core/src/fee_ticker/mod.rs create mode 100644 core/bin/zksync_core/src/fee_ticker/types.rs create mode 100644 core/bin/zksync_core/src/gas_adjuster/mod.rs create mode 100644 core/bin/zksync_core/src/gas_adjuster/tests.rs create mode 100644 core/bin/zksync_core/src/gas_tracker/constants.rs create mode 100644 core/bin/zksync_core/src/gas_tracker/mod.rs create mode 100644 core/bin/zksync_core/src/genesis.rs create mode 100644 core/bin/zksync_core/src/house_keeper/gcs_blob_cleaner.rs create mode 100644 core/bin/zksync_core/src/house_keeper/gpu_prover_queue_monitor.rs create mode 100644 core/bin/zksync_core/src/house_keeper/mod.rs create mode 100644 core/bin/zksync_core/src/house_keeper/periodic_job.rs create mode 100644 core/bin/zksync_core/src/house_keeper/witness_generator_misc_reporter.rs create mode 100644 core/bin/zksync_core/src/house_keeper/witness_generator_queue_monitor.rs create mode 100644 core/bin/zksync_core/src/lib.rs create mode 100644 core/bin/zksync_core/src/metadata_calculator/mod.rs create mode 100644 core/bin/zksync_core/src/metadata_calculator/tests.rs create mode 100644 core/bin/zksync_core/src/state_keeper/batch_executor/mod.rs create mode 100644 core/bin/zksync_core/src/state_keeper/batch_executor/tests/mod.rs create mode 100644 core/bin/zksync_core/src/state_keeper/batch_executor/tests/tester.rs create mode 100644 core/bin/zksync_core/src/state_keeper/extractors.rs create mode 100644 core/bin/zksync_core/src/state_keeper/io/mempool.rs create mode 100644 core/bin/zksync_core/src/state_keeper/io/mod.rs create mode 100644 core/bin/zksync_core/src/state_keeper/keeper.rs create mode 100644 core/bin/zksync_core/src/state_keeper/mempool_actor.rs create mode 100644 core/bin/zksync_core/src/state_keeper/mod.rs create mode 100644 core/bin/zksync_core/src/state_keeper/seal_criteria/function.rs create mode 100644 core/bin/zksync_core/src/state_keeper/seal_criteria/gas.rs create mode 100644 core/bin/zksync_core/src/state_keeper/seal_criteria/geometry_seal_criteria.rs create mode 100644 core/bin/zksync_core/src/state_keeper/seal_criteria/mod.rs create mode 100644 core/bin/zksync_core/src/state_keeper/seal_criteria/pubdata_bytes.rs create mode 100644 core/bin/zksync_core/src/state_keeper/seal_criteria/slots.rs create mode 100644 core/bin/zksync_core/src/state_keeper/seal_criteria/timeout.rs create mode 100644 core/bin/zksync_core/src/state_keeper/tests/mod.rs create mode 100644 core/bin/zksync_core/src/state_keeper/tests/tester.rs create mode 100644 core/bin/zksync_core/src/state_keeper/types.rs create mode 100644 core/bin/zksync_core/src/state_keeper/updates/l1_batch_updates.rs create mode 100644 core/bin/zksync_core/src/state_keeper/updates/miniblock_updates.rs create mode 100644 core/bin/zksync_core/src/state_keeper/updates/mod.rs create mode 100644 core/bin/zksync_core/src/witness_generator/basic_circuits.rs create mode 100644 core/bin/zksync_core/src/witness_generator/leaf_aggregation.rs create mode 100644 core/bin/zksync_core/src/witness_generator/mod.rs create mode 100644 core/bin/zksync_core/src/witness_generator/node_aggregation.rs create mode 100644 core/bin/zksync_core/src/witness_generator/precalculated_merkle_paths_provider.rs create mode 100644 core/bin/zksync_core/src/witness_generator/scheduler.rs create mode 100644 core/bin/zksync_core/src/witness_generator/tests.rs create mode 100644 core/bin/zksync_core/src/witness_generator/utils.rs create mode 100644 core/lib/basic_types/Cargo.toml create mode 100644 core/lib/basic_types/src/lib.rs create mode 100644 core/lib/basic_types/src/macros.rs create mode 100644 core/lib/basic_types/src/network.rs create mode 100644 core/lib/circuit_breaker/Cargo.toml create mode 100644 core/lib/circuit_breaker/src/code_hashes.rs create mode 100644 core/lib/circuit_breaker/src/facet_selectors.rs create mode 100644 core/lib/circuit_breaker/src/l1_txs.rs create mode 100644 core/lib/circuit_breaker/src/lib.rs create mode 100644 core/lib/circuit_breaker/src/utils.rs create mode 100644 core/lib/circuit_breaker/src/vks.rs create mode 100644 core/lib/config/Cargo.toml create mode 100644 core/lib/config/src/configs/api.rs create mode 100644 core/lib/config/src/configs/chain.rs create mode 100644 core/lib/config/src/configs/circuit_synthesizer.rs create mode 100644 core/lib/config/src/configs/contract_verifier.rs create mode 100644 core/lib/config/src/configs/contracts.rs create mode 100644 core/lib/config/src/configs/database.rs create mode 100644 core/lib/config/src/configs/eth_client.rs create mode 100644 core/lib/config/src/configs/eth_sender.rs create mode 100644 core/lib/config/src/configs/eth_watch.rs create mode 100644 core/lib/config/src/configs/fetcher.rs create mode 100644 core/lib/config/src/configs/mod.rs create mode 100644 core/lib/config/src/configs/nfs.rs create mode 100644 core/lib/config/src/configs/object_store.rs create mode 100644 core/lib/config/src/configs/prover.rs create mode 100644 core/lib/config/src/configs/prover_group.rs create mode 100644 core/lib/config/src/configs/test_utils.rs create mode 100644 core/lib/config/src/configs/utils.rs create mode 100644 core/lib/config/src/configs/witness_generator.rs create mode 100644 core/lib/config/src/constants/blocks.rs create mode 100644 core/lib/config/src/constants/contracts.rs create mode 100644 core/lib/config/src/constants/crypto.rs create mode 100644 core/lib/config/src/constants/ethereum.rs create mode 100644 core/lib/config/src/constants/fees/intrinsic.rs create mode 100644 core/lib/config/src/constants/fees/mod.rs create mode 100644 core/lib/config/src/constants/mod.rs create mode 100644 core/lib/config/src/constants/system_context.rs create mode 100644 core/lib/config/src/constants/trusted_slots.rs create mode 100644 core/lib/config/src/lib.rs create mode 100644 core/lib/config/src/test_config/mod.rs create mode 100644 core/lib/contracts/Cargo.toml create mode 100644 core/lib/contracts/src/lib.rs create mode 100644 core/lib/crypto/Cargo.toml create mode 100644 core/lib/crypto/README.md create mode 100644 core/lib/crypto/src/convert.rs create mode 100644 core/lib/crypto/src/error.rs create mode 100644 core/lib/crypto/src/hasher/blake2.rs create mode 100644 core/lib/crypto/src/hasher/keccak.rs create mode 100644 core/lib/crypto/src/hasher/mod.rs create mode 100644 core/lib/crypto/src/hasher/sha256.rs create mode 100644 core/lib/crypto/src/lib.rs create mode 100644 core/lib/crypto/src/primitives.rs create mode 100644 core/lib/crypto/src/proof.rs create mode 100644 core/lib/crypto/src/serialization.rs create mode 100644 core/lib/dal/.gitignore create mode 100644 core/lib/dal/Cargo.toml create mode 100644 core/lib/dal/migrations/20211026134308_init.down.sql create mode 100644 core/lib/dal/migrations/20211026134308_init.up.sql create mode 100644 core/lib/dal/migrations/20220120160234_add_tx_execution_info.down.sql create mode 100644 core/lib/dal/migrations/20220120160234_add_tx_execution_info.up.sql create mode 100644 core/lib/dal/migrations/20220127113853_tx_received_at_index.down.sql create mode 100644 core/lib/dal/migrations/20220127113853_tx_received_at_index.up.sql create mode 100644 core/lib/dal/migrations/20220204131627_add_merkle_root.down.sql create mode 100644 core/lib/dal/migrations/20220204131627_add_merkle_root.up.sql create mode 100644 core/lib/dal/migrations/20220204223302_transactions_new_mempool.down.sql create mode 100644 core/lib/dal/migrations/20220204223302_transactions_new_mempool.up.sql create mode 100644 core/lib/dal/migrations/20220304160112_drop_events_indices.down.sql create mode 100644 core/lib/dal/migrations/20220304160112_drop_events_indices.up.sql create mode 100644 core/lib/dal/migrations/20220308124416_add_l2_to_l1_communication.down.sql create mode 100644 core/lib/dal/migrations/20220308124416_add_l2_to_l1_communication.up.sql create mode 100644 core/lib/dal/migrations/20220315095541_txs_priority_id.down.sql create mode 100644 core/lib/dal/migrations/20220315095541_txs_priority_id.up.sql create mode 100644 core/lib/dal/migrations/20220315144416_remove_tx_valid_in.down.sql create mode 100644 core/lib/dal/migrations/20220315144416_remove_tx_valid_in.up.sql create mode 100644 core/lib/dal/migrations/20220321131756_change_events_loc_index.down.sql create mode 100644 core/lib/dal/migrations/20220321131756_change_events_loc_index.up.sql create mode 100644 core/lib/dal/migrations/20220325153146_remove_events_id.down.sql create mode 100644 core/lib/dal/migrations/20220325153146_remove_events_id.up.sql create mode 100644 core/lib/dal/migrations/20220401114554_storage_tables_migration.down.sql create mode 100644 core/lib/dal/migrations/20220401114554_storage_tables_migration.up.sql create mode 100644 core/lib/dal/migrations/20220404102332_eth_tx_index.down.sql create mode 100644 core/lib/dal/migrations/20220404102332_eth_tx_index.up.sql create mode 100644 core/lib/dal/migrations/20220406085905_eth_tx_has_failed_status.down.sql create mode 100644 core/lib/dal/migrations/20220406085905_eth_tx_has_failed_status.up.sql create mode 100644 core/lib/dal/migrations/20220412142956_add_block_hash_index.down.sql create mode 100644 core/lib/dal/migrations/20220412142956_add_block_hash_index.up.sql create mode 100644 core/lib/dal/migrations/20220421125432_drop_to_porter_authorized.down.sql create mode 100644 core/lib/dal/migrations/20220421125432_drop_to_porter_authorized.up.sql create mode 100644 core/lib/dal/migrations/20220504154136_remove_nonce_gaps.down.sql create mode 100644 core/lib/dal/migrations/20220504154136_remove_nonce_gaps.up.sql create mode 100644 core/lib/dal/migrations/20220509214600_using_base_fee_and_priority_fee.down.sql create mode 100644 core/lib/dal/migrations/20220509214600_using_base_fee_and_priority_fee.up.sql create mode 100644 core/lib/dal/migrations/20220517135000_add_contract_address_column.down.sql create mode 100644 core/lib/dal/migrations/20220517135000_add_contract_address_column.up.sql create mode 100644 core/lib/dal/migrations/20220519101248_btree_events_indices.down.sql create mode 100644 core/lib/dal/migrations/20220519101248_btree_events_indices.up.sql create mode 100644 core/lib/dal/migrations/20220519103453_in_mempool.down.sql create mode 100644 core/lib/dal/migrations/20220519103453_in_mempool.up.sql create mode 100644 core/lib/dal/migrations/20220519150925_eth_sender_refactor.down.sql create mode 100644 core/lib/dal/migrations/20220519150925_eth_sender_refactor.up.sql create mode 100644 core/lib/dal/migrations/20220519201431_change_block_commitment_model.down.sql create mode 100644 core/lib/dal/migrations/20220519201431_change_block_commitment_model.up.sql create mode 100644 core/lib/dal/migrations/20220527103820_block_number_tx_index_indices.down.sql create mode 100644 core/lib/dal/migrations/20220527103820_block_number_tx_index_indices.up.sql create mode 100644 core/lib/dal/migrations/20220601105448_contract_verification.down.sql create mode 100644 core/lib/dal/migrations/20220601105448_contract_verification.up.sql create mode 100644 core/lib/dal/migrations/20220610121550_add_predicted_gas_columns.down.sql create mode 100644 core/lib/dal/migrations/20220610121550_add_predicted_gas_columns.up.sql create mode 100644 core/lib/dal/migrations/20220611134715_token-bridging.down.sql create mode 100644 core/lib/dal/migrations/20220611134715_token-bridging.up.sql create mode 100644 core/lib/dal/migrations/20220621103309_contract_verification_compilation_errors.down.sql create mode 100644 core/lib/dal/migrations/20220621103309_contract_verification_compilation_errors.up.sql create mode 100644 core/lib/dal/migrations/20220630134601_track_l1_block_number_for_priority_ops.down.sql create mode 100644 core/lib/dal/migrations/20220630134601_track_l1_block_number_for_priority_ops.up.sql create mode 100644 core/lib/dal/migrations/20220704121755_witness_inputs.down.sql create mode 100644 core/lib/dal/migrations/20220704121755_witness_inputs.up.sql create mode 100644 core/lib/dal/migrations/20220705133822_add_value_column.down.sql create mode 100644 core/lib/dal/migrations/20220705133822_add_value_column.up.sql create mode 100644 core/lib/dal/migrations/20220707151034_storage_logs_dedup.down.sql create mode 100644 core/lib/dal/migrations/20220707151034_storage_logs_dedup.up.sql create mode 100644 core/lib/dal/migrations/20220708093726_make-type-not-null.down.sql create mode 100644 core/lib/dal/migrations/20220708093726_make-type-not-null.up.sql create mode 100644 core/lib/dal/migrations/20220711132020_save_initial_bootloader_content.down.sql create mode 100644 core/lib/dal/migrations/20220711132020_save_initial_bootloader_content.up.sql create mode 100644 core/lib/dal/migrations/20220712073626_add_columns_to_witness_inputs.down.sql create mode 100644 core/lib/dal/migrations/20220712073626_add_columns_to_witness_inputs.up.sql create mode 100644 core/lib/dal/migrations/20220712093408_add_used_contracts_to_blocks.down.sql create mode 100644 core/lib/dal/migrations/20220712093408_add_used_contracts_to_blocks.up.sql create mode 100644 core/lib/dal/migrations/20220713090226_remove-priority-mode.down.sql create mode 100644 core/lib/dal/migrations/20220713090226_remove-priority-mode.up.sql create mode 100644 core/lib/dal/migrations/20220729125750_add-contract-verification-started-at.down.sql create mode 100644 core/lib/dal/migrations/20220729125750_add-contract-verification-started-at.up.sql create mode 100644 core/lib/dal/migrations/20220801131413_add_paymaster_data.down.sql create mode 100644 core/lib/dal/migrations/20220801131413_add_paymaster_data.up.sql create mode 100644 core/lib/dal/migrations/20220807172230_block_metadata.down.sql create mode 100644 core/lib/dal/migrations/20220807172230_block_metadata.up.sql create mode 100644 core/lib/dal/migrations/20220815130828_support_eip1559.down.sql create mode 100644 core/lib/dal/migrations/20220815130828_support_eip1559.up.sql create mode 100644 core/lib/dal/migrations/20220816141002_add_prover_jobs.down.sql create mode 100644 core/lib/dal/migrations/20220816141002_add_prover_jobs.up.sql create mode 100644 core/lib/dal/migrations/20220826100615_save_block_ergs_per_pubdata_limit.down.sql create mode 100644 core/lib/dal/migrations/20220826100615_save_block_ergs_per_pubdata_limit.up.sql create mode 100644 core/lib/dal/migrations/20220827110416_miniblocks.down.sql create mode 100644 core/lib/dal/migrations/20220827110416_miniblocks.up.sql create mode 100644 core/lib/dal/migrations/20220902115015_add-solc-version-to-contract-verifier.down.sql create mode 100644 core/lib/dal/migrations/20220902115015_add-solc-version-to-contract-verifier.up.sql create mode 100644 core/lib/dal/migrations/20220902124458_add_time_taken_column_in_witness_inputs.down.sql create mode 100644 core/lib/dal/migrations/20220902124458_add_time_taken_column_in_witness_inputs.up.sql create mode 100644 core/lib/dal/migrations/20220902190932_add_processing_started_at_to_witness_inputs.sql.down.sql create mode 100644 core/lib/dal/migrations/20220902190932_add_processing_started_at_to_witness_inputs.sql.up.sql create mode 100644 core/lib/dal/migrations/20220904161256_add_time_taken_column_in_prover_jobs.down.sql create mode 100644 core/lib/dal/migrations/20220904161256_add_time_taken_column_in_prover_jobs.up.sql create mode 100644 core/lib/dal/migrations/20220908094615_add_error_field_to_queues.down.sql create mode 100644 core/lib/dal/migrations/20220908094615_add_error_field_to_queues.up.sql create mode 100644 core/lib/dal/migrations/20220908103955_verifier-compilers-versions.down.sql create mode 100644 core/lib/dal/migrations/20220908103955_verifier-compilers-versions.up.sql create mode 100644 core/lib/dal/migrations/20220908145203_remove-redundant-fields.down.sql create mode 100644 core/lib/dal/migrations/20220908145203_remove-redundant-fields.up.sql create mode 100644 core/lib/dal/migrations/20220914102048_unique-blocknumber-for-witnesses.down.sql create mode 100644 core/lib/dal/migrations/20220914102048_unique-blocknumber-for-witnesses.up.sql create mode 100644 core/lib/dal/migrations/20220915125943_l1_batch_tx_number.down.sql create mode 100644 core/lib/dal/migrations/20220915125943_l1_batch_tx_number.up.sql create mode 100644 core/lib/dal/migrations/20220930085018_add_proof_aggregations.down.sql create mode 100644 core/lib/dal/migrations/20220930085018_add_proof_aggregations.up.sql create mode 100644 core/lib/dal/migrations/20221001090302_add_proof_result.down.sql create mode 100644 core/lib/dal/migrations/20221001090302_add_proof_result.up.sql create mode 100644 core/lib/dal/migrations/20221001131821_add_static_artifact_storage.down.sql create mode 100644 core/lib/dal/migrations/20221001131821_add_static_artifact_storage.up.sql create mode 100644 core/lib/dal/migrations/20221002190817_add_circuit_id_to_proofs.down.sql create mode 100644 core/lib/dal/migrations/20221002190817_add_circuit_id_to_proofs.up.sql create mode 100644 core/lib/dal/migrations/20221003090515_add-commitment-hash-fields.down.sql create mode 100644 core/lib/dal/migrations/20221003090515_add-commitment-hash-fields.up.sql create mode 100644 core/lib/dal/migrations/20221004114549_add_scheduler_jobs_table.down.sql create mode 100644 core/lib/dal/migrations/20221004114549_add_scheduler_jobs_table.up.sql create mode 100644 core/lib/dal/migrations/20221007085909_add-attempts-for-jobs.down.sql create mode 100644 core/lib/dal/migrations/20221007085909_add-attempts-for-jobs.up.sql create mode 100644 core/lib/dal/migrations/20221010145858_remove_ergs_per_code_decommittment_word.down.sql create mode 100644 core/lib/dal/migrations/20221010145858_remove_ergs_per_code_decommittment_word.up.sql create mode 100644 core/lib/dal/migrations/20221013104735_add_aggregation_result_coords_to_scheduler_jobs.down.sql create mode 100644 core/lib/dal/migrations/20221013104735_add_aggregation_result_coords_to_scheduler_jobs.up.sql create mode 100644 core/lib/dal/migrations/20221018114513_verification-requests-panic-messages.down.sql create mode 100644 core/lib/dal/migrations/20221018114513_verification-requests-panic-messages.up.sql create mode 100644 core/lib/dal/migrations/20221019112725_l2_to_l1_logs_table.down.sql create mode 100644 core/lib/dal/migrations/20221019112725_l2_to_l1_logs_table.up.sql create mode 100644 core/lib/dal/migrations/20221031110209_miniblocks-hash-index.down.sql create mode 100644 core/lib/dal/migrations/20221031110209_miniblocks-hash-index.up.sql create mode 100644 core/lib/dal/migrations/20221103104136_add-signed-raw-tx.down.sql create mode 100644 core/lib/dal/migrations/20221103104136_add-signed-raw-tx.up.sql create mode 100644 core/lib/dal/migrations/20221108190838_set-primary-keys.down.sql create mode 100644 core/lib/dal/migrations/20221108190838_set-primary-keys.up.sql create mode 100644 core/lib/dal/migrations/20221109094807_block-skip-proof.down.sql create mode 100644 core/lib/dal/migrations/20221109094807_block-skip-proof.up.sql create mode 100644 core/lib/dal/migrations/20221110094012_add_merkel_tree_paths_blob_url_in_witness_inputs.down.sql create mode 100644 core/lib/dal/migrations/20221110094012_add_merkel_tree_paths_blob_url_in_witness_inputs.up.sql create mode 100644 core/lib/dal/migrations/20221110094252_add_basic_circuits_and_inputs_blob_url_in_leaf_aggregation_witness_jobs.down.sql create mode 100644 core/lib/dal/migrations/20221110094252_add_basic_circuits_and_inputs_blob_url_in_leaf_aggregation_witness_jobs.up.sql create mode 100644 core/lib/dal/migrations/20221110094339_add_leaf_layer_subques_and_aggregation_output_blob_url_in_node_aggregation_witness_jobs.down.sql create mode 100644 core/lib/dal/migrations/20221110094339_add_leaf_layer_subques_and_aggregation_output_blob_url_in_node_aggregation_witness_jobs.up.sql create mode 100644 core/lib/dal/migrations/20221110094730_add_scheduler_witness_and_final_node_aggregations_blob_url_in_scheduler_witness_jobs.down.sql create mode 100644 core/lib/dal/migrations/20221110094730_add_scheduler_witness_and_final_node_aggregations_blob_url_in_scheduler_witness_jobs.up.sql create mode 100644 core/lib/dal/migrations/20221110095052_add_circuit_input_blob_url_column_in_prover_jobs.down.sql create mode 100644 core/lib/dal/migrations/20221110095052_add_circuit_input_blob_url_column_in_prover_jobs.up.sql create mode 100644 core/lib/dal/migrations/20221202102801_events-tx-initiator-address.down.sql create mode 100644 core/lib/dal/migrations/20221202102801_events-tx-initiator-address.up.sql create mode 100644 core/lib/dal/migrations/20221215085757_add_composite_index_to_prover_jobs.down.sql create mode 100644 core/lib/dal/migrations/20221215085757_add_composite_index_to_prover_jobs.up.sql create mode 100644 core/lib/dal/migrations/20221215094205_prover-job-identity.down.sql create mode 100644 core/lib/dal/migrations/20221215094205_prover-job-identity.up.sql create mode 100644 core/lib/dal/migrations/20221227165603_fee_model.down.sql create mode 100644 core/lib/dal/migrations/20221227165603_fee_model.up.sql create mode 100644 core/lib/dal/migrations/20230104104142_add_gpu_prover_queue_table.down.sql create mode 100644 core/lib/dal/migrations/20230104104142_add_gpu_prover_queue_table.up.sql create mode 100644 core/lib/dal/migrations/20230105122559_protective_reads.down.sql create mode 100644 core/lib/dal/migrations/20230105122559_protective_reads.up.sql create mode 100644 core/lib/dal/migrations/20230105160906_remove_contracts_null_restriction.down.sql create mode 100644 core/lib/dal/migrations/20230105160906_remove_contracts_null_restriction.up.sql create mode 100644 core/lib/dal/migrations/20230109123145_add_blob_cleanup_status_column_to_tables_with_blobs.down.sql create mode 100644 core/lib/dal/migrations/20230109123145_add_blob_cleanup_status_column_to_tables_with_blobs.up.sql create mode 100644 core/lib/dal/migrations/20230109123703_add_blob_cleanup_status_index_for_tables_with_blobs.down.sql create mode 100644 core/lib/dal/migrations/20230109123703_add_blob_cleanup_status_index_for_tables_with_blobs.up.sql create mode 100644 core/lib/dal/migrations/20230111122457_refactor_contracts_verification_info.down.sql create mode 100644 core/lib/dal/migrations/20230111122457_refactor_contracts_verification_info.up.sql create mode 100644 core/lib/dal/migrations/20230112111801_initial_writes.down.sql create mode 100644 core/lib/dal/migrations/20230112111801_initial_writes.up.sql create mode 100644 core/lib/dal/migrations/20230113113154_add_storage_logs_index.down.sql create mode 100644 core/lib/dal/migrations/20230113113154_add_storage_logs_index.up.sql create mode 100644 core/lib/dal/migrations/20230117123627_add_queue_capacity_and_free_slots_in_gpu_prover_queue.down.sql create mode 100644 core/lib/dal/migrations/20230117123627_add_queue_capacity_and_free_slots_in_gpu_prover_queue.up.sql create mode 100644 core/lib/dal/migrations/20230119123216_drop_null_constraint_gpu_prover_queue.down.sql create mode 100644 core/lib/dal/migrations/20230119123216_drop_null_constraint_gpu_prover_queue.up.sql create mode 100644 core/lib/dal/migrations/20230119182427_l1_refunds.down.sql create mode 100644 core/lib/dal/migrations/20230119182427_l1_refunds.up.sql create mode 100644 core/lib/dal/migrations/20230202142858_ergs_to_gas.down.sql create mode 100644 core/lib/dal/migrations/20230202142858_ergs_to_gas.up.sql create mode 100644 core/lib/dal/migrations/20230203102247_add_specialized_prover_group_id_in_gpu_prover_queue.down.sql create mode 100644 core/lib/dal/migrations/20230203102247_add_specialized_prover_group_id_in_gpu_prover_queue.up.sql create mode 100644 core/lib/dal/sqlx-data.json create mode 100644 core/lib/dal/src/blocks_dal.rs create mode 100644 core/lib/dal/src/blocks_web3_dal.rs create mode 100644 core/lib/dal/src/connection/holder.rs create mode 100644 core/lib/dal/src/connection/mod.rs create mode 100644 core/lib/dal/src/connection/test_pool.rs create mode 100644 core/lib/dal/src/eth_sender_dal.rs create mode 100644 core/lib/dal/src/events_dal.rs create mode 100644 core/lib/dal/src/events_web3_dal.rs create mode 100644 core/lib/dal/src/explorer/contract_verification_dal.rs create mode 100644 core/lib/dal/src/explorer/explorer_accounts_dal.rs create mode 100644 core/lib/dal/src/explorer/explorer_blocks_dal.rs create mode 100644 core/lib/dal/src/explorer/explorer_events_dal.rs create mode 100644 core/lib/dal/src/explorer/explorer_misc_dal.rs create mode 100644 core/lib/dal/src/explorer/explorer_transactions_dal.rs create mode 100644 core/lib/dal/src/explorer/mod.rs create mode 100644 core/lib/dal/src/explorer/storage_contract_info.rs create mode 100644 core/lib/dal/src/fee_monitor_dal.rs create mode 100644 core/lib/dal/src/gpu_prover_queue_dal.rs create mode 100644 core/lib/dal/src/lib.rs create mode 100644 core/lib/dal/src/models/mod.rs create mode 100644 core/lib/dal/src/models/storage_block.rs create mode 100644 core/lib/dal/src/models/storage_contract.rs create mode 100644 core/lib/dal/src/models/storage_eth_tx.rs create mode 100644 core/lib/dal/src/models/storage_event.rs create mode 100644 core/lib/dal/src/models/storage_fee_monitor.rs create mode 100644 core/lib/dal/src/models/storage_log.rs create mode 100644 core/lib/dal/src/models/storage_prover_job_info.rs create mode 100644 core/lib/dal/src/models/storage_state_record.rs create mode 100644 core/lib/dal/src/models/storage_token.rs create mode 100644 core/lib/dal/src/models/storage_transaction.rs create mode 100644 core/lib/dal/src/models/storage_witness_job_info.rs create mode 100644 core/lib/dal/src/prover_dal.rs create mode 100644 core/lib/dal/src/storage_dal.rs create mode 100644 core/lib/dal/src/storage_load_dal.rs create mode 100644 core/lib/dal/src/storage_logs_dal.rs create mode 100644 core/lib/dal/src/storage_logs_dedup_dal.rs create mode 100644 core/lib/dal/src/storage_web3_dal.rs create mode 100644 core/lib/dal/src/tests/mod.rs create mode 100644 core/lib/dal/src/time_utils.rs create mode 100644 core/lib/dal/src/tokens_dal.rs create mode 100644 core/lib/dal/src/tokens_web3_dal.rs create mode 100644 core/lib/dal/src/transactions_dal.rs create mode 100644 core/lib/dal/src/transactions_web3_dal.rs create mode 100644 core/lib/dal/src/witness_generator_dal.rs create mode 100644 core/lib/db_test_macro/Cargo.toml create mode 100644 core/lib/db_test_macro/src/lib.rs create mode 100644 core/lib/eth_client/Cargo.toml create mode 100644 core/lib/eth_client/src/clients/http_client.rs create mode 100644 core/lib/eth_client/src/clients/mock.rs create mode 100644 core/lib/eth_client/src/clients/mod.rs create mode 100644 core/lib/eth_client/src/lib.rs create mode 100644 core/lib/eth_signer/Cargo.toml create mode 100644 core/lib/eth_signer/src/error.rs create mode 100644 core/lib/eth_signer/src/json_rpc_signer.rs create mode 100644 core/lib/eth_signer/src/lib.rs create mode 100644 core/lib/eth_signer/src/pk_signer.rs create mode 100644 core/lib/eth_signer/src/raw_ethereum_tx.rs create mode 100644 core/lib/mempool/Cargo.toml create mode 100644 core/lib/mempool/src/lib.rs create mode 100644 core/lib/mempool/src/mempool_store.rs create mode 100644 core/lib/mempool/src/tests.rs create mode 100644 core/lib/mempool/src/types.rs create mode 100644 core/lib/merkle_tree/Cargo.toml create mode 100644 core/lib/merkle_tree/src/iter_ext/merge_join_with_max_predecessor.rs create mode 100644 core/lib/merkle_tree/src/iter_ext/mod.rs create mode 100644 core/lib/merkle_tree/src/lib.rs create mode 100644 core/lib/merkle_tree/src/patch.rs create mode 100644 core/lib/merkle_tree/src/storage.rs create mode 100644 core/lib/merkle_tree/src/tests.rs create mode 100644 core/lib/merkle_tree/src/tree_config.rs create mode 100644 core/lib/merkle_tree/src/types.rs create mode 100644 core/lib/merkle_tree/src/utils.rs create mode 100644 core/lib/merkle_tree/src/zksync_tree.rs create mode 100644 core/lib/mini_merkle_tree/Cargo.toml create mode 100644 core/lib/mini_merkle_tree/src/lib.rs create mode 100644 core/lib/object_store/Cargo.toml create mode 100644 core/lib/object_store/src/file_backed_object_store.rs create mode 100644 core/lib/object_store/src/gcs_object_store.rs create mode 100644 core/lib/object_store/src/gcs_utils.rs create mode 100644 core/lib/object_store/src/lib.rs create mode 100644 core/lib/object_store/src/object_store.rs create mode 100644 core/lib/object_store/src/tests.rs create mode 100644 core/lib/prometheus_exporter/Cargo.toml create mode 100644 core/lib/prometheus_exporter/src/lib.rs create mode 100644 core/lib/prover_utils/Cargo.toml create mode 100644 core/lib/prover_utils/src/lib.rs create mode 100644 core/lib/queued_job_processor/Cargo.toml create mode 100644 core/lib/queued_job_processor/src/lib.rs create mode 100644 core/lib/state/Cargo.toml create mode 100644 core/lib/state/src/lib.rs create mode 100644 core/lib/state/src/secondary_storage.rs create mode 100644 core/lib/state/src/storage_view.rs create mode 100644 core/lib/storage/Cargo.toml create mode 100644 core/lib/storage/src/db.rs create mode 100644 core/lib/storage/src/lib.rs create mode 100644 core/lib/storage/src/util.rs create mode 100644 core/lib/types/Cargo.toml create mode 100644 core/lib/types/src/aggregated_operations.rs create mode 100644 core/lib/types/src/api.rs create mode 100644 core/lib/types/src/block.rs create mode 100644 core/lib/types/src/circuit.rs create mode 100644 core/lib/types/src/commitment.rs create mode 100644 core/lib/types/src/eth_sender.rs create mode 100644 core/lib/types/src/event.rs create mode 100644 core/lib/types/src/explorer_api.rs create mode 100644 core/lib/types/src/fee.rs create mode 100644 core/lib/types/src/helpers.rs create mode 100644 core/lib/types/src/l1/error.rs create mode 100644 core/lib/types/src/l1/mod.rs create mode 100644 core/lib/types/src/l2/error.rs create mode 100644 core/lib/types/src/l2/mod.rs create mode 100644 core/lib/types/src/l2_to_l1_log.rs create mode 100644 core/lib/types/src/lib.rs create mode 100644 core/lib/types/src/log_query_sorter.rs create mode 100644 core/lib/types/src/priority_op_onchain_data.rs create mode 100644 core/lib/types/src/proofs.rs create mode 100644 core/lib/types/src/pubdata_packing.rs create mode 100644 core/lib/types/src/storage/log.rs create mode 100644 core/lib/types/src/storage/mod.rs create mode 100644 core/lib/types/src/storage/writes.rs create mode 100644 core/lib/types/src/system_contracts.rs create mode 100644 core/lib/types/src/tokens.rs create mode 100644 core/lib/types/src/transaction_request.rs create mode 100644 core/lib/types/src/tx/execute.rs create mode 100644 core/lib/types/src/tx/mod.rs create mode 100644 core/lib/types/src/tx/primitives/eip712_signature/member_types.rs create mode 100644 core/lib/types/src/tx/primitives/eip712_signature/mod.rs create mode 100644 core/lib/types/src/tx/primitives/eip712_signature/struct_builder.rs create mode 100644 core/lib/types/src/tx/primitives/eip712_signature/tests.rs create mode 100644 core/lib/types/src/tx/primitives/eip712_signature/typed_structure.rs create mode 100644 core/lib/types/src/tx/primitives/eip712_signature/utils.rs create mode 100644 core/lib/types/src/tx/primitives/mod.rs create mode 100644 core/lib/types/src/tx/primitives/packed_eth_signature.rs create mode 100644 core/lib/types/src/tx/tx_execution_info.rs create mode 100644 core/lib/types/src/utils.rs create mode 100644 core/lib/types/src/vm_trace.rs create mode 100644 core/lib/utils/Cargo.toml create mode 100644 core/lib/utils/src/bytecode.rs create mode 100644 core/lib/utils/src/convert.rs create mode 100644 core/lib/utils/src/env_tools.rs create mode 100644 core/lib/utils/src/format.rs create mode 100644 core/lib/utils/src/lib.rs create mode 100644 core/lib/utils/src/macros.rs create mode 100644 core/lib/utils/src/misc.rs create mode 100644 core/lib/utils/src/panic_extractor.rs create mode 100644 core/lib/utils/src/panic_notify.rs create mode 100644 core/lib/utils/src/serde_wrappers.rs create mode 100644 core/lib/utils/src/test_utils.rs create mode 100644 core/lib/utils/src/time.rs create mode 100644 core/lib/vlog/Cargo.toml create mode 100644 core/lib/vlog/src/lib.rs create mode 100644 core/lib/vm/Cargo.toml create mode 100644 core/lib/vm/src/bootloader_state.rs create mode 100644 core/lib/vm/src/errors/bootloader_error.rs create mode 100644 core/lib/vm/src/errors/mod.rs create mode 100644 core/lib/vm/src/errors/tx_revert_reason.rs create mode 100644 core/lib/vm/src/errors/vm_revert_reason.rs create mode 100644 core/lib/vm/src/event_sink.rs create mode 100644 core/lib/vm/src/events.rs create mode 100644 core/lib/vm/src/history_recorder.rs create mode 100644 core/lib/vm/src/lib.rs create mode 100644 core/lib/vm/src/memory.rs create mode 100644 core/lib/vm/src/oracle_tools.rs create mode 100644 core/lib/vm/src/oracles/decommitter.rs create mode 100644 core/lib/vm/src/oracles/mod.rs create mode 100644 core/lib/vm/src/oracles/precompile.rs create mode 100644 core/lib/vm/src/oracles/storage.rs create mode 100644 core/lib/vm/src/oracles/tracer.rs create mode 100644 core/lib/vm/src/pubdata_utils.rs create mode 100644 core/lib/vm/src/storage.rs create mode 100644 core/lib/vm/src/test_utils.rs create mode 100644 core/lib/vm/src/tests/bootloader.rs create mode 100644 core/lib/vm/src/tests/mod.rs create mode 100644 core/lib/vm/src/transaction_data.rs create mode 100644 core/lib/vm/src/utils.rs create mode 100644 core/lib/vm/src/vm.rs create mode 100644 core/lib/vm/src/vm_with_bootloader.rs create mode 100644 core/lib/web3_decl/Cargo.toml create mode 100644 core/lib/web3_decl/src/error.rs create mode 100644 core/lib/web3_decl/src/lib.rs create mode 100644 core/lib/web3_decl/src/namespaces/eth.rs create mode 100644 core/lib/web3_decl/src/namespaces/eth_subscribe.rs create mode 100644 core/lib/web3_decl/src/namespaces/mod.rs create mode 100644 core/lib/web3_decl/src/namespaces/net.rs create mode 100644 core/lib/web3_decl/src/namespaces/web3.rs create mode 100644 core/lib/web3_decl/src/namespaces/zks.rs create mode 100644 core/lib/web3_decl/src/types.rs create mode 100644 core/tests/loadnext/Cargo.toml create mode 100644 core/tests/loadnext/README.md create mode 100644 core/tests/loadnext/src/account/api_request_executor.rs create mode 100644 core/tests/loadnext/src/account/explorer_api_executor.rs create mode 100644 core/tests/loadnext/src/account/mod.rs create mode 100644 core/tests/loadnext/src/account/pubsub_executor.rs create mode 100644 core/tests/loadnext/src/account/tx_command_executor.rs create mode 100644 core/tests/loadnext/src/account_pool.rs create mode 100644 core/tests/loadnext/src/all.rs create mode 100644 core/tests/loadnext/src/command/api.rs create mode 100644 core/tests/loadnext/src/command/explorer_api.rs create mode 100644 core/tests/loadnext/src/command/mod.rs create mode 100644 core/tests/loadnext/src/command/pubsub.rs create mode 100644 core/tests/loadnext/src/command/tx_command.rs create mode 100644 core/tests/loadnext/src/config.rs create mode 100644 core/tests/loadnext/src/constants.rs create mode 100644 core/tests/loadnext/src/corrupted_tx.rs create mode 100644 core/tests/loadnext/src/executor.rs create mode 100644 core/tests/loadnext/src/fs_utils.rs create mode 100644 core/tests/loadnext/src/lib.rs create mode 100644 core/tests/loadnext/src/main.rs create mode 100644 core/tests/loadnext/src/report.rs create mode 100644 core/tests/loadnext/src/report_collector/metrics_collector.rs create mode 100644 core/tests/loadnext/src/report_collector/mod.rs create mode 100644 core/tests/loadnext/src/report_collector/operation_results_collector.rs create mode 100644 core/tests/loadnext/src/rng.rs create mode 100644 core/tests/revert-test/package.json create mode 100644 core/tests/revert-test/tests/revert-and-restart.test.ts create mode 100644 core/tests/revert-test/tests/tester.ts create mode 100644 core/tests/revert-test/tsconfig.json create mode 100644 core/tests/test_account/Cargo.toml create mode 100644 core/tests/test_account/src/lib.rs create mode 100644 core/tests/testkit/Cargo.toml create mode 100644 core/tests/testkit/src/commands/gas_price/mod.rs create mode 100644 core/tests/testkit/src/commands/gas_price/types.rs create mode 100644 core/tests/testkit/src/commands/gas_price/utils.rs create mode 100644 core/tests/testkit/src/commands/mod.rs create mode 100644 core/tests/testkit/src/commands/revert_block.rs create mode 100644 core/tests/testkit/src/commands/upgrade_contract.rs create mode 100644 core/tests/testkit/src/commands/utils.rs create mode 100644 core/tests/testkit/src/eth_provider.rs create mode 100644 core/tests/testkit/src/external_commands.rs create mode 100644 core/tests/testkit/src/main.rs create mode 100644 core/tests/testkit/src/server_handler.rs create mode 100644 core/tests/testkit/src/tester.rs create mode 100644 core/tests/testkit/src/types.rs create mode 100644 core/tests/testkit/src/utils.rs create mode 100644 core/tests/ts-integration/README.md create mode 100644 core/tests/ts-integration/contracts/README.md create mode 100644 core/tests/ts-integration/contracts/basic-constructor/basic-constructor.sol create mode 100644 core/tests/ts-integration/contracts/context/context.sol create mode 100644 core/tests/ts-integration/contracts/counter/counter.sol create mode 100644 core/tests/ts-integration/contracts/create/Foo.sol create mode 100644 core/tests/ts-integration/contracts/create/create.sol create mode 100644 core/tests/ts-integration/contracts/custom-account/Constants.sol create mode 100644 core/tests/ts-integration/contracts/custom-account/RLPEncoder.sol create mode 100644 core/tests/ts-integration/contracts/custom-account/SystemContext.sol create mode 100644 core/tests/ts-integration/contracts/custom-account/SystemContractsCaller.sol create mode 100644 core/tests/ts-integration/contracts/custom-account/TransactionHelper.sol create mode 100644 core/tests/ts-integration/contracts/custom-account/Utils.sol create mode 100644 core/tests/ts-integration/contracts/custom-account/custom-account.sol create mode 100644 core/tests/ts-integration/contracts/custom-account/custom-paymaster.sol create mode 100644 core/tests/ts-integration/contracts/custom-account/interfaces/IAccount.sol create mode 100644 core/tests/ts-integration/contracts/custom-account/interfaces/IContractDeployer.sol create mode 100644 core/tests/ts-integration/contracts/custom-account/interfaces/IERC20.sol create mode 100644 core/tests/ts-integration/contracts/custom-account/interfaces/INonceHolder.sol create mode 100644 core/tests/ts-integration/contracts/custom-account/interfaces/IPaymaster.sol create mode 100644 core/tests/ts-integration/contracts/custom-account/interfaces/IPaymasterFlow.sol create mode 100644 core/tests/ts-integration/contracts/custom-account/nonce-holder-test.sol create mode 100644 core/tests/ts-integration/contracts/error/error.sol create mode 100644 core/tests/ts-integration/contracts/events/events.sol create mode 100644 core/tests/ts-integration/contracts/events/sample-calldata create mode 100644 core/tests/ts-integration/contracts/expensive/expensive.sol create mode 100644 core/tests/ts-integration/contracts/infinite/infinite.sol create mode 100644 core/tests/ts-integration/contracts/writes-and-messages/writes-and-messages.sol create mode 100644 core/tests/ts-integration/hardhat.config.ts create mode 100644 core/tests/ts-integration/jest.config.json create mode 100644 core/tests/ts-integration/package.json create mode 100644 core/tests/ts-integration/src/context-owner.ts create mode 100644 core/tests/ts-integration/src/env.ts create mode 100644 core/tests/ts-integration/src/helpers.ts create mode 100644 core/tests/ts-integration/src/index.ts create mode 100644 core/tests/ts-integration/src/jest-setup/add-matchers.ts create mode 100644 core/tests/ts-integration/src/jest-setup/global-setup.ts create mode 100644 core/tests/ts-integration/src/jest-setup/global-teardown.ts create mode 100644 core/tests/ts-integration/src/matchers/big-number.ts create mode 100644 core/tests/ts-integration/src/matchers/eth-primitives.ts create mode 100644 core/tests/ts-integration/src/matchers/fail.ts create mode 100644 core/tests/ts-integration/src/matchers/matcher-helpers.ts create mode 100644 core/tests/ts-integration/src/matchers/transaction.ts create mode 100644 core/tests/ts-integration/src/modifiers/balance-checker.ts create mode 100644 core/tests/ts-integration/src/modifiers/index.ts create mode 100644 core/tests/ts-integration/src/modifiers/receipt-check.ts create mode 100644 core/tests/ts-integration/src/prerequisites.ts create mode 100644 core/tests/ts-integration/src/reporter.ts create mode 100644 core/tests/ts-integration/src/retry-provider.ts create mode 100644 core/tests/ts-integration/src/system.ts create mode 100644 core/tests/ts-integration/src/test-master.ts create mode 100644 core/tests/ts-integration/src/types.ts create mode 100644 core/tests/ts-integration/tests/api/explorer.test.ts create mode 100644 core/tests/ts-integration/tests/api/web3.test.ts create mode 100644 core/tests/ts-integration/tests/contracts.test.ts create mode 100644 core/tests/ts-integration/tests/custom-account.test.ts create mode 100644 core/tests/ts-integration/tests/erc20.test.ts create mode 100644 core/tests/ts-integration/tests/ether.test.ts create mode 100644 core/tests/ts-integration/tests/l1.test.ts create mode 100644 core/tests/ts-integration/tests/mempool.test.ts create mode 100644 core/tests/ts-integration/tests/paymaster.test.ts create mode 100644 core/tests/ts-integration/tests/self-unit.test.ts create mode 100644 core/tests/ts-integration/tests/system.test.ts create mode 100644 core/tests/ts-integration/tsconfig.json create mode 100644 core/tests/ts-integration/typings/jest.d.ts create mode 100644 core/tests/ts-integration/yarn.lock create mode 100644 docker-compose-backup-test.yml create mode 100644 docker-compose-gpu-runner.yml create mode 100644 docker-compose-runner.yml create mode 100644 docker-compose.yml create mode 100644 docker/circuit-synthesizer/Dockerfile create mode 100644 docker/contract-verifier/Dockerfile create mode 100644 docker/contract-verifier/install-all-solc.sh create mode 100644 docker/geth/Dockerfile create mode 100644 docker/geth/fast-dev.json create mode 100755 docker/geth/geth-entry.sh create mode 100644 docker/geth/keystore/UTC--2019-04-06T21-13-27.692266000Z--8a91dc2d28b689474298d91899f0c1baf62cb85b create mode 100644 docker/geth/mainnet-dev.json create mode 100644 docker/geth/password.sec create mode 100644 docker/geth/standard-dev.json create mode 100644 docker/local-node/Dockerfile create mode 100755 docker/local-node/entrypoint.sh create mode 100644 docker/prover/Dockerfile create mode 100644 docker/runner/Dockerfile create mode 100644 docker/server-v2/Dockerfile create mode 100644 docker/zk-environment/Dockerfile create mode 100644 docker/zk-rust-nightly-environment/Dockerfile create mode 100644 docs/architecture.md create mode 100644 docs/development.md create mode 100644 docs/launch.md create mode 100644 docs/setup-dev.md create mode 100644 eraLogo.svg create mode 100644 etc/ERC20/contracts/ZkSyncERC20.sol create mode 100644 etc/ERC20/contracts/interfaces/Context.sol create mode 100644 etc/ERC20/contracts/interfaces/ERC20.sol create mode 100644 etc/ERC20/contracts/interfaces/IERC20.sol create mode 100644 etc/ERC20/contracts/interfaces/IERC20Metadata.sol create mode 100644 etc/ERC20/hardhat.config.ts create mode 100644 etc/ERC20/package.json create mode 100644 etc/commitment_tests/zksync_testharness_test.json create mode 100644 etc/contracts-test-data/README.md create mode 100644 etc/contracts-test-data/contracts/basic-constructor/basic-constructor.sol create mode 100644 etc/contracts-test-data/contracts/context/context.sol create mode 100644 etc/contracts-test-data/contracts/counter/counter.sol create mode 100644 etc/contracts-test-data/contracts/create/Foo.sol create mode 100644 etc/contracts-test-data/contracts/create/create.sol create mode 100644 etc/contracts-test-data/contracts/custom-account/Constants.sol create mode 100644 etc/contracts-test-data/contracts/custom-account/RLPEncoder.sol create mode 100644 etc/contracts-test-data/contracts/custom-account/SystemContext.sol create mode 100644 etc/contracts-test-data/contracts/custom-account/SystemContractsCaller.sol create mode 100644 etc/contracts-test-data/contracts/custom-account/TransactionHelper.sol create mode 100644 etc/contracts-test-data/contracts/custom-account/Utils.sol create mode 100644 etc/contracts-test-data/contracts/custom-account/custom-account.sol create mode 100644 etc/contracts-test-data/contracts/custom-account/custom-paymaster.sol create mode 100644 etc/contracts-test-data/contracts/custom-account/interfaces/IAccount.sol create mode 100644 etc/contracts-test-data/contracts/custom-account/interfaces/IContractDeployer.sol create mode 100644 etc/contracts-test-data/contracts/custom-account/interfaces/IERC20.sol create mode 100644 etc/contracts-test-data/contracts/custom-account/interfaces/INonceHolder.sol create mode 100644 etc/contracts-test-data/contracts/custom-account/interfaces/IPaymaster.sol create mode 100644 etc/contracts-test-data/contracts/custom-account/interfaces/IPaymasterFlow.sol create mode 100644 etc/contracts-test-data/contracts/custom-account/nonce-holder-test.sol create mode 100644 etc/contracts-test-data/contracts/error/error.sol create mode 100644 etc/contracts-test-data/contracts/estimator/estimator.sol create mode 100644 etc/contracts-test-data/contracts/events/events.sol create mode 100644 etc/contracts-test-data/contracts/events/sample-calldata create mode 100644 etc/contracts-test-data/contracts/expensive/expensive.sol create mode 100644 etc/contracts-test-data/contracts/infinite/infinite.sol create mode 100644 etc/contracts-test-data/contracts/loadnext/loadnext_contract.sol create mode 100644 etc/contracts-test-data/hardhat.config.ts create mode 100644 etc/contracts-test-data/package.json create mode 100644 etc/env/base/README.md create mode 100644 etc/env/base/api.toml create mode 100644 etc/env/base/chain.toml create mode 100644 etc/env/base/circuit_synthesizer.toml create mode 100644 etc/env/base/contract_verifier.toml create mode 100644 etc/env/base/contracts.toml create mode 100644 etc/env/base/database.toml create mode 100644 etc/env/base/eth_client.toml create mode 100644 etc/env/base/eth_sender.toml create mode 100644 etc/env/base/eth_watch.toml create mode 100644 etc/env/base/fetcher.toml create mode 100644 etc/env/base/misc.toml create mode 100644 etc/env/base/nfs.toml create mode 100644 etc/env/base/object_store.toml create mode 100644 etc/env/base/private.toml create mode 100644 etc/env/base/prover.toml create mode 100644 etc/env/base/prover_group.toml create mode 100644 etc/env/base/rust.toml create mode 100644 etc/env/base/witness_generator.toml create mode 100644 etc/env/docker.env create mode 100644 etc/lint-config/js.js create mode 100644 etc/lint-config/md.js create mode 100644 etc/lint-config/sol.js create mode 100644 etc/lint-config/ts.js create mode 160000 etc/openzeppelin-contracts create mode 100644 etc/prettier-config/js.js create mode 100644 etc/prettier-config/md.js create mode 100644 etc/prettier-config/sol.js create mode 100644 etc/prettier-config/ts.js create mode 100644 etc/prettier-config/vue.js create mode 100755 etc/scripts/prepare_bellman_cuda.sh create mode 160000 etc/system-contracts create mode 100644 etc/test_config/.gitignore create mode 100644 etc/test_config/README.md create mode 100644 etc/test_config/constant/api.json create mode 100644 etc/test_config/constant/eth.json create mode 100644 etc/test_config/volatile/.empty create mode 100644 etc/thread create mode 100644 etc/tokens/goerli.json create mode 100644 etc/tokens/mainnet.json create mode 100644 etc/tokens/rinkeby.json create mode 100644 etc/tokens/ropsten.json create mode 100644 etc/tokens/test.json create mode 100644 infrastructure/local-setup-preparation/.gitignore create mode 100644 infrastructure/local-setup-preparation/README.md create mode 100644 infrastructure/local-setup-preparation/package.json create mode 100644 infrastructure/local-setup-preparation/src/index.ts create mode 100644 infrastructure/local-setup-preparation/src/utils.ts create mode 100644 infrastructure/local-setup-preparation/tsconfig.json create mode 100644 infrastructure/openzeppelin-tests-preparation/package.json create mode 100644 infrastructure/openzeppelin-tests-preparation/src/index.ts create mode 100644 infrastructure/openzeppelin-tests-preparation/tsconfig.json create mode 100644 infrastructure/reading-tool/.gitignore create mode 100644 infrastructure/reading-tool/README.md create mode 100644 infrastructure/reading-tool/package.json create mode 100644 infrastructure/reading-tool/src/index.ts create mode 100644 infrastructure/reading-tool/tsconfig.json create mode 100644 infrastructure/zk/.gitignore create mode 100644 infrastructure/zk/README.md create mode 100644 infrastructure/zk/package.json create mode 100644 infrastructure/zk/src/clean.ts create mode 100644 infrastructure/zk/src/compiler.ts create mode 100644 infrastructure/zk/src/completion.ts create mode 100644 infrastructure/zk/src/config.ts create mode 100644 infrastructure/zk/src/contract.ts create mode 100644 infrastructure/zk/src/contract_verifier.ts create mode 100644 infrastructure/zk/src/database/database.ts create mode 100644 infrastructure/zk/src/docker.ts create mode 100644 infrastructure/zk/src/down.ts create mode 100644 infrastructure/zk/src/dummy-prover.ts create mode 100644 infrastructure/zk/src/env.ts create mode 100644 infrastructure/zk/src/fmt.ts create mode 100644 infrastructure/zk/src/index.ts create mode 100644 infrastructure/zk/src/init.ts create mode 100644 infrastructure/zk/src/lint.ts create mode 100644 infrastructure/zk/src/prover.ts create mode 100644 infrastructure/zk/src/run/data-restore.ts create mode 100644 infrastructure/zk/src/run/run.ts create mode 100644 infrastructure/zk/src/server.ts create mode 100644 infrastructure/zk/src/test/integration.ts create mode 100644 infrastructure/zk/src/test/test.ts create mode 100644 infrastructure/zk/src/up.ts create mode 100644 infrastructure/zk/src/utils.ts create mode 100644 infrastructure/zk/tsconfig.json create mode 100644 package.json create mode 100644 renovate.json create mode 100644 rust-toolchain create mode 100644 sdk/zksync-rs/Cargo.toml create mode 100644 sdk/zksync-rs/README.md create mode 100644 sdk/zksync-rs/src/abi/IERC20.json create mode 100644 sdk/zksync-rs/src/abi/IL1Bridge.json create mode 100644 sdk/zksync-rs/src/abi/IPaymasterFlow.json create mode 100644 sdk/zksync-rs/src/abi/IZkSync.json create mode 100755 sdk/zksync-rs/src/abi/update-abi.sh create mode 100644 sdk/zksync-rs/src/error.rs create mode 100644 sdk/zksync-rs/src/ethereum/DepositERC20GasLimit.json create mode 100644 sdk/zksync-rs/src/ethereum/mod.rs create mode 100644 sdk/zksync-rs/src/lib.rs create mode 100644 sdk/zksync-rs/src/operations/deploy_contract.rs create mode 100644 sdk/zksync-rs/src/operations/execute_contract.rs create mode 100644 sdk/zksync-rs/src/operations/mod.rs create mode 100644 sdk/zksync-rs/src/operations/transfer.rs create mode 100644 sdk/zksync-rs/src/operations/withdraw.rs create mode 100644 sdk/zksync-rs/src/signer.rs create mode 100644 sdk/zksync-rs/src/utils.rs create mode 100644 sdk/zksync-rs/src/wallet.rs create mode 100644 sdk/zksync-rs/tests/integration.rs create mode 100644 sdk/zksync-rs/tests/unit.rs create mode 100644 sdk/zksync-web3.js/.gitignore create mode 100644 sdk/zksync-web3.js/abi/ContractDeployer.json create mode 100644 sdk/zksync-web3.js/abi/IAllowList.json create mode 100644 sdk/zksync-web3.js/abi/IERC1271.json create mode 100644 sdk/zksync-web3.js/abi/IERC20.json create mode 100644 sdk/zksync-web3.js/abi/IEthToken.json create mode 100644 sdk/zksync-web3.js/abi/IL1Bridge.json create mode 100644 sdk/zksync-web3.js/abi/IL1Messenger.json create mode 100644 sdk/zksync-web3.js/abi/IL2Bridge.json create mode 100644 sdk/zksync-web3.js/abi/IPaymasterFlow.json create mode 100644 sdk/zksync-web3.js/abi/IZkSync.json create mode 100755 sdk/zksync-web3.js/abi/update-abi.sh create mode 100644 sdk/zksync-web3.js/package.json create mode 100644 sdk/zksync-web3.js/src/adapters.ts create mode 100644 sdk/zksync-web3.js/src/calldata.ts create mode 100644 sdk/zksync-web3.js/src/contract.ts create mode 100644 sdk/zksync-web3.js/src/index.ts create mode 100644 sdk/zksync-web3.js/src/paymaster-utils.ts create mode 100644 sdk/zksync-web3.js/src/provider.ts create mode 100644 sdk/zksync-web3.js/src/signer.ts create mode 100644 sdk/zksync-web3.js/src/types.ts create mode 100644 sdk/zksync-web3.js/src/utils.ts create mode 100644 sdk/zksync-web3.js/src/wallet.ts create mode 100644 sdk/zksync-web3.js/tests/main.test.ts create mode 100644 sdk/zksync-web3.js/tsconfig.json create mode 100644 sdk/zksync-web3.js/typechain/IAllowList.d.ts create mode 100644 sdk/zksync-web3.js/typechain/IAllowListFactory.ts create mode 100644 sdk/zksync-web3.js/typechain/IERC20Metadata.d.ts create mode 100644 sdk/zksync-web3.js/typechain/IERC20MetadataFactory.ts create mode 100644 sdk/zksync-web3.js/typechain/IEthToken.d.ts create mode 100644 sdk/zksync-web3.js/typechain/IEthTokenFactory.ts create mode 100644 sdk/zksync-web3.js/typechain/IL1Bridge.d.ts create mode 100644 sdk/zksync-web3.js/typechain/IL1BridgeFactory.ts create mode 100644 sdk/zksync-web3.js/typechain/IL2Bridge.d.ts create mode 100644 sdk/zksync-web3.js/typechain/IL2BridgeFactory.ts create mode 100644 sdk/zksync-web3.js/typechain/IZkSync.d.ts create mode 100644 sdk/zksync-web3.js/typechain/IZkSyncFactory.ts create mode 100644 sdk/zksync-web3.js/typechain/index.ts create mode 100644 sdk/zksync-web3.js/typechain/update.sh create mode 100644 tarpaulin.toml create mode 100644 yarn.lock diff --git a/.cargo/config.toml b/.cargo/config.toml new file mode 100644 index 000000000000..c91c3f38b7b3 --- /dev/null +++ b/.cargo/config.toml @@ -0,0 +1,2 @@ +[net] +git-fetch-with-cli = true diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 000000000000..271d170e7d01 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,36 @@ +* +!docker/prover/prover-entry.sh +!docker/zk/entrypoint.sh +!docker/local-node/entrypoint.sh +!docker/contract-verifier/install-all-solc.sh +!etc/test_config +!etc/env/dev.env.example +!etc/env/docker.env +!etc/env/base +!etc/tokens +!etc/ERC20 +!artifacts +!keys +keys/setup +!bin/ +!db/ +!backups/ +!core/ +!yarn.lock +!package.json +!Cargo.lock +!Cargo.toml +!contracts/ +# It's required to remove .git from contracts, +# otherwise yarn tries to use .git parent directory that +# doesn't exist. +contracts/.git +!infrastructure/local-setup-preparation +!infrastructure/zk +!sdk/zksync-rs +!sdk/zksync-web3.js +!etc/system-contracts/bootloader/build/artifacts +!etc/system-contracts/artifacts-zk +!cargo +!bellman-cuda +!core/bin/verification_key_generator_and_server/data/ diff --git a/.eslintignore b/.eslintignore new file mode 100644 index 000000000000..6773d535d8c4 --- /dev/null +++ b/.eslintignore @@ -0,0 +1,8 @@ +./volumes/**/* +node_modules +**/node_modules/** +build/ +dist/ +volumes/ +.tslintrc.js +bellman-cuda \ No newline at end of file diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 000000000000..ecee2a3f453c --- /dev/null +++ b/.gitattributes @@ -0,0 +1,2 @@ +*.sol linguist-language=Solidity + diff --git a/.githooks/pre-commit b/.githooks/pre-commit new file mode 100755 index 000000000000..029294ab750a --- /dev/null +++ b/.githooks/pre-commit @@ -0,0 +1,14 @@ +#!/bin/sh +# +# Pre-commit hook verifying that inappropriate code will not be committed. + +# Colors for the terminal output +RED='\033[0;31m' +NC='\033[0m' # No Color + +# Check that Rust formatting rules are not violated. +if ! cargo fmt -- --check; then + echo -e "${RED}Commit error!${NC}" + echo "Please format the code via 'cargo fmt', cannot commit unformatted code" + exit 1 +fi diff --git a/.githooks/pre-push b/.githooks/pre-push new file mode 100755 index 000000000000..eb1acbb693c1 --- /dev/null +++ b/.githooks/pre-push @@ -0,0 +1,14 @@ +#!/bin/sh +# +# Pre-push hook verifying that inappropriate code will not be pushed. + +# Colors for the terminal output +RED='\033[0;31m' +NC='\033[0m' # No Color + +# Check that prettier formatting rules are not violated. +if ! zk fmt --check; then + echo -e "${RED}Commit error!${NC}" + echo "Please format the code via 'zk fmt', cannot push unformatted code" + exit 1 +fi diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000000..146a9c4642c6 --- /dev/null +++ b/.gitignore @@ -0,0 +1,65 @@ +# Editor directories and files +.idea +*.suo +*.ntvs* +*.njsproj +*.sln +.vscode +.DS_Store +*.bak +node_modules +*.log +target +a.out +.gitconfig +cobertura.xml +tags +*.orig + +zksync_pk.key +dist +todo + +Cargo.lock +!/Cargo.lock +!/core/bin/prover/Cargo.lock +!/core/bin/circuit_synthesizer/Cargo.lock +!/core/bin/setup_key_generator_and_server/Cargo.lock +!/core/bin/verification_key_generator_and_server/Cargo.lock +!/infrastructure/zksync-crypto/Cargo.lock + +/etc/env/* +!/etc/env/dev.env.example +!/etc/env/docker.env +!/etc/env/ci.env +!/etc/env/base +/etc/tokens/localhost.json +/etc/zksolc-bin/* +/etc/solc-bin/* +!/keys +/keys/* +!/keys/packed +/tmp +/volumes +/logs +/loadtest-config +/sdk/binaryen + +.ipynb_checkpoints + +loadtest_accounts_* + +go_to_env.sh + +core/lib/storage/.env + +.zcli-config.json + +db/ +db-ssd/ +backups/ +artifacts/ +artifacts-zk/ +cache-zk/ +zksolc +.github diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 000000000000..db7040dd34b6 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,12 @@ +[submodule "sdk/binaryen"] + path = sdk/binaryen + url = git@github.com:WebAssembly/binaryen.git +[submodule "etc/system-contracts"] + path = etc/system-contracts + url = git@github.com:matter-labs/system-contracts.git +[submodule "etc/openzeppelin-contracts"] + path = etc/openzeppelin-contracts + url = git@github.com:matter-labs/openzeppelin-contracts.git +[submodule "contracts"] + path = contracts + url = https://github.com/matter-labs/zksync-2-contracts.git diff --git a/.markdownlintignore b/.markdownlintignore new file mode 100644 index 000000000000..05ba7370295a --- /dev/null +++ b/.markdownlintignore @@ -0,0 +1,2 @@ +# Ignore submodule +bellman-cuda diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 000000000000..05ba7370295a --- /dev/null +++ b/.prettierignore @@ -0,0 +1,2 @@ +# Ignore submodule +bellman-cuda diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/CODEOWNERS b/CODEOWNERS new file mode 100644 index 000000000000..c126897f5de9 --- /dev/null +++ b/CODEOWNERS @@ -0,0 +1 @@ +* @RomanBrodetski @perekopskiy diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 000000000000..f129e606f7a5 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,9 @@ +# Contribution Guidelines + +Thank you for considering helping out with the source code! We are extremely grateful for any consideration of +contributions to this repository. However, at this time, we generally do not accept external contributions. This policy +will change in the future, so please check back regularly for updates. + +For security issues, please contact us at [security@matterlabs.dev](mailto:security@matterlabs.dev). + +Thank you for your support in accelerating the mass adoption of crypto for personal sovereignty! diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 000000000000..d5c3b953a8a0 --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,7552 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "actix-codec" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57a7559404a7f3573127aab53c08ce37a6c6a315c374a31070f3c91cd1b4a7fe" +dependencies = [ + "bitflags", + "bytes 1.4.0", + "futures-core", + "futures-sink", + "log", + "memchr", + "pin-project-lite", + "tokio", + "tokio-util 0.7.6", +] + +[[package]] +name = "actix-cors" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b340e9cfa5b08690aae90fb61beb44e9b06f44fe3d0f93781aaa58cfba86245e" +dependencies = [ + "actix-utils", + "actix-web", + "derive_more", + "futures-util", + "log", + "once_cell", + "smallvec", +] + +[[package]] +name = "actix-http" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0070905b2c4a98d184c4e81025253cb192aa8a73827553f38e9410801ceb35bb" +dependencies = [ + "actix-codec", + "actix-rt", + "actix-service", + "actix-utils", + "ahash", + "base64 0.21.0", + "bitflags", + "brotli", + "bytes 1.4.0", + "bytestring", + "derive_more", + "encoding_rs", + "flate2", + "futures-core", + "h2", + "http", + "httparse", + "httpdate", + "itoa 1.0.5", + "language-tags", + "local-channel", + "mime", + "percent-encoding", + "pin-project-lite", + "rand 0.8.5", + "sha1", + "smallvec", + "tokio", + "tokio-util 0.7.6", + "tracing", + "zstd", +] + +[[package]] +name = "actix-macros" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "465a6172cf69b960917811022d8f29bc0b7fa1398bc4f78b3c466673db1213b6" +dependencies = [ + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "actix-router" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d66ff4d247d2b160861fa2866457e85706833527840e4133f8f49aa423a38799" +dependencies = [ + "bytestring", + "http", + "regex", + "serde", + "tracing", +] + +[[package]] +name = "actix-rt" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15265b6b8e2347670eb363c47fc8c75208b4a4994b27192f345fcbe707804f3e" +dependencies = [ + "actix-macros", + "futures-core", + "tokio", +] + +[[package]] +name = "actix-server" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e8613a75dd50cc45f473cee3c34d59ed677c0f7b44480ce3b8247d7dc519327" +dependencies = [ + "actix-rt", + "actix-service", + "actix-utils", + "futures-core", + "futures-util", + "mio 0.8.5", + "num_cpus", + "socket2", + "tokio", + "tracing", +] + +[[package]] +name = "actix-service" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b894941f818cfdc7ccc4b9e60fa7e53b5042a2e8567270f9147d5591893373a" +dependencies = [ + "futures-core", + "paste", + "pin-project-lite", +] + +[[package]] +name = "actix-utils" +version = "3.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88a1dcdff1466e3c2488e1cb5c36a71822750ad43839937f85d2f4d9f8b705d8" +dependencies = [ + "local-waker", + "pin-project-lite", +] + +[[package]] +name = "actix-web" +version = "4.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "464e0fddc668ede5f26ec1f9557a8d44eda948732f40c6b0ad79126930eb775f" +dependencies = [ + "actix-codec", + "actix-http", + "actix-macros", + "actix-router", + "actix-rt", + "actix-server", + "actix-service", + "actix-utils", + "actix-web-codegen", + "ahash", + "bytes 1.4.0", + "bytestring", + "cfg-if 1.0.0", + "cookie", + "derive_more", + "encoding_rs", + "futures-core", + "futures-util", + "http", + "itoa 1.0.5", + "language-tags", + "log", + "mime", + "once_cell", + "pin-project-lite", + "regex", + "serde", + "serde_json", + "serde_urlencoded", + "smallvec", + "socket2", + "time 0.3.17", + "url", +] + +[[package]] +name = "actix-web-codegen" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa9362663c8643d67b2d5eafba49e4cb2c8a053a29ed00a0bea121f17c76b13" +dependencies = [ + "actix-router", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "addchain" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b2e69442aa5628ea6951fa33e24efe8313f4321a91bd729fc2f75bdfc858570" +dependencies = [ + "num-bigint 0.3.3", + "num-integer", + "num-traits", +] + +[[package]] +name = "addr2line" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "admin-tools" +version = "0.1.0" +dependencies = [ + "anyhow", + "chrono", + "clap 4.1.4", + "dotenvy", + "tokio", + "zksync_dal", + "zksync_types", +] + +[[package]] +name = "aes" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "884391ef1066acaa41e766ba8f596341b96e93ce34f9a43e7d24bf0a0eaf0561" +dependencies = [ + "aes-soft", + "aesni", + "cipher", +] + +[[package]] +name = "aes-ctr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7729c3cde54d67063be556aeac75a81330d802f0259500ca40cb52967f975763" +dependencies = [ + "aes-soft", + "aesni", + "cipher", + "ctr", +] + +[[package]] +name = "aes-soft" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be14c7498ea50828a38d0e24a765ed2effe92a705885b57d029cd67d45744072" +dependencies = [ + "cipher", + "opaque-debug 0.3.0", +] + +[[package]] +name = "aesni" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea2e11f5e94c2f7d386164cc2aa1f97823fed6f259e486940a71c174dd01b0ce" +dependencies = [ + "cipher", + "opaque-debug 0.3.0", +] + +[[package]] +name = "ahash" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +dependencies = [ + "getrandom 0.2.8", + "once_cell", + "version_check", +] + +[[package]] +name = "aho-corasick" +version = "0.7.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac" +dependencies = [ + "memchr", +] + +[[package]] +name = "alloc-no-stdlib" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" + +[[package]] +name = "alloc-stdlib" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" +dependencies = [ + "alloc-no-stdlib", +] + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "ansi_term" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" +dependencies = [ + "winapi 0.3.9", +] + +[[package]] +name = "anyhow" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "224afbd727c3d6e4b90103ece64b8d1b67fbb1973b1046c2281eed3f3803f800" + +[[package]] +name = "arr_macro" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a105bfda48707cf19220129e78fca01e9639433ffaef4163546ed8fb04120a5" +dependencies = [ + "arr_macro_impl", + "proc-macro-hack", +] + +[[package]] +name = "arr_macro_impl" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0609c78bd572f4edc74310dfb63a01f5609d53fa8b4dd7c4d98aef3b3e8d72d1" +dependencies = [ + "proc-macro-hack", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "arrayref" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4c527152e37cf757a3f78aae5a06fbeefdb07ccc535c980a3208ee3060dd544" + +[[package]] +name = "arrayvec" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd9fd44efafa8690358b7408d253adf110036b88f55672a933f01d616ad9b1b9" +dependencies = [ + "nodrop", +] + +[[package]] +name = "arrayvec" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" + +[[package]] +name = "arrayvec" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" + +[[package]] +name = "assert_matches" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b34d609dfbaf33d6889b2b7106d3ca345eacad44200913df5ba02bfd31d2ba9" + +[[package]] +name = "async-channel" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf46fee83e5ccffc220104713af3292ff9bc7c64c7de289f66dae8e38d826833" +dependencies = [ + "concurrent-queue", + "event-listener", + "futures-core", +] + +[[package]] +name = "async-executor" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17adb73da160dfb475c183343c8cccd80721ea5a605d3eb57125f0a7b7a92d0b" +dependencies = [ + "async-lock", + "async-task", + "concurrent-queue", + "fastrand", + "futures-lite", + "slab", +] + +[[package]] +name = "async-global-executor" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1b6f5d7df27bd294849f8eec66ecfc63d11814df7a4f5d74168a2394467b776" +dependencies = [ + "async-channel", + "async-executor", + "async-io", + "async-lock", + "blocking", + "futures-lite", + "once_cell", +] + +[[package]] +name = "async-io" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c374dda1ed3e7d8f0d9ba58715f924862c63eae6849c92d3a18e7fbde9e2794" +dependencies = [ + "async-lock", + "autocfg 1.1.0", + "concurrent-queue", + "futures-lite", + "libc", + "log", + "parking", + "polling", + "slab", + "socket2", + "waker-fn", + "windows-sys 0.42.0", +] + +[[package]] +name = "async-lock" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8101efe8695a6c17e02911402145357e718ac92d3ff88ae8419e84b1707b685" +dependencies = [ + "event-listener", + "futures-lite", +] + +[[package]] +name = "async-native-tls" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e9e7a929bd34c68a82d58a4de7f86fffdaf97fb2af850162a7bb19dd7269b33" +dependencies = [ + "async-std", + "native-tls", + "thiserror", + "url", +] + +[[package]] +name = "async-process" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6381ead98388605d0d9ff86371043b5aa922a3905824244de40dc263a14fcba4" +dependencies = [ + "async-io", + "async-lock", + "autocfg 1.1.0", + "blocking", + "cfg-if 1.0.0", + "event-listener", + "futures-lite", + "libc", + "signal-hook", + "windows-sys 0.42.0", +] + +[[package]] +name = "async-std" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62565bb4402e926b29953c785397c6dc0391b7b446e45008b0049eb43cec6f5d" +dependencies = [ + "async-channel", + "async-global-executor", + "async-io", + "async-lock", + "async-process", + "crossbeam-utils 0.8.14", + "futures-channel", + "futures-core", + "futures-io", + "futures-lite", + "gloo-timers", + "kv-log-macro", + "log", + "memchr", + "once_cell", + "pin-project-lite", + "pin-utils", + "slab", + "wasm-bindgen-futures", +] + +[[package]] +name = "async-stream" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dad5c83079eae9969be7fadefe640a1c566901f05ff91ab221de4b6f68d9507e" +dependencies = [ + "async-stream-impl", + "futures-core", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10f203db73a71dfa2fb6dd22763990fa26f3d2625a6da2da900d23b87d26be27" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "async-task" +version = "4.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a40729d2133846d9ed0ea60a8b9541bccddab49cd30f0715a1da672fe9a2524" + +[[package]] +name = "async-trait" +version = "0.1.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cd7fce9ba8c3c042128ce72d8b2ddbf3a05747efb67ea0313c635e10bda47a2" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "atoi" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "616896e05fc0e2649463a93a15183c6a16bf03413a7af88ef1285ddedfa9cda5" +dependencies = [ + "num-traits", +] + +[[package]] +name = "atomic-waker" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "debc29dde2e69f9e47506b525f639ed42300fc014a3e007832592448fa8e4599" + +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi 0.1.19", + "libc", + "winapi 0.3.9", +] + +[[package]] +name = "autocfg" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dde43e75fd43e8a1bf86103336bc699aa8d17ad1be60c76c0bdfd4828e19b78" +dependencies = [ + "autocfg 1.1.0", +] + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "backtrace" +version = "0.3.67" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "233d376d6d185f2a3093e58f283f60f880315b6c60075b01f36b3b85154564ca" +dependencies = [ + "addr2line", + "cc", + "cfg-if 1.0.0", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + +[[package]] +name = "base16ct" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" + +[[package]] +name = "base64" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff" + +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + +[[package]] +name = "base64" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a4ddaa51a5bc52a6948f74c06d20aaaddb71924eab79b8c97a8c556e942d6a" + +[[package]] +name = "base64ct" +version = "1.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b645a089122eccb6111b4f81cbc1a49f5900ac4666bb93ac027feaecf15607bf" + +[[package]] +name = "beef" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1" +dependencies = [ + "serde", +] + +[[package]] +name = "bellman_ce" +version = "0.3.2" +source = "git+https://github.com/matter-labs/bellman?branch=dev#3aa6226d04d60c539ff4ee480479ac5b92871a20" +dependencies = [ + "arrayvec 0.7.2", + "bit-vec", + "blake2s_const", + "blake2s_simd", + "byteorder", + "cfg-if 1.0.0", + "crossbeam 0.7.3", + "futures 0.3.26", + "hex", + "lazy_static", + "num_cpus", + "pairing_ce", + "rand 0.4.6", + "serde", + "smallvec", + "tiny-keccak 1.5.0", +] + +[[package]] +name = "bigdecimal" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc403c26e6b03005522e6e8053384c4e881dfe5b2bf041c0c2c49be33d64a539" +dependencies = [ + "num-bigint 0.3.3", + "num-integer", + "num-traits", + "serde", +] + +[[package]] +name = "bincode" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" +dependencies = [ + "serde", +] + +[[package]] +name = "bindgen" +version = "0.59.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bd2a9a458e8f4304c52c43ebb0cfbd520289f8379a52e329a38afda99bf8eb8" +dependencies = [ + "bitflags", + "cexpr", + "clang-sys", + "lazy_static", + "lazycell", + "peeking_take_while", + "proc-macro2 1.0.51", + "quote 1.0.23", + "regex", + "rustc-hash", + "shlex", +] + +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" +dependencies = [ + "serde", +] + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitvec" +version = "0.20.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7774144344a4faa177370406a7ff5f1da24303817368584c6206c8303eb07848" +dependencies = [ + "funty", + "radium", + "tap", + "wyz", +] + +[[package]] +name = "blake2" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a4e37d16930f5459780f5621038b6382b9bb37c19016f39fb6b5808d831f174" +dependencies = [ + "crypto-mac 0.8.0", + "digest 0.9.0", + "opaque-debug 0.3.0", +] + +[[package]] +name = "blake2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" +dependencies = [ + "digest 0.10.6", +] + +[[package]] +name = "blake2-rfc_bellman_edition" +version = "0.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc60350286c7c3db13b98e91dbe5c8b6830a6821bc20af5b0c310ce94d74915" +dependencies = [ + "arrayvec 0.4.12", + "byteorder", + "constant_time_eq", +] + +[[package]] +name = "blake2b_simd" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afa748e348ad3be8263be728124b24a24f268266f6f5d58af9d75f6a40b5c587" +dependencies = [ + "arrayref", + "arrayvec 0.5.2", + "constant_time_eq", +] + +[[package]] +name = "blake2s_const" +version = "0.6.0" +source = "git+https://github.com/matter-labs/bellman?branch=dev#3aa6226d04d60c539ff4ee480479ac5b92871a20" +dependencies = [ + "arrayref", + "arrayvec 0.5.2", + "constant_time_eq", +] + +[[package]] +name = "blake2s_simd" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e461a7034e85b211a4acb57ee2e6730b32912b06c08cc242243c39fc21ae6a2" +dependencies = [ + "arrayref", + "arrayvec 0.5.2", + "constant_time_eq", +] + +[[package]] +name = "blob_purger" +version = "1.0.0" +dependencies = [ + "structopt", + "zksync_dal", +] + +[[package]] +name = "block-buffer" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b" +dependencies = [ + "block-padding 0.1.5", + "byte-tools", + "byteorder", + "generic-array 0.12.4", +] + +[[package]] +name = "block-buffer" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" +dependencies = [ + "block-padding 0.2.1", + "generic-array 0.14.6", +] + +[[package]] +name = "block-buffer" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e" +dependencies = [ + "generic-array 0.14.6", +] + +[[package]] +name = "block-modes" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57a0e8073e8baa88212fb5823574c02ebccb395136ba9a164ab89379ec6072f0" +dependencies = [ + "block-padding 0.2.1", + "cipher", +] + +[[package]] +name = "block-padding" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa79dedbb091f449f1f39e53edf88d5dbe95f895dae6135a8d7b881fb5af73f5" +dependencies = [ + "byte-tools", +] + +[[package]] +name = "block-padding" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d696c370c750c948ada61c69a0ee2cbbb9c50b1019ddb86d9317157a99c2cae" + +[[package]] +name = "blocking" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c67b173a56acffd6d2326fb7ab938ba0b00a71480e14902b2591c87bc5741e8" +dependencies = [ + "async-channel", + "async-lock", + "async-task", + "atomic-waker", + "fastrand", + "futures-lite", +] + +[[package]] +name = "brotli" +version = "3.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1a0b1dbcc8ae29329621f8d4f0d835787c1c38bb1401979b49d13b0b305ff68" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", + "brotli-decompressor", +] + +[[package]] +name = "brotli-decompressor" +version = "2.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b6561fd3f895a11e8f72af2cb7d22e08366bebc2b6b57f7744c4bda27034744" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", +] + +[[package]] +name = "bstr" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3569f383e8f1598449f1a423e72e99569137b47740b1da11ef19af3d5c3223" +dependencies = [ + "lazy_static", + "memchr", + "regex-automata", + "serde", +] + +[[package]] +name = "bstr" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7f0778972c64420fdedc63f09919c8a88bda7b25135357fd25a5d9f3257e832" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "bumpalo" +version = "3.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" + +[[package]] +name = "byte-slice-cast" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" + +[[package]] +name = "byte-tools" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" + +[[package]] +name = "byteorder" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" + +[[package]] +name = "bytes" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "206fdffcfa2df7cbe15601ef46c813fce0965eb3286db6b56c583b814b51c81c" +dependencies = [ + "byteorder", + "iovec", +] + +[[package]] +name = "bytes" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" + +[[package]] +name = "bytestring" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7f83e57d9154148e355404702e2694463241880b939570d7c97c014da7a69a1" +dependencies = [ + "bytes 1.4.0", +] + +[[package]] +name = "bzip2-sys" +version = "0.1.11+1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "736a955f3fa7875102d57c82b8cac37ec45224a07fd32d58f9f7a186b6cd4cdc" +dependencies = [ + "cc", + "libc", + "pkg-config", +] + +[[package]] +name = "cast" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" + +[[package]] +name = "cc" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" +dependencies = [ + "jobserver", +] + +[[package]] +name = "cexpr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +dependencies = [ + "nom", +] + +[[package]] +name = "cfg-if" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16b0a3d9ed01224b22057780a37bb8c5dbfe1be8ba48678e7bf57ec4b385411f" +dependencies = [ + "iana-time-zone", + "js-sys", + "num-integer", + "num-traits", + "rustc-serialize", + "serde", + "time 0.1.43", + "wasm-bindgen", + "winapi 0.3.9", +] + +[[package]] +name = "cipher" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12f8e7987cbd042a63249497f41aed09f8e65add917ea6566effbc56578d6801" +dependencies = [ + "generic-array 0.14.6", +] + +[[package]] +name = "circuit_testing" +version = "0.1.0" +source = "git+https://github.com/matter-labs/circuit_testing.git?branch=main#7160c45c844944748663c91b6860c77f5376d9e4" +dependencies = [ + "bellman_ce", +] + +[[package]] +name = "clang-sys" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa2e27ae6ab525c3d369ded447057bca5438d86dc3a68f6faafb8269ba82ebf3" +dependencies = [ + "glob", + "libc", + "libloading", +] + +[[package]] +name = "clap" +version = "2.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" +dependencies = [ + "ansi_term", + "atty", + "bitflags", + "strsim 0.8.0", + "textwrap", + "unicode-width", + "vec_map", +] + +[[package]] +name = "clap" +version = "4.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f13b9c79b5d1dd500d20ef541215a6423c75829ef43117e1b4d17fd8af0b5d76" +dependencies = [ + "bitflags", + "clap_derive", + "clap_lex", + "is-terminal", + "once_cell", + "strsim 0.10.0", + "termcolor", +] + +[[package]] +name = "clap_derive" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "684a277d672e91966334af371f1a7b5833f9aa00b07c84e92fbce95e00208ce8" +dependencies = [ + "heck 0.4.1", + "proc-macro-error", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "clap_lex" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "783fe232adfca04f90f56201b26d79682d4cd2625e0bc7290b95123afe558ade" +dependencies = [ + "os_str_bytes", +] + +[[package]] +name = "cloud-storage" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7602ac4363f68ac757d6b87dd5d850549a14d37489902ae639c06ecec06ad275" +dependencies = [ + "async-trait", + "base64 0.13.1", + "bytes 1.4.0", + "chrono", + "dotenv", + "futures-util", + "hex", + "jsonwebtoken", + "lazy_static", + "openssl", + "percent-encoding", + "reqwest", + "serde", + "serde_json", + "tokio", +] + +[[package]] +name = "cloudabi" +version = "0.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" +dependencies = [ + "bitflags", +] + +[[package]] +name = "codegen" +version = "0.1.0" +source = "git+https://github.com/matter-labs/solidity_plonk_verifier.git?branch=dev#4fb6397f778a580c9207ec23661228f5da7e66b4" +dependencies = [ + "ethereum-types", + "franklin-crypto", + "handlebars", + "hex", + "paste", + "rescue_poseidon", + "serde", + "serde_derive", + "serde_json", +] + +[[package]] +name = "codegen" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff61280aed771c3070e7dcc9e050c66f1eb1e3b96431ba66f9f74641d02fc41d" +dependencies = [ + "indexmap", +] + +[[package]] +name = "codespan-reporting" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e" +dependencies = [ + "termcolor", + "unicode-width", +] + +[[package]] +name = "combine" +version = "4.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35ed6e9d84f0b51a7f52daf1c7d71dd136fd7a3f41a8462b8cdb8c78d920fad4" +dependencies = [ + "bytes 1.4.0", + "memchr", +] + +[[package]] +name = "concurrent-queue" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c278839b831783b70278b14df4d45e1beb1aad306c07bb796637de9a0e323e8e" +dependencies = [ + "crossbeam-utils 0.8.14", +] + +[[package]] +name = "const-oid" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cec318a675afcb6a1ea1d4340e2d377e56e47c266f28043ceccbf4412ddfdd3b" + +[[package]] +name = "constant_time_eq" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" + +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + +[[package]] +name = "convert_case" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "cookie" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e859cd57d0710d9e06c381b550c06e76992472a8c6d527aecd2fc673dcc231fb" +dependencies = [ + "percent-encoding", + "time 0.3.17", + "version_check", +] + +[[package]] +name = "core-foundation" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" + +[[package]] +name = "cpufeatures" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320" +dependencies = [ + "libc", +] + +[[package]] +name = "crc" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49fc9a695bca7f35f5f4c15cddc84415f66a74ea78eef08e90c5024f2b540e23" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccaeedb56da03b09f598226e25e80088cb4cd25f316e6e4df7d695f0feeb1403" + +[[package]] +name = "crc32fast" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" +dependencies = [ + "cfg-if 1.0.0", +] + +[[package]] +name = "criterion" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b01d6de93b2b6c65e17c634a26653a29d107b3c98c607c765bf38d041531cd8f" +dependencies = [ + "atty", + "cast", + "clap 2.34.0", + "criterion-plot", + "csv", + "itertools", + "lazy_static", + "num-traits", + "oorandom", + "plotters", + "rayon", + "regex", + "serde", + "serde_cbor", + "serde_derive", + "serde_json", + "tinytemplate", + "walkdir", +] + +[[package]] +name = "criterion-plot" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2673cc8207403546f45f5fd319a974b1e6983ad1a3ee7e6041650013be041876" +dependencies = [ + "cast", + "itertools", +] + +[[package]] +name = "crossbeam" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69323bff1fb41c635347b8ead484a5ca6c3f11914d784170b158d8449ab07f8e" +dependencies = [ + "cfg-if 0.1.10", + "crossbeam-channel 0.4.4", + "crossbeam-deque 0.7.4", + "crossbeam-epoch 0.8.2", + "crossbeam-queue 0.2.3", + "crossbeam-utils 0.7.2", +] + +[[package]] +name = "crossbeam" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2801af0d36612ae591caa9568261fddce32ce6e08a7275ea334a06a4ad021a2c" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-channel 0.5.6", + "crossbeam-deque 0.8.2", + "crossbeam-epoch 0.9.13", + "crossbeam-queue 0.3.8", + "crossbeam-utils 0.8.14", +] + +[[package]] +name = "crossbeam-channel" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b153fe7cbef478c567df0f972e02e6d736db11affe43dfc9c56a9374d1adfb87" +dependencies = [ + "crossbeam-utils 0.7.2", + "maybe-uninit", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-utils 0.8.14", +] + +[[package]] +name = "crossbeam-deque" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20ff29ded3204c5106278a81a38f4b482636ed4fa1e6cfbeef193291beb29ed" +dependencies = [ + "crossbeam-epoch 0.8.2", + "crossbeam-utils 0.7.2", + "maybe-uninit", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-epoch 0.9.13", + "crossbeam-utils 0.8.14", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "058ed274caafc1f60c4997b5fc07bf7dc7cca454af7c6e81edffe5f33f70dace" +dependencies = [ + "autocfg 1.1.0", + "cfg-if 0.1.10", + "crossbeam-utils 0.7.2", + "lazy_static", + "maybe-uninit", + "memoffset 0.5.6", + "scopeguard", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01a9af1f4c2ef74bb8aa1f7e19706bc72d03598c8a570bb5de72243c7a9d9d5a" +dependencies = [ + "autocfg 1.1.0", + "cfg-if 1.0.0", + "crossbeam-utils 0.8.14", + "memoffset 0.7.1", + "scopeguard", +] + +[[package]] +name = "crossbeam-queue" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "774ba60a54c213d409d5353bda12d49cd68d14e45036a285234c8d6f91f92570" +dependencies = [ + "cfg-if 0.1.10", + "crossbeam-utils 0.7.2", + "maybe-uninit", +] + +[[package]] +name = "crossbeam-queue" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-utils 0.8.14", +] + +[[package]] +name = "crossbeam-utils" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8" +dependencies = [ + "autocfg 1.1.0", + "cfg-if 0.1.10", + "lazy_static", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f" +dependencies = [ + "cfg-if 1.0.0", +] + +[[package]] +name = "crunchy" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" + +[[package]] +name = "crypto-bigint" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" +dependencies = [ + "generic-array 0.14.6", + "rand_core 0.6.4", + "subtle", + "zeroize", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array 0.14.6", + "typenum", +] + +[[package]] +name = "crypto-mac" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" +dependencies = [ + "generic-array 0.14.6", + "subtle", +] + +[[package]] +name = "crypto-mac" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bff07008ec701e8028e2ceb8f83f0e4274ee62bd2dbdc4fefff2e9a91824081a" +dependencies = [ + "generic-array 0.14.6", + "subtle", +] + +[[package]] +name = "crypto-mac" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1d1a86f49236c215f271d40892d5fc950490551400b02ef360692c29815c714" +dependencies = [ + "generic-array 0.14.6", + "subtle", +] + +[[package]] +name = "cs_derive" +version = "0.1.0" +source = "git+https://github.com/matter-labs/sync_vm.git?branch=v1.3.1#a69bcef3eafcc39887ca8c09ec835c1a426d0813" +dependencies = [ + "proc-macro-error", + "proc-macro2 1.0.51", + "quote 1.0.23", + "serde", + "syn 1.0.107", +] + +[[package]] +name = "csv" +version = "1.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22813a6dc45b335f9bade10bf7271dc477e81113e89eb251a0bc2a8a81c536e1" +dependencies = [ + "bstr 0.2.17", + "csv-core", + "itoa 0.4.8", + "ryu", + "serde", +] + +[[package]] +name = "csv-core" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b2466559f260f48ad25fe6317b3c8dac77b5bdb5763ac7d9d6103530663bc90" +dependencies = [ + "memchr", +] + +[[package]] +name = "ctor" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d2301688392eb071b0bf1a37be05c469d3cc4dbbd95df672fe28ab021e6a096" +dependencies = [ + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "ctr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb4a30d54f7443bf3d6191dcd486aca19e67cb3c49fa7a06a319966346707e7f" +dependencies = [ + "cipher", +] + +[[package]] +name = "ctrlc" +version = "3.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbcf33c2a618cbe41ee43ae6e9f2e48368cd9f9db2896f10167d8d762679f639" +dependencies = [ + "nix", + "windows-sys 0.45.0", +] + +[[package]] +name = "cxx" +version = "1.0.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90d59d9acd2a682b4e40605a242f6670eaa58c5957471cbf85e8aa6a0b97a5e8" +dependencies = [ + "cc", + "cxxbridge-flags", + "cxxbridge-macro", + "link-cplusplus", +] + +[[package]] +name = "cxx-build" +version = "1.0.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebfa40bda659dd5c864e65f4c9a2b0aff19bea56b017b9b77c73d3766a453a38" +dependencies = [ + "cc", + "codespan-reporting", + "once_cell", + "proc-macro2 1.0.51", + "quote 1.0.23", + "scratch", + "syn 1.0.107", +] + +[[package]] +name = "cxxbridge-flags" +version = "1.0.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "457ce6757c5c70dc6ecdbda6925b958aae7f959bda7d8fb9bde889e34a09dc03" + +[[package]] +name = "cxxbridge-macro" +version = "1.0.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebf883b7aacd7b2aeb2a7b338648ee19f57c140d4ee8e52c68979c6b2f7f2263" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "darling" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a01d95850c592940db9b8194bc39f4bc0e89dee5c4265e4b1807c34a9aba453c" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2 1.0.51", + "quote 1.0.23", + "strsim 0.10.0", + "syn 1.0.107", +] + +[[package]] +name = "darling_macro" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" +dependencies = [ + "darling_core", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "dashmap" +version = "5.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc" +dependencies = [ + "cfg-if 1.0.0", + "hashbrown 0.12.3", + "lock_api", + "once_cell", + "parking_lot_core 0.9.7", +] + +[[package]] +name = "db_test_macro" +version = "0.1.0" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "debugid" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef552e6f588e446098f6ba40d89ac146c8c7b64aade83c051ee00bb5d2bc18d" +dependencies = [ + "serde", + "uuid", +] + +[[package]] +name = "der" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1a467a65c5e759bce6e65eaf91cc29f466cdc57cb65777bd646872a8a1fd4de" +dependencies = [ + "const-oid", + "zeroize", +] + +[[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "derive_more" +version = "0.99.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +dependencies = [ + "convert_case 0.4.0", + "proc-macro2 1.0.51", + "quote 1.0.23", + "rustc_version", + "syn 1.0.107", +] + +[[package]] +name = "digest" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5" +dependencies = [ + "generic-array 0.12.4", +] + +[[package]] +name = "digest" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" +dependencies = [ + "generic-array 0.14.6", +] + +[[package]] +name = "digest" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8168378f4e5023e7218c89c891c0fd8ecdb5e5e4f18cb78f38cf245dd021e76f" +dependencies = [ + "block-buffer 0.10.3", + "crypto-common", + "subtle", +] + +[[package]] +name = "dirs" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fd78930633bd1c6e35c4b42b1df7b0cbc6bc191146e512bb3bedf243fcc3901" +dependencies = [ + "libc", + "redox_users 0.3.5", + "winapi 0.3.9", +] + +[[package]] +name = "dirs" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30baa043103c9d0c2a57cf537cc2f35623889dc0d405e6c3cccfadbc81c71309" +dependencies = [ + "dirs-sys", +] + +[[package]] +name = "dirs-sys" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6" +dependencies = [ + "libc", + "redox_users 0.4.3", + "winapi 0.3.9", +] + +[[package]] +name = "dotenv" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f" + +[[package]] +name = "dotenvy" +version = "0.15.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03d8c417d7a8cb362e0c37e5d815f5eb7c37f79ff93707329d5a194e42e54ca0" + +[[package]] +name = "ecdsa" +version = "0.14.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413301934810f597c1d19ca71c8710e99a3f1ba28a0d2ebc01551a2daeea3c5c" +dependencies = [ + "der", + "elliptic-curve", + "rfc6979", + "signature", +] + +[[package]] +name = "eip712-signature" +version = "0.1.0" +source = "git+https://github.com/vladbochok/eip712-signature#30b11455e7d613313e8c12d2aad961fd4bf902fe" +dependencies = [ + "ethereum-types", + "parity-crypto", + "thiserror", +] + +[[package]] +name = "either" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" +dependencies = [ + "serde", +] + +[[package]] +name = "elliptic-curve" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7bb888ab5300a19b8e5bceef25ac745ad065f3c9f7efc6de1b91958110891d3" +dependencies = [ + "base16ct", + "crypto-bigint", + "der", + "digest 0.10.6", + "ff", + "generic-array 0.14.6", + "group", + "pkcs8", + "rand_core 0.6.4", + "sec1", + "subtle", + "zeroize", +] + +[[package]] +name = "encoding_rs" +version = "0.8.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" +dependencies = [ + "cfg-if 1.0.0", +] + +[[package]] +name = "env_logger" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a12e6657c4c97ebab115a42dcee77225f7f482cdd841cf7088c657a42e9e00e7" +dependencies = [ + "atty", + "humantime", + "log", + "regex", + "termcolor", +] + +[[package]] +name = "env_logger" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85cdab6a89accf66733ad5a1693a4dcced6aeff64602b634530dd73c1f3ee9f0" +dependencies = [ + "humantime", + "is-terminal", + "log", + "regex", + "termcolor", +] + +[[package]] +name = "envy" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f47e0157f2cb54f5ae1bd371b30a2ae4311e1c028f575cd4e81de7353215965" +dependencies = [ + "serde", +] + +[[package]] +name = "errno" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1" +dependencies = [ + "errno-dragonfly", + "libc", + "winapi 0.3.9", +] + +[[package]] +name = "errno-dragonfly" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "ethabi" +version = "16.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4c98847055d934070b90e806e12d3936b787d0a115068981c1d8dfd5dfef5a5" +dependencies = [ + "ethereum-types", + "hex", + "serde", + "serde_json", + "sha3 0.9.1", + "thiserror", + "uint", +] + +[[package]] +name = "ethbloom" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfb684ac8fa8f6c5759f788862bb22ec6fe3cb392f6bfd08e3c64b603661e3f8" +dependencies = [ + "crunchy", + "fixed-hash", + "impl-rlp", + "impl-serde", + "tiny-keccak 2.0.2", +] + +[[package]] +name = "ethereum-types" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05136f7057fe789f06e6d41d07b34e6f70d8c86e5693b60f97aaa6553553bdaf" +dependencies = [ + "ethbloom", + "fixed-hash", + "impl-rlp", + "impl-serde", + "primitive-types", + "uint", +] + +[[package]] +name = "event-listener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" + +[[package]] +name = "events_tx_initiator_address_migration" +version = "0.1.0" +dependencies = [ + "tokio", + "zksync_dal", + "zksync_types", +] + +[[package]] +name = "expanduser" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14e0b79235da57db6b6c2beed9af6e5de867d63a973ae3e91910ddc33ba40bc0" +dependencies = [ + "dirs 1.0.5", + "lazy_static", + "pwd", +] + +[[package]] +name = "fake-simd" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" + +[[package]] +name = "fastrand" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499" +dependencies = [ + "instant", +] + +[[package]] +name = "ff" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160" +dependencies = [ + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "ff_ce" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b538e4231443a5b9c507caee3356f016d832cf7393d2d90f03ea3180d4e3fbc" +dependencies = [ + "byteorder", + "ff_derive_ce", + "hex", + "rand 0.4.6", + "serde", +] + +[[package]] +name = "ff_derive_ce" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b96fbccd88dbb1fac4ee4a07c2fcc4ca719a74ffbd9d2b9d41d8c8eb073d8b20" +dependencies = [ + "num-bigint 0.4.3", + "num-integer", + "num-traits", + "proc-macro2 1.0.51", + "quote 1.0.23", + "serde", + "syn 1.0.107", +] + +[[package]] +name = "fixed-hash" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcf0ed7fe52a17a03854ec54a9f76d6d84508d1c0e66bc1793301c73fc8493c" +dependencies = [ + "byteorder", + "rand 0.8.5", + "rustc-hex", + "static_assertions", +] + +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + +[[package]] +name = "flate2" +version = "1.0.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "franklin-crypto" +version = "0.0.5" +source = "git+https://github.com/matter-labs/franklin-crypto?branch=dev#3baf4c4eb3b41fcaca5cfd36d0dc46b097ba7322" +dependencies = [ + "arr_macro", + "bellman_ce", + "bit-vec", + "blake2 0.9.2", + "blake2-rfc_bellman_edition", + "blake2s_simd", + "byteorder", + "digest 0.9.0", + "hex", + "indexmap", + "itertools", + "lazy_static", + "num-bigint 0.4.3", + "num-derive 0.2.5", + "num-integer", + "num-traits", + "rand 0.4.6", + "serde", + "sha2 0.9.9", + "sha3 0.9.1", + "smallvec", + "splitmut", + "tiny-keccak 1.5.0", +] + +[[package]] +name = "fuchsia-cprng" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" + +[[package]] +name = "fuchsia-zircon" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82" +dependencies = [ + "bitflags", + "fuchsia-zircon-sys", +] + +[[package]] +name = "fuchsia-zircon-sys" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" + +[[package]] +name = "funty" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7" + +[[package]] +name = "futures" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678" + +[[package]] +name = "futures" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13e2792b0ff0340399d58445b88fd9770e3489eff258a4cbc1523418f12abf84" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e5317663a9089767a1ec00a487df42e0ca174b61b4483213ac24448e4664df5" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec90ff4d0fe1f57d600049061dc6bb68ed03c7d2fbd697274c41805dcb3f8608" + +[[package]] +name = "futures-executor" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8de0a35a6ab97ec8869e32a2473f4b1324459e14c29275d14b10cb1fd19b50e" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", + "num_cpus", +] + +[[package]] +name = "futures-intrusive" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a604f7a68fbf8103337523b1fadc8ade7361ee3f112f7c680ad179651616aed5" +dependencies = [ + "futures-core", + "lock_api", + "parking_lot 0.11.2", +] + +[[package]] +name = "futures-io" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfb8371b6fb2aeb2d280374607aeabfc99d95c72edfe51692e42d3d7f0d08531" + +[[package]] +name = "futures-lite" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7694489acd39452c77daa48516b894c153f192c3578d5a839b62c58099fcbf48" +dependencies = [ + "fastrand", + "futures-core", + "futures-io", + "memchr", + "parking", + "pin-project-lite", + "waker-fn", +] + +[[package]] +name = "futures-macro" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95a73af87da33b5acf53acfebdc339fe592ecf5357ac7c0a7734ab9d8c876a70" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "futures-sink" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f310820bb3e8cfd46c80db4d7fb8353e15dfff853a127158425f31e0be6c8364" + +[[package]] +name = "futures-task" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcf79a1bf610b10f42aea489289c5a2c478a786509693b80cd39c44ccd936366" + +[[package]] +name = "futures-timer" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" +dependencies = [ + "gloo-timers", + "send_wrapper", +] + +[[package]] +name = "futures-util" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c1d6de3acfef38d2be4b1f543f553131788603495be83da675e180c8d6b7bd1" +dependencies = [ + "futures 0.1.31", + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "generic-array" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffdf9f34f1447443d37393cc6c2b8313aebddcd96906caf34e54c68d8e57d7bd" +dependencies = [ + "typenum", +] + +[[package]] +name = "generic-array" +version = "0.14.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", +] + +[[package]] +name = "getrandom" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "wasi 0.11.0+wasi-snapshot-preview1", +] + +[[package]] +name = "gimli" +version = "0.27.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "221996f774192f0f718773def8201c4ae31f02616a54ccfc2d358bb0e5cefdec" + +[[package]] +name = "glob" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" + +[[package]] +name = "globset" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "029d74589adefde59de1a0c4f4732695c32805624aec7b68d91503d4dba79afc" +dependencies = [ + "aho-corasick", + "bstr 1.2.0", + "fnv", + "log", + "regex", +] + +[[package]] +name = "gloo-net" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9902a044653b26b99f7e3693a42f171312d9be8b26b5697bd1e43ad1f8a35e10" +dependencies = [ + "futures-channel", + "futures-core", + "futures-sink", + "gloo-utils", + "js-sys", + "pin-project", + "serde", + "serde_json", + "thiserror", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "gloo-timers" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" +dependencies = [ + "futures-channel", + "futures-core", + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "gloo-utils" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8e8fc851e9c7b9852508bc6e3f690f452f474417e8545ec9857b7f7377036b5" +dependencies = [ + "js-sys", + "serde", + "serde_json", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "governor" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19775995ee20209163239355bc3ad2f33f83da35d9ef72dea26e5af753552c87" +dependencies = [ + "dashmap", + "futures 0.3.26", + "futures-timer", + "no-std-compat", + "nonzero_ext", + "parking_lot 0.12.1", + "quanta 0.9.3", + "rand 0.8.5", + "smallvec", +] + +[[package]] +name = "group" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" +dependencies = [ + "ff", + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "h2" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f9f29bc9dda355256b2916cf526ab02ce0aeaaaf2bad60d65ef3f12f11dd0f4" +dependencies = [ + "bytes 1.4.0", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util 0.7.6", + "tracing", +] + +[[package]] +name = "half" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" + +[[package]] +name = "handlebars" +version = "4.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "035ef95d03713f2c347a72547b7cd38cbc9af7cd51e6099fb62d586d4a6dee3a" +dependencies = [ + "log", + "pest", + "pest_derive", + "serde", + "serde_json", + "thiserror", +] + +[[package]] +name = "hashbrown" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" +dependencies = [ + "ahash", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +dependencies = [ + "ahash", +] + +[[package]] +name = "hashlink" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7249a3129cbc1ffccd74857f81464a323a152173cdb134e0fd81bc803b29facf" +dependencies = [ + "hashbrown 0.11.2", +] + +[[package]] +name = "headers" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3e372db8e5c0d213e0cd0b9be18be2aca3d44cf2fe30a9d46a65581cd454584" +dependencies = [ + "base64 0.13.1", + "bitflags", + "bytes 1.4.0", + "headers-core", + "http", + "httpdate", + "mime", + "sha1", +] + +[[package]] +name = "headers-core" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7f66481bfee273957b1f20485a4ff3362987f85b2c236580d81b4eb7a326429" +dependencies = [ + "http", +] + +[[package]] +name = "heck" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + +[[package]] +name = "hermit-abi" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7" +dependencies = [ + "libc", +] + +[[package]] +name = "hermit-abi" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hmac" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1441c6b1e930e2817404b5046f1f989899143a12bf92de603b69f4e0aee1e15" +dependencies = [ + "crypto-mac 0.10.1", + "digest 0.9.0", +] + +[[package]] +name = "hmac" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a2a2320eb7ec0ebe8da8f744d7812d9fc4cb4d09344ac01898dbcb6a20ae69b" +dependencies = [ + "crypto-mac 0.11.1", + "digest 0.9.0", +] + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest 0.10.6", +] + +[[package]] +name = "hostname" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c731c3e10504cc8ed35cfe2f1db4c9274c3d35fa486e3b31df46f068ef3e867" +dependencies = [ + "libc", + "match_cfg", + "winapi 0.3.9", +] + +[[package]] +name = "http" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75f43d41e26995c17e71ee126451dd3941010b0514a81a9d11f3b341debc2399" +dependencies = [ + "bytes 1.4.0", + "fnv", + "itoa 1.0.5", +] + +[[package]] +name = "http-body" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" +dependencies = [ + "bytes 1.4.0", + "http", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" + +[[package]] +name = "httpdate" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" + +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + +[[package]] +name = "hyper" +version = "0.14.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e011372fa0b68db8350aa7a248930ecc7839bf46d8485577d69f117a75f164c" +dependencies = [ + "bytes 1.4.0", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa 1.0.5", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.23.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1788965e61b367cd03a62950836d5cd41560c3577d90e40e0819373194d1661c" +dependencies = [ + "http", + "hyper", + "log", + "rustls", + "rustls-native-certs", + "tokio", + "tokio-rustls", + "webpki-roots", +] + +[[package]] +name = "hyper-timeout" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +dependencies = [ + "hyper", + "pin-project-lite", + "tokio", + "tokio-io-timeout", +] + +[[package]] +name = "hyper-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +dependencies = [ + "bytes 1.4.0", + "hyper", + "native-tls", + "tokio", + "tokio-native-tls", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.53" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64c122667b287044802d6ce17ee2ddf13207ed924c712de9a66a5814d5b64765" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "winapi 0.3.9", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0703ae284fc167426161c2e3f1da3ea71d94b21bedbcc9494e92b28e334e3dca" +dependencies = [ + "cxx", + "cxx-build", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" +dependencies = [ + "matches", + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "idna" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "impl-codec" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "161ebdfec3c8e3b52bf61c4f3550a1eea4f9579d10dc1b936f3171ebdcd6c443" +dependencies = [ + "parity-scale-codec", +] + +[[package]] +name = "impl-rlp" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28220f89297a075ddc7245cd538076ee98b01f2a9c23a53a4f1105d5a322808" +dependencies = [ + "rlp", +] + +[[package]] +name = "impl-serde" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4551f042f3438e64dbd6226b20527fc84a6e1fe65688b58746a2f53623f25f5c" +dependencies = [ + "serde", +] + +[[package]] +name = "impl-trait-for-tuples" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "indexmap" +version = "1.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399" +dependencies = [ + "autocfg 1.1.0", + "hashbrown 0.12.3", +] + +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +dependencies = [ + "cfg-if 1.0.0", +] + +[[package]] +name = "io-lifetimes" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1abeb7a0dd0f8181267ff8adc397075586500b81b28a73e8a0208b00fc170fb3" +dependencies = [ + "libc", + "windows-sys 0.45.0", +] + +[[package]] +name = "iovec" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e" +dependencies = [ + "libc", +] + +[[package]] +name = "ipnet" +version = "2.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30e22bd8629359895450b59ea7a776c850561b96a3b1d31321c1949d9e6c9146" + +[[package]] +name = "ipnetwork" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02c3eaab3ac0ede60ffa41add21970a7df7d91772c03383aac6c2c3d53cc716b" + +[[package]] +name = "is-terminal" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22e18b0a45d56fe973d6db23972bf5bc46f988a4a2385deac9cc29572f09daef" +dependencies = [ + "hermit-abi 0.3.1", + "io-lifetimes", + "rustix", + "windows-sys 0.45.0", +] + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" + +[[package]] +name = "itoa" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440" + +[[package]] +name = "jobserver" +version = "0.1.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "068b1ee6743e4d11fb9c6a1e6064b3693a1b600e7f5f5988047d98b3dc9fb90b" +dependencies = [ + "libc", +] + +[[package]] +name = "js-sys" +version = "0.3.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "445dde2150c55e483f3d8416706b97ec8e8237c307e5b7b4b8dd15e6af2a0730" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "jsonrpc-client-transports" +version = "18.0.0" +source = "git+https://github.com/matter-labs/jsonrpc.git?branch=master#12c53e3e20c09c2fb9966a4ef1b0ea63de172540" +dependencies = [ + "derive_more", + "futures 0.3.26", + "jsonrpc-core 18.0.0 (git+https://github.com/matter-labs/jsonrpc.git?branch=master)", + "jsonrpc-pubsub", + "log", + "serde", + "serde_json", +] + +[[package]] +name = "jsonrpc-core" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14f7f76aef2d054868398427f6c54943cf3d1caa9a7ec7d0c38d69df97a965eb" +dependencies = [ + "futures 0.3.26", + "futures-executor", + "futures-util", + "log", + "serde", + "serde_derive", + "serde_json", +] + +[[package]] +name = "jsonrpc-core" +version = "18.0.0" +source = "git+https://github.com/matter-labs/jsonrpc.git?branch=master#12c53e3e20c09c2fb9966a4ef1b0ea63de172540" +dependencies = [ + "futures 0.3.26", + "futures-executor", + "futures-util", + "log", + "serde", + "serde_derive", + "serde_json", +] + +[[package]] +name = "jsonrpc-core-client" +version = "18.0.0" +source = "git+https://github.com/matter-labs/jsonrpc.git?branch=master#12c53e3e20c09c2fb9966a4ef1b0ea63de172540" +dependencies = [ + "futures 0.3.26", + "jsonrpc-client-transports", +] + +[[package]] +name = "jsonrpc-derive" +version = "18.0.0" +source = "git+https://github.com/matter-labs/jsonrpc.git?branch=master#12c53e3e20c09c2fb9966a4ef1b0ea63de172540" +dependencies = [ + "proc-macro-crate 0.1.5", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "jsonrpc-http-server" +version = "18.0.0" +source = "git+https://github.com/matter-labs/jsonrpc.git?branch=master#12c53e3e20c09c2fb9966a4ef1b0ea63de172540" +dependencies = [ + "futures 0.3.26", + "hyper", + "jsonrpc-core 18.0.0 (git+https://github.com/matter-labs/jsonrpc.git?branch=master)", + "jsonrpc-server-utils", + "log", + "net2", + "parking_lot 0.11.2", + "unicase", +] + +[[package]] +name = "jsonrpc-pubsub" +version = "18.0.0" +source = "git+https://github.com/matter-labs/jsonrpc.git?branch=master#12c53e3e20c09c2fb9966a4ef1b0ea63de172540" +dependencies = [ + "futures 0.3.26", + "jsonrpc-core 18.0.0 (git+https://github.com/matter-labs/jsonrpc.git?branch=master)", + "lazy_static", + "log", + "parking_lot 0.11.2", + "rand 0.7.3", + "serde", +] + +[[package]] +name = "jsonrpc-server-utils" +version = "18.0.0" +source = "git+https://github.com/matter-labs/jsonrpc.git?branch=master#12c53e3e20c09c2fb9966a4ef1b0ea63de172540" +dependencies = [ + "bytes 1.4.0", + "futures 0.3.26", + "globset", + "jsonrpc-core 18.0.0 (git+https://github.com/matter-labs/jsonrpc.git?branch=master)", + "lazy_static", + "log", + "tokio", + "tokio-stream", + "tokio-util 0.6.10", + "unicase", +] + +[[package]] +name = "jsonrpc-ws-server" +version = "18.0.0" +source = "git+https://github.com/matter-labs/jsonrpc.git?branch=master#12c53e3e20c09c2fb9966a4ef1b0ea63de172540" +dependencies = [ + "futures 0.3.26", + "jsonrpc-core 18.0.0 (git+https://github.com/matter-labs/jsonrpc.git?branch=master)", + "jsonrpc-server-utils", + "log", + "parity-ws", + "parking_lot 0.11.2", + "slab", +] + +[[package]] +name = "jsonrpsee" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d291e3a5818a2384645fd9756362e6d89cf0541b0b916fa7702ea4a9833608e" +dependencies = [ + "jsonrpsee-client-transport", + "jsonrpsee-core", + "jsonrpsee-http-client", + "jsonrpsee-proc-macros", + "jsonrpsee-server", + "jsonrpsee-types", + "jsonrpsee-wasm-client", + "jsonrpsee-ws-client", + "tracing", +] + +[[package]] +name = "jsonrpsee-client-transport" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "965de52763f2004bc91ac5bcec504192440f0b568a5d621c59d9dbd6f886c3fb" +dependencies = [ + "anyhow", + "futures-channel", + "futures-timer", + "futures-util", + "gloo-net", + "http", + "jsonrpsee-core", + "jsonrpsee-types", + "pin-project", + "rustls-native-certs", + "soketto", + "thiserror", + "tokio", + "tokio-rustls", + "tokio-util 0.7.6", + "tracing", + "webpki-roots", +] + +[[package]] +name = "jsonrpsee-core" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4e70b4439a751a5de7dd5ed55eacff78ebf4ffe0fc009cb1ebb11417f5b536b" +dependencies = [ + "anyhow", + "arrayvec 0.7.2", + "async-lock", + "async-trait", + "beef", + "futures-channel", + "futures-timer", + "futures-util", + "globset", + "hyper", + "jsonrpsee-types", + "parking_lot 0.12.1", + "rand 0.8.5", + "rustc-hash", + "serde", + "serde_json", + "soketto", + "thiserror", + "tokio", + "tracing", + "wasm-bindgen-futures", +] + +[[package]] +name = "jsonrpsee-http-client" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc345b0a43c6bc49b947ebeb936e886a419ee3d894421790c969cc56040542ad" +dependencies = [ + "async-trait", + "hyper", + "hyper-rustls", + "jsonrpsee-core", + "jsonrpsee-types", + "rustc-hash", + "serde", + "serde_json", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "jsonrpsee-proc-macros" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baa6da1e4199c10d7b1d0a6e5e8bd8e55f351163b6f4b3cbb044672a69bd4c1c" +dependencies = [ + "heck 0.4.1", + "proc-macro-crate 1.3.0", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "jsonrpsee-server" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fb69dad85df79527c019659a992498d03f8495390496da2f07e6c24c2b356fc" +dependencies = [ + "futures-channel", + "futures-util", + "http", + "hyper", + "jsonrpsee-core", + "jsonrpsee-types", + "serde", + "serde_json", + "soketto", + "tokio", + "tokio-stream", + "tokio-util 0.7.6", + "tower", + "tracing", +] + +[[package]] +name = "jsonrpsee-types" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5bd522fe1ce3702fd94812965d7bb7a3364b1c9aba743944c5a00529aae80f8c" +dependencies = [ + "anyhow", + "beef", + "serde", + "serde_json", + "thiserror", + "tracing", +] + +[[package]] +name = "jsonrpsee-wasm-client" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a77310456f43c6c89bcba1f6b2fc2a28300da7c341f320f5128f8c83cc63232d" +dependencies = [ + "jsonrpsee-client-transport", + "jsonrpsee-core", + "jsonrpsee-types", +] + +[[package]] +name = "jsonrpsee-ws-client" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b83daeecfc6517cfe210df24e570fb06213533dfb990318fae781f4c7119dd9" +dependencies = [ + "http", + "jsonrpsee-client-transport", + "jsonrpsee-core", + "jsonrpsee-types", +] + +[[package]] +name = "jsonwebtoken" +version = "7.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afabcc15e437a6484fc4f12d0fd63068fe457bf93f1c148d3d9649c60b103f32" +dependencies = [ + "base64 0.12.3", + "pem", + "ring", + "serde", + "serde_json", + "simple_asn1", +] + +[[package]] +name = "k256" +version = "0.11.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72c1e0b51e7ec0a97369623508396067a486bd0cbed95a2659a4b863d28cfc8b" +dependencies = [ + "cfg-if 1.0.0", + "ecdsa", + "elliptic-curve", + "sha2 0.10.6", +] + +[[package]] +name = "keccak" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3afef3b6eff9ce9d8ff9b3601125eec7f0c8cbac7abd14f355d053fa56c98768" +dependencies = [ + "cpufeatures", +] + +[[package]] +name = "kernel32-sys" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" +dependencies = [ + "winapi 0.2.8", + "winapi-build", +] + +[[package]] +name = "kv-log-macro" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" +dependencies = [ + "log", +] + +[[package]] +name = "language-tags" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4345964bb142484797b161f473a503a434de77149dd8c7427788c6e13379388" + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + +[[package]] +name = "libc" +version = "0.2.139" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79" + +[[package]] +name = "libloading" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f" +dependencies = [ + "cfg-if 1.0.0", + "winapi 0.3.9", +] + +[[package]] +name = "libm" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "348108ab3fba42ec82ff6e9564fc4ca0247bdccdc68dd8af9764bbc79c3c8ffb" + +[[package]] +name = "librocksdb-sys" +version = "0.6.1+6.28.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81bc587013734dadb7cf23468e531aa120788b87243648be42e2d3a072186291" +dependencies = [ + "bindgen", + "bzip2-sys", + "cc", + "glob", + "libc", + "libz-sys", +] + +[[package]] +name = "libz-sys" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9702761c3935f8cc2f101793272e202c72b99da8f4224a19ddcf1279a6450bbf" +dependencies = [ + "cc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "link-cplusplus" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecd207c9c713c34f95a097a5b029ac2ce6010530c7b49d7fea24d977dede04f5" +dependencies = [ + "cc", +] + +[[package]] +name = "linux-raw-sys" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4" + +[[package]] +name = "loadnext" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "envy", + "futures 0.3.26", + "hex", + "num 0.3.1", + "once_cell", + "rand 0.8.5", + "rand_distr", + "regex", + "reqwest", + "serde", + "serde_json", + "static_assertions", + "thiserror", + "tokio", + "vlog", + "zksync", + "zksync_config", + "zksync_eth_client", + "zksync_eth_signer", + "zksync_types", + "zksync_utils", + "zksync_web3_decl", +] + +[[package]] +name = "local-channel" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f303ec0e94c6c54447f84f3b0ef7af769858a9c4ef56ef2a986d3dcd4c3fc9c" +dependencies = [ + "futures-core", + "futures-sink", + "futures-util", + "local-waker", +] + +[[package]] +name = "local-waker" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e34f76eb3611940e0e7d53a9aaa4e6a3151f69541a282fd0dad5571420c53ff1" + +[[package]] +name = "lock_api" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" +dependencies = [ + "autocfg 1.1.0", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" +dependencies = [ + "cfg-if 1.0.0", + "value-bag", +] + +[[package]] +name = "mach" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b823e83b2affd8f40a9ee8c29dbc56404c1e34cd2710921f2801e2cf29527afa" +dependencies = [ + "libc", +] + +[[package]] +name = "match_cfg" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4" + +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata", +] + +[[package]] +name = "matches" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" + +[[package]] +name = "maybe-uninit" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00" + +[[package]] +name = "md-5" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5a279bb9607f9f53c22d496eade00d138d1bdcccd07d74650387cf94942a15" +dependencies = [ + "block-buffer 0.9.0", + "digest 0.9.0", + "opaque-debug 0.3.0", +] + +[[package]] +name = "memchr" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" + +[[package]] +name = "memoffset" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "043175f069eda7b85febe4a74abbaeff828d9f8b448515d3151a14a3542811aa" +dependencies = [ + "autocfg 1.1.0", +] + +[[package]] +name = "memoffset" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4" +dependencies = [ + "autocfg 1.1.0", +] + +[[package]] +name = "metrics" +version = "0.20.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b9b8653cec6897f73b519a43fba5ee3d50f62fe9af80b428accdcc093b4a849" +dependencies = [ + "ahash", + "metrics-macros", + "portable-atomic", +] + +[[package]] +name = "metrics-exporter-prometheus" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8603921e1f54ef386189335f288441af761e0fc61bcb552168d9cedfe63ebc70" +dependencies = [ + "hyper", + "indexmap", + "ipnet", + "metrics", + "metrics-util", + "parking_lot 0.12.1", + "portable-atomic", + "quanta 0.10.1", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "metrics-macros" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "731f8ecebd9f3a4aa847dfe75455e4757a45da40a7793d2f0b1f9b6ed18b23f3" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "metrics-util" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7d24dc2dbae22bff6f1f9326ffce828c9f07ef9cc1e8002e5279f845432a30a" +dependencies = [ + "crossbeam-epoch 0.9.13", + "crossbeam-utils 0.8.14", + "hashbrown 0.12.3", + "metrics", + "num_cpus", + "parking_lot 0.12.1", + "portable-atomic", + "quanta 0.10.1", + "sketches-ddsketch", +] + +[[package]] +name = "mime" +version = "0.3.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d" + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa" +dependencies = [ + "adler", +] + +[[package]] +name = "mio" +version = "0.6.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4afd66f5b91bf2a3bc13fad0e21caedac168ca4c707504e75585648ae80e4cc4" +dependencies = [ + "cfg-if 0.1.10", + "fuchsia-zircon", + "fuchsia-zircon-sys", + "iovec", + "kernel32-sys", + "libc", + "log", + "miow", + "net2", + "slab", + "winapi 0.2.8", +] + +[[package]] +name = "mio" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5d732bc30207a6423068df043e3d02e0735b155ad7ce1a6f76fe2baa5b158de" +dependencies = [ + "libc", + "log", + "wasi 0.11.0+wasi-snapshot-preview1", + "windows-sys 0.42.0", +] + +[[package]] +name = "mio-extras" +version = "2.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52403fe290012ce777c4626790c8951324a2b9e3316b3143779c72b029742f19" +dependencies = [ + "lazycell", + "log", + "mio 0.6.23", + "slab", +] + +[[package]] +name = "miow" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebd808424166322d4a38da87083bfddd3ac4c131334ed55856112eb06d46944d" +dependencies = [ + "kernel32-sys", + "net2", + "winapi 0.2.8", + "ws2_32-sys", +] + +[[package]] +name = "multimap" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" + +[[package]] +name = "native-tls" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" +dependencies = [ + "lazy_static", + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "net2" +version = "0.2.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74d0df99cfcd2530b2e694f6e17e7f37b8e26bb23983ac530c0c97408837c631" +dependencies = [ + "cfg-if 0.1.10", + "libc", + "winapi 0.3.9", +] + +[[package]] +name = "nix" +version = "0.26.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfdda3d196821d6af13126e40375cdf7da646a96114af134d5f417a9a1dc8e1a" +dependencies = [ + "bitflags", + "cfg-if 1.0.0", + "libc", + "static_assertions", +] + +[[package]] +name = "no-std-compat" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b93853da6d84c2e3c7d730d6473e8817692dd89be387eb01b94d7f108ecb5b8c" + +[[package]] +name = "nodrop" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb" + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "nom8" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae01545c9c7fc4486ab7debaf2aad7003ac19431791868fb2e8066df97fad2f8" +dependencies = [ + "memchr", +] + +[[package]] +name = "nonzero_ext" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38bf9645c8b145698bb0b18a4637dcacbc421ea49bef2317e4fd8065a387cf21" + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi 0.3.9", +] + +[[package]] +name = "num" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b7a8e9be5e039e2ff869df49155f1c06bd01ade2117ec783e56ab0932b67a8f" +dependencies = [ + "num-bigint 0.3.3", + "num-complex 0.3.1", + "num-integer", + "num-iter", + "num-rational 0.3.2", + "num-traits", +] + +[[package]] +name = "num" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43db66d1170d347f9a065114077f7dccb00c1b9478c89384490a3425279a4606" +dependencies = [ + "num-bigint 0.4.3", + "num-complex 0.4.3", + "num-integer", + "num-iter", + "num-rational 0.4.1", + "num-traits", +] + +[[package]] +name = "num-bigint" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "090c7f9998ee0ff65aa5b723e4009f7b217707f1fb5ea551329cc4d6231fb304" +dependencies = [ + "autocfg 1.1.0", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-bigint" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6f7833f2cbf2360a6cfd58cd41a53aa7a90bd4c202f5b1c7dd2ed73c57b2c3" +dependencies = [ + "autocfg 1.1.0", + "num-integer", + "num-traits", + "rand 0.7.3", + "serde", +] + +[[package]] +name = "num-bigint" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" +dependencies = [ + "autocfg 1.1.0", + "num-integer", + "num-traits", + "serde", +] + +[[package]] +name = "num-complex" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "747d632c0c558b87dbabbe6a82f3b4ae03720d0646ac5b7b4dae89394be5f2c5" +dependencies = [ + "num-traits", + "rand 0.7.3", + "serde", +] + +[[package]] +name = "num-complex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02e0d21255c828d6f128a1e41534206671e8c3ea0c62f32291e808dc82cff17d" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-derive" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eafd0b45c5537c3ba526f79d3e75120036502bebacbb3f3220914067ce39dbf2" +dependencies = [ + "proc-macro2 0.4.30", + "quote 0.6.13", + "syn 0.15.44", +] + +[[package]] +name = "num-derive" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "num-integer" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" +dependencies = [ + "autocfg 1.1.0", + "num-traits", +] + +[[package]] +name = "num-iter" +version = "0.1.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d03e6c028c5dc5cac6e2dec0efda81fc887605bb3d884578bb6d6bf7514e252" +dependencies = [ + "autocfg 1.1.0", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-rational" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12ac428b1cb17fce6f731001d307d351ec70a6d202fc2e60f7d4c5e42d8f4f07" +dependencies = [ + "autocfg 1.1.0", + "num-bigint 0.3.3", + "num-integer", + "num-traits", + "serde", +] + +[[package]] +name = "num-rational" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0638a1c9d0a3c0914158145bc76cff373a75a627e6ecbfb71cbe6f453a5a19b0" +dependencies = [ + "autocfg 1.1.0", + "num-bigint 0.4.3", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" +dependencies = [ + "autocfg 1.1.0", + "libm", +] + +[[package]] +name = "num_cpus" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" +dependencies = [ + "hermit-abi 0.2.6", + "libc", +] + +[[package]] +name = "object" +version = "0.30.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea86265d3d3dcb6a27fc51bd29a4bf387fae9d2986b823079d4986af253eb439" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66" + +[[package]] +name = "oorandom" +version = "11.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" + +[[package]] +name = "opaque-debug" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2839e79665f131bdb5782e51f2c6c9599c133c6098982a54c794358bf432529c" + +[[package]] +name = "opaque-debug" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" + +[[package]] +name = "openssl" +version = "0.10.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b102428fd03bc5edf97f62620f7298614c45cedf287c271e7ed450bbaf83f2e1" +dependencies = [ + "bitflags", + "cfg-if 1.0.0", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b501e44f11665960c7e7fcf062c7d96a14ade4aa98116c004b2e37b5be7d736c" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-sys" +version = "0.9.80" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23bbbf7854cd45b83958ebe919f0e8e516793727652e27fda10a8384cfc790b7" +dependencies = [ + "autocfg 1.1.0", + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "opentelemetry" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6105e89802af13fdf48c49d7646d3b533a70e536d818aae7e78ba0433d01acb8" +dependencies = [ + "async-trait", + "crossbeam-channel 0.5.6", + "futures-channel", + "futures-executor", + "futures-util", + "js-sys", + "lazy_static", + "percent-encoding", + "pin-project", + "rand 0.8.5", + "thiserror", +] + +[[package]] +name = "opentelemetry-http" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "449048140ee61e28f57abe6e9975eedc1f3a29855c7407bd6c12b18578863379" +dependencies = [ + "async-trait", + "bytes 1.4.0", + "http", + "opentelemetry", + "reqwest", +] + +[[package]] +name = "opentelemetry-otlp" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d1a6ca9de4c8b00aa7f1a153bd76cb263287155cec642680d79d98706f3d28a" +dependencies = [ + "async-trait", + "futures 0.3.26", + "futures-util", + "http", + "opentelemetry", + "opentelemetry-http", + "prost", + "prost-build", + "reqwest", + "thiserror", + "tokio", + "tonic", + "tonic-build", +] + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "985cc35d832d412224b2cffe2f9194b1b89b6aa5d0bef76d080dce09d90e62bd" +dependencies = [ + "opentelemetry", +] + +[[package]] +name = "os_info" +version = "3.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c424bc68d15e0778838ac013b5b3449544d8133633d8016319e7e05a820b8c0" +dependencies = [ + "log", + "serde", + "winapi 0.3.9", +] + +[[package]] +name = "os_str_bytes" +version = "6.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b7820b9daea5457c9f21c69448905d723fbd21136ccf521748f23fd49e723ee" + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "pairing_ce" +version = "0.28.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db007b21259660d025918e653508f03050bf23fb96a88601f9936329faadc597" +dependencies = [ + "byteorder", + "cfg-if 1.0.0", + "ff_ce", + "rand 0.4.6", + "serde", +] + +[[package]] +name = "parity-crypto" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b92ea9ddac0d6e1db7c49991e7d397d34a9fd814b4c93cda53788e8eef94e35" +dependencies = [ + "aes", + "aes-ctr", + "block-modes", + "digest 0.9.0", + "ethereum-types", + "hmac 0.10.1", + "lazy_static", + "pbkdf2 0.7.5", + "ripemd160", + "rustc-hex", + "scrypt", + "secp256k1 0.20.3", + "sha2 0.9.9", + "subtle", + "tiny-keccak 2.0.2", + "zeroize", +] + +[[package]] +name = "parity-scale-codec" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "373b1a4c1338d9cd3d1fa53b3a11bdab5ab6bd80a20f7f7becd76953ae2be909" +dependencies = [ + "arrayvec 0.7.2", + "bitvec", + "byte-slice-cast", + "impl-trait-for-tuples", + "parity-scale-codec-derive", + "serde", +] + +[[package]] +name = "parity-scale-codec-derive" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1557010476e0595c9b568d16dcfb81b93cdeb157612726f5170d31aa707bed27" +dependencies = [ + "proc-macro-crate 1.3.0", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "parity-ws" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5983d3929ad50f12c3eb9a6743f19d691866ecd44da74c0a3308c3f8a56df0c6" +dependencies = [ + "byteorder", + "bytes 0.4.12", + "httparse", + "log", + "mio 0.6.23", + "mio-extras", + "rand 0.7.3", + "sha-1 0.8.2", + "slab", + "url", +] + +[[package]] +name = "parking" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "427c3892f9e783d91cc128285287e70a59e206ca452770ece88a76f7a3eddd72" + +[[package]] +name = "parking_lot" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core 0.8.6", +] + +[[package]] +name = "parking_lot" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +dependencies = [ + "lock_api", + "parking_lot_core 0.9.7", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" +dependencies = [ + "cfg-if 1.0.0", + "instant", + "libc", + "redox_syscall 0.2.16", + "smallvec", + "winapi 0.3.9", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9069cbb9f99e3a5083476ccb29ceb1de18b9118cafa53e90c9551235de2b9521" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "redox_syscall 0.2.16", + "smallvec", + "windows-sys 0.45.0", +] + +[[package]] +name = "password-hash" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54986aa4bfc9b98c6a5f40184223658d187159d7b3c6af33f2b2aa25ae1db0fa" +dependencies = [ + "base64ct", + "rand_core 0.6.4", +] + +[[package]] +name = "paste" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d01a5bd0424d00070b0098dd17ebca6f961a959dead1dbcbbbc1d1cd8d3deeba" + +[[package]] +name = "pbkdf2" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3b8c0d71734018084da0c0354193a5edfb81b20d2d57a92c5b154aefc554a4a" +dependencies = [ + "crypto-mac 0.10.1", +] + +[[package]] +name = "pbkdf2" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf916dd32dd26297907890d99dc2740e33f6bd9073965af4ccff2967962f5508" +dependencies = [ + "base64ct", + "crypto-mac 0.10.1", + "hmac 0.10.1", + "password-hash", + "sha2 0.9.9", +] + +[[package]] +name = "peeking_take_while" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" + +[[package]] +name = "pem" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd56cbd21fea48d0c440b41cd69c589faacade08c992d9a54e471b79d0fd13eb" +dependencies = [ + "base64 0.13.1", + "once_cell", + "regex", +] + +[[package]] +name = "percent-encoding" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e" + +[[package]] +name = "pest" +version = "2.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "028accff104c4e513bad663bbcd2ad7cfd5304144404c31ed0a77ac103d00660" +dependencies = [ + "thiserror", + "ucd-trie", +] + +[[package]] +name = "pest_derive" +version = "2.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ac3922aac69a40733080f53c1ce7f91dcf57e1a5f6c52f421fadec7fbdc4b69" +dependencies = [ + "pest", + "pest_generator", +] + +[[package]] +name = "pest_generator" +version = "2.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d06646e185566b5961b4058dd107e0a7f56e77c3f484549fb119867773c0f202" +dependencies = [ + "pest", + "pest_meta", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "pest_meta" +version = "2.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6f60b2ba541577e2a0c307c8f39d1439108120eb7903adeb6497fa880c59616" +dependencies = [ + "once_cell", + "pest", + "sha2 0.10.6", +] + +[[package]] +name = "petgraph" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4dd7d28ee937e54fe3080c91faa1c3a46c06de6252988a7f4592ba2310ef22a4" +dependencies = [ + "fixedbitset", + "indexmap", +] + +[[package]] +name = "pin-project" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad29a609b6bcd67fee905812e544992d216af9d755757c05ed2d0e15a74c6ecc" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "069bdb1e05adc7a8990dce9cc75370895fbe4e3d58b9b73bf1aee56359344a55" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkcs8" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9eca2c590a5f85da82668fa685c09ce2888b9430e83299debf1f34b65fd4a4ba" +dependencies = [ + "der", + "spki", +] + +[[package]] +name = "pkg-config" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ac9a59f73473f1b8d852421e59e64809f025994837ef743615c6d0c5b305160" + +[[package]] +name = "plotters" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2538b639e642295546c50fcd545198c9d64ee2a38620a628724a3b266d5fbf97" +dependencies = [ + "num-traits", + "plotters-backend", + "plotters-svg", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "plotters-backend" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "193228616381fecdc1224c62e96946dfbc73ff4384fba576e052ff8c1bea8142" + +[[package]] +name = "plotters-svg" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9a81d2759aae1dae668f783c308bc5c8ebd191ff4184aaa1b37f65a6ae5a56f" +dependencies = [ + "plotters-backend", +] + +[[package]] +name = "polling" +version = "2.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22122d5ec4f9fe1b3916419b76be1e80bcb93f618d071d2edf841b137b2a2bd6" +dependencies = [ + "autocfg 1.1.0", + "cfg-if 1.0.0", + "libc", + "log", + "wepoll-ffi", + "windows-sys 0.42.0", +] + +[[package]] +name = "portable-atomic" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26f6a7b87c2e435a3241addceeeff740ff8b7e76b74c13bf9acb17fa454ea00b" + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "primitive-types" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05e4722c697a58a99d5d06a08c30821d7c082a4632198de1eaa5a6c22ef42373" +dependencies = [ + "fixed-hash", + "impl-codec", + "impl-rlp", + "impl-serde", + "uint", +] + +[[package]] +name = "proc-macro-crate" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785" +dependencies = [ + "toml", +] + +[[package]] +name = "proc-macro-crate" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66618389e4ec1c7afe67d51a9bf34ff9236480f8d51e7489b7d5ab0303c13f34" +dependencies = [ + "once_cell", + "toml_edit 0.18.1", +] + +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "version_check", +] + +[[package]] +name = "proc-macro-hack" +version = "0.5.20+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" + +[[package]] +name = "proc-macro2" +version = "0.4.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759" +dependencies = [ + "unicode-xid", +] + +[[package]] +name = "proc-macro2" +version = "1.0.51" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d727cae5b39d21da60fa540906919ad737832fe0b1c165da3a34d6548c849d6" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "prometheus_exporter" +version = "1.0.0" +dependencies = [ + "metrics", + "metrics-exporter-prometheus", + "tokio", + "vlog", + "zksync_config", +] + +[[package]] +name = "prost" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "444879275cb4fd84958b1a1d5420d15e6fcf7c235fe47f053c9c2a80aceb6001" +dependencies = [ + "bytes 1.4.0", + "prost-derive", +] + +[[package]] +name = "prost-build" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62941722fb675d463659e49c4f3fe1fe792ff24fe5bbaa9c08cd3b98a1c354f5" +dependencies = [ + "bytes 1.4.0", + "heck 0.3.3", + "itertools", + "lazy_static", + "log", + "multimap", + "petgraph", + "prost", + "prost-types", + "regex", + "tempfile", + "which", +] + +[[package]] +name = "prost-derive" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9cc1a3263e07e0bf68e96268f37665207b49560d98739662cdfaae215c720fe" +dependencies = [ + "anyhow", + "itertools", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "prost-types" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "534b7a0e836e3c482d2693070f982e39e7611da9695d4d1f5a4b186b51faef0a" +dependencies = [ + "bytes 1.4.0", + "prost", +] + +[[package]] +name = "pwd" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72c71c0c79b9701efe4e1e4b563b2016dd4ee789eb99badcb09d61ac4b92e4a2" +dependencies = [ + "libc", + "thiserror", +] + +[[package]] +name = "quanta" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20afe714292d5e879d8b12740aa223c6a88f118af41870e8b6196e39a02238a8" +dependencies = [ + "crossbeam-utils 0.8.14", + "libc", + "mach", + "once_cell", + "raw-cpuid", + "wasi 0.10.2+wasi-snapshot-preview1", + "web-sys", + "winapi 0.3.9", +] + +[[package]] +name = "quanta" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7e31331286705f455e56cca62e0e717158474ff02b7936c1fa596d983f4ae27" +dependencies = [ + "crossbeam-utils 0.8.14", + "libc", + "mach", + "once_cell", + "raw-cpuid", + "wasi 0.10.2+wasi-snapshot-preview1", + "web-sys", + "winapi 0.3.9", +] + +[[package]] +name = "quote" +version = "0.6.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1" +dependencies = [ + "proc-macro2 0.4.30", +] + +[[package]] +name = "quote" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b" +dependencies = [ + "proc-macro2 1.0.51", +] + +[[package]] +name = "radium" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "643f8f41a8ebc4c5dc4515c82bb8abd397b527fc20fd681b7c011c2aee5d44fb" + +[[package]] +name = "rand" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" +dependencies = [ + "fuchsia-cprng", + "libc", + "rand_core 0.3.1", + "rdrand", + "winapi 0.3.9", +] + +[[package]] +name = "rand" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" +dependencies = [ + "autocfg 0.1.8", + "libc", + "rand_chacha 0.1.1", + "rand_core 0.4.2", + "rand_hc 0.1.0", + "rand_isaac", + "rand_jitter", + "rand_os", + "rand_pcg", + "rand_xorshift", + "winapi 0.3.9", +] + +[[package]] +name = "rand" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" +dependencies = [ + "getrandom 0.1.16", + "libc", + "rand_chacha 0.2.2", + "rand_core 0.5.1", + "rand_hc 0.2.0", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" +dependencies = [ + "autocfg 0.1.8", + "rand_core 0.3.1", +] + +[[package]] +name = "rand_chacha" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" +dependencies = [ + "ppv-lite86", + "rand_core 0.5.1", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_core" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" +dependencies = [ + "rand_core 0.4.2", +] + +[[package]] +name = "rand_core" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" + +[[package]] +name = "rand_core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" +dependencies = [ + "getrandom 0.1.16", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.8", +] + +[[package]] +name = "rand_distr" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32cb0b9bc82b0a0876c2dd994a7e7a2683d3e7390ca40e6886785ef0c7e3ee31" +dependencies = [ + "num-traits", + "rand 0.8.5", +] + +[[package]] +name = "rand_hc" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "rand_hc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" +dependencies = [ + "rand_core 0.5.1", +] + +[[package]] +name = "rand_isaac" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "rand_jitter" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b" +dependencies = [ + "libc", + "rand_core 0.4.2", + "winapi 0.3.9", +] + +[[package]] +name = "rand_os" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071" +dependencies = [ + "cloudabi", + "fuchsia-cprng", + "libc", + "rand_core 0.4.2", + "rdrand", + "winapi 0.3.9", +] + +[[package]] +name = "rand_pcg" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44" +dependencies = [ + "autocfg 0.1.8", + "rand_core 0.4.2", +] + +[[package]] +name = "rand_xorshift" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "raw-cpuid" +version = "10.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c307f7aacdbab3f0adee67d52739a1d71112cc068d6fab169ddeb18e48877fad" +dependencies = [ + "bitflags", +] + +[[package]] +name = "rayon" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db3a213adf02b3bcfd2d3846bb41cb22857d131789e01df434fb7e7bc0759b7" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "356a0625f1954f730c0201cdab48611198dc6ce21f4acff55089b5a78e6e835b" +dependencies = [ + "crossbeam-channel 0.5.6", + "crossbeam-deque 0.8.2", + "crossbeam-utils 0.8.14", + "num_cpus", +] + +[[package]] +name = "rdrand" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "redox_syscall" +version = "0.1.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" + +[[package]] +name = "redox_syscall" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" +dependencies = [ + "bitflags", +] + +[[package]] +name = "redox_users" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de0737333e7a9502c789a36d7c7fa6092a49895d4faa31ca5df163857ded2e9d" +dependencies = [ + "getrandom 0.1.16", + "redox_syscall 0.1.57", + "rust-argon2", +] + +[[package]] +name = "redox_users" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" +dependencies = [ + "getrandom 0.2.8", + "redox_syscall 0.2.16", + "thiserror", +] + +[[package]] +name = "regex" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.6.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848" + +[[package]] +name = "remove_dir_all" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" +dependencies = [ + "winapi 0.3.9", +] + +[[package]] +name = "reqwest" +version = "0.11.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21eed90ec8570952d53b772ecf8f206aa1ec9a3d76b2521c56c42973f2d91ee9" +dependencies = [ + "base64 0.21.0", + "bytes 1.4.0", + "encoding_rs", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-rustls", + "hyper-tls", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls", + "rustls-pemfile", + "serde", + "serde_json", + "serde_urlencoded", + "tokio", + "tokio-native-tls", + "tokio-rustls", + "tokio-util 0.7.6", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "webpki-roots", + "winreg", +] + +[[package]] +name = "rescue_poseidon" +version = "0.4.1" +source = "git+https://github.com/matter-labs/rescue-poseidon#fbb3882b8f1e63dff769a1f1a59211d0e0838351" +dependencies = [ + "addchain", + "arrayvec 0.7.2", + "blake2 0.10.6", + "byteorder", + "franklin-crypto", + "num-bigint 0.3.3", + "num-integer", + "num-iter", + "num-traits", + "rand 0.4.6", + "serde", + "sha3 0.9.1", + "smallvec", +] + +[[package]] +name = "rfc6979" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7743f17af12fa0b03b803ba12cd6a8d9483a587e89c69445e3909655c0b9fabb" +dependencies = [ + "crypto-bigint", + "hmac 0.12.1", + "zeroize", +] + +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin", + "untrusted", + "web-sys", + "winapi 0.3.9", +] + +[[package]] +name = "ripemd160" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2eca4ecc81b7f313189bf73ce724400a07da2a6dac19588b03c8bd76a2dcc251" +dependencies = [ + "block-buffer 0.9.0", + "digest 0.9.0", + "opaque-debug 0.3.0", +] + +[[package]] +name = "rlp" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb919243f34364b6bd2fc10ef797edbfa75f33c252e7998527479c6d6b47e1ec" +dependencies = [ + "bytes 1.4.0", + "rustc-hex", +] + +[[package]] +name = "rocksdb" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "620f4129485ff1a7128d184bc687470c21c7951b64779ebc9cfdad3dcd920290" +dependencies = [ + "libc", + "librocksdb-sys", +] + +[[package]] +name = "rust-argon2" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b18820d944b33caa75a71378964ac46f58517c92b6ae5f762636247c09e78fb" +dependencies = [ + "base64 0.13.1", + "blake2b_simd", + "constant_time_eq", + "crossbeam-utils 0.8.14", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342" + +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + +[[package]] +name = "rustc-hex" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" + +[[package]] +name = "rustc-serialize" +version = "0.3.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" + +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver", +] + +[[package]] +name = "rustix" +version = "0.36.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f43abb88211988493c1abb44a70efa56ff0ce98f233b7b276146f1f3f7ba9644" +dependencies = [ + "bitflags", + "errno", + "io-lifetimes", + "libc", + "linux-raw-sys", + "windows-sys 0.45.0", +] + +[[package]] +name = "rustls" +version = "0.20.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fff78fc74d175294f4e83b28343315ffcfb114b156f0185e9741cb5570f50e2f" +dependencies = [ + "log", + "ring", + "sct", + "webpki", +] + +[[package]] +name = "rustls-native-certs" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0167bac7a9f490495f3c33013e7722b53cb087ecbe082fb0c6387c96f634ea50" +dependencies = [ + "openssl-probe", + "rustls-pemfile", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d194b56d58803a43635bdc398cd17e383d6f71f9182b9a192c127ca42494a59b" +dependencies = [ + "base64 0.21.0", +] + +[[package]] +name = "rustversion" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5583e89e108996506031660fe09baa5011b9dd0341b89029313006d1fb508d70" + +[[package]] +name = "ryu" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde" + +[[package]] +name = "salsa20" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "399f290ffc409596022fce5ea5d4138184be4784f2b28c62c59f0d8389059a15" +dependencies = [ + "cipher", +] + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "schannel" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "713cfb06c7059f3588fb8044c0fad1d09e3c01d225e25b9220dbfdcf16dbb1b3" +dependencies = [ + "windows-sys 0.42.0", +] + +[[package]] +name = "scopeguard" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" + +[[package]] +name = "scratch" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddccb15bcce173023b3fedd9436f882a0739b8dfb45e4f6b6002bee5929f61b2" + +[[package]] +name = "scrypt" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8da492dab03f925d977776a0b7233d7b934d6dc2b94faead48928e2e9bacedb9" +dependencies = [ + "base64 0.13.1", + "hmac 0.10.1", + "pbkdf2 0.6.0", + "rand 0.7.3", + "rand_core 0.5.1", + "salsa20", + "sha2 0.9.9", + "subtle", +] + +[[package]] +name = "sct" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "sec1" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3be24c1842290c45df0a7bf069e0c268a747ad05a192f2fd7dcfdbc1cba40928" +dependencies = [ + "base16ct", + "der", + "generic-array 0.14.6", + "pkcs8", + "subtle", + "zeroize", +] + +[[package]] +name = "secp256k1" +version = "0.20.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97d03ceae636d0fed5bae6a7f4f664354c5f4fcedf6eef053fef17e49f837d0a" +dependencies = [ + "rand 0.6.5", + "secp256k1-sys", +] + +[[package]] +name = "secp256k1" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c42e6f1735c5f00f51e43e28d6634141f2bcad10931b2609ddd74a86d751260" +dependencies = [ + "secp256k1-sys", +] + +[[package]] +name = "secp256k1-sys" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "957da2573cde917463ece3570eab4a0b3f19de6f1646cde62e6fd3868f566036" +dependencies = [ + "cc", +] + +[[package]] +name = "security-framework" +version = "2.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a332be01508d814fed64bf28f798a146d73792121129962fdf335bb3c49a4254" +dependencies = [ + "bitflags", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31c9bb296072e961fcbd8853511dd39c2d8be2deb1e17c6860b1d30732b323b4" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "1.0.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58bc9567378fc7690d6b2addae4e60ac2eeea07becb2c64b9f218b53865cba2a" + +[[package]] +name = "send_wrapper" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f638d531eccd6e23b980caf34876660d38e265409d8e99b397ab71eb3612fad0" + +[[package]] +name = "sentry" +version = "0.29.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6097dc270a9c4555c5d6222ed243eaa97ff38e29299ed7c5cb36099033c604e" +dependencies = [ + "httpdate", + "native-tls", + "reqwest", + "sentry-backtrace", + "sentry-contexts", + "sentry-core", + "sentry-panic", + "tokio", + "ureq", +] + +[[package]] +name = "sentry-backtrace" +version = "0.29.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d92d1e4d591534ae4f872d6142f3b500f4ffc179a6aed8a3e86c7cc96d10a6a" +dependencies = [ + "backtrace", + "once_cell", + "regex", + "sentry-core", +] + +[[package]] +name = "sentry-contexts" +version = "0.29.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3afa877b1898ff67dd9878cf4bec4e53cef7d3be9f14b1fc9e4fcdf36f8e4259" +dependencies = [ + "hostname", + "libc", + "os_info", + "rustc_version", + "sentry-core", + "uname", +] + +[[package]] +name = "sentry-core" +version = "0.29.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc43eb7e4e3a444151a0fe8a0e9ce60eabd905dae33d66e257fa26f1b509c1bd" +dependencies = [ + "once_cell", + "rand 0.8.5", + "sentry-types", + "serde", + "serde_json", +] + +[[package]] +name = "sentry-panic" +version = "0.29.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccab4fab11e3e63c45f4524bee2e75cde39cdf164cb0b0cbe6ccd1948ceddf66" +dependencies = [ + "sentry-backtrace", + "sentry-core", +] + +[[package]] +name = "sentry-types" +version = "0.29.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f63708ec450b6bdcb657af760c447416d69c38ce421f34e5e2e9ce8118410bc7" +dependencies = [ + "debugid", + "getrandom 0.2.8", + "hex", + "serde", + "serde_json", + "thiserror", + "time 0.3.17", + "url", + "uuid", +] + +[[package]] +name = "serde" +version = "1.0.152" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_cbor" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bef2ebfde456fb76bbcf9f59315333decc4fda0b2b44b420243c11e0f5ec1f5" +dependencies = [ + "half", + "serde", +] + +[[package]] +name = "serde_derive" +version = "1.0.152" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "serde_json" +version = "1.0.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cad406b69c91885b5107daf2c29572f6c8cdb3c66826821e286c533490c0bc76" +dependencies = [ + "indexmap", + "itoa 1.0.5", + "ryu", + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa 1.0.5", + "ryu", + "serde", +] + +[[package]] +name = "serde_with" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678b5a069e50bf00ecd22d0cd8ddf7c236f68581b03db652061ed5eb13a312ff" +dependencies = [ + "hex", + "serde", + "serde_with_macros", +] + +[[package]] +name = "serde_with_macros" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e182d6ec6f05393cc0e5ed1bf81ad6db3a8feedf8ee515ecdd369809bcce8082" +dependencies = [ + "darling", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "set_correct_tx_format_for_priority_ops" +version = "0.1.0" +dependencies = [ + "tokio", + "zksync_dal", + "zksync_types", +] + +[[package]] +name = "sha-1" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7d94d0bede923b3cea61f3f1ff57ff8cdfd77b400fb8f9998949e0cf04163df" +dependencies = [ + "block-buffer 0.7.3", + "digest 0.8.1", + "fake-simd", + "opaque-debug 0.2.3", +] + +[[package]] +name = "sha-1" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6" +dependencies = [ + "block-buffer 0.9.0", + "cfg-if 1.0.0", + "cpufeatures", + "digest 0.9.0", + "opaque-debug 0.3.0", +] + +[[package]] +name = "sha1" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" +dependencies = [ + "cfg-if 1.0.0", + "cpufeatures", + "digest 0.10.6", +] + +[[package]] +name = "sha2" +version = "0.9.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" +dependencies = [ + "block-buffer 0.9.0", + "cfg-if 1.0.0", + "cpufeatures", + "digest 0.9.0", + "opaque-debug 0.3.0", +] + +[[package]] +name = "sha2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" +dependencies = [ + "cfg-if 1.0.0", + "cpufeatures", + "digest 0.10.6", +] + +[[package]] +name = "sha3" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f81199417d4e5de3f04b1e871023acea7389672c4135918f05aa9cbf2f2fa809" +dependencies = [ + "block-buffer 0.9.0", + "digest 0.9.0", + "keccak", + "opaque-debug 0.3.0", +] + +[[package]] +name = "sha3" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bdf0c33fae925bdc080598b84bc15c55e7b9a4a43b3c704da051f977469691c9" +dependencies = [ + "digest 0.10.6", + "keccak", +] + +[[package]] +name = "sharded-slab" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shlex" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43b2853a4d09f215c24cc5489c992ce46052d359b5109343cbafbf26bc62f8a3" + +[[package]] +name = "signal-hook" +version = "0.3.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a253b5e89e2698464fc26b545c9edceb338e18a89effeeecfea192c3025be29d" +dependencies = [ + "libc", + "signal-hook-registry", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e51e73328dc4ac0c7ccbda3a494dfa03df1de2f46018127f60c693f2648455b0" +dependencies = [ + "libc", +] + +[[package]] +name = "signature" +version = "1.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" +dependencies = [ + "digest 0.10.6", + "rand_core 0.6.4", +] + +[[package]] +name = "simple_asn1" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "692ca13de57ce0613a363c8c2f1de925adebc81b04c923ac60c5488bb44abe4b" +dependencies = [ + "chrono", + "num-bigint 0.2.6", + "num-traits", +] + +[[package]] +name = "sketches-ddsketch" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ceb945e54128e09c43d8e4f1277851bd5044c6fc540bbaa2ad888f60b3da9ae7" + +[[package]] +name = "slab" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4614a76b2a8be0058caa9dbbaf66d988527d86d003c11a94fbd335d7661edcef" +dependencies = [ + "autocfg 1.1.0", +] + +[[package]] +name = "smallvec" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" + +[[package]] +name = "socket2" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02e2d2db9033d13a1567121ddd7a095ee144db4e1ca1b1bda3419bc0da294ebd" +dependencies = [ + "libc", + "winapi 0.3.9", +] + +[[package]] +name = "soketto" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d1c5305e39e09653383c2c7244f2f78b3bcae37cf50c64cb4789c9f5096ec2" +dependencies = [ + "base64 0.13.1", + "bytes 1.4.0", + "futures 0.3.26", + "http", + "httparse", + "log", + "rand 0.8.5", + "sha-1 0.9.8", +] + +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + +[[package]] +name = "spki" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67cf02bbac7a337dc36e4f5a693db6c21e7863f45070f7064577eb4367a3212b" +dependencies = [ + "base64ct", + "der", +] + +[[package]] +name = "splitmut" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c85070f382340e8b23a75808e83573ddf65f9ad9143df9573ca37c1ed2ee956a" + +[[package]] +name = "sqlformat" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4b7922be017ee70900be125523f38bdd644f4f06a1b16e8fa5a8ee8c34bffd4" +dependencies = [ + "itertools", + "nom", + "unicode_categories", +] + +[[package]] +name = "sqlx" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7911b0031a0247af40095838002999c7a52fba29d9739e93326e71a5a1bc9d43" +dependencies = [ + "sqlx-core", + "sqlx-macros", +] + +[[package]] +name = "sqlx-core" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aec89bfaca8f7737439bad16d52b07f1ccd0730520d3bf6ae9d069fe4b641fb1" +dependencies = [ + "ahash", + "atoi", + "base64 0.13.1", + "bigdecimal", + "bitflags", + "byteorder", + "bytes 1.4.0", + "chrono", + "crc", + "crossbeam-channel 0.5.6", + "crossbeam-queue 0.3.8", + "crossbeam-utils 0.8.14", + "dirs 3.0.2", + "either", + "futures-channel", + "futures-core", + "futures-intrusive", + "futures-util", + "hashlink", + "hex", + "hmac 0.11.0", + "indexmap", + "ipnetwork", + "itoa 0.4.8", + "libc", + "log", + "md-5", + "memchr", + "num-bigint 0.3.3", + "once_cell", + "parking_lot 0.11.2", + "percent-encoding", + "rand 0.8.5", + "serde", + "serde_json", + "sha-1 0.9.8", + "sha2 0.9.9", + "smallvec", + "sqlformat", + "sqlx-rt", + "stringprep", + "thiserror", + "url", + "whoami", +] + +[[package]] +name = "sqlx-macros" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "584866c833511b1a152e87a7ee20dee2739746f60c858b3c5209150bc4b466f5" +dependencies = [ + "dotenv", + "either", + "heck 0.3.3", + "hex", + "once_cell", + "proc-macro2 1.0.51", + "quote 1.0.23", + "serde", + "serde_json", + "sha2 0.9.9", + "sqlx-core", + "sqlx-rt", + "syn 1.0.107", + "url", +] + +[[package]] +name = "sqlx-rt" +version = "0.5.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4db708cd3e459078f85f39f96a00960bd841f66ee2a669e90bf36907f5a79aae" +dependencies = [ + "async-native-tls", + "async-std", + "native-tls", +] + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "storage_logs_migration" +version = "0.1.0" +dependencies = [ + "tokio", + "zksync_dal", + "zksync_types", +] + +[[package]] +name = "stringprep" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ee348cb74b87454fff4b551cbf727025810a004f88aeacae7f85b87f4e9a1c1" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "strsim" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "structopt" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c6b5c64445ba8094a6ab0c3cd2ad323e07171012d9c98b0b15651daf1787a10" +dependencies = [ + "clap 2.34.0", + "lazy_static", + "structopt-derive", +] + +[[package]] +name = "structopt-derive" +version = "0.4.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0" +dependencies = [ + "heck 0.3.3", + "proc-macro-error", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "strum" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.24.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" +dependencies = [ + "heck 0.4.1", + "proc-macro2 1.0.51", + "quote 1.0.23", + "rustversion", + "syn 1.0.107", +] + +[[package]] +name = "subtle" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" + +[[package]] +name = "syn" +version = "0.15.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ca4b3b69a77cbe1ffc9e198781b7acb0c7365a883670e8f1c1bc66fba79a5c5" +dependencies = [ + "proc-macro2 0.4.30", + "quote 0.6.13", + "unicode-xid", +] + +[[package]] +name = "syn" +version = "1.0.107" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "unicode-ident", +] + +[[package]] +name = "sync_vm" +version = "1.3.1" +source = "git+https://github.com/matter-labs/sync_vm.git?branch=v1.3.1#a69bcef3eafcc39887ca8c09ec835c1a426d0813" +dependencies = [ + "arrayvec 0.7.2", + "cs_derive", + "derivative", + "eip712-signature", + "franklin-crypto", + "hex", + "itertools", + "num-bigint 0.4.3", + "num-derive 0.3.3", + "num-integer", + "num-traits", + "once_cell", + "rand 0.4.6", + "rescue_poseidon", + "serde", + "sha2 0.10.6", + "sha3 0.10.6", + "smallvec", + "zk_evm", + "zkevm_opcode_defs", +] + +[[package]] +name = "system-constants-generator" +version = "0.1.0" +dependencies = [ + "codegen 0.2.0", + "num 0.3.1", + "once_cell", + "rand 0.7.3", + "serde", + "serde_json", + "tempfile", + "vm", + "zksync_contracts", + "zksync_state", + "zksync_storage", + "zksync_types", + "zksync_utils", +] + +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + +[[package]] +name = "tempdir" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15f2b5fb00ccdf689e0149d1b1b3c03fead81c2b37735d812fa8bddbbf41b6d8" +dependencies = [ + "rand 0.4.6", + "remove_dir_all", +] + +[[package]] +name = "tempfile" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4" +dependencies = [ + "cfg-if 1.0.0", + "fastrand", + "libc", + "redox_syscall 0.2.16", + "remove_dir_all", + "winapi 0.3.9", +] + +[[package]] +name = "termcolor" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "test-log" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38f0c854faeb68a048f0f2dc410c5ddae3bf83854ef0e4977d58306a5edef50e" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "textwrap" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" +dependencies = [ + "unicode-width", +] + +[[package]] +name = "thiserror" +version = "1.0.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a9cd18aa97d5c45c6603caea1da6628790b37f7a34b6ca89522331c5180fed0" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "thread_local" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180" +dependencies = [ + "once_cell", +] + +[[package]] +name = "time" +version = "0.1.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca8a50ef2360fbd1eeb0ecd46795a87a19024eb4b53c5dc916ca1fd95fe62438" +dependencies = [ + "libc", + "winapi 0.3.9", +] + +[[package]] +name = "time" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a561bf4617eebd33bca6434b988f39ed798e527f51a1e797d0ee4f61c0a38376" +dependencies = [ + "itoa 1.0.5", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd" + +[[package]] +name = "time-macros" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d967f99f534ca7e495c575c62638eebc2898a8c84c119b89e250477bc4ba16b2" +dependencies = [ + "time-core", +] + +[[package]] +name = "tiny-keccak" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d8a021c69bb74a44ccedb824a046447e2c84a01df9e5c20779750acb38e11b2" +dependencies = [ + "crunchy", +] + +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + +[[package]] +name = "tinytemplate" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8e00990ebabbe4c14c08aca901caed183ecd5c09562a12c824bb53d3c3fd3af" +dependencies = [ + "autocfg 1.1.0", + "bytes 1.4.0", + "libc", + "memchr", + "mio 0.8.5", + "num_cpus", + "parking_lot 0.12.1", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.42.0", +] + +[[package]] +name = "tokio-io-timeout" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" +dependencies = [ + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-macros" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d266c00fde287f55d3f1c3e96c500c362a2b8c695076ec180f27918820bc6df8" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.23.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" +dependencies = [ + "rustls", + "tokio", + "webpki", +] + +[[package]] +name = "tokio-stream" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d660770404473ccd7bc9f8b28494a811bc18542b915c0855c51e8f419d5223ce" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.6.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36943ee01a6d67977dd3f84a5a1d2efeb4ada3a1ae771cadfaa535d9d9fc6507" +dependencies = [ + "bytes 1.4.0", + "futures-core", + "futures-sink", + "log", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc6a3b08b64e6dfad376fa2432c7b1f01522e37a623c3050bc95db2d3ff21583" +dependencies = [ + "bytes 1.4.0", + "futures-core", + "futures-io", + "futures-sink", + "pin-project-lite", + "tokio", + "tracing", +] + +[[package]] +name = "toml" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_datetime" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4553f467ac8e3d374bc9a177a26801e5d0f9b211aa1673fb137a403afd1c9cf5" + +[[package]] +name = "toml_edit" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5376256e44f2443f8896ac012507c19a012df0fe8758b55246ae51a2279db51f" +dependencies = [ + "combine", + "indexmap", + "itertools", +] + +[[package]] +name = "toml_edit" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56c59d8dd7d0dcbc6428bf7aa2f0e823e26e43b3c9aca15bbc9475d23e5fa12b" +dependencies = [ + "indexmap", + "nom8", + "toml_datetime", +] + +[[package]] +name = "tonic" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff08f4649d10a70ffa3522ca559031285d8e421d727ac85c60825761818f5d0a" +dependencies = [ + "async-stream", + "async-trait", + "base64 0.13.1", + "bytes 1.4.0", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-timeout", + "percent-encoding", + "pin-project", + "prost", + "prost-derive", + "tokio", + "tokio-stream", + "tokio-util 0.6.10", + "tower", + "tower-layer", + "tower-service", + "tracing", + "tracing-futures", +] + +[[package]] +name = "tonic-build" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9403f1bafde247186684b230dc6f38b5cd514584e8bec1dd32514be4745fa757" +dependencies = [ + "proc-macro2 1.0.51", + "prost-build", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "indexmap", + "pin-project", + "pin-project-lite", + "rand 0.8.5", + "slab", + "tokio", + "tokio-util 0.7.6", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + +[[package]] +name = "tracing" +version = "0.1.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +dependencies = [ + "cfg-if 1.0.0", + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "tracing-core" +version = "0.1.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-futures" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" +dependencies = [ + "pin-project", + "tracing", +] + +[[package]] +name = "tracing-log" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" +dependencies = [ + "lazy_static", + "log", + "tracing-core", +] + +[[package]] +name = "tracing-opentelemetry" +version = "0.17.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbbe89715c1dbbb790059e2565353978564924ee85017b5fff365c872ff6721f" +dependencies = [ + "once_cell", + "opentelemetry", + "tracing", + "tracing-core", + "tracing-log", + "tracing-subscriber", +] + +[[package]] +name = "tracing-serde" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1" +dependencies = [ + "serde", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6176eae26dd70d0c919749377897b54a9276bd7061339665dd68777926b5a70" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "serde", + "serde_json", + "sharded-slab", + "smallvec", + "thread_local", + "time 0.3.17", + "tracing", + "tracing-core", + "tracing-log", + "tracing-serde", +] + +[[package]] +name = "try-lock" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" + +[[package]] +name = "typenum" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" + +[[package]] +name = "ucd-trie" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e79c4d996edb816c91e4308506774452e55e95c3c9de07b6729e17e15a5ef81" + +[[package]] +name = "uint" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76f64bba2c53b04fcab63c01a7d7427eadc821e3bc48c34dc9ba29c501164b52" +dependencies = [ + "byteorder", + "crunchy", + "hex", + "static_assertions", +] + +[[package]] +name = "uname" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b72f89f0ca32e4db1c04e2a72f5345d59796d4866a1ee0609084569f73683dc8" +dependencies = [ + "libc", +] + +[[package]] +name = "unicase" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" +dependencies = [ + "version_check", +] + +[[package]] +name = "unicode-bidi" +version = "0.3.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d54675592c1dbefd78cbd98db9bacd89886e1ca50692a0692baefffdeb92dd58" + +[[package]] +name = "unicode-ident" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc" + +[[package]] +name = "unicode-normalization" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-segmentation" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" + +[[package]] +name = "unicode-width" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" + +[[package]] +name = "unicode-xid" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" + +[[package]] +name = "unicode_categories" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" + +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + +[[package]] +name = "ureq" +version = "2.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "338b31dd1314f68f3aabf3ed57ab922df95ffcd902476ca7ba3c4ce7b908c46d" +dependencies = [ + "base64 0.13.1", + "log", + "native-tls", + "once_cell", + "url", +] + +[[package]] +name = "url" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643" +dependencies = [ + "form_urlencoded", + "idna 0.3.0", + "percent-encoding", + "serde", +] + +[[package]] +name = "uuid" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1674845326ee10d37ca60470760d4288a6f80f304007d92e5c53bab78c9cfd79" +dependencies = [ + "getrandom 0.2.8", + "serde", +] + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "value-bag" +version = "1.0.0-alpha.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2209b78d1249f7e6f3293657c9779fe31ced465df091bbd433a1cf88e916ec55" +dependencies = [ + "ctor", + "version_check", +] + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "vec_map" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "vlog" +version = "1.0.0" +dependencies = [ + "chrono", + "opentelemetry", + "opentelemetry-otlp", + "opentelemetry-semantic-conventions", + "sentry", + "serde_json", + "tracing", + "tracing-opentelemetry", + "tracing-subscriber", +] + +[[package]] +name = "vm" +version = "0.1.0" +dependencies = [ + "hex", + "itertools", + "metrics", + "once_cell", + "serde", + "serde_json", + "tempfile", + "thiserror", + "tracing", + "vlog", + "zk_evm", + "zkevm-assembly", + "zksync_config", + "zksync_contracts", + "zksync_crypto", + "zksync_state", + "zksync_storage", + "zksync_types", + "zksync_utils", +] + +[[package]] +name = "waker-fn" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca" + +[[package]] +name = "walkdir" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56" +dependencies = [ + "same-file", + "winapi 0.3.9", + "winapi-util", +] + +[[package]] +name = "want" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" +dependencies = [ + "log", + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + +[[package]] +name = "wasi" +version = "0.10.2+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b" +dependencies = [ + "cfg-if 1.0.0", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f219e0d211ba40266969f6dbdd90636da12f75bee4fc9d6c23d1260dadb51454" +dependencies = [ + "cfg-if 1.0.0", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5" +dependencies = [ + "quote 1.0.23", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d" + +[[package]] +name = "wasm-streams" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bbae3363c08332cadccd13b67db371814cd214c2524020932f0804b8cf7c078" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "web-sys" +version = "0.3.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e33b99f4b23ba3eec1a53ac264e35a755f00e966e0065077d6027c0f575b0b97" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web3" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44f258e254752d210b84fe117b31f1e3cc9cbf04c0d747eb7f8cf7cf5e370f6d" +dependencies = [ + "arrayvec 0.7.2", + "base64 0.13.1", + "bytes 1.4.0", + "derive_more", + "ethabi", + "ethereum-types", + "futures 0.3.26", + "futures-timer", + "headers", + "hex", + "idna 0.2.3", + "jsonrpc-core 18.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "log", + "once_cell", + "parking_lot 0.12.1", + "pin-project", + "reqwest", + "rlp", + "secp256k1 0.21.3", + "serde", + "serde_json", + "tiny-keccak 2.0.2", + "url", +] + +[[package]] +name = "webpki" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "webpki-roots" +version = "0.22.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" +dependencies = [ + "webpki", +] + +[[package]] +name = "wepoll-ffi" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d743fdedc5c64377b5fc2bc036b01c7fd642205a0d96356034ae3404d49eb7fb" +dependencies = [ + "cc", +] + +[[package]] +name = "which" +version = "4.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2441c784c52b289a054b7201fc93253e288f094e2f4be9058343127c4226a269" +dependencies = [ + "either", + "libc", + "once_cell", +] + +[[package]] +name = "whoami" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45dbc71f0cdca27dc261a9bd37ddec174e4a0af2b900b890f378460f745426e3" +dependencies = [ + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "winapi" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-build" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +dependencies = [ + "winapi 0.3.9", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-sys" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e2522491fbfcd58cc84d47aeb2958948c4b8982e9a2d8a2a35bbaed431390e7" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c9864e83243fdec7fc9c5444389dcbbfd258f745e7853198f365e3c4968a608" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c8b1b673ffc16c47a9ff48570a9d85e25d265735c503681332589af6253c6c7" + +[[package]] +name = "windows_i686_gnu" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de3887528ad530ba7bdbb1faa8275ec7a1155a45ffa57c37993960277145d640" + +[[package]] +name = "windows_i686_msvc" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf4d1122317eddd6ff351aa852118a2418ad4214e6613a50e0191f7004372605" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1040f221285e17ebccbc2591ffdc2d44ee1f9186324dd3e84e99ac68d699c45" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "628bfdf232daa22b0d64fdb62b09fcc36bb01f05a3939e20ab73aaf9470d0463" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "447660ad36a13288b1db4d4248e857b510e8c3a225c822ba4fb748c0aafecffd" + +[[package]] +name = "winreg" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d" +dependencies = [ + "winapi 0.3.9", +] + +[[package]] +name = "ws2_32-sys" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e" +dependencies = [ + "winapi 0.2.8", + "winapi-build", +] + +[[package]] +name = "wyz" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85e60b0d1b5f99db2556934e21937020776a5d31520bf169e851ac44e6420214" + +[[package]] +name = "zeroize" +version = "1.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c394b5bd0c6f669e7275d9c20aa90ae064cb22e75a1cad54e1b34088034b149f" + +[[package]] +name = "zk_evm" +version = "1.3.1" +source = "git+https://github.com/matter-labs/zk_evm.git?branch=v1.3.1#76a3877f5a0b7449bcca73d35bae3ae226996fdd" +dependencies = [ + "blake2 0.10.6", + "k256", + "lazy_static", + "num 0.4.0", + "serde", + "serde_json", + "sha2 0.10.6", + "sha3 0.10.6", + "static_assertions", + "zkevm_opcode_defs", +] + +[[package]] +name = "zkevm-assembly" +version = "1.3.0" +source = "git+https://github.com/matter-labs/zkEVM-assembly.git?branch=v1.3.1#e2a2145a90ceeb54407df1be5254291a9f693422" +dependencies = [ + "env_logger 0.9.3", + "hex", + "lazy_static", + "log", + "nom", + "num-bigint 0.4.3", + "num-traits", + "regex", + "smallvec", + "structopt", + "thiserror", + "zkevm_opcode_defs", +] + +[[package]] +name = "zkevm_opcode_defs" +version = "1.3.1" +source = "git+https://github.com/matter-labs/zkevm_opcode_defs.git?branch=v1.3.1#dc5b5c463d867855514f03c179992acbde74face" +dependencies = [ + "bitflags", + "ethereum-types", + "lazy_static", + "sha2 0.10.6", +] + +[[package]] +name = "zkevm_test_harness" +version = "1.3.1" +source = "git+https://github.com/matter-labs/zkevm_test_harness.git?branch=v1.3.1#2100f00f9cb79e851bf11ec21391672244e382fc" +dependencies = [ + "bincode", + "blake2 0.10.6", + "circuit_testing", + "codegen 0.2.0", + "crossbeam 0.8.2", + "derivative", + "env_logger 0.10.0", + "hex", + "num-bigint 0.4.3", + "num-integer", + "num-traits", + "rayon", + "serde", + "serde_json", + "sha2 0.10.6", + "sha3 0.10.6", + "smallvec", + "structopt", + "sync_vm", + "test-log", + "tracing", + "zk_evm", + "zkevm-assembly", +] + +[[package]] +name = "zksync" +version = "0.3.0" +dependencies = [ + "anyhow", + "hex", + "num 0.3.1", + "serde_json", + "thiserror", + "tokio", + "zksync_config", + "zksync_eth_client", + "zksync_eth_signer", + "zksync_types", + "zksync_utils", + "zksync_web3_decl", +] + +[[package]] +name = "zksync_basic_types" +version = "1.0.0" +dependencies = [ + "serde", + "web3", +] + +[[package]] +name = "zksync_circuit_breaker" +version = "1.0.0" +dependencies = [ + "async-trait", + "convert_case 0.6.0", + "futures 0.3.26", + "hex", + "serde", + "serde_json", + "thiserror", + "tokio", + "zksync_config", + "zksync_contracts", + "zksync_dal", + "zksync_eth_client", + "zksync_types", + "zksync_utils", + "zksync_verification_key_generator_and_server", +] + +[[package]] +name = "zksync_config" +version = "1.0.0" +dependencies = [ + "bigdecimal", + "envy", + "num 0.3.1", + "once_cell", + "serde", + "serde_json", + "url", + "zksync_basic_types", + "zksync_utils", +] + +[[package]] +name = "zksync_contract_verifier" +version = "0.1.0" +dependencies = [ + "chrono", + "ctrlc", + "ethabi", + "futures 0.3.26", + "hex", + "lazy_static", + "metrics", + "prometheus_exporter", + "serde", + "serde_json", + "structopt", + "thiserror", + "tokio", + "vlog", + "zksync_config", + "zksync_contracts", + "zksync_dal", + "zksync_queued_job_processor", + "zksync_types", +] + +[[package]] +name = "zksync_contracts" +version = "1.0.0" +dependencies = [ + "ethabi", + "hex", + "once_cell", + "serde_json", + "zksync_utils", +] + +[[package]] +name = "zksync_core" +version = "1.0.0" +dependencies = [ + "actix-cors", + "actix-rt", + "actix-web", + "anyhow", + "assert_matches", + "async-std", + "async-trait", + "bigdecimal", + "bincode", + "chrono", + "ctrlc", + "db_test_macro", + "futures 0.3.26", + "governor", + "hex", + "itertools", + "jsonrpc-core 18.0.0 (git+https://github.com/matter-labs/jsonrpc.git?branch=master)", + "jsonrpc-core-client", + "jsonrpc-derive", + "jsonrpc-http-server", + "jsonrpc-pubsub", + "jsonrpc-ws-server", + "metrics", + "num 0.3.1", + "once_cell", + "prometheus_exporter", + "rand 0.8.5", + "reqwest", + "serde", + "serde_json", + "structopt", + "tempfile", + "thiserror", + "tokio", + "tracing", + "vlog", + "vm", + "zksync_circuit_breaker", + "zksync_config", + "zksync_contracts", + "zksync_dal", + "zksync_eth_client", + "zksync_eth_signer", + "zksync_mempool", + "zksync_merkle_tree", + "zksync_mini_merkle_tree", + "zksync_object_store", + "zksync_prover_utils", + "zksync_queued_job_processor", + "zksync_state", + "zksync_storage", + "zksync_types", + "zksync_utils", + "zksync_verification_key_generator_and_server", + "zksync_web3_decl", +] + +[[package]] +name = "zksync_crypto" +version = "1.0.0" +dependencies = [ + "base64 0.13.1", + "blake2 0.10.6", + "hex", + "once_cell", + "rand 0.4.6", + "serde", + "serde_json", + "sha2 0.9.9", + "thiserror", + "zksync_basic_types", +] + +[[package]] +name = "zksync_dal" +version = "1.0.0" +dependencies = [ + "anyhow", + "async-std", + "bigdecimal", + "bincode", + "db_test_macro", + "hex", + "itertools", + "metrics", + "num 0.3.1", + "once_cell", + "serde_json", + "sqlx", + "thiserror", + "tokio", + "vlog", + "vm", + "zksync_config", + "zksync_contracts", + "zksync_object_store", + "zksync_state", + "zksync_storage", + "zksync_types", + "zksync_utils", + "zksync_web3_decl", +] + +[[package]] +name = "zksync_eth_client" +version = "1.0.0" +dependencies = [ + "anyhow", + "async-trait", + "hex", + "jsonrpc-core 18.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "metrics", + "parity-crypto", + "serde", + "thiserror", + "tokio", + "vlog", + "zksync_config", + "zksync_contracts", + "zksync_eth_signer", + "zksync_types", +] + +[[package]] +name = "zksync_eth_signer" +version = "1.0.0" +dependencies = [ + "actix-rt", + "actix-web", + "async-trait", + "futures 0.3.26", + "hex", + "jsonrpc-core 18.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "parity-crypto", + "reqwest", + "rlp", + "secp256k1 0.21.3", + "serde", + "serde_derive", + "serde_json", + "thiserror", + "tokio", + "zksync_types", +] + +[[package]] +name = "zksync_mempool" +version = "1.0.0" +dependencies = [ + "metrics", + "vlog", + "zksync_types", +] + +[[package]] +name = "zksync_merkle_tree" +version = "1.0.0" +dependencies = [ + "anyhow", + "async-trait", + "bincode", + "byteorder", + "criterion", + "fnv", + "futures 0.3.26", + "itertools", + "metrics", + "once_cell", + "rand 0.4.6", + "rayon", + "serde", + "serde_json", + "tempfile", + "thiserror", + "tokio", + "vlog", + "zksync_config", + "zksync_crypto", + "zksync_storage", + "zksync_types", + "zksync_utils", +] + +[[package]] +name = "zksync_mini_merkle_tree" +version = "1.0.0" +dependencies = [ + "once_cell", + "rayon", + "zksync_basic_types", + "zksync_crypto", +] + +[[package]] +name = "zksync_object_store" +version = "1.0.0" +dependencies = [ + "cloud-storage", + "expanduser", + "metrics", + "tempdir", + "tokio", + "vlog", + "zksync_config", + "zksync_types", +] + +[[package]] +name = "zksync_prover_utils" +version = "1.0.0" +dependencies = [ + "metrics", + "reqwest", + "vlog", +] + +[[package]] +name = "zksync_queued_job_processor" +version = "1.0.0" +dependencies = [ + "async-trait", + "tokio", + "vlog", + "zksync_dal", + "zksync_utils", +] + +[[package]] +name = "zksync_state" +version = "1.0.0" +dependencies = [ + "tempfile", + "vlog", + "zksync_storage", + "zksync_types", + "zksync_utils", +] + +[[package]] +name = "zksync_storage" +version = "1.0.0" +dependencies = [ + "bincode", + "byteorder", + "num_cpus", + "once_cell", + "rocksdb", + "serde", + "vlog", + "zksync_types", + "zksync_utils", +] + +[[package]] +name = "zksync_test_account" +version = "1.0.0" +dependencies = [ + "num 0.3.1", + "zksync_basic_types", + "zksync_crypto", + "zksync_types", + "zksync_utils", +] + +[[package]] +name = "zksync_testkit" +version = "1.0.0" +dependencies = [ + "anyhow", + "futures 0.3.26", + "num 0.3.1", + "once_cell", + "rand 0.7.3", + "serde", + "serde_json", + "structopt", + "tempfile", + "tokio", + "vlog", + "vm", + "zksync_config", + "zksync_contracts", + "zksync_core", + "zksync_crypto", + "zksync_dal", + "zksync_eth_client", + "zksync_eth_signer", + "zksync_mempool", + "zksync_prover_utils", + "zksync_state", + "zksync_storage", + "zksync_test_account", + "zksync_types", + "zksync_utils", +] + +[[package]] +name = "zksync_types" +version = "1.0.0" +dependencies = [ + "bigdecimal", + "blake2 0.10.6", + "chrono", + "codegen 0.1.0", + "ethbloom", + "hex", + "metrics", + "num 0.3.1", + "once_cell", + "parity-crypto", + "rayon", + "rlp", + "secp256k1 0.21.3", + "serde", + "serde_json", + "serde_with", + "strum", + "thiserror", + "tiny-keccak 1.5.0", + "tokio", + "zk_evm", + "zkevm-assembly", + "zkevm_test_harness", + "zksync_basic_types", + "zksync_config", + "zksync_contracts", + "zksync_mini_merkle_tree", + "zksync_utils", +] + +[[package]] +name = "zksync_utils" +version = "1.0.0" +dependencies = [ + "anyhow", + "bigdecimal", + "envy", + "futures 0.3.26", + "hex", + "num 0.3.1", + "serde", + "serde_json", + "thiserror", + "tokio", + "zk_evm", + "zksync_basic_types", +] + +[[package]] +name = "zksync_verification_key_generator_and_server" +version = "1.0.0" +dependencies = [ + "bincode", + "circuit_testing", + "ff_ce", + "hex", + "itertools", + "serde_json", + "structopt", + "toml_edit 0.14.4", + "vlog", + "zksync_types", +] + +[[package]] +name = "zksync_web3_decl" +version = "1.0.0" +dependencies = [ + "bigdecimal", + "chrono", + "itertools", + "jsonrpsee", + "rlp", + "serde", + "serde_json", + "thiserror", + "zksync_types", +] + +[[package]] +name = "zstd" +version = "0.12.3+zstd.1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76eea132fb024e0e13fd9c2f5d5d595d8a967aa72382ac2f9d39fcc95afd0806" +dependencies = [ + "zstd-safe", +] + +[[package]] +name = "zstd-safe" +version = "6.0.4+zstd.1.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7afb4b54b8910cf5447638cb54bf4e8a65cbedd783af98b98c62ffe91f185543" +dependencies = [ + "libc", + "zstd-sys", +] + +[[package]] +name = "zstd-sys" +version = "2.0.7+zstd.1.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94509c3ba2fe55294d752b79842c530ccfab760192521df74a081a78d2b3c7f5" +dependencies = [ + "cc", + "libc", + "pkg-config", +] diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 000000000000..97b46e9d50af --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,63 @@ +[workspace] +members = [ + # Prover + # We don't include `prove` image here since it cannot be built with stable rust + # and we don't want to use nightly for the whole workspace + # "core/bin/prover", + # Server + "core/bin/zksync_core", + "core/bin/admin-tools", + "core/bin/system-constants-generator", + # Contract verifier + "core/bin/contract-verifier", + # Setup key generator and server: its commented as it cannot be built with stable rust. + # "core/bin/setup_key_generator_and_server", + # Verification key generator and server + "core/bin/verification_key_generator_and_server", + # Migration to fill events.tx_initiator_address table for old events + "core/bin/events_tx_initiator_address_migration", + "core/bin/set_correct_tx_format_for_priority_ops", + "core/bin/storage_logs_migration", + # Tool for removing blobs from database + "core/bin/blob_purger", + # circuit synthesizer: its commented as it cannot be built with stable rust. + # "core/bin/circuit_synthesizer", + # Libraries + "core/lib/basic_types", + "core/lib/config", + "core/lib/contracts", + "core/lib/crypto", + "core/lib/circuit_breaker", + "core/lib/dal", + "core/lib/db_test_macro", + "core/lib/eth_client", + "core/lib/eth_signer", + "core/lib/mempool", + "core/lib/merkle_tree", + "core/lib/object_store", + "core/lib/mini_merkle_tree", + "core/lib/prometheus_exporter", + "core/lib/queued_job_processor", + "core/lib/state", + "core/lib/storage", + "core/lib/types", + "core/lib/prover_utils", + "core/lib/utils", + "core/lib/vlog", + "core/lib/vm", + "core/lib/web3_decl", + + # Test infrastructure + "core/tests/loadnext", + "core/tests/testkit", + + # SDK section + "sdk/zksync-rs", +] +resolver = "2" + +exclude = [ "core/bin/prover", "core/bin/setup_key_generator_and_server", "core/bin/circuit_synthesizer"] + +[profile.test.package.zksync_merkle_tree] +opt-level = 3 + diff --git a/LICENSE-APACHE b/LICENSE-APACHE new file mode 100644 index 000000000000..d9a10c0d8e86 --- /dev/null +++ b/LICENSE-APACHE @@ -0,0 +1,176 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS diff --git a/LICENSE-MIT b/LICENSE-MIT new file mode 100644 index 000000000000..2739ea6e22a9 --- /dev/null +++ b/LICENSE-MIT @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 Matter Labs + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 000000000000..2ca6fddd8c8e --- /dev/null +++ b/README.md @@ -0,0 +1,37 @@ +# zkSync Era: A ZK Rollup For Scaling Ethereum + +[![Logo](eraLogo.svg)](https://zksync.io/) + +zkSync Era is a layer 2 rollup that uses zero-knowledge proofs to scale Ethereum without compromising on security or +decentralization. Since it's EVM compatible (Solidity/Vyper), 99% of Ethereum projects can redeploy without refactoring +or re-auditing a single line of code. zkSync Era also uses an LLVM-based compiler that will eventually let developers +write smart contracts in C++, Rust and other popular languages. + +## Knowledge Index + +The following questions will be answered by the following resources: + +| Question | Resource | +| ------------------------------------------------------- | --------------------------------------- | +| What do I need to develop the project locally? | [development.md](docs/development.md) | +| How can I set up my dev environment? | [setup-dev.md](docs/setup-dev.md) | +| How can I run the project? | [launch.md](docs/launch.md) | +| What is the logical project structure and architecture? | [architecture.md](docs/architecture.md) | +| Where can I find developer docs? | [docs](https://v2-docs.zksync.io/dev/) | + +## License + +zkSync Era is distributed under the terms of either + +- Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or ) +- MIT license ([LICENSE-MIT](LICENSE-MIT) or ) + +at your option. + +## Official Links + +- [Website](https://zksync.io/) +- [GitHub](https://github.com/matter-labs) +- [Twitter](https://twitter.com/zksync) +- [Twitter for Devs](https://twitter.com/zkSyncDevs) +- [Discord](https://discord.gg/nMaPGrDDwk) diff --git a/bin/ci_run b/bin/ci_run new file mode 100755 index 000000000000..ea9604acdb2e --- /dev/null +++ b/bin/ci_run @@ -0,0 +1,6 @@ +#!/bin/bash + +# Runs the command from within CI docker-compose environment. +cd $ZKSYNC_HOME + +docker-compose -f docker-compose-runner.yml exec -T zk $@ diff --git a/bin/run_loadtest_from_github_actions b/bin/run_loadtest_from_github_actions new file mode 100755 index 000000000000..ee7b024b284a --- /dev/null +++ b/bin/run_loadtest_from_github_actions @@ -0,0 +1,32 @@ +#!/bin/bash + +# Prepare environments +IFS=',' +read -ra weights <<<"$TRANSACTIONS_WEIGHTS" #reading $TRANSACTIONS_WEIGHTS as an array as tokens separated by IFS +export TRANSACTION_WEIGHTS_DEPOSIT=${weights[0]} +export TRANSACTION_WEIGHTS_L1_TRANSACTIONS=${weights[1]} +export TRANSACTION_WEIGHTS_L2_TRANSACTIONS=${weights[2]} +export TRANSACTION_WEIGHTS_WITHDRAWAL=${weights[3]} + + +read -ra execution_params <<<"$CONTRACT_EXECUTION_PARAMS" #reading $CONTRACT_EXECUTION_PARAMS as an array as tokens separated by IFS +export CONTRACT_EXECUTION_PARAMS_READS=${execution_params[0]} +export CONTRACT_EXECUTION_PARAMS_WRITES=${execution_params[1]} +export CONTRACT_EXECUTION_PARAMS_EVENTS=${execution_params[2]} +export CONTRACT_EXECUTION_PARAMS_HASHES=${execution_params[3]} +export CONTRACT_EXECUTION_PARAMS_RECURSIVE_CALLS=${execution_params[4]} +export CONTRACT_EXECUTION_PARAMS_DEPLOYS=${execution_params[5]} + +read -ra execution_params <<<"$EXPLORER_API_REQUESTS_WEIGHTS" #reading $EXPLORER_API_REQUESTS_WEIGHTS as an array as tokens separated by IFS +export EXPLORER_API_REQUESTS_WEIGHTS_NETWORK_STATS=${execution_params[0]} +export EXPLORER_API_REQUESTS_WEIGHTS_BLOCKS=${execution_params[1]} +export EXPLORER_API_REQUESTS_WEIGHTS_BLOCK=${execution_params[2]} +export EXPLORER_API_REQUESTS_WEIGHTS_TRANSACTIONS=${execution_params[3]} +export EXPLORER_API_REQUESTS_WEIGHTS_ACCOUNT=${execution_params[4]} +export EXPLORER_API_REQUESTS_WEIGHTS_TOKEN=${execution_params[5]} +export EXPLORER_API_REQUESTS_WEIGHTS_CONTRACT=${execution_params[6]} +export EXPLORER_API_REQUESTS_WEIGHTS_TRANSACTION=${execution_params[7]} +export EXPLORER_API_REQUESTS_WEIGHTS_ACCOUNT_TRANSACTIONS=${execution_params[8]} + +# Run the test +cargo run --bin loadnext diff --git a/bin/zk b/bin/zk new file mode 100755 index 000000000000..a6f51fd2c6dd --- /dev/null +++ b/bin/zk @@ -0,0 +1,9 @@ +#!/bin/bash + +if [ -z "$1" ]; then + cd $ZKSYNC_HOME + yarn && yarn zk build +else + # can't start this with yarn since it has quirks with `--` as an argument + node -- $ZKSYNC_HOME/infrastructure/zk/build/index.js "$@" +fi diff --git a/bors.toml b/bors.toml new file mode 100644 index 000000000000..339bbc08f476 --- /dev/null +++ b/bors.toml @@ -0,0 +1,17 @@ +# Set bors's timeout to 4 hours +# https://ddg.gg/?q=4+hours+in+seconds +timeout-sec = 14400 + +# If expected statuses are not specified explicitly, bors tries to "guess" and apperently does it wrong sometimes +status = [ + "codecov/patch", + "codecov/project", + "generate", + "integration", + "loadtest", + "lint", + "testkit", + "Build images / Build and Push Docker Images" +] + +use_squash_merge = true diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 000000000000..1cd130ab48c5 --- /dev/null +++ b/codecov.yml @@ -0,0 +1,17 @@ +coverage: + range: "10...90" + status: + # We might gate PRs based on coverage diffs, + # but we don't want that for now + project: + default: + informational: true + patch: + default: + informational: true + +comment: + require_changes: true + +github_checks: + annotations: false diff --git a/contracts b/contracts new file mode 160000 index 000000000000..496e98cd6457 --- /dev/null +++ b/contracts @@ -0,0 +1 @@ +Subproject commit 496e98cd645716ae7799e688d063c4be3cac80d6 diff --git a/core/bin/admin-tools/Cargo.toml b/core/bin/admin-tools/Cargo.toml new file mode 100644 index 000000000000..23c3bdcb9b25 --- /dev/null +++ b/core/bin/admin-tools/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "admin-tools" +version = "0.1.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] +description = "Admin tools CLI for zkSync" +publish = false # We don't want to publish our binaries. + +[dependencies] +anyhow = "1.0" +chrono = "^0.4" +clap = { version = "4.0", features = ["derive"] } +dotenvy = "^0.15" +tokio = { version = "1", features = ["full"] } + +zksync_types = { path = "../../lib/types", version = "1.0" } +zksync_dal = { path = "../../lib/dal", version = "1.0" } + + diff --git a/core/bin/admin-tools/src/application.rs b/core/bin/admin-tools/src/application.rs new file mode 100644 index 000000000000..34b49eef71ea --- /dev/null +++ b/core/bin/admin-tools/src/application.rs @@ -0,0 +1,70 @@ +use std::path::Path; + +pub struct TerminalSize { + pub height: u32, + pub width: u32, +} + +pub struct App<'a> { + pub terminal: TerminalSize, + pub tokio: tokio::runtime::Runtime, + pub db: zksync_dal::StorageProcessor<'a>, +} + +pub fn create_app<'a>(profile: &Option) -> Result, AppError> { + if profile.is_some() { + let home = std::env::var("ZKSYNC_HOME").map_err(|x| AppError::Init(InitError::Env(x)))?; + + let path = + Path::new(home.as_str()).join(format!("etc/env/{}.env", profile.as_ref().unwrap())); + + dotenvy::from_filename(path) + .map_err(|x| AppError::Init(InitError::Generic(x.to_string())))?; + } + + let tokio = tokio::runtime::Runtime::new().map_err(|x| AppError::Init(InitError::IO(x)))?; + + let db = + tokio.block_on(async { zksync_dal::StorageProcessor::establish_connection(true).await }); + + let invocation = std::process::Command::new("stty") + .arg("-f") + .arg("/dev/stderr") + .arg("size") + .output(); + + let terminal = match invocation { + Result::Ok(x) if x.stderr.is_empty() => { + let mut split = std::str::from_utf8(&x.stdout).unwrap().split_whitespace(); + + TerminalSize { + height: split.next().unwrap().parse().unwrap(), + width: split.next().unwrap().parse().unwrap(), + } + } + _ => TerminalSize { + height: 60, + width: 80, + }, + }; + + Ok(App { + tokio, + db, + terminal, + }) +} + +#[derive(Debug)] +pub enum InitError { + Env(std::env::VarError), + IO(std::io::Error), + Generic(String), +} + +#[derive(Debug)] +pub enum AppError { + Db(String), + Command(String), + Init(InitError), +} diff --git a/core/bin/admin-tools/src/blocks.rs b/core/bin/admin-tools/src/blocks.rs new file mode 100644 index 000000000000..7a585b0a5d54 --- /dev/null +++ b/core/bin/admin-tools/src/blocks.rs @@ -0,0 +1,309 @@ +use std::cmp::{max, min}; +use std::ops::Add; +use std::{collections::HashMap, convert::TryFrom, iter}; + +use std::convert::AsRef; + +use chrono::{DateTime, Datelike, Duration, Timelike, Utc}; +use zksync_dal::prover_dal::GetProverJobsParams; +use zksync_dal::witness_generator_dal::GetWitnessJobsParams; +use zksync_types::proofs::{ProverJobStatus, WitnessJobStatus}; + +use zksync_types::{ + proofs::{AggregationRound, ProverJobInfo, WitnessJobInfo}, + L1BatchNumber, +}; + +use crate::application::{App, AppError}; + +pub struct RoundWitnessStats { + job: WitnessJobInfo, + created_at: DateTime, + updated_at: DateTime, +} + +pub struct RoundProverStats { + jobs: Vec, + updated_at: DateTime, +} + +pub struct AggregationRoundInfo { + prover: Option, + witness: Option, + round_number: AggregationRound, + created_at: DateTime, + updated_at: DateTime, +} + +pub struct BlockInfo { + id: L1BatchNumber, + aggregations: Vec>, +} + +pub fn get_block_info(id: L1BatchNumber, app: &mut App) -> Result { + /// Assumes that all provided jobs are from the same aggregation round. + fn jobs_to_round_stats( + witness: Option, + proofs: Vec, + ) -> (Option, Option) { + let witness = witness.map(move |x| RoundWitnessStats { + created_at: x.created_at, + updated_at: x.updated_at, + job: x, + }); + + let prover = proofs + .iter() + .map(|x| x.updated_at) + .reduce(max) + .map(move |updated_at| RoundProverStats { + jobs: proofs, + updated_at, + }); + + (witness, prover) + } + + fn compose_round( + round: AggregationRound, + witness: Option, + prover: Option, + ) -> Option { + witness.map(move |witness| AggregationRoundInfo { + round_number: round, + created_at: witness.created_at, + updated_at: prover + .as_ref() + .map(|x| x.updated_at) + .unwrap_or_else(|| witness.updated_at), + witness: Some(witness), + prover, + }) + } + + let witness_jobs = app + .db + .witness_generator_dal() + .get_jobs(GetWitnessJobsParams { + blocks: Some(id..id), + }) + .map_err(|x| AppError::Db(x.to_string()))?; + + let proof_jobs = app + .db + .prover_dal() + .get_jobs(GetProverJobsParams::blocks(id..id)) + .map_err(|x| AppError::Db(x.to_string()))?; + + let mut proof_groups = + proof_jobs + .into_iter() + .fold(HashMap::<_, Vec<_>>::new(), |mut aggr, cur| { + aggr.entry(cur.position.aggregation_round) + .or_default() + .push(cur); + aggr + }); + + let mut witns_groups = witness_jobs + .into_iter() + .fold(HashMap::new(), |mut aggr, cur| { + if aggr.insert(cur.position.aggregation_round, cur).is_some() { + panic!("Single witness job expected per generation round") + } + aggr + }); + + let aggregations = (0..3) + .map(|x| AggregationRound::try_from(x).unwrap()) + .map(|ar| { + ( + ar, + witns_groups.remove(&ar), + proof_groups.remove(&ar).unwrap_or_default(), + ) + }) + .map(|(ar, wtns, prfs)| (ar, jobs_to_round_stats(wtns, prfs))) + .map(|(ar, (w, p))| compose_round(ar, w, p)) + .collect::>(); + + Ok(BlockInfo { id, aggregations }) +} + +pub fn print_block_info(block: &BlockInfo) -> Result<(), AppError> { + fn indent(s: &str, i: usize) -> String { + let width = " ".repeat(i); + + String::new() + .add(width.as_str()) + .add(&s.replace('\n', &String::from("\n").add(&width))) + } + + fn timef(t: DateTime) -> String { + format!( + "{:02}:{:02}:{:02} {}-{:02}-{:02}", + t.hour(), + t.minute(), + t.second(), + t.year(), + t.month(), + t.day() + ) + } + + fn durf(d: Duration) -> String { + format!("{}:{:02}m", d.num_minutes(), d.num_seconds() % 60) + } + + fn print_existing_block( + round: &AggregationRoundInfo, + round_prev: &Option, + ) { + let (duration_reference, reference_name) = match round_prev { + Some(round_prev) => ( + round_prev.updated_at, + format!("U{}", round_prev.round_number as u32), + ), + None => (round.created_at, "C".to_string()), + }; + + let witness = match &round.witness { + Some(witness) => { + let status = match &witness.job.status { + WitnessJobStatus::Successful(x) => { + format!( + "Started: {} {}+{} +Elapsed: {}", + timef(x.started_at), + reference_name, + durf(x.started_at - duration_reference), + durf(x.time_taken) + ) + } + + _ => String::new(), + }; + + format!( + " Witness job: {}\n{}", + witness.job.status, + indent(status.as_str(), 6) + ) + } + None => "No witness job.".to_string(), + }; + + let prover = match &round.prover { + Some(prover) if !prover.jobs.is_empty() => { + let statuses = prover + .jobs + .iter() + .fold(HashMap::new(), |mut aggr, cur| { + aggr.entry(cur.status.as_ref()) + .and_modify(|x| *x += 1) + .or_insert(1); + aggr + }) + .iter() + .map(|(status, count)| format!("{}: {}", status, count)) + .collect::>() + .join(", "); + + fn map(f: fn(T, T) -> T, x: Option, y: T) -> Option + where + T: Copy, + { + x.map(|x| f(x, y)).or(Some(y)) + } + + let (started_min, started_max, elapsed_min, elapsed_max) = prover.jobs.iter().fold( + (None, None, None, None), + |(started_min, started_max, elapsed_min, elapsed_max), cur| match &cur.status { + ProverJobStatus::InProgress(s) => ( + map(min, started_min, s.started_at), + map(max, started_max, s.started_at), + elapsed_min, + elapsed_max, + ), + ProverJobStatus::Successful(s) => ( + map(min, started_min, s.started_at), + map(max, started_max, s.started_at), + map(min, elapsed_min, cur.updated_at - s.started_at), + map(max, elapsed_max, cur.updated_at - s.started_at), + ), + ProverJobStatus::Failed(s) => ( + map(min, started_min, s.started_at), + map(max, started_max, s.started_at), + elapsed_min, + elapsed_max, + ), + _ => (started_min, started_max, elapsed_min, elapsed_max), + }, + ); + + fn format_time( + t: Option>, + reference: DateTime, + reference_name: &str, + ) -> String { + match t { + Some(t) => { + format!("{} {}+{}", timef(t), reference_name, durf(t - reference)) + } + None => "N/A".to_owned(), + } + } + + let stats = format!( + "Started: {} (min) + {} (max) +Elapsed: {} (min), + {} (max)", + format_time(started_min, duration_reference, &reference_name), + format_time(started_max, duration_reference, &reference_name), + elapsed_min.map_or("N/A".to_owned(), durf), + elapsed_max.map_or("N/A".to_owned(), durf) + ); + + format!(" Prover jobs: {}\n{}", statuses, indent(&stats, 6)) + } + _ => "No prover jobs.".to_string(), + }; + println!("Round {}", round.round_number as u32); + println!("[C]reated {}", timef(round.created_at)); + println!( + "[U]pdated {} C+{}", + timef(round.updated_at), + durf(round.updated_at - round.created_at) + ); + + println!("{}\n{}", witness, prover); + } + + fn print_missing_block(round_ix: usize) { + println!("Round {} missing jobs", round_ix); + } + + println!("Block {}", block.id); + + let prevs = iter::once(&None).chain(block.aggregations.iter()); // No need to map into Some, cause previous round must to exist. + + block + .aggregations + .iter() + .zip(prevs) + .enumerate() + .for_each(|(i, (cur, prev))| { + // Option in `cur` refers to conceptual existence of block data. + // Option in `prev` signifies whether there is a previous block for `cur` block, and + // must only be None for first element. + assert!(i == 0 || prev.is_some()); + + match cur { + Some(x) => print_existing_block(x, prev), + None => print_missing_block(i), + } + }); + + Ok(()) +} diff --git a/core/bin/admin-tools/src/main.rs b/core/bin/admin-tools/src/main.rs new file mode 100644 index 000000000000..350c2665f88f --- /dev/null +++ b/core/bin/admin-tools/src/main.rs @@ -0,0 +1,170 @@ +use std::convert::TryFrom; + +use application::{App, AppError}; +use blocks::print_block_info; +use clap::{Args, Parser, Subcommand}; +use zksync_dal::prover_dal::GetProverJobsParams; +use zksync_types::proofs::AggregationRound; +use zksync_types::L1BatchNumber; + +use crate::application::create_app; + +mod application; +mod blocks; +mod prover; + +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, clap::ValueEnum)] +enum JobStatus { + Queued, + Failed, + InProgress, + Successful, +} + +#[derive(Parser)] +struct Cli { + #[command(subcommand)] + command: Command, + #[arg(long)] + /// Can be used to load environment from ./etc/env/.env file. + profile: Option, +} + +#[derive(Subcommand)] +enum Command { + #[command(subcommand)] + Prover(ProverCommand), + #[command(subcommand)] + Blocks(BlockCommand), +} + +#[derive(Subcommand)] +enum ProverCommand { + /// Show general prover jobs statistics. + Stats, + /// List specific jobs + Ls(ProverLsCommand), +} + +#[derive(Subcommand)] +enum BlockCommand { + Show(BlockShowCommand), +} + +type AppFnBox<'a> = Box Result<(), AppError> + 'a>; +type CmdMatch<'a> = Result, AppError>; + +fn prover_stats<'a>() -> AppFnBox<'a> { + Box::new(|app| { + let stats = prover::get_stats(app)?; + prover::print_stats(&stats, app.terminal.width) + }) +} + +#[derive(Args)] +struct ProverLsCommand { + #[arg(long, short)] + /// Statuses to include. + status: Option>, + #[arg(long, short, default_value_t = 10)] + /// Limits the amount of returned results. + limit: u32, + #[arg(long)] + desc: bool, + #[arg(long)] + /// Block range. Format: `x` or `x..y`. + range: Option, + #[arg(long)] + round: Option, +} + +fn prover_ls<'a>(cmd: &ProverLsCommand) -> Result, AppError> { + let range = match &cmd.range { + Some(input) => { + let split = input + .split("..") + .map(|x| x.parse::()) + .collect::, _>>() + .map_err(|_| AppError::Command("Wrong range format".to_owned()))?; + + match split.as_slice() { + [] => Ok(None), + [id] => Ok(Some(L1BatchNumber(*id)..L1BatchNumber(*id))), + [s, e] => Ok(Some(L1BatchNumber(*s)..L1BatchNumber(*e))), + _ => Err(AppError::Command("Wrong range format".to_owned())), + } + } + None => Ok(None), + }?; + + let opts = GetProverJobsParams { + blocks: range, + statuses: cmd.status.as_ref().map(|x| { + x.iter() + .map(|x| { + clap::ValueEnum::to_possible_value(x) + .unwrap() + .get_name() + .replace('-', "_") + }) + .collect() + }), + limit: Some(cmd.limit), + desc: cmd.desc, + round: cmd + .round + .map_or(Ok(None), |x| AggregationRound::try_from(x).map(Some)) + .map_err(|_| AppError::Command("Wrong aggregation round value.".to_owned()))?, + }; + + Ok(Box::new(move |app| { + let jobs = prover::get_jobs(app, opts)?; + prover::print_jobs(&jobs) + })) +} + +#[derive(Args)] +struct BlockShowCommand { + id: u32, +} + +fn block_show<'a>(id: L1BatchNumber) -> AppFnBox<'a> { + Box::new(move |app| { + let block = blocks::get_block_info(id, app)?; + print_block_info(&block) + }) +} + +fn match_prover_cmd(cmd: &ProverCommand) -> CmdMatch { + match cmd { + ProverCommand::Stats => Ok(prover_stats()), + ProverCommand::Ls(x) => prover_ls(x), + } +} + +fn match_block_cmd(cmd: &BlockCommand) -> CmdMatch { + match cmd { + BlockCommand::Show(cmd) => Ok(block_show(L1BatchNumber(cmd.id))), + } +} + +fn match_cmd(cmd: &Command) -> CmdMatch { + match cmd { + Command::Prover(cmd) => match_prover_cmd(cmd), + Command::Blocks(cmd) => match_block_cmd(cmd), + } +} + +fn main() -> Result<(), AppError> { + let cli = Cli::parse(); + + let exec_fn = match_cmd(&cli.command)?; + + let mut app = create_app(&cli.profile)?; + + println!(); + let result = exec_fn(&mut app); + println!(); + + result +} diff --git a/core/bin/admin-tools/src/prover.rs b/core/bin/admin-tools/src/prover.rs new file mode 100644 index 000000000000..a0bc09026d43 --- /dev/null +++ b/core/bin/admin-tools/src/prover.rs @@ -0,0 +1,170 @@ +use crate::application::AppError; + +use super::application::App; +use std::string::ToString; +use zksync_dal::{self, prover_dal::GetProverJobsParams}; +use zksync_types::{ + proofs::{ProverJobInfo, ProverJobStatus}, + L1BatchNumber, +}; + +pub struct ProverStats { + pub successful: usize, + pub successful_padding: L1BatchNumber, + pub in_progress: usize, + pub queued: usize, + pub queued_padding: L1BatchNumber, + pub failed: usize, + pub jobs: Vec, +} + +pub fn get_stats(app: &mut App) -> Result { + let stats = app.db.prover_dal().get_prover_jobs_stats(); + let stats_extended = app + .db + .prover_dal() + .get_extended_stats() + .map_err(|x| AppError::Db(x.to_string()))?; + + Ok(ProverStats { + successful: stats.successful, + successful_padding: stats_extended.successful_padding, + in_progress: stats.in_progress, + queued: stats.queued, + queued_padding: stats_extended.queued_padding, + failed: stats.failed, + jobs: stats_extended.active_area, + }) +} + +pub fn print_stats(stats: &ProverStats, term_width: u32) -> Result<(), AppError> { + struct Map { + //width: u32, + successful: Vec, + in_progress: Vec, + queued: Vec, + failed: Vec, + skipped: Vec, + } + + impl Map { + fn new( + display_w: u32, + active_area_start: u32, + active_area_size: u32, + jobs: &[ProverJobInfo], + ) -> Map { + let mut s: Vec<_> = (0..display_w).map(|_| false).collect(); + let mut i: Vec<_> = (0..display_w).map(|_| false).collect(); + let mut q: Vec<_> = (0..display_w).map(|_| false).collect(); + let mut f: Vec<_> = (0..display_w).map(|_| false).collect(); + let mut sk: Vec<_> = (0..display_w).map(|_| false).collect(); + + let area_term_ratio = active_area_size as f32 / display_w as f32; + + for j in jobs + .iter() + .filter(|x| !matches!(x.status, ProverJobStatus::Ignored)) + { + let ix = ((j.block_number.0 - active_area_start) as f32 / area_term_ratio) as usize; + + (match j.status { + ProverJobStatus::Successful(_) => &mut s, + ProverJobStatus::InProgress(_) => &mut i, + ProverJobStatus::Queued => &mut q, + ProverJobStatus::Failed(_) => &mut f, + ProverJobStatus::Skipped => &mut sk, + _ => unreachable!(), + })[ix] = true; + } + + Map { + successful: s, + failed: f, + in_progress: i, + queued: q, + skipped: sk, + } + } + } + + let active_area_start = stats.successful_padding.0 + 1; + let active_area_size = stats.queued_padding.0 - active_area_start; + + let display_w = std::cmp::min(term_width, active_area_size); + + let map = Map::new(display_w, active_area_start, active_area_size, &stats.jobs); + + let map_fn = |x: &bool| match x { + true => "+", + false => ".", + }; + + let to_str_fn = |v: Vec<_>| v.iter().map(map_fn).collect::(); + + println!("Prover jobs: "); + println!(" Queued: {}", stats.queued); + println!(" In progress: {}", stats.in_progress); + println!( + " Successful: {}, block reach: {}", + stats.successful, stats.successful_padding + ); + println!(" Failed: {}", stats.failed); + + if stats.failed > 0 { + println!(" [id:block] circuit type") + } + + for x in stats + .jobs + .iter() + .filter(|x| matches!(x.status, ProverJobStatus::Failed(_))) + { + println!(" - [{}:{}] {}", x.id, x.block_number, x.circuit_type) + } + + println!(); + println!( + "Active area [{} - {}] ({})", + stats.successful_padding.0 + 1, + stats.queued_padding.0 - 1, + stats.queued_padding.0 - stats.successful_padding.0 - 2, + ); + println!("q: --|{}|--", to_str_fn(map.queued)); + println!("i: --|{}|--", to_str_fn(map.in_progress)); + println!("s: --|{}|--", to_str_fn(map.successful)); + println!("f: --|{}|--", to_str_fn(map.failed)); + println!("x: --|{}|--", to_str_fn(map.skipped)); + + Ok(()) +} + +pub fn get_jobs(app: &mut App, opts: GetProverJobsParams) -> Result, AppError> { + app.db + .prover_dal() + .get_jobs(opts) + .map_err(|x| AppError::Db(x.to_string())) +} + +pub fn print_jobs(jobs: &[ProverJobInfo]) -> Result<(), AppError> { + fn pji2string(job: &ProverJobInfo) -> String { + format!( + "Id: {} +Block: {} +Circuit type: {} +Aggregation round: {} +Status: {}", + job.id, + job.block_number, + job.circuit_type, + job.position.aggregation_round as u32, + job.status + ) + } + + let results = jobs.iter().map(pji2string).collect::>(); + + println!("{}\n\n{} results", results.join("\n\n"), results.len()); + + Ok(()) +} diff --git a/core/bin/blob_purger/Cargo.toml b/core/bin/blob_purger/Cargo.toml new file mode 100644 index 000000000000..4452cc5e4007 --- /dev/null +++ b/core/bin/blob_purger/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "blob_purger" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] + + +[[bin]] +name = "zksync_blob_purger" +path = "src/main.rs" + +[dependencies] +zksync_dal = { path = "../../lib/dal", version = "1.0" } +structopt = "0.3.26" diff --git a/core/bin/blob_purger/src/main.rs b/core/bin/blob_purger/src/main.rs new file mode 100644 index 000000000000..2cb9d1f79092 --- /dev/null +++ b/core/bin/blob_purger/src/main.rs @@ -0,0 +1,143 @@ +use std::str::FromStr; +use std::thread::sleep; +use std::time::Duration; + +use structopt::StructOpt; + +use zksync_dal::ConnectionPool; +const WAIT_TIME_MILLI_SECONDS: u64 = 2500; + +#[derive(Debug)] +enum BlobTable { + WitnessInputs, + LeafAggregationWitnessJobs, + NodeAggregationWitnessJobs, + SchedulerWitnessJobs, + ProverJobs, +} + +impl FromStr for BlobTable { + type Err = String; + fn from_str(table_name: &str) -> Result { + match table_name { + "witness_inputs" => Ok(BlobTable::WitnessInputs), + "leaf_aggregation_witness_jobs" => Ok(BlobTable::LeafAggregationWitnessJobs), + "node_aggregation_witness_jobs" => Ok(BlobTable::NodeAggregationWitnessJobs), + "scheduler_witness_jobs" => Ok(BlobTable::SchedulerWitnessJobs), + "prover_jobs" => Ok(BlobTable::ProverJobs), + _ => Err("Could not parse table name".to_string()), + } + } +} + +#[derive(Debug, StructOpt)] +#[structopt( + name = "Tool for purging blob from database", + about = "Tool to delete blob for individual tables from db" +)] +struct Opt { + /// Name of the table from which blobs would be deleted. + #[structopt(short = "t", long = "table_name", default_value = "witness_inputs")] + table: BlobTable, + /// Number of blobs purged in each batch. + #[structopt(short = "b", long = "batch_size", default_value = "20")] + batch_size: u8, +} + +fn purge_witness_inputs(pool: ConnectionPool, batch_size: u8) -> bool { + let l1_batches = pool + .access_storage_blocking() + .blocks_dal() + .get_l1_batches_with_blobs_in_db(batch_size); + if l1_batches.is_empty() { + return false; + } + println!("purging witness_inputs: {:?}", l1_batches); + pool.access_storage_blocking() + .blocks_dal() + .purge_blobs_from_db(l1_batches); + true +} + +fn purge_leaf_aggregation_witness_jobs(pool: ConnectionPool, batch_size: u8) -> bool { + let l1_batches = pool + .access_storage_blocking() + .witness_generator_dal() + .get_leaf_aggregation_l1_batches_with_blobs_in_db(batch_size); + if l1_batches.is_empty() { + return false; + } + println!("purging leaf_aggregation_witness_jobs: {:?}", l1_batches); + pool.access_storage_blocking() + .witness_generator_dal() + .purge_leaf_aggregation_blobs_from_db(l1_batches); + true +} + +fn purge_node_aggregation_witness_jobs(pool: ConnectionPool, batch_size: u8) -> bool { + let l1_batches = pool + .access_storage_blocking() + .witness_generator_dal() + .get_node_aggregation_l1_batches_with_blobs_in_db(batch_size); + if l1_batches.is_empty() { + return false; + } + println!("purging node_aggregation_witness_jobs: {:?}", l1_batches); + pool.access_storage_blocking() + .witness_generator_dal() + .purge_node_aggregation_blobs_from_db(l1_batches); + true +} + +fn purge_scheduler_witness_jobs(pool: ConnectionPool, batch_size: u8) -> bool { + let l1_batches = pool + .access_storage_blocking() + .witness_generator_dal() + .get_scheduler_l1_batches_with_blobs_in_db(batch_size); + if l1_batches.is_empty() { + return false; + } + println!("purging scheduler_witness_jobs: {:?}", l1_batches); + pool.access_storage_blocking() + .witness_generator_dal() + .purge_scheduler_blobs_from_db(l1_batches); + true +} + +fn purge_prover_jobs(pool: ConnectionPool, batch_size: u8) -> bool { + let job_ids = pool + .access_storage_blocking() + .prover_dal() + .get_l1_batches_with_blobs_in_db(batch_size); + if job_ids.is_empty() { + return false; + } + println!("purging prover_jobs: {:?}", job_ids); + pool.access_storage_blocking() + .prover_dal() + .purge_blobs_from_db(job_ids); + true +} + +fn main() { + let opt = Opt::from_args(); + println!("processing table: {:?}", opt.table); + let pool = ConnectionPool::new(Some(1), true); + let mut shall_purge = true; + while shall_purge { + shall_purge = match opt.table { + BlobTable::WitnessInputs => purge_witness_inputs(pool.clone(), opt.batch_size), + BlobTable::LeafAggregationWitnessJobs => { + purge_leaf_aggregation_witness_jobs(pool.clone(), opt.batch_size) + } + BlobTable::NodeAggregationWitnessJobs => { + purge_node_aggregation_witness_jobs(pool.clone(), opt.batch_size) + } + BlobTable::SchedulerWitnessJobs => { + purge_scheduler_witness_jobs(pool.clone(), opt.batch_size) + } + BlobTable::ProverJobs => purge_prover_jobs(pool.clone(), opt.batch_size), + }; + sleep(Duration::from_millis(WAIT_TIME_MILLI_SECONDS)); + } +} diff --git a/core/bin/circuit_synthesizer/Cargo.lock b/core/bin/circuit_synthesizer/Cargo.lock new file mode 100644 index 000000000000..44712097815a --- /dev/null +++ b/core/bin/circuit_synthesizer/Cargo.lock @@ -0,0 +1,6177 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "addchain" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b2e69442aa5628ea6951fa33e24efe8313f4321a91bd729fc2f75bdfc858570" +dependencies = [ + "num-bigint 0.3.3", + "num-integer", + "num-traits", +] + +[[package]] +name = "addr2line" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "aes" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "884391ef1066acaa41e766ba8f596341b96e93ce34f9a43e7d24bf0a0eaf0561" +dependencies = [ + "aes-soft", + "aesni", + "cipher", +] + +[[package]] +name = "aes-ctr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7729c3cde54d67063be556aeac75a81330d802f0259500ca40cb52967f975763" +dependencies = [ + "aes-soft", + "aesni", + "cipher", + "ctr", +] + +[[package]] +name = "aes-soft" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be14c7498ea50828a38d0e24a765ed2effe92a705885b57d029cd67d45744072" +dependencies = [ + "cipher", + "opaque-debug", +] + +[[package]] +name = "aesni" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea2e11f5e94c2f7d386164cc2aa1f97823fed6f259e486940a71c174dd01b0ce" +dependencies = [ + "cipher", + "opaque-debug", +] + +[[package]] +name = "ahash" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +dependencies = [ + "getrandom 0.2.8", + "once_cell", + "version_check", +] + +[[package]] +name = "aho-corasick" +version = "0.7.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac" +dependencies = [ + "memchr", +] + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "ansi_term" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" +dependencies = [ + "winapi", +] + +[[package]] +name = "anyhow" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "224afbd727c3d6e4b90103ece64b8d1b67fbb1973b1046c2281eed3f3803f800" + +[[package]] +name = "api" +version = "0.1.0" +source = "git+https://github.com/matter-labs/heavy-ops-service.git?branch=cleanup#c40d66b6d57f7f4d39259ffb993f5a1bba64bf6c" +dependencies = [ + "bellman_ce", + "cfg-if 1.0.0", + "num_cpus", + "serde", +] + +[[package]] +name = "arr_macro" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a105bfda48707cf19220129e78fca01e9639433ffaef4163546ed8fb04120a5" +dependencies = [ + "arr_macro_impl", + "proc-macro-hack", +] + +[[package]] +name = "arr_macro_impl" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0609c78bd572f4edc74310dfb63a01f5609d53fa8b4dd7c4d98aef3b3e8d72d1" +dependencies = [ + "proc-macro-hack", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "arrayref" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4c527152e37cf757a3f78aae5a06fbeefdb07ccc535c980a3208ee3060dd544" + +[[package]] +name = "arrayvec" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd9fd44efafa8690358b7408d253adf110036b88f55672a933f01d616ad9b1b9" +dependencies = [ + "nodrop", +] + +[[package]] +name = "arrayvec" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" + +[[package]] +name = "arrayvec" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" + +[[package]] +name = "async-channel" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf46fee83e5ccffc220104713af3292ff9bc7c64c7de289f66dae8e38d826833" +dependencies = [ + "concurrent-queue", + "event-listener", + "futures-core", +] + +[[package]] +name = "async-executor" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17adb73da160dfb475c183343c8cccd80721ea5a605d3eb57125f0a7b7a92d0b" +dependencies = [ + "async-lock", + "async-task", + "concurrent-queue", + "fastrand", + "futures-lite", + "slab", +] + +[[package]] +name = "async-global-executor" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1b6f5d7df27bd294849f8eec66ecfc63d11814df7a4f5d74168a2394467b776" +dependencies = [ + "async-channel", + "async-executor", + "async-io", + "async-lock", + "blocking", + "futures-lite", + "once_cell", +] + +[[package]] +name = "async-io" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c374dda1ed3e7d8f0d9ba58715f924862c63eae6849c92d3a18e7fbde9e2794" +dependencies = [ + "async-lock", + "autocfg 1.1.0", + "concurrent-queue", + "futures-lite", + "libc", + "log", + "parking", + "polling", + "slab", + "socket2", + "waker-fn", + "windows-sys 0.42.0", +] + +[[package]] +name = "async-lock" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8101efe8695a6c17e02911402145357e718ac92d3ff88ae8419e84b1707b685" +dependencies = [ + "event-listener", + "futures-lite", +] + +[[package]] +name = "async-native-tls" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e9e7a929bd34c68a82d58a4de7f86fffdaf97fb2af850162a7bb19dd7269b33" +dependencies = [ + "async-std", + "native-tls", + "thiserror", + "url", +] + +[[package]] +name = "async-process" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6381ead98388605d0d9ff86371043b5aa922a3905824244de40dc263a14fcba4" +dependencies = [ + "async-io", + "async-lock", + "autocfg 1.1.0", + "blocking", + "cfg-if 1.0.0", + "event-listener", + "futures-lite", + "libc", + "signal-hook", + "windows-sys 0.42.0", +] + +[[package]] +name = "async-std" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62565bb4402e926b29953c785397c6dc0391b7b446e45008b0049eb43cec6f5d" +dependencies = [ + "async-channel", + "async-global-executor", + "async-io", + "async-lock", + "async-process", + "crossbeam-utils 0.8.14", + "futures-channel", + "futures-core", + "futures-io", + "futures-lite", + "gloo-timers", + "kv-log-macro", + "log", + "memchr", + "once_cell", + "pin-project-lite", + "pin-utils", + "slab", + "wasm-bindgen-futures", +] + +[[package]] +name = "async-stream" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dad5c83079eae9969be7fadefe640a1c566901f05ff91ab221de4b6f68d9507e" +dependencies = [ + "async-stream-impl", + "futures-core", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10f203db73a71dfa2fb6dd22763990fa26f3d2625a6da2da900d23b87d26be27" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "async-task" +version = "4.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a40729d2133846d9ed0ea60a8b9541bccddab49cd30f0715a1da672fe9a2524" + +[[package]] +name = "async-trait" +version = "0.1.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cd7fce9ba8c3c042128ce72d8b2ddbf3a05747efb67ea0313c635e10bda47a2" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "atoi" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "616896e05fc0e2649463a93a15183c6a16bf03413a7af88ef1285ddedfa9cda5" +dependencies = [ + "num-traits", +] + +[[package]] +name = "atomic-waker" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "debc29dde2e69f9e47506b525f639ed42300fc014a3e007832592448fa8e4599" + +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi 0.1.19", + "libc", + "winapi", +] + +[[package]] +name = "autocfg" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dde43e75fd43e8a1bf86103336bc699aa8d17ad1be60c76c0bdfd4828e19b78" +dependencies = [ + "autocfg 1.1.0", +] + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "backtrace" +version = "0.3.67" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "233d376d6d185f2a3093e58f283f60f880315b6c60075b01f36b3b85154564ca" +dependencies = [ + "addr2line", + "cc", + "cfg-if 1.0.0", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + +[[package]] +name = "base16ct" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" + +[[package]] +name = "base64" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff" + +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + +[[package]] +name = "base64" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a4ddaa51a5bc52a6948f74c06d20aaaddb71924eab79b8c97a8c556e942d6a" + +[[package]] +name = "base64ct" +version = "1.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b645a089122eccb6111b4f81cbc1a49f5900ac4666bb93ac027feaecf15607bf" + +[[package]] +name = "beef" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1" +dependencies = [ + "serde", +] + +[[package]] +name = "bellman_ce" +version = "0.3.2" +source = "git+https://github.com/matter-labs/bellman?branch=dev#3aa6226d04d60c539ff4ee480479ac5b92871a20" +dependencies = [ + "arrayvec 0.7.2", + "bit-vec", + "blake2s_const", + "blake2s_simd", + "byteorder", + "cfg-if 1.0.0", + "crossbeam 0.7.3", + "futures 0.3.26", + "hex", + "lazy_static", + "num_cpus", + "pairing_ce", + "rand 0.4.6", + "serde", + "smallvec", + "tiny-keccak 1.5.0", +] + +[[package]] +name = "bigdecimal" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc403c26e6b03005522e6e8053384c4e881dfe5b2bf041c0c2c49be33d64a539" +dependencies = [ + "num-bigint 0.3.3", + "num-integer", + "num-traits", + "serde", +] + +[[package]] +name = "bincode" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" +dependencies = [ + "serde", +] + +[[package]] +name = "bindgen" +version = "0.59.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bd2a9a458e8f4304c52c43ebb0cfbd520289f8379a52e329a38afda99bf8eb8" +dependencies = [ + "bitflags", + "cexpr", + "clang-sys", + "lazy_static", + "lazycell", + "peeking_take_while", + "proc-macro2 1.0.51", + "quote 1.0.23", + "regex", + "rustc-hash", + "shlex", +] + +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" +dependencies = [ + "serde", +] + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitvec" +version = "0.20.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7774144344a4faa177370406a7ff5f1da24303817368584c6206c8303eb07848" +dependencies = [ + "funty", + "radium", + "tap", + "wyz", +] + +[[package]] +name = "blake2" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a4e37d16930f5459780f5621038b6382b9bb37c19016f39fb6b5808d831f174" +dependencies = [ + "crypto-mac 0.8.0", + "digest 0.9.0", + "opaque-debug", +] + +[[package]] +name = "blake2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" +dependencies = [ + "digest 0.10.6", +] + +[[package]] +name = "blake2-rfc_bellman_edition" +version = "0.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc60350286c7c3db13b98e91dbe5c8b6830a6821bc20af5b0c310ce94d74915" +dependencies = [ + "arrayvec 0.4.12", + "byteorder", + "constant_time_eq", +] + +[[package]] +name = "blake2s_const" +version = "0.6.0" +source = "git+https://github.com/matter-labs/bellman?branch=dev#3aa6226d04d60c539ff4ee480479ac5b92871a20" +dependencies = [ + "arrayref", + "arrayvec 0.5.2", + "constant_time_eq", +] + +[[package]] +name = "blake2s_simd" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e461a7034e85b211a4acb57ee2e6730b32912b06c08cc242243c39fc21ae6a2" +dependencies = [ + "arrayref", + "arrayvec 0.5.2", + "constant_time_eq", +] + +[[package]] +name = "block-buffer" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" +dependencies = [ + "block-padding", + "generic-array", +] + +[[package]] +name = "block-buffer" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e" +dependencies = [ + "generic-array", +] + +[[package]] +name = "block-modes" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57a0e8073e8baa88212fb5823574c02ebccb395136ba9a164ab89379ec6072f0" +dependencies = [ + "block-padding", + "cipher", +] + +[[package]] +name = "block-padding" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d696c370c750c948ada61c69a0ee2cbbb9c50b1019ddb86d9317157a99c2cae" + +[[package]] +name = "blocking" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c67b173a56acffd6d2326fb7ab938ba0b00a71480e14902b2591c87bc5741e8" +dependencies = [ + "async-channel", + "async-lock", + "async-task", + "atomic-waker", + "fastrand", + "futures-lite", +] + +[[package]] +name = "bstr" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7f0778972c64420fdedc63f09919c8a88bda7b25135357fd25a5d9f3257e832" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "bumpalo" +version = "3.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" + +[[package]] +name = "byte-slice-cast" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" + +[[package]] +name = "byteorder" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" + +[[package]] +name = "bytes" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" + +[[package]] +name = "bzip2-sys" +version = "0.1.11+1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "736a955f3fa7875102d57c82b8cac37ec45224a07fd32d58f9f7a186b6cd4cdc" +dependencies = [ + "cc", + "libc", + "pkg-config", +] + +[[package]] +name = "cc" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" +dependencies = [ + "jobserver", +] + +[[package]] +name = "cexpr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +dependencies = [ + "nom", +] + +[[package]] +name = "cfg-if" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16b0a3d9ed01224b22057780a37bb8c5dbfe1be8ba48678e7bf57ec4b385411f" +dependencies = [ + "iana-time-zone", + "js-sys", + "num-integer", + "num-traits", + "rustc-serialize", + "serde", + "time 0.1.43", + "wasm-bindgen", + "winapi", +] + +[[package]] +name = "cipher" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12f8e7987cbd042a63249497f41aed09f8e65add917ea6566effbc56578d6801" +dependencies = [ + "generic-array", +] + +[[package]] +name = "circuit_testing" +version = "0.1.0" +source = "git+https://github.com/matter-labs/circuit_testing.git?branch=main#7160c45c844944748663c91b6860c77f5376d9e4" +dependencies = [ + "bellman_ce", +] + +[[package]] +name = "clang-sys" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa2e27ae6ab525c3d369ded447057bca5438d86dc3a68f6faafb8269ba82ebf3" +dependencies = [ + "glob", + "libc", + "libloading", +] + +[[package]] +name = "clap" +version = "2.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" +dependencies = [ + "ansi_term", + "atty", + "bitflags", + "strsim 0.8.0", + "textwrap", + "unicode-width", + "vec_map", +] + +[[package]] +name = "cloud-storage" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7602ac4363f68ac757d6b87dd5d850549a14d37489902ae639c06ecec06ad275" +dependencies = [ + "async-trait", + "base64 0.13.1", + "bytes", + "chrono", + "dotenv", + "futures-util", + "hex", + "jsonwebtoken", + "lazy_static", + "openssl", + "percent-encoding", + "reqwest", + "serde", + "serde_json", + "tokio", +] + +[[package]] +name = "cloudabi" +version = "0.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" +dependencies = [ + "bitflags", +] + +[[package]] +name = "codegen" +version = "0.1.0" +source = "git+https://github.com/matter-labs/solidity_plonk_verifier.git?branch=dev#4fb6397f778a580c9207ec23661228f5da7e66b4" +dependencies = [ + "ethereum-types", + "franklin-crypto", + "handlebars", + "hex", + "paste", + "rescue_poseidon", + "serde", + "serde_derive", + "serde_json", +] + +[[package]] +name = "codegen" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff61280aed771c3070e7dcc9e050c66f1eb1e3b96431ba66f9f74641d02fc41d" +dependencies = [ + "indexmap", +] + +[[package]] +name = "codespan-reporting" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e" +dependencies = [ + "termcolor", + "unicode-width", +] + +[[package]] +name = "concurrent-queue" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c278839b831783b70278b14df4d45e1beb1aad306c07bb796637de9a0e323e8e" +dependencies = [ + "crossbeam-utils 0.8.14", +] + +[[package]] +name = "const-oid" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cec318a675afcb6a1ea1d4340e2d377e56e47c266f28043ceccbf4412ddfdd3b" + +[[package]] +name = "constant_time_eq" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" + +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + +[[package]] +name = "core-foundation" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" + +[[package]] +name = "cpufeatures" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320" +dependencies = [ + "libc", +] + +[[package]] +name = "crc" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49fc9a695bca7f35f5f4c15cddc84415f66a74ea78eef08e90c5024f2b540e23" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccaeedb56da03b09f598226e25e80088cb4cd25f316e6e4df7d695f0feeb1403" + +[[package]] +name = "crossbeam" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69323bff1fb41c635347b8ead484a5ca6c3f11914d784170b158d8449ab07f8e" +dependencies = [ + "cfg-if 0.1.10", + "crossbeam-channel 0.4.4", + "crossbeam-deque 0.7.4", + "crossbeam-epoch 0.8.2", + "crossbeam-queue 0.2.3", + "crossbeam-utils 0.7.2", +] + +[[package]] +name = "crossbeam" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2801af0d36612ae591caa9568261fddce32ce6e08a7275ea334a06a4ad021a2c" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-channel 0.5.6", + "crossbeam-deque 0.8.2", + "crossbeam-epoch 0.9.13", + "crossbeam-queue 0.3.8", + "crossbeam-utils 0.8.14", +] + +[[package]] +name = "crossbeam-channel" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b153fe7cbef478c567df0f972e02e6d736db11affe43dfc9c56a9374d1adfb87" +dependencies = [ + "crossbeam-utils 0.7.2", + "maybe-uninit", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-utils 0.8.14", +] + +[[package]] +name = "crossbeam-deque" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20ff29ded3204c5106278a81a38f4b482636ed4fa1e6cfbeef193291beb29ed" +dependencies = [ + "crossbeam-epoch 0.8.2", + "crossbeam-utils 0.7.2", + "maybe-uninit", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-epoch 0.9.13", + "crossbeam-utils 0.8.14", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "058ed274caafc1f60c4997b5fc07bf7dc7cca454af7c6e81edffe5f33f70dace" +dependencies = [ + "autocfg 1.1.0", + "cfg-if 0.1.10", + "crossbeam-utils 0.7.2", + "lazy_static", + "maybe-uninit", + "memoffset 0.5.6", + "scopeguard", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01a9af1f4c2ef74bb8aa1f7e19706bc72d03598c8a570bb5de72243c7a9d9d5a" +dependencies = [ + "autocfg 1.1.0", + "cfg-if 1.0.0", + "crossbeam-utils 0.8.14", + "memoffset 0.7.1", + "scopeguard", +] + +[[package]] +name = "crossbeam-queue" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "774ba60a54c213d409d5353bda12d49cd68d14e45036a285234c8d6f91f92570" +dependencies = [ + "cfg-if 0.1.10", + "crossbeam-utils 0.7.2", + "maybe-uninit", +] + +[[package]] +name = "crossbeam-queue" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-utils 0.8.14", +] + +[[package]] +name = "crossbeam-utils" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8" +dependencies = [ + "autocfg 1.1.0", + "cfg-if 0.1.10", + "lazy_static", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f" +dependencies = [ + "cfg-if 1.0.0", +] + +[[package]] +name = "crunchy" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" + +[[package]] +name = "crypto-bigint" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" +dependencies = [ + "generic-array", + "rand_core 0.6.4", + "subtle", + "zeroize", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "crypto-mac" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" +dependencies = [ + "generic-array", + "subtle", +] + +[[package]] +name = "crypto-mac" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bff07008ec701e8028e2ceb8f83f0e4274ee62bd2dbdc4fefff2e9a91824081a" +dependencies = [ + "generic-array", + "subtle", +] + +[[package]] +name = "crypto-mac" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1d1a86f49236c215f271d40892d5fc950490551400b02ef360692c29815c714" +dependencies = [ + "generic-array", + "subtle", +] + +[[package]] +name = "cs_derive" +version = "0.1.0" +source = "git+https://github.com/matter-labs/sync_vm.git?branch=v1.3.1#a69bcef3eafcc39887ca8c09ec835c1a426d0813" +dependencies = [ + "proc-macro-error", + "proc-macro2 1.0.51", + "quote 1.0.23", + "serde", + "syn 1.0.107", +] + +[[package]] +name = "ctor" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d2301688392eb071b0bf1a37be05c469d3cc4dbbd95df672fe28ab021e6a096" +dependencies = [ + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "ctr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb4a30d54f7443bf3d6191dcd486aca19e67cb3c49fa7a06a319966346707e7f" +dependencies = [ + "cipher", +] + +[[package]] +name = "ctrlc" +version = "3.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbcf33c2a618cbe41ee43ae6e9f2e48368cd9f9db2896f10167d8d762679f639" +dependencies = [ + "nix", + "windows-sys 0.45.0", +] + +[[package]] +name = "cxx" +version = "1.0.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90d59d9acd2a682b4e40605a242f6670eaa58c5957471cbf85e8aa6a0b97a5e8" +dependencies = [ + "cc", + "cxxbridge-flags", + "cxxbridge-macro", + "link-cplusplus", +] + +[[package]] +name = "cxx-build" +version = "1.0.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebfa40bda659dd5c864e65f4c9a2b0aff19bea56b017b9b77c73d3766a453a38" +dependencies = [ + "cc", + "codespan-reporting", + "once_cell", + "proc-macro2 1.0.51", + "quote 1.0.23", + "scratch", + "syn 1.0.107", +] + +[[package]] +name = "cxxbridge-flags" +version = "1.0.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "457ce6757c5c70dc6ecdbda6925b958aae7f959bda7d8fb9bde889e34a09dc03" + +[[package]] +name = "cxxbridge-macro" +version = "1.0.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebf883b7aacd7b2aeb2a7b338648ee19f57c140d4ee8e52c68979c6b2f7f2263" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "darling" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a01d95850c592940db9b8194bc39f4bc0e89dee5c4265e4b1807c34a9aba453c" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2 1.0.51", + "quote 1.0.23", + "strsim 0.10.0", + "syn 1.0.107", +] + +[[package]] +name = "darling_macro" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" +dependencies = [ + "darling_core", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "debugid" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef552e6f588e446098f6ba40d89ac146c8c7b64aade83c051ee00bb5d2bc18d" +dependencies = [ + "serde", + "uuid", +] + +[[package]] +name = "der" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1a467a65c5e759bce6e65eaf91cc29f466cdc57cb65777bd646872a8a1fd4de" +dependencies = [ + "const-oid", + "zeroize", +] + +[[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "derive_more" +version = "0.99.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +dependencies = [ + "convert_case", + "proc-macro2 1.0.51", + "quote 1.0.23", + "rustc_version", + "syn 1.0.107", +] + +[[package]] +name = "digest" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" +dependencies = [ + "generic-array", +] + +[[package]] +name = "digest" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8168378f4e5023e7218c89c891c0fd8ecdb5e5e4f18cb78f38cf245dd021e76f" +dependencies = [ + "block-buffer 0.10.3", + "crypto-common", + "subtle", +] + +[[package]] +name = "dirs" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30baa043103c9d0c2a57cf537cc2f35623889dc0d405e6c3cccfadbc81c71309" +dependencies = [ + "dirs-sys", +] + +[[package]] +name = "dirs-sys" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6" +dependencies = [ + "libc", + "redox_users", + "winapi", +] + +[[package]] +name = "dotenv" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f" + +[[package]] +name = "ecdsa" +version = "0.14.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413301934810f597c1d19ca71c8710e99a3f1ba28a0d2ebc01551a2daeea3c5c" +dependencies = [ + "der", + "elliptic-curve", + "rfc6979", + "signature", +] + +[[package]] +name = "eip712-signature" +version = "0.1.0" +source = "git+https://github.com/vladbochok/eip712-signature#30b11455e7d613313e8c12d2aad961fd4bf902fe" +dependencies = [ + "ethereum-types", + "parity-crypto", + "thiserror", +] + +[[package]] +name = "either" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" +dependencies = [ + "serde", +] + +[[package]] +name = "elliptic-curve" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7bb888ab5300a19b8e5bceef25ac745ad065f3c9f7efc6de1b91958110891d3" +dependencies = [ + "base16ct", + "crypto-bigint", + "der", + "digest 0.10.6", + "ff", + "generic-array", + "group", + "pkcs8", + "rand_core 0.6.4", + "sec1", + "subtle", + "zeroize", +] + +[[package]] +name = "encoding_rs" +version = "0.8.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" +dependencies = [ + "cfg-if 1.0.0", +] + +[[package]] +name = "env_logger" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a12e6657c4c97ebab115a42dcee77225f7f482cdd841cf7088c657a42e9e00e7" +dependencies = [ + "atty", + "humantime", + "log", + "regex", + "termcolor", +] + +[[package]] +name = "env_logger" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85cdab6a89accf66733ad5a1693a4dcced6aeff64602b634530dd73c1f3ee9f0" +dependencies = [ + "humantime", + "is-terminal", + "log", + "regex", + "termcolor", +] + +[[package]] +name = "envy" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f47e0157f2cb54f5ae1bd371b30a2ae4311e1c028f575cd4e81de7353215965" +dependencies = [ + "serde", +] + +[[package]] +name = "errno" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1" +dependencies = [ + "errno-dragonfly", + "libc", + "winapi", +] + +[[package]] +name = "errno-dragonfly" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "ethabi" +version = "16.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4c98847055d934070b90e806e12d3936b787d0a115068981c1d8dfd5dfef5a5" +dependencies = [ + "ethereum-types", + "hex", + "serde", + "serde_json", + "sha3 0.9.1", + "thiserror", + "uint", +] + +[[package]] +name = "ethbloom" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfb684ac8fa8f6c5759f788862bb22ec6fe3cb392f6bfd08e3c64b603661e3f8" +dependencies = [ + "crunchy", + "fixed-hash", + "impl-rlp", + "impl-serde", + "tiny-keccak 2.0.2", +] + +[[package]] +name = "ethereum-types" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05136f7057fe789f06e6d41d07b34e6f70d8c86e5693b60f97aaa6553553bdaf" +dependencies = [ + "ethbloom", + "fixed-hash", + "impl-rlp", + "impl-serde", + "primitive-types", + "uint", +] + +[[package]] +name = "event-listener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" + +[[package]] +name = "fastrand" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499" +dependencies = [ + "instant", +] + +[[package]] +name = "ff" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160" +dependencies = [ + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "ff_ce" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b538e4231443a5b9c507caee3356f016d832cf7393d2d90f03ea3180d4e3fbc" +dependencies = [ + "byteorder", + "ff_derive_ce", + "hex", + "rand 0.4.6", + "serde", +] + +[[package]] +name = "ff_derive_ce" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b96fbccd88dbb1fac4ee4a07c2fcc4ca719a74ffbd9d2b9d41d8c8eb073d8b20" +dependencies = [ + "num-bigint 0.4.3", + "num-integer", + "num-traits", + "proc-macro2 1.0.51", + "quote 1.0.23", + "serde", + "syn 1.0.107", +] + +[[package]] +name = "fixed-hash" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcf0ed7fe52a17a03854ec54a9f76d6d84508d1c0e66bc1793301c73fc8493c" +dependencies = [ + "byteorder", + "rand 0.8.5", + "rustc-hex", + "static_assertions", +] + +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "franklin-crypto" +version = "0.0.5" +source = "git+https://github.com/matter-labs/franklin-crypto?branch=dev#3baf4c4eb3b41fcaca5cfd36d0dc46b097ba7322" +dependencies = [ + "arr_macro", + "bellman_ce", + "bit-vec", + "blake2 0.9.2", + "blake2-rfc_bellman_edition", + "blake2s_simd", + "byteorder", + "digest 0.9.0", + "hex", + "indexmap", + "itertools", + "lazy_static", + "num-bigint 0.4.3", + "num-derive 0.2.5", + "num-integer", + "num-traits", + "rand 0.4.6", + "serde", + "sha2 0.9.9", + "sha3 0.9.1", + "smallvec", + "splitmut", + "tiny-keccak 1.5.0", +] + +[[package]] +name = "fuchsia-cprng" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" + +[[package]] +name = "funty" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7" + +[[package]] +name = "futures" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678" + +[[package]] +name = "futures" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13e2792b0ff0340399d58445b88fd9770e3489eff258a4cbc1523418f12abf84" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e5317663a9089767a1ec00a487df42e0ca174b61b4483213ac24448e4664df5" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec90ff4d0fe1f57d600049061dc6bb68ed03c7d2fbd697274c41805dcb3f8608" + +[[package]] +name = "futures-executor" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8de0a35a6ab97ec8869e32a2473f4b1324459e14c29275d14b10cb1fd19b50e" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", + "num_cpus", +] + +[[package]] +name = "futures-intrusive" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a604f7a68fbf8103337523b1fadc8ade7361ee3f112f7c680ad179651616aed5" +dependencies = [ + "futures-core", + "lock_api", + "parking_lot 0.11.2", +] + +[[package]] +name = "futures-io" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfb8371b6fb2aeb2d280374607aeabfc99d95c72edfe51692e42d3d7f0d08531" + +[[package]] +name = "futures-lite" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7694489acd39452c77daa48516b894c153f192c3578d5a839b62c58099fcbf48" +dependencies = [ + "fastrand", + "futures-core", + "futures-io", + "memchr", + "parking", + "pin-project-lite", + "waker-fn", +] + +[[package]] +name = "futures-macro" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95a73af87da33b5acf53acfebdc339fe592ecf5357ac7c0a7734ab9d8c876a70" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "futures-sink" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f310820bb3e8cfd46c80db4d7fb8353e15dfff853a127158425f31e0be6c8364" + +[[package]] +name = "futures-task" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcf79a1bf610b10f42aea489289c5a2c478a786509693b80cd39c44ccd936366" + +[[package]] +name = "futures-timer" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" +dependencies = [ + "gloo-timers", + "send_wrapper", +] + +[[package]] +name = "futures-util" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c1d6de3acfef38d2be4b1f543f553131788603495be83da675e180c8d6b7bd1" +dependencies = [ + "futures 0.1.31", + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "generic-array" +version = "0.14.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", +] + +[[package]] +name = "getrandom" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "wasi 0.11.0+wasi-snapshot-preview1", +] + +[[package]] +name = "gimli" +version = "0.27.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "221996f774192f0f718773def8201c4ae31f02616a54ccfc2d358bb0e5cefdec" + +[[package]] +name = "glob" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" + +[[package]] +name = "globset" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "029d74589adefde59de1a0c4f4732695c32805624aec7b68d91503d4dba79afc" +dependencies = [ + "aho-corasick", + "bstr", + "fnv", + "log", + "regex", +] + +[[package]] +name = "gloo-net" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9902a044653b26b99f7e3693a42f171312d9be8b26b5697bd1e43ad1f8a35e10" +dependencies = [ + "futures-channel", + "futures-core", + "futures-sink", + "gloo-utils", + "js-sys", + "pin-project", + "serde", + "serde_json", + "thiserror", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "gloo-timers" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" +dependencies = [ + "futures-channel", + "futures-core", + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "gloo-utils" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8e8fc851e9c7b9852508bc6e3f690f452f474417e8545ec9857b7f7377036b5" +dependencies = [ + "js-sys", + "serde", + "serde_json", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "group" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" +dependencies = [ + "ff", + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "h2" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f9f29bc9dda355256b2916cf526ab02ce0aeaaaf2bad60d65ef3f12f11dd0f4" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util 0.7.6", + "tracing", +] + +[[package]] +name = "handlebars" +version = "4.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "035ef95d03713f2c347a72547b7cd38cbc9af7cd51e6099fb62d586d4a6dee3a" +dependencies = [ + "log", + "pest", + "pest_derive", + "serde", + "serde_json", + "thiserror", +] + +[[package]] +name = "hashbrown" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" +dependencies = [ + "ahash", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +dependencies = [ + "ahash", +] + +[[package]] +name = "hashlink" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7249a3129cbc1ffccd74857f81464a323a152173cdb134e0fd81bc803b29facf" +dependencies = [ + "hashbrown 0.11.2", +] + +[[package]] +name = "headers" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3e372db8e5c0d213e0cd0b9be18be2aca3d44cf2fe30a9d46a65581cd454584" +dependencies = [ + "base64 0.13.1", + "bitflags", + "bytes", + "headers-core", + "http", + "httpdate", + "mime", + "sha1", +] + +[[package]] +name = "headers-core" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7f66481bfee273957b1f20485a4ff3362987f85b2c236580d81b4eb7a326429" +dependencies = [ + "http", +] + +[[package]] +name = "heck" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + +[[package]] +name = "hermit-abi" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7" +dependencies = [ + "libc", +] + +[[package]] +name = "hermit-abi" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hmac" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1441c6b1e930e2817404b5046f1f989899143a12bf92de603b69f4e0aee1e15" +dependencies = [ + "crypto-mac 0.10.1", + "digest 0.9.0", +] + +[[package]] +name = "hmac" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a2a2320eb7ec0ebe8da8f744d7812d9fc4cb4d09344ac01898dbcb6a20ae69b" +dependencies = [ + "crypto-mac 0.11.1", + "digest 0.9.0", +] + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest 0.10.6", +] + +[[package]] +name = "hostname" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c731c3e10504cc8ed35cfe2f1db4c9274c3d35fa486e3b31df46f068ef3e867" +dependencies = [ + "libc", + "match_cfg", + "winapi", +] + +[[package]] +name = "http" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75f43d41e26995c17e71ee126451dd3941010b0514a81a9d11f3b341debc2399" +dependencies = [ + "bytes", + "fnv", + "itoa 1.0.5", +] + +[[package]] +name = "http-body" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" +dependencies = [ + "bytes", + "http", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" + +[[package]] +name = "httpdate" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" + +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + +[[package]] +name = "hyper" +version = "0.14.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e011372fa0b68db8350aa7a248930ecc7839bf46d8485577d69f117a75f164c" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa 1.0.5", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.23.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1788965e61b367cd03a62950836d5cd41560c3577d90e40e0819373194d1661c" +dependencies = [ + "http", + "hyper", + "log", + "rustls", + "rustls-native-certs", + "tokio", + "tokio-rustls", + "webpki-roots", +] + +[[package]] +name = "hyper-timeout" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +dependencies = [ + "hyper", + "pin-project-lite", + "tokio", + "tokio-io-timeout", +] + +[[package]] +name = "hyper-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +dependencies = [ + "bytes", + "hyper", + "native-tls", + "tokio", + "tokio-native-tls", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.53" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64c122667b287044802d6ce17ee2ddf13207ed924c712de9a66a5814d5b64765" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "winapi", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0703ae284fc167426161c2e3f1da3ea71d94b21bedbcc9494e92b28e334e3dca" +dependencies = [ + "cxx", + "cxx-build", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" +dependencies = [ + "matches", + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "idna" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "impl-codec" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "161ebdfec3c8e3b52bf61c4f3550a1eea4f9579d10dc1b936f3171ebdcd6c443" +dependencies = [ + "parity-scale-codec", +] + +[[package]] +name = "impl-rlp" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28220f89297a075ddc7245cd538076ee98b01f2a9c23a53a4f1105d5a322808" +dependencies = [ + "rlp", +] + +[[package]] +name = "impl-serde" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4551f042f3438e64dbd6226b20527fc84a6e1fe65688b58746a2f53623f25f5c" +dependencies = [ + "serde", +] + +[[package]] +name = "impl-trait-for-tuples" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "indexmap" +version = "1.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399" +dependencies = [ + "autocfg 1.1.0", + "hashbrown 0.12.3", +] + +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +dependencies = [ + "cfg-if 1.0.0", +] + +[[package]] +name = "io-lifetimes" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1abeb7a0dd0f8181267ff8adc397075586500b81b28a73e8a0208b00fc170fb3" +dependencies = [ + "libc", + "windows-sys 0.45.0", +] + +[[package]] +name = "ipnet" +version = "2.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30e22bd8629359895450b59ea7a776c850561b96a3b1d31321c1949d9e6c9146" + +[[package]] +name = "ipnetwork" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02c3eaab3ac0ede60ffa41add21970a7df7d91772c03383aac6c2c3d53cc716b" + +[[package]] +name = "is-terminal" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22e18b0a45d56fe973d6db23972bf5bc46f988a4a2385deac9cc29572f09daef" +dependencies = [ + "hermit-abi 0.3.1", + "io-lifetimes", + "rustix", + "windows-sys 0.45.0", +] + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" + +[[package]] +name = "itoa" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440" + +[[package]] +name = "jobserver" +version = "0.1.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "068b1ee6743e4d11fb9c6a1e6064b3693a1b600e7f5f5988047d98b3dc9fb90b" +dependencies = [ + "libc", +] + +[[package]] +name = "js-sys" +version = "0.3.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "445dde2150c55e483f3d8416706b97ec8e8237c307e5b7b4b8dd15e6af2a0730" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "jsonrpc-core" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14f7f76aef2d054868398427f6c54943cf3d1caa9a7ec7d0c38d69df97a965eb" +dependencies = [ + "futures 0.3.26", + "futures-executor", + "futures-util", + "log", + "serde", + "serde_derive", + "serde_json", +] + +[[package]] +name = "jsonrpsee" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d291e3a5818a2384645fd9756362e6d89cf0541b0b916fa7702ea4a9833608e" +dependencies = [ + "jsonrpsee-client-transport", + "jsonrpsee-core", + "jsonrpsee-http-client", + "jsonrpsee-proc-macros", + "jsonrpsee-server", + "jsonrpsee-types", + "jsonrpsee-wasm-client", + "jsonrpsee-ws-client", + "tracing", +] + +[[package]] +name = "jsonrpsee-client-transport" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "965de52763f2004bc91ac5bcec504192440f0b568a5d621c59d9dbd6f886c3fb" +dependencies = [ + "anyhow", + "futures-channel", + "futures-timer", + "futures-util", + "gloo-net", + "http", + "jsonrpsee-core", + "jsonrpsee-types", + "pin-project", + "rustls-native-certs", + "soketto", + "thiserror", + "tokio", + "tokio-rustls", + "tokio-util 0.7.6", + "tracing", + "webpki-roots", +] + +[[package]] +name = "jsonrpsee-core" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4e70b4439a751a5de7dd5ed55eacff78ebf4ffe0fc009cb1ebb11417f5b536b" +dependencies = [ + "anyhow", + "arrayvec 0.7.2", + "async-lock", + "async-trait", + "beef", + "futures-channel", + "futures-timer", + "futures-util", + "globset", + "hyper", + "jsonrpsee-types", + "parking_lot 0.12.1", + "rand 0.8.5", + "rustc-hash", + "serde", + "serde_json", + "soketto", + "thiserror", + "tokio", + "tracing", + "wasm-bindgen-futures", +] + +[[package]] +name = "jsonrpsee-http-client" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc345b0a43c6bc49b947ebeb936e886a419ee3d894421790c969cc56040542ad" +dependencies = [ + "async-trait", + "hyper", + "hyper-rustls", + "jsonrpsee-core", + "jsonrpsee-types", + "rustc-hash", + "serde", + "serde_json", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "jsonrpsee-proc-macros" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baa6da1e4199c10d7b1d0a6e5e8bd8e55f351163b6f4b3cbb044672a69bd4c1c" +dependencies = [ + "heck 0.4.1", + "proc-macro-crate", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "jsonrpsee-server" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fb69dad85df79527c019659a992498d03f8495390496da2f07e6c24c2b356fc" +dependencies = [ + "futures-channel", + "futures-util", + "http", + "hyper", + "jsonrpsee-core", + "jsonrpsee-types", + "serde", + "serde_json", + "soketto", + "tokio", + "tokio-stream", + "tokio-util 0.7.6", + "tower", + "tracing", +] + +[[package]] +name = "jsonrpsee-types" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5bd522fe1ce3702fd94812965d7bb7a3364b1c9aba743944c5a00529aae80f8c" +dependencies = [ + "anyhow", + "beef", + "serde", + "serde_json", + "thiserror", + "tracing", +] + +[[package]] +name = "jsonrpsee-wasm-client" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a77310456f43c6c89bcba1f6b2fc2a28300da7c341f320f5128f8c83cc63232d" +dependencies = [ + "jsonrpsee-client-transport", + "jsonrpsee-core", + "jsonrpsee-types", +] + +[[package]] +name = "jsonrpsee-ws-client" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b83daeecfc6517cfe210df24e570fb06213533dfb990318fae781f4c7119dd9" +dependencies = [ + "http", + "jsonrpsee-client-transport", + "jsonrpsee-core", + "jsonrpsee-types", +] + +[[package]] +name = "jsonwebtoken" +version = "7.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afabcc15e437a6484fc4f12d0fd63068fe457bf93f1c148d3d9649c60b103f32" +dependencies = [ + "base64 0.12.3", + "pem", + "ring", + "serde", + "serde_json", + "simple_asn1", +] + +[[package]] +name = "k256" +version = "0.11.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72c1e0b51e7ec0a97369623508396067a486bd0cbed95a2659a4b863d28cfc8b" +dependencies = [ + "cfg-if 1.0.0", + "ecdsa", + "elliptic-curve", + "sha2 0.10.6", +] + +[[package]] +name = "keccak" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3afef3b6eff9ce9d8ff9b3601125eec7f0c8cbac7abd14f355d053fa56c98768" +dependencies = [ + "cpufeatures", +] + +[[package]] +name = "kv-log-macro" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" +dependencies = [ + "log", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + +[[package]] +name = "libc" +version = "0.2.139" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79" + +[[package]] +name = "libloading" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f" +dependencies = [ + "cfg-if 1.0.0", + "winapi", +] + +[[package]] +name = "librocksdb-sys" +version = "0.6.1+6.28.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81bc587013734dadb7cf23468e531aa120788b87243648be42e2d3a072186291" +dependencies = [ + "bindgen", + "bzip2-sys", + "cc", + "glob", + "libc", + "libz-sys", +] + +[[package]] +name = "libz-sys" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9702761c3935f8cc2f101793272e202c72b99da8f4224a19ddcf1279a6450bbf" +dependencies = [ + "cc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "link-cplusplus" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecd207c9c713c34f95a097a5b029ac2ce6010530c7b49d7fea24d977dede04f5" +dependencies = [ + "cc", +] + +[[package]] +name = "linux-raw-sys" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4" + +[[package]] +name = "local-ip-address" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "faa9d02443a1741e9f51dafdfcbffb3863b2a89c457d762b40337d6c5153ef81" +dependencies = [ + "libc", + "neli", + "thiserror", + "windows-sys 0.42.0", +] + +[[package]] +name = "lock_api" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" +dependencies = [ + "autocfg 1.1.0", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" +dependencies = [ + "cfg-if 1.0.0", + "value-bag", +] + +[[package]] +name = "mach" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b823e83b2affd8f40a9ee8c29dbc56404c1e34cd2710921f2801e2cf29527afa" +dependencies = [ + "libc", +] + +[[package]] +name = "match_cfg" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4" + +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata", +] + +[[package]] +name = "matches" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" + +[[package]] +name = "maybe-uninit" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00" + +[[package]] +name = "md-5" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5a279bb9607f9f53c22d496eade00d138d1bdcccd07d74650387cf94942a15" +dependencies = [ + "block-buffer 0.9.0", + "digest 0.9.0", + "opaque-debug", +] + +[[package]] +name = "memchr" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" + +[[package]] +name = "memoffset" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "043175f069eda7b85febe4a74abbaeff828d9f8b448515d3151a14a3542811aa" +dependencies = [ + "autocfg 1.1.0", +] + +[[package]] +name = "memoffset" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4" +dependencies = [ + "autocfg 1.1.0", +] + +[[package]] +name = "metrics" +version = "0.20.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b9b8653cec6897f73b519a43fba5ee3d50f62fe9af80b428accdcc093b4a849" +dependencies = [ + "ahash", + "metrics-macros", + "portable-atomic", +] + +[[package]] +name = "metrics-exporter-prometheus" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8603921e1f54ef386189335f288441af761e0fc61bcb552168d9cedfe63ebc70" +dependencies = [ + "hyper", + "indexmap", + "ipnet", + "metrics", + "metrics-util", + "parking_lot 0.12.1", + "portable-atomic", + "quanta", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "metrics-macros" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "731f8ecebd9f3a4aa847dfe75455e4757a45da40a7793d2f0b1f9b6ed18b23f3" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "metrics-util" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7d24dc2dbae22bff6f1f9326ffce828c9f07ef9cc1e8002e5279f845432a30a" +dependencies = [ + "crossbeam-epoch 0.9.13", + "crossbeam-utils 0.8.14", + "hashbrown 0.12.3", + "metrics", + "num_cpus", + "parking_lot 0.12.1", + "portable-atomic", + "quanta", + "sketches-ddsketch", +] + +[[package]] +name = "mime" +version = "0.3.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d" + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa" +dependencies = [ + "adler", +] + +[[package]] +name = "mio" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5d732bc30207a6423068df043e3d02e0735b155ad7ce1a6f76fe2baa5b158de" +dependencies = [ + "libc", + "log", + "wasi 0.11.0+wasi-snapshot-preview1", + "windows-sys 0.42.0", +] + +[[package]] +name = "multimap" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" + +[[package]] +name = "native-tls" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" +dependencies = [ + "lazy_static", + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "neli" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9053554eb5dcb7e10d9cdab1206965bde870eed5d0d341532ca035e3ba221508" +dependencies = [ + "byteorder", + "libc", +] + +[[package]] +name = "nix" +version = "0.26.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfdda3d196821d6af13126e40375cdf7da646a96114af134d5f417a9a1dc8e1a" +dependencies = [ + "bitflags", + "cfg-if 1.0.0", + "libc", + "static_assertions", +] + +[[package]] +name = "nodrop" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb" + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "nom8" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae01545c9c7fc4486ab7debaf2aad7003ac19431791868fb2e8066df97fad2f8" +dependencies = [ + "memchr", +] + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b7a8e9be5e039e2ff869df49155f1c06bd01ade2117ec783e56ab0932b67a8f" +dependencies = [ + "num-bigint 0.3.3", + "num-complex 0.3.1", + "num-integer", + "num-iter", + "num-rational 0.3.2", + "num-traits", +] + +[[package]] +name = "num" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43db66d1170d347f9a065114077f7dccb00c1b9478c89384490a3425279a4606" +dependencies = [ + "num-bigint 0.4.3", + "num-complex 0.4.3", + "num-integer", + "num-iter", + "num-rational 0.4.1", + "num-traits", +] + +[[package]] +name = "num-bigint" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "090c7f9998ee0ff65aa5b723e4009f7b217707f1fb5ea551329cc4d6231fb304" +dependencies = [ + "autocfg 1.1.0", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-bigint" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6f7833f2cbf2360a6cfd58cd41a53aa7a90bd4c202f5b1c7dd2ed73c57b2c3" +dependencies = [ + "autocfg 1.1.0", + "num-integer", + "num-traits", + "serde", +] + +[[package]] +name = "num-bigint" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" +dependencies = [ + "autocfg 1.1.0", + "num-integer", + "num-traits", + "serde", +] + +[[package]] +name = "num-complex" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "747d632c0c558b87dbabbe6a82f3b4ae03720d0646ac5b7b4dae89394be5f2c5" +dependencies = [ + "num-traits", + "serde", +] + +[[package]] +name = "num-complex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02e0d21255c828d6f128a1e41534206671e8c3ea0c62f32291e808dc82cff17d" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-derive" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eafd0b45c5537c3ba526f79d3e75120036502bebacbb3f3220914067ce39dbf2" +dependencies = [ + "proc-macro2 0.4.30", + "quote 0.6.13", + "syn 0.15.44", +] + +[[package]] +name = "num-derive" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "num-integer" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" +dependencies = [ + "autocfg 1.1.0", + "num-traits", +] + +[[package]] +name = "num-iter" +version = "0.1.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d03e6c028c5dc5cac6e2dec0efda81fc887605bb3d884578bb6d6bf7514e252" +dependencies = [ + "autocfg 1.1.0", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-rational" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12ac428b1cb17fce6f731001d307d351ec70a6d202fc2e60f7d4c5e42d8f4f07" +dependencies = [ + "autocfg 1.1.0", + "num-bigint 0.3.3", + "num-integer", + "num-traits", + "serde", +] + +[[package]] +name = "num-rational" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0638a1c9d0a3c0914158145bc76cff373a75a627e6ecbfb71cbe6f453a5a19b0" +dependencies = [ + "autocfg 1.1.0", + "num-bigint 0.4.3", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" +dependencies = [ + "autocfg 1.1.0", +] + +[[package]] +name = "num_cpus" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" +dependencies = [ + "hermit-abi 0.2.6", + "libc", +] + +[[package]] +name = "object" +version = "0.30.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea86265d3d3dcb6a27fc51bd29a4bf387fae9d2986b823079d4986af253eb439" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66" + +[[package]] +name = "opaque-debug" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" + +[[package]] +name = "openssl" +version = "0.10.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b102428fd03bc5edf97f62620f7298614c45cedf287c271e7ed450bbaf83f2e1" +dependencies = [ + "bitflags", + "cfg-if 1.0.0", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b501e44f11665960c7e7fcf062c7d96a14ade4aa98116c004b2e37b5be7d736c" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-sys" +version = "0.9.80" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23bbbf7854cd45b83958ebe919f0e8e516793727652e27fda10a8384cfc790b7" +dependencies = [ + "autocfg 1.1.0", + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "opentelemetry" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6105e89802af13fdf48c49d7646d3b533a70e536d818aae7e78ba0433d01acb8" +dependencies = [ + "async-trait", + "crossbeam-channel 0.5.6", + "futures-channel", + "futures-executor", + "futures-util", + "js-sys", + "lazy_static", + "percent-encoding", + "pin-project", + "rand 0.8.5", + "thiserror", +] + +[[package]] +name = "opentelemetry-http" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "449048140ee61e28f57abe6e9975eedc1f3a29855c7407bd6c12b18578863379" +dependencies = [ + "async-trait", + "bytes", + "http", + "opentelemetry", + "reqwest", +] + +[[package]] +name = "opentelemetry-otlp" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d1a6ca9de4c8b00aa7f1a153bd76cb263287155cec642680d79d98706f3d28a" +dependencies = [ + "async-trait", + "futures 0.3.26", + "futures-util", + "http", + "opentelemetry", + "opentelemetry-http", + "prost", + "prost-build", + "reqwest", + "thiserror", + "tokio", + "tonic", + "tonic-build", +] + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "985cc35d832d412224b2cffe2f9194b1b89b6aa5d0bef76d080dce09d90e62bd" +dependencies = [ + "opentelemetry", +] + +[[package]] +name = "os_info" +version = "3.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c424bc68d15e0778838ac013b5b3449544d8133633d8016319e7e05a820b8c0" +dependencies = [ + "log", + "serde", + "winapi", +] + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "pairing_ce" +version = "0.28.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db007b21259660d025918e653508f03050bf23fb96a88601f9936329faadc597" +dependencies = [ + "byteorder", + "cfg-if 1.0.0", + "ff_ce", + "rand 0.4.6", + "serde", +] + +[[package]] +name = "parity-crypto" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b92ea9ddac0d6e1db7c49991e7d397d34a9fd814b4c93cda53788e8eef94e35" +dependencies = [ + "aes", + "aes-ctr", + "block-modes", + "digest 0.9.0", + "ethereum-types", + "hmac 0.10.1", + "lazy_static", + "pbkdf2 0.7.5", + "ripemd160", + "rustc-hex", + "scrypt", + "secp256k1 0.20.3", + "sha2 0.9.9", + "subtle", + "tiny-keccak 2.0.2", + "zeroize", +] + +[[package]] +name = "parity-scale-codec" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "373b1a4c1338d9cd3d1fa53b3a11bdab5ab6bd80a20f7f7becd76953ae2be909" +dependencies = [ + "arrayvec 0.7.2", + "bitvec", + "byte-slice-cast", + "impl-trait-for-tuples", + "parity-scale-codec-derive", + "serde", +] + +[[package]] +name = "parity-scale-codec-derive" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1557010476e0595c9b568d16dcfb81b93cdeb157612726f5170d31aa707bed27" +dependencies = [ + "proc-macro-crate", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "parking" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "427c3892f9e783d91cc128285287e70a59e206ca452770ece88a76f7a3eddd72" + +[[package]] +name = "parking_lot" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core 0.8.6", +] + +[[package]] +name = "parking_lot" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +dependencies = [ + "lock_api", + "parking_lot_core 0.9.7", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" +dependencies = [ + "cfg-if 1.0.0", + "instant", + "libc", + "redox_syscall", + "smallvec", + "winapi", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9069cbb9f99e3a5083476ccb29ceb1de18b9118cafa53e90c9551235de2b9521" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "redox_syscall", + "smallvec", + "windows-sys 0.45.0", +] + +[[package]] +name = "password-hash" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54986aa4bfc9b98c6a5f40184223658d187159d7b3c6af33f2b2aa25ae1db0fa" +dependencies = [ + "base64ct", + "rand_core 0.6.4", +] + +[[package]] +name = "paste" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d01a5bd0424d00070b0098dd17ebca6f961a959dead1dbcbbbc1d1cd8d3deeba" + +[[package]] +name = "pbkdf2" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3b8c0d71734018084da0c0354193a5edfb81b20d2d57a92c5b154aefc554a4a" +dependencies = [ + "crypto-mac 0.10.1", +] + +[[package]] +name = "pbkdf2" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf916dd32dd26297907890d99dc2740e33f6bd9073965af4ccff2967962f5508" +dependencies = [ + "base64ct", + "crypto-mac 0.10.1", + "hmac 0.10.1", + "password-hash", + "sha2 0.9.9", +] + +[[package]] +name = "peeking_take_while" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" + +[[package]] +name = "pem" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd56cbd21fea48d0c440b41cd69c589faacade08c992d9a54e471b79d0fd13eb" +dependencies = [ + "base64 0.13.1", + "once_cell", + "regex", +] + +[[package]] +name = "percent-encoding" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e" + +[[package]] +name = "pest" +version = "2.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "028accff104c4e513bad663bbcd2ad7cfd5304144404c31ed0a77ac103d00660" +dependencies = [ + "thiserror", + "ucd-trie", +] + +[[package]] +name = "pest_derive" +version = "2.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ac3922aac69a40733080f53c1ce7f91dcf57e1a5f6c52f421fadec7fbdc4b69" +dependencies = [ + "pest", + "pest_generator", +] + +[[package]] +name = "pest_generator" +version = "2.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d06646e185566b5961b4058dd107e0a7f56e77c3f484549fb119867773c0f202" +dependencies = [ + "pest", + "pest_meta", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "pest_meta" +version = "2.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6f60b2ba541577e2a0c307c8f39d1439108120eb7903adeb6497fa880c59616" +dependencies = [ + "once_cell", + "pest", + "sha2 0.10.6", +] + +[[package]] +name = "petgraph" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4dd7d28ee937e54fe3080c91faa1c3a46c06de6252988a7f4592ba2310ef22a4" +dependencies = [ + "fixedbitset", + "indexmap", +] + +[[package]] +name = "pin-project" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad29a609b6bcd67fee905812e544992d216af9d755757c05ed2d0e15a74c6ecc" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "069bdb1e05adc7a8990dce9cc75370895fbe4e3d58b9b73bf1aee56359344a55" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkcs8" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9eca2c590a5f85da82668fa685c09ce2888b9430e83299debf1f34b65fd4a4ba" +dependencies = [ + "der", + "spki", +] + +[[package]] +name = "pkg-config" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ac9a59f73473f1b8d852421e59e64809f025994837ef743615c6d0c5b305160" + +[[package]] +name = "polling" +version = "2.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22122d5ec4f9fe1b3916419b76be1e80bcb93f618d071d2edf841b137b2a2bd6" +dependencies = [ + "autocfg 1.1.0", + "cfg-if 1.0.0", + "libc", + "log", + "wepoll-ffi", + "windows-sys 0.42.0", +] + +[[package]] +name = "portable-atomic" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26f6a7b87c2e435a3241addceeeff740ff8b7e76b74c13bf9acb17fa454ea00b" + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "primitive-types" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05e4722c697a58a99d5d06a08c30821d7c082a4632198de1eaa5a6c22ef42373" +dependencies = [ + "fixed-hash", + "impl-codec", + "impl-rlp", + "impl-serde", + "uint", +] + +[[package]] +name = "proc-macro-crate" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66618389e4ec1c7afe67d51a9bf34ff9236480f8d51e7489b7d5ab0303c13f34" +dependencies = [ + "once_cell", + "toml_edit", +] + +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "version_check", +] + +[[package]] +name = "proc-macro-hack" +version = "0.5.20+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" + +[[package]] +name = "proc-macro2" +version = "0.4.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759" +dependencies = [ + "unicode-xid", +] + +[[package]] +name = "proc-macro2" +version = "1.0.51" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d727cae5b39d21da60fa540906919ad737832fe0b1c165da3a34d6548c849d6" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "prometheus_exporter" +version = "1.0.0" +dependencies = [ + "metrics", + "metrics-exporter-prometheus", + "tokio", + "vlog", + "zksync_config", +] + +[[package]] +name = "prost" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "444879275cb4fd84958b1a1d5420d15e6fcf7c235fe47f053c9c2a80aceb6001" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-build" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62941722fb675d463659e49c4f3fe1fe792ff24fe5bbaa9c08cd3b98a1c354f5" +dependencies = [ + "bytes", + "heck 0.3.3", + "itertools", + "lazy_static", + "log", + "multimap", + "petgraph", + "prost", + "prost-types", + "regex", + "tempfile", + "which", +] + +[[package]] +name = "prost-derive" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9cc1a3263e07e0bf68e96268f37665207b49560d98739662cdfaae215c720fe" +dependencies = [ + "anyhow", + "itertools", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "prost-types" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "534b7a0e836e3c482d2693070f982e39e7611da9695d4d1f5a4b186b51faef0a" +dependencies = [ + "bytes", + "prost", +] + +[[package]] +name = "prover-service" +version = "0.1.0" +source = "git+https://github.com/matter-labs/heavy-ops-service.git?branch=cleanup#c40d66b6d57f7f4d39259ffb993f5a1bba64bf6c" +dependencies = [ + "api", + "bincode", + "crossbeam-utils 0.8.14", + "log", + "num_cpus", + "rand 0.4.6", + "serde", + "serde_json", + "zkevm_test_harness", +] + +[[package]] +name = "quanta" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7e31331286705f455e56cca62e0e717158474ff02b7936c1fa596d983f4ae27" +dependencies = [ + "crossbeam-utils 0.8.14", + "libc", + "mach", + "once_cell", + "raw-cpuid", + "wasi 0.10.2+wasi-snapshot-preview1", + "web-sys", + "winapi", +] + +[[package]] +name = "queues" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1475abae4f8ad4998590fe3acfe20104f0a5d48fc420c817cd2c09c3f56151f0" + +[[package]] +name = "quote" +version = "0.6.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1" +dependencies = [ + "proc-macro2 0.4.30", +] + +[[package]] +name = "quote" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b" +dependencies = [ + "proc-macro2 1.0.51", +] + +[[package]] +name = "radium" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "643f8f41a8ebc4c5dc4515c82bb8abd397b527fc20fd681b7c011c2aee5d44fb" + +[[package]] +name = "rand" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" +dependencies = [ + "fuchsia-cprng", + "libc", + "rand_core 0.3.1", + "rdrand", + "winapi", +] + +[[package]] +name = "rand" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" +dependencies = [ + "autocfg 0.1.8", + "libc", + "rand_chacha 0.1.1", + "rand_core 0.4.2", + "rand_hc 0.1.0", + "rand_isaac", + "rand_jitter", + "rand_os", + "rand_pcg", + "rand_xorshift", + "winapi", +] + +[[package]] +name = "rand" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" +dependencies = [ + "rand_chacha 0.2.2", + "rand_core 0.5.1", + "rand_hc 0.2.0", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" +dependencies = [ + "autocfg 0.1.8", + "rand_core 0.3.1", +] + +[[package]] +name = "rand_chacha" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" +dependencies = [ + "ppv-lite86", + "rand_core 0.5.1", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_core" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" +dependencies = [ + "rand_core 0.4.2", +] + +[[package]] +name = "rand_core" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" + +[[package]] +name = "rand_core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" +dependencies = [ + "getrandom 0.1.16", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.8", +] + +[[package]] +name = "rand_hc" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "rand_hc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" +dependencies = [ + "rand_core 0.5.1", +] + +[[package]] +name = "rand_isaac" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "rand_jitter" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b" +dependencies = [ + "libc", + "rand_core 0.4.2", + "winapi", +] + +[[package]] +name = "rand_os" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071" +dependencies = [ + "cloudabi", + "fuchsia-cprng", + "libc", + "rand_core 0.4.2", + "rdrand", + "winapi", +] + +[[package]] +name = "rand_pcg" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44" +dependencies = [ + "autocfg 0.1.8", + "rand_core 0.4.2", +] + +[[package]] +name = "rand_xorshift" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "raw-cpuid" +version = "10.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c307f7aacdbab3f0adee67d52739a1d71112cc068d6fab169ddeb18e48877fad" +dependencies = [ + "bitflags", +] + +[[package]] +name = "rayon" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db3a213adf02b3bcfd2d3846bb41cb22857d131789e01df434fb7e7bc0759b7" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "356a0625f1954f730c0201cdab48611198dc6ce21f4acff55089b5a78e6e835b" +dependencies = [ + "crossbeam-channel 0.5.6", + "crossbeam-deque 0.8.2", + "crossbeam-utils 0.8.14", + "num_cpus", +] + +[[package]] +name = "rdrand" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "redox_syscall" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" +dependencies = [ + "bitflags", +] + +[[package]] +name = "redox_users" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" +dependencies = [ + "getrandom 0.2.8", + "redox_syscall", + "thiserror", +] + +[[package]] +name = "regex" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.6.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848" + +[[package]] +name = "remove_dir_all" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" +dependencies = [ + "winapi", +] + +[[package]] +name = "reqwest" +version = "0.11.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21eed90ec8570952d53b772ecf8f206aa1ec9a3d76b2521c56c42973f2d91ee9" +dependencies = [ + "base64 0.21.0", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-rustls", + "hyper-tls", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls", + "rustls-pemfile", + "serde", + "serde_json", + "serde_urlencoded", + "tokio", + "tokio-native-tls", + "tokio-rustls", + "tokio-util 0.7.6", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "webpki-roots", + "winreg", +] + +[[package]] +name = "rescue_poseidon" +version = "0.4.1" +source = "git+https://github.com/matter-labs/rescue-poseidon.git#fbb3882b8f1e63dff769a1f1a59211d0e0838351" +dependencies = [ + "addchain", + "arrayvec 0.7.2", + "blake2 0.10.6", + "byteorder", + "franklin-crypto", + "num-bigint 0.3.3", + "num-integer", + "num-iter", + "num-traits", + "rand 0.4.6", + "serde", + "sha3 0.9.1", + "smallvec", +] + +[[package]] +name = "rfc6979" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7743f17af12fa0b03b803ba12cd6a8d9483a587e89c69445e3909655c0b9fabb" +dependencies = [ + "crypto-bigint", + "hmac 0.12.1", + "zeroize", +] + +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin", + "untrusted", + "web-sys", + "winapi", +] + +[[package]] +name = "ripemd160" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2eca4ecc81b7f313189bf73ce724400a07da2a6dac19588b03c8bd76a2dcc251" +dependencies = [ + "block-buffer 0.9.0", + "digest 0.9.0", + "opaque-debug", +] + +[[package]] +name = "rlp" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb919243f34364b6bd2fc10ef797edbfa75f33c252e7998527479c6d6b47e1ec" +dependencies = [ + "bytes", + "rustc-hex", +] + +[[package]] +name = "rocksdb" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "620f4129485ff1a7128d184bc687470c21c7951b64779ebc9cfdad3dcd920290" +dependencies = [ + "libc", + "librocksdb-sys", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342" + +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + +[[package]] +name = "rustc-hex" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" + +[[package]] +name = "rustc-serialize" +version = "0.3.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" + +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver", +] + +[[package]] +name = "rustix" +version = "0.36.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f43abb88211988493c1abb44a70efa56ff0ce98f233b7b276146f1f3f7ba9644" +dependencies = [ + "bitflags", + "errno", + "io-lifetimes", + "libc", + "linux-raw-sys", + "windows-sys 0.45.0", +] + +[[package]] +name = "rustls" +version = "0.20.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fff78fc74d175294f4e83b28343315ffcfb114b156f0185e9741cb5570f50e2f" +dependencies = [ + "log", + "ring", + "sct", + "webpki", +] + +[[package]] +name = "rustls-native-certs" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0167bac7a9f490495f3c33013e7722b53cb087ecbe082fb0c6387c96f634ea50" +dependencies = [ + "openssl-probe", + "rustls-pemfile", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d194b56d58803a43635bdc398cd17e383d6f71f9182b9a192c127ca42494a59b" +dependencies = [ + "base64 0.21.0", +] + +[[package]] +name = "rustversion" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5583e89e108996506031660fe09baa5011b9dd0341b89029313006d1fb508d70" + +[[package]] +name = "ryu" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde" + +[[package]] +name = "salsa20" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "399f290ffc409596022fce5ea5d4138184be4784f2b28c62c59f0d8389059a15" +dependencies = [ + "cipher", +] + +[[package]] +name = "schannel" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "713cfb06c7059f3588fb8044c0fad1d09e3c01d225e25b9220dbfdcf16dbb1b3" +dependencies = [ + "windows-sys 0.42.0", +] + +[[package]] +name = "scopeguard" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" + +[[package]] +name = "scratch" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddccb15bcce173023b3fedd9436f882a0739b8dfb45e4f6b6002bee5929f61b2" + +[[package]] +name = "scrypt" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8da492dab03f925d977776a0b7233d7b934d6dc2b94faead48928e2e9bacedb9" +dependencies = [ + "base64 0.13.1", + "hmac 0.10.1", + "pbkdf2 0.6.0", + "rand 0.7.3", + "rand_core 0.5.1", + "salsa20", + "sha2 0.9.9", + "subtle", +] + +[[package]] +name = "sct" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "sec1" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3be24c1842290c45df0a7bf069e0c268a747ad05a192f2fd7dcfdbc1cba40928" +dependencies = [ + "base16ct", + "der", + "generic-array", + "pkcs8", + "subtle", + "zeroize", +] + +[[package]] +name = "secp256k1" +version = "0.20.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97d03ceae636d0fed5bae6a7f4f664354c5f4fcedf6eef053fef17e49f837d0a" +dependencies = [ + "rand 0.6.5", + "secp256k1-sys", +] + +[[package]] +name = "secp256k1" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c42e6f1735c5f00f51e43e28d6634141f2bcad10931b2609ddd74a86d751260" +dependencies = [ + "secp256k1-sys", +] + +[[package]] +name = "secp256k1-sys" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "957da2573cde917463ece3570eab4a0b3f19de6f1646cde62e6fd3868f566036" +dependencies = [ + "cc", +] + +[[package]] +name = "security-framework" +version = "2.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a332be01508d814fed64bf28f798a146d73792121129962fdf335bb3c49a4254" +dependencies = [ + "bitflags", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31c9bb296072e961fcbd8853511dd39c2d8be2deb1e17c6860b1d30732b323b4" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "1.0.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58bc9567378fc7690d6b2addae4e60ac2eeea07becb2c64b9f218b53865cba2a" + +[[package]] +name = "send_wrapper" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f638d531eccd6e23b980caf34876660d38e265409d8e99b397ab71eb3612fad0" + +[[package]] +name = "sentry" +version = "0.29.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6097dc270a9c4555c5d6222ed243eaa97ff38e29299ed7c5cb36099033c604e" +dependencies = [ + "httpdate", + "native-tls", + "reqwest", + "sentry-backtrace", + "sentry-contexts", + "sentry-core", + "sentry-panic", + "tokio", + "ureq", +] + +[[package]] +name = "sentry-backtrace" +version = "0.29.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d92d1e4d591534ae4f872d6142f3b500f4ffc179a6aed8a3e86c7cc96d10a6a" +dependencies = [ + "backtrace", + "once_cell", + "regex", + "sentry-core", +] + +[[package]] +name = "sentry-contexts" +version = "0.29.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3afa877b1898ff67dd9878cf4bec4e53cef7d3be9f14b1fc9e4fcdf36f8e4259" +dependencies = [ + "hostname", + "libc", + "os_info", + "rustc_version", + "sentry-core", + "uname", +] + +[[package]] +name = "sentry-core" +version = "0.29.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc43eb7e4e3a444151a0fe8a0e9ce60eabd905dae33d66e257fa26f1b509c1bd" +dependencies = [ + "once_cell", + "rand 0.8.5", + "sentry-types", + "serde", + "serde_json", +] + +[[package]] +name = "sentry-panic" +version = "0.29.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccab4fab11e3e63c45f4524bee2e75cde39cdf164cb0b0cbe6ccd1948ceddf66" +dependencies = [ + "sentry-backtrace", + "sentry-core", +] + +[[package]] +name = "sentry-types" +version = "0.29.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f63708ec450b6bdcb657af760c447416d69c38ce421f34e5e2e9ce8118410bc7" +dependencies = [ + "debugid", + "getrandom 0.2.8", + "hex", + "serde", + "serde_json", + "thiserror", + "time 0.3.17", + "url", + "uuid", +] + +[[package]] +name = "serde" +version = "1.0.152" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.152" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "serde_json" +version = "1.0.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cad406b69c91885b5107daf2c29572f6c8cdb3c66826821e286c533490c0bc76" +dependencies = [ + "indexmap", + "itoa 1.0.5", + "ryu", + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa 1.0.5", + "ryu", + "serde", +] + +[[package]] +name = "serde_with" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678b5a069e50bf00ecd22d0cd8ddf7c236f68581b03db652061ed5eb13a312ff" +dependencies = [ + "serde", + "serde_with_macros", +] + +[[package]] +name = "serde_with_macros" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e182d6ec6f05393cc0e5ed1bf81ad6db3a8feedf8ee515ecdd369809bcce8082" +dependencies = [ + "darling", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "sha-1" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6" +dependencies = [ + "block-buffer 0.9.0", + "cfg-if 1.0.0", + "cpufeatures", + "digest 0.9.0", + "opaque-debug", +] + +[[package]] +name = "sha1" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" +dependencies = [ + "cfg-if 1.0.0", + "cpufeatures", + "digest 0.10.6", +] + +[[package]] +name = "sha2" +version = "0.9.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" +dependencies = [ + "block-buffer 0.9.0", + "cfg-if 1.0.0", + "cpufeatures", + "digest 0.9.0", + "opaque-debug", +] + +[[package]] +name = "sha2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" +dependencies = [ + "cfg-if 1.0.0", + "cpufeatures", + "digest 0.10.6", +] + +[[package]] +name = "sha3" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f81199417d4e5de3f04b1e871023acea7389672c4135918f05aa9cbf2f2fa809" +dependencies = [ + "block-buffer 0.9.0", + "digest 0.9.0", + "keccak", + "opaque-debug", +] + +[[package]] +name = "sha3" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bdf0c33fae925bdc080598b84bc15c55e7b9a4a43b3c704da051f977469691c9" +dependencies = [ + "digest 0.10.6", + "keccak", +] + +[[package]] +name = "sharded-slab" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shlex" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43b2853a4d09f215c24cc5489c992ce46052d359b5109343cbafbf26bc62f8a3" + +[[package]] +name = "signal-hook" +version = "0.3.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a253b5e89e2698464fc26b545c9edceb338e18a89effeeecfea192c3025be29d" +dependencies = [ + "libc", + "signal-hook-registry", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e51e73328dc4ac0c7ccbda3a494dfa03df1de2f46018127f60c693f2648455b0" +dependencies = [ + "libc", +] + +[[package]] +name = "signature" +version = "1.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" +dependencies = [ + "digest 0.10.6", + "rand_core 0.6.4", +] + +[[package]] +name = "simple_asn1" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "692ca13de57ce0613a363c8c2f1de925adebc81b04c923ac60c5488bb44abe4b" +dependencies = [ + "chrono", + "num-bigint 0.2.6", + "num-traits", +] + +[[package]] +name = "sketches-ddsketch" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ceb945e54128e09c43d8e4f1277851bd5044c6fc540bbaa2ad888f60b3da9ae7" + +[[package]] +name = "slab" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4614a76b2a8be0058caa9dbbaf66d988527d86d003c11a94fbd335d7661edcef" +dependencies = [ + "autocfg 1.1.0", +] + +[[package]] +name = "smallvec" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" + +[[package]] +name = "socket2" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02e2d2db9033d13a1567121ddd7a095ee144db4e1ca1b1bda3419bc0da294ebd" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "soketto" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d1c5305e39e09653383c2c7244f2f78b3bcae37cf50c64cb4789c9f5096ec2" +dependencies = [ + "base64 0.13.1", + "bytes", + "futures 0.3.26", + "http", + "httparse", + "log", + "rand 0.8.5", + "sha-1", +] + +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + +[[package]] +name = "spki" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67cf02bbac7a337dc36e4f5a693db6c21e7863f45070f7064577eb4367a3212b" +dependencies = [ + "base64ct", + "der", +] + +[[package]] +name = "splitmut" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c85070f382340e8b23a75808e83573ddf65f9ad9143df9573ca37c1ed2ee956a" + +[[package]] +name = "sqlformat" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4b7922be017ee70900be125523f38bdd644f4f06a1b16e8fa5a8ee8c34bffd4" +dependencies = [ + "itertools", + "nom", + "unicode_categories", +] + +[[package]] +name = "sqlx" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7911b0031a0247af40095838002999c7a52fba29d9739e93326e71a5a1bc9d43" +dependencies = [ + "sqlx-core", + "sqlx-macros", +] + +[[package]] +name = "sqlx-core" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aec89bfaca8f7737439bad16d52b07f1ccd0730520d3bf6ae9d069fe4b641fb1" +dependencies = [ + "ahash", + "atoi", + "base64 0.13.1", + "bigdecimal", + "bitflags", + "byteorder", + "bytes", + "chrono", + "crc", + "crossbeam-channel 0.5.6", + "crossbeam-queue 0.3.8", + "crossbeam-utils 0.8.14", + "dirs", + "either", + "futures-channel", + "futures-core", + "futures-intrusive", + "futures-util", + "hashlink", + "hex", + "hmac 0.11.0", + "indexmap", + "ipnetwork", + "itoa 0.4.8", + "libc", + "log", + "md-5", + "memchr", + "num-bigint 0.3.3", + "once_cell", + "parking_lot 0.11.2", + "percent-encoding", + "rand 0.8.5", + "serde", + "serde_json", + "sha-1", + "sha2 0.9.9", + "smallvec", + "sqlformat", + "sqlx-rt", + "stringprep", + "thiserror", + "url", + "whoami", +] + +[[package]] +name = "sqlx-macros" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "584866c833511b1a152e87a7ee20dee2739746f60c858b3c5209150bc4b466f5" +dependencies = [ + "dotenv", + "either", + "heck 0.3.3", + "hex", + "once_cell", + "proc-macro2 1.0.51", + "quote 1.0.23", + "serde", + "serde_json", + "sha2 0.9.9", + "sqlx-core", + "sqlx-rt", + "syn 1.0.107", + "url", +] + +[[package]] +name = "sqlx-rt" +version = "0.5.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4db708cd3e459078f85f39f96a00960bd841f66ee2a669e90bf36907f5a79aae" +dependencies = [ + "async-native-tls", + "async-std", + "native-tls", +] + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "stringprep" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ee348cb74b87454fff4b551cbf727025810a004f88aeacae7f85b87f4e9a1c1" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "strsim" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "structopt" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c6b5c64445ba8094a6ab0c3cd2ad323e07171012d9c98b0b15651daf1787a10" +dependencies = [ + "clap", + "lazy_static", + "structopt-derive", +] + +[[package]] +name = "structopt-derive" +version = "0.4.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0" +dependencies = [ + "heck 0.3.3", + "proc-macro-error", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "strum" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.24.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" +dependencies = [ + "heck 0.4.1", + "proc-macro2 1.0.51", + "quote 1.0.23", + "rustversion", + "syn 1.0.107", +] + +[[package]] +name = "subtle" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" + +[[package]] +name = "syn" +version = "0.15.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ca4b3b69a77cbe1ffc9e198781b7acb0c7365a883670e8f1c1bc66fba79a5c5" +dependencies = [ + "proc-macro2 0.4.30", + "quote 0.6.13", + "unicode-xid", +] + +[[package]] +name = "syn" +version = "1.0.107" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "unicode-ident", +] + +[[package]] +name = "sync_vm" +version = "1.3.1" +source = "git+https://github.com/matter-labs/sync_vm.git?branch=v1.3.1#a69bcef3eafcc39887ca8c09ec835c1a426d0813" +dependencies = [ + "arrayvec 0.7.2", + "cs_derive", + "derivative", + "eip712-signature", + "franklin-crypto", + "hex", + "itertools", + "num-bigint 0.4.3", + "num-derive 0.3.3", + "num-integer", + "num-traits", + "once_cell", + "rand 0.4.6", + "rescue_poseidon", + "serde", + "sha2 0.10.6", + "sha3 0.10.6", + "smallvec", + "zk_evm", + "zkevm_opcode_defs", +] + +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + +[[package]] +name = "tempfile" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4" +dependencies = [ + "cfg-if 1.0.0", + "fastrand", + "libc", + "redox_syscall", + "remove_dir_all", + "winapi", +] + +[[package]] +name = "termcolor" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "test-log" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38f0c854faeb68a048f0f2dc410c5ddae3bf83854ef0e4977d58306a5edef50e" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "textwrap" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" +dependencies = [ + "unicode-width", +] + +[[package]] +name = "thiserror" +version = "1.0.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a9cd18aa97d5c45c6603caea1da6628790b37f7a34b6ca89522331c5180fed0" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "thread_local" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180" +dependencies = [ + "once_cell", +] + +[[package]] +name = "time" +version = "0.1.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca8a50ef2360fbd1eeb0ecd46795a87a19024eb4b53c5dc916ca1fd95fe62438" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "time" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a561bf4617eebd33bca6434b988f39ed798e527f51a1e797d0ee4f61c0a38376" +dependencies = [ + "itoa 1.0.5", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd" + +[[package]] +name = "time-macros" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d967f99f534ca7e495c575c62638eebc2898a8c84c119b89e250477bc4ba16b2" +dependencies = [ + "time-core", +] + +[[package]] +name = "tiny-keccak" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d8a021c69bb74a44ccedb824a046447e2c84a01df9e5c20779750acb38e11b2" +dependencies = [ + "crunchy", +] + +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8e00990ebabbe4c14c08aca901caed183ecd5c09562a12c824bb53d3c3fd3af" +dependencies = [ + "autocfg 1.1.0", + "bytes", + "libc", + "memchr", + "mio", + "num_cpus", + "parking_lot 0.12.1", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.42.0", +] + +[[package]] +name = "tokio-io-timeout" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" +dependencies = [ + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-macros" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d266c00fde287f55d3f1c3e96c500c362a2b8c695076ec180f27918820bc6df8" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.23.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" +dependencies = [ + "rustls", + "tokio", + "webpki", +] + +[[package]] +name = "tokio-stream" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d660770404473ccd7bc9f8b28494a811bc18542b915c0855c51e8f419d5223ce" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.6.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36943ee01a6d67977dd3f84a5a1d2efeb4ada3a1ae771cadfaa535d9d9fc6507" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "log", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc6a3b08b64e6dfad376fa2432c7b1f01522e37a623c3050bc95db2d3ff21583" +dependencies = [ + "bytes", + "futures-core", + "futures-io", + "futures-sink", + "pin-project-lite", + "tokio", + "tracing", +] + +[[package]] +name = "toml_datetime" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4553f467ac8e3d374bc9a177a26801e5d0f9b211aa1673fb137a403afd1c9cf5" + +[[package]] +name = "toml_edit" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56c59d8dd7d0dcbc6428bf7aa2f0e823e26e43b3c9aca15bbc9475d23e5fa12b" +dependencies = [ + "indexmap", + "nom8", + "toml_datetime", +] + +[[package]] +name = "tonic" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff08f4649d10a70ffa3522ca559031285d8e421d727ac85c60825761818f5d0a" +dependencies = [ + "async-stream", + "async-trait", + "base64 0.13.1", + "bytes", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-timeout", + "percent-encoding", + "pin-project", + "prost", + "prost-derive", + "tokio", + "tokio-stream", + "tokio-util 0.6.10", + "tower", + "tower-layer", + "tower-service", + "tracing", + "tracing-futures", +] + +[[package]] +name = "tonic-build" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9403f1bafde247186684b230dc6f38b5cd514584e8bec1dd32514be4745fa757" +dependencies = [ + "proc-macro2 1.0.51", + "prost-build", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "indexmap", + "pin-project", + "pin-project-lite", + "rand 0.8.5", + "slab", + "tokio", + "tokio-util 0.7.6", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + +[[package]] +name = "tracing" +version = "0.1.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +dependencies = [ + "cfg-if 1.0.0", + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "tracing-core" +version = "0.1.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-futures" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" +dependencies = [ + "pin-project", + "tracing", +] + +[[package]] +name = "tracing-log" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" +dependencies = [ + "lazy_static", + "log", + "tracing-core", +] + +[[package]] +name = "tracing-opentelemetry" +version = "0.17.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbbe89715c1dbbb790059e2565353978564924ee85017b5fff365c872ff6721f" +dependencies = [ + "once_cell", + "opentelemetry", + "tracing", + "tracing-core", + "tracing-log", + "tracing-subscriber", +] + +[[package]] +name = "tracing-serde" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1" +dependencies = [ + "serde", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6176eae26dd70d0c919749377897b54a9276bd7061339665dd68777926b5a70" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "serde", + "serde_json", + "sharded-slab", + "smallvec", + "thread_local", + "time 0.3.17", + "tracing", + "tracing-core", + "tracing-log", + "tracing-serde", +] + +[[package]] +name = "try-lock" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" + +[[package]] +name = "typenum" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" + +[[package]] +name = "ucd-trie" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e79c4d996edb816c91e4308506774452e55e95c3c9de07b6729e17e15a5ef81" + +[[package]] +name = "uint" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76f64bba2c53b04fcab63c01a7d7427eadc821e3bc48c34dc9ba29c501164b52" +dependencies = [ + "byteorder", + "crunchy", + "hex", + "static_assertions", +] + +[[package]] +name = "uname" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b72f89f0ca32e4db1c04e2a72f5345d59796d4866a1ee0609084569f73683dc8" +dependencies = [ + "libc", +] + +[[package]] +name = "unicode-bidi" +version = "0.3.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d54675592c1dbefd78cbd98db9bacd89886e1ca50692a0692baefffdeb92dd58" + +[[package]] +name = "unicode-ident" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc" + +[[package]] +name = "unicode-normalization" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-segmentation" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" + +[[package]] +name = "unicode-width" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" + +[[package]] +name = "unicode-xid" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" + +[[package]] +name = "unicode_categories" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" + +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + +[[package]] +name = "ureq" +version = "2.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "338b31dd1314f68f3aabf3ed57ab922df95ffcd902476ca7ba3c4ce7b908c46d" +dependencies = [ + "base64 0.13.1", + "log", + "native-tls", + "once_cell", + "url", +] + +[[package]] +name = "url" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643" +dependencies = [ + "form_urlencoded", + "idna 0.3.0", + "percent-encoding", + "serde", +] + +[[package]] +name = "uuid" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1674845326ee10d37ca60470760d4288a6f80f304007d92e5c53bab78c9cfd79" +dependencies = [ + "getrandom 0.2.8", + "serde", +] + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "value-bag" +version = "1.0.0-alpha.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2209b78d1249f7e6f3293657c9779fe31ced465df091bbd433a1cf88e916ec55" +dependencies = [ + "ctor", + "version_check", +] + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "vec_map" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "vlog" +version = "1.0.0" +dependencies = [ + "chrono", + "opentelemetry", + "opentelemetry-otlp", + "opentelemetry-semantic-conventions", + "sentry", + "serde_json", + "tracing", + "tracing-opentelemetry", + "tracing-subscriber", +] + +[[package]] +name = "vm" +version = "0.1.0" +dependencies = [ + "hex", + "itertools", + "metrics", + "once_cell", + "thiserror", + "tracing", + "vlog", + "zk_evm", + "zkevm-assembly", + "zksync_config", + "zksync_contracts", + "zksync_crypto", + "zksync_state", + "zksync_storage", + "zksync_types", + "zksync_utils", +] + +[[package]] +name = "waker-fn" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca" + +[[package]] +name = "want" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" +dependencies = [ + "log", + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + +[[package]] +name = "wasi" +version = "0.10.2+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b" +dependencies = [ + "cfg-if 1.0.0", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f219e0d211ba40266969f6dbdd90636da12f75bee4fc9d6c23d1260dadb51454" +dependencies = [ + "cfg-if 1.0.0", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5" +dependencies = [ + "quote 1.0.23", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d" + +[[package]] +name = "wasm-streams" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bbae3363c08332cadccd13b67db371814cd214c2524020932f0804b8cf7c078" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "web-sys" +version = "0.3.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e33b99f4b23ba3eec1a53ac264e35a755f00e966e0065077d6027c0f575b0b97" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web3" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44f258e254752d210b84fe117b31f1e3cc9cbf04c0d747eb7f8cf7cf5e370f6d" +dependencies = [ + "arrayvec 0.7.2", + "base64 0.13.1", + "bytes", + "derive_more", + "ethabi", + "ethereum-types", + "futures 0.3.26", + "futures-timer", + "headers", + "hex", + "idna 0.2.3", + "jsonrpc-core", + "log", + "once_cell", + "parking_lot 0.12.1", + "pin-project", + "reqwest", + "rlp", + "secp256k1 0.21.3", + "serde", + "serde_json", + "tiny-keccak 2.0.2", + "url", +] + +[[package]] +name = "webpki" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "webpki-roots" +version = "0.22.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" +dependencies = [ + "webpki", +] + +[[package]] +name = "wepoll-ffi" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d743fdedc5c64377b5fc2bc036b01c7fd642205a0d96356034ae3404d49eb7fb" +dependencies = [ + "cc", +] + +[[package]] +name = "which" +version = "4.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2441c784c52b289a054b7201fc93253e288f094e2f4be9058343127c4226a269" +dependencies = [ + "either", + "libc", + "once_cell", +] + +[[package]] +name = "whoami" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45dbc71f0cdca27dc261a9bd37ddec174e4a0af2b900b890f378460f745426e3" +dependencies = [ + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +dependencies = [ + "winapi", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-sys" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e2522491fbfcd58cc84d47aeb2958948c4b8982e9a2d8a2a35bbaed431390e7" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c9864e83243fdec7fc9c5444389dcbbfd258f745e7853198f365e3c4968a608" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c8b1b673ffc16c47a9ff48570a9d85e25d265735c503681332589af6253c6c7" + +[[package]] +name = "windows_i686_gnu" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de3887528ad530ba7bdbb1faa8275ec7a1155a45ffa57c37993960277145d640" + +[[package]] +name = "windows_i686_msvc" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf4d1122317eddd6ff351aa852118a2418ad4214e6613a50e0191f7004372605" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1040f221285e17ebccbc2591ffdc2d44ee1f9186324dd3e84e99ac68d699c45" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "628bfdf232daa22b0d64fdb62b09fcc36bb01f05a3939e20ab73aaf9470d0463" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "447660ad36a13288b1db4d4248e857b510e8c3a225c822ba4fb748c0aafecffd" + +[[package]] +name = "winreg" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d" +dependencies = [ + "winapi", +] + +[[package]] +name = "wyz" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85e60b0d1b5f99db2556934e21937020776a5d31520bf169e851ac44e6420214" + +[[package]] +name = "zeroize" +version = "1.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c394b5bd0c6f669e7275d9c20aa90ae064cb22e75a1cad54e1b34088034b149f" + +[[package]] +name = "zk_evm" +version = "1.3.1" +source = "git+https://github.com/matter-labs/zk_evm.git?branch=v1.3.1#76a3877f5a0b7449bcca73d35bae3ae226996fdd" +dependencies = [ + "blake2 0.10.6", + "k256", + "lazy_static", + "num 0.4.0", + "serde", + "serde_json", + "sha2 0.10.6", + "sha3 0.10.6", + "static_assertions", + "zkevm_opcode_defs", +] + +[[package]] +name = "zkevm-assembly" +version = "1.3.0" +source = "git+https://github.com/matter-labs/zkEVM-assembly.git?branch=v1.3.1#e2a2145a90ceeb54407df1be5254291a9f693422" +dependencies = [ + "env_logger 0.9.3", + "hex", + "lazy_static", + "log", + "nom", + "num-bigint 0.4.3", + "num-traits", + "regex", + "smallvec", + "structopt", + "thiserror", + "zkevm_opcode_defs", +] + +[[package]] +name = "zkevm_opcode_defs" +version = "1.3.1" +source = "git+https://github.com/matter-labs/zkevm_opcode_defs.git?branch=v1.3.1#dc5b5c463d867855514f03c179992acbde74face" +dependencies = [ + "bitflags", + "ethereum-types", + "lazy_static", + "sha2 0.10.6", +] + +[[package]] +name = "zkevm_test_harness" +version = "1.3.1" +source = "git+https://github.com/matter-labs/zkevm_test_harness.git?branch=v1.3.1#2100f00f9cb79e851bf11ec21391672244e382fc" +dependencies = [ + "bincode", + "blake2 0.10.6", + "circuit_testing", + "codegen 0.2.0", + "crossbeam 0.8.2", + "derivative", + "env_logger 0.10.0", + "hex", + "num-bigint 0.4.3", + "num-integer", + "num-traits", + "rayon", + "serde", + "serde_json", + "sha2 0.10.6", + "sha3 0.10.6", + "smallvec", + "structopt", + "sync_vm", + "test-log", + "tracing", + "zk_evm", + "zkevm-assembly", +] + +[[package]] +name = "zksync_basic_types" +version = "1.0.0" +dependencies = [ + "serde", + "web3", +] + +[[package]] +name = "zksync_circuit_synthesizer" +version = "0.1.0" +dependencies = [ + "bincode", + "ctrlc", + "futures 0.3.26", + "local-ip-address", + "metrics", + "prometheus_exporter", + "prover-service", + "queues", + "structopt", + "tokio", + "vlog", + "zkevm_test_harness", + "zksync_config", + "zksync_dal", + "zksync_object_store", + "zksync_prover_utils", + "zksync_queued_job_processor", + "zksync_types", +] + +[[package]] +name = "zksync_config" +version = "1.0.0" +dependencies = [ + "bigdecimal", + "envy", + "num 0.3.1", + "once_cell", + "serde", + "serde_json", + "url", + "zksync_basic_types", + "zksync_utils", +] + +[[package]] +name = "zksync_contracts" +version = "1.0.0" +dependencies = [ + "ethabi", + "hex", + "once_cell", + "serde_json", + "zksync_utils", +] + +[[package]] +name = "zksync_crypto" +version = "1.0.0" +dependencies = [ + "base64 0.13.1", + "blake2 0.10.6", + "hex", + "once_cell", + "rand 0.4.6", + "serde", + "sha2 0.9.9", + "thiserror", + "zksync_basic_types", +] + +[[package]] +name = "zksync_dal" +version = "1.0.0" +dependencies = [ + "anyhow", + "async-std", + "bigdecimal", + "bincode", + "hex", + "itertools", + "metrics", + "num 0.3.1", + "once_cell", + "serde_json", + "sqlx", + "thiserror", + "vlog", + "vm", + "zksync_config", + "zksync_contracts", + "zksync_object_store", + "zksync_state", + "zksync_storage", + "zksync_types", + "zksync_utils", + "zksync_web3_decl", +] + +[[package]] +name = "zksync_mini_merkle_tree" +version = "1.0.0" +dependencies = [ + "once_cell", + "rayon", + "zksync_basic_types", + "zksync_crypto", +] + +[[package]] +name = "zksync_object_store" +version = "1.0.0" +dependencies = [ + "cloud-storage", + "metrics", + "tokio", + "vlog", + "zksync_config", + "zksync_types", +] + +[[package]] +name = "zksync_prover_utils" +version = "1.0.0" +dependencies = [ + "metrics", + "reqwest", + "vlog", +] + +[[package]] +name = "zksync_queued_job_processor" +version = "1.0.0" +dependencies = [ + "async-trait", + "tokio", + "vlog", + "zksync_dal", + "zksync_utils", +] + +[[package]] +name = "zksync_state" +version = "1.0.0" +dependencies = [ + "vlog", + "zksync_storage", + "zksync_types", + "zksync_utils", +] + +[[package]] +name = "zksync_storage" +version = "1.0.0" +dependencies = [ + "bincode", + "byteorder", + "num_cpus", + "once_cell", + "rocksdb", + "serde", + "vlog", + "zksync_types", + "zksync_utils", +] + +[[package]] +name = "zksync_types" +version = "1.0.0" +dependencies = [ + "bigdecimal", + "blake2 0.10.6", + "chrono", + "codegen 0.1.0", + "ethbloom", + "hex", + "metrics", + "num 0.3.1", + "once_cell", + "parity-crypto", + "rayon", + "rlp", + "serde", + "serde_json", + "serde_with", + "strum", + "thiserror", + "tiny-keccak 1.5.0", + "zk_evm", + "zkevm-assembly", + "zkevm_test_harness", + "zksync_basic_types", + "zksync_config", + "zksync_contracts", + "zksync_mini_merkle_tree", + "zksync_utils", +] + +[[package]] +name = "zksync_utils" +version = "1.0.0" +dependencies = [ + "anyhow", + "bigdecimal", + "envy", + "futures 0.3.26", + "hex", + "num 0.3.1", + "serde", + "thiserror", + "tokio", + "zk_evm", + "zksync_basic_types", +] + +[[package]] +name = "zksync_web3_decl" +version = "1.0.0" +dependencies = [ + "bigdecimal", + "chrono", + "itertools", + "jsonrpsee", + "rlp", + "serde", + "serde_json", + "thiserror", + "zksync_types", +] diff --git a/core/bin/circuit_synthesizer/Cargo.toml b/core/bin/circuit_synthesizer/Cargo.toml new file mode 100644 index 000000000000..f97a37f85f3c --- /dev/null +++ b/core/bin/circuit_synthesizer/Cargo.toml @@ -0,0 +1,31 @@ +[package] +name = "zksync_circuit_synthesizer" +version = "0.1.0" +edition = "2021" + +[[bin]] +name = "zksync_circuit_synthesizer" +path = "src/main.rs" + + +[dependencies] +zksync_dal = { path = "../../lib/dal", version = "1.0" } +zksync_queued_job_processor = { path = "../../lib/queued_job_processor", version = "1.0" } +zksync_config = { path = "../../lib/config", version = "1.0" } +zksync_object_store = { path = "../../lib/object_store", version = "1.0" } +zksync_types = { path = "../../lib/types", version = "1.0" } +vlog = { path = "../../lib/vlog", version = "1.0" } +prometheus_exporter = { path = "../../lib/prometheus_exporter", version = "1.0" } +zksync_prover_utils = {path = "../../lib/prover_utils", version = "1.0" } + +zkevm_test_harness = { git = "https://github.com/matter-labs/zkevm_test_harness.git", branch = "v1.3.1"} +prover-service = { git = "https://github.com/matter-labs/heavy-ops-service.git", branch = "cleanup", features=["legacy"], default-features=false} + +structopt = "0.3.26" +queues = "1.1.0" +tokio = { version = "1.23.0", features = ["full"] } +futures = { version = "0.3", features = ["compat"] } +ctrlc = { version = "3.1", features = ["termination"] } +local-ip-address = "0.5.0" +bincode = "1.3.2" +metrics = "0.20" diff --git a/core/bin/circuit_synthesizer/src/circuit_synthesizer.rs b/core/bin/circuit_synthesizer/src/circuit_synthesizer.rs new file mode 100644 index 000000000000..5122abb6ef4d --- /dev/null +++ b/core/bin/circuit_synthesizer/src/circuit_synthesizer.rs @@ -0,0 +1,246 @@ +use std::io::copy; +use std::net::SocketAddr; +use std::net::TcpStream; +use std::time::Instant; + +use local_ip_address::local_ip; +use prover_service::prover::{Prover, ProvingAssembly}; +use prover_service::remote_synth::serialize_job; +use tokio::task::JoinHandle; +use tokio::time::sleep; +use zkevm_test_harness::abstract_zksync_circuit::concrete_circuits::ZkSyncCircuit; +use zkevm_test_harness::bellman::plonk::better_better_cs::cs::Circuit; +use zkevm_test_harness::pairing::bn256::Bn256; +use zkevm_test_harness::witness::oracle::VmWitnessOracle; + +use zksync_config::configs::CircuitSynthesizerConfig; +use zksync_config::configs::prover_group::ProverGroupConfig; +use zksync_config::ProverConfigs; +use zksync_dal::ConnectionPool; +use zksync_dal::gpu_prover_queue_dal::{GpuProverInstanceStatus, SocketAddress}; +use zksync_object_store::gcs_utils::prover_circuit_input_blob_url; +use zksync_object_store::object_store::{create_object_store_from_env, PROVER_JOBS_BUCKET_PATH}; +use zksync_prover_utils::numeric_index_to_circuit_name; +use zksync_queued_job_processor::{async_trait, JobProcessor}; +use zksync_types::proofs::ProverJobMetadata; + +pub struct CircuitSynthesizer { + config: CircuitSynthesizerConfig, +} + +impl CircuitSynthesizer { + pub fn new(config: CircuitSynthesizerConfig) -> Self { + Self { config } + } + + pub fn synthesize( + circuit: ZkSyncCircuit>, + ) -> (ProvingAssembly, u8) { + let circuit_synthesis_started_at = Instant::now(); + let mut assembly = Prover::new_proving_assembly(); + circuit + .synthesize(&mut assembly) + .expect("circuit synthesize failed"); + let circuit_type = numeric_index_to_circuit_name(circuit.numeric_circuit_type()).unwrap(); + vlog::info!( + "Finished circuit synthesis for circuit: {} took {:?} seconds", + circuit_type, + circuit_synthesis_started_at.elapsed().as_secs(), + ); + metrics::histogram!( + "server.circuit_synthesizer.synthesize", + circuit_synthesis_started_at.elapsed().as_secs() as f64, + "circuit_type" => circuit_type, + ); + + // we don't perform assembly finalization here since it increases the assembly size significantly due to padding. + (assembly, circuit.numeric_circuit_type()) + } +} + +fn get_circuit( + prover_job_metadata: ProverJobMetadata, +) -> ZkSyncCircuit> { + let circuit_input_blob_url = prover_circuit_input_blob_url( + prover_job_metadata.block_number, + prover_job_metadata.sequence_number, + prover_job_metadata.circuit_type.clone(), + prover_job_metadata.aggregation_round, + ); + let object_store = create_object_store_from_env(); + let circuit_input = object_store + .get(PROVER_JOBS_BUCKET_PATH, circuit_input_blob_url) + .expect("Failed fetching prover jobs from GCS"); + + bincode::deserialize::>>(&circuit_input) + .expect("Failed to deserialize circuit input") +} + +#[async_trait] +impl JobProcessor for CircuitSynthesizer { + type Job = ZkSyncCircuit>; + type JobId = u32; + type JobArtifacts = (ProvingAssembly, u8); + const SERVICE_NAME: &'static str = "CircuitSynthesizer"; + + async fn get_next_job( + &self, + connection_pool: ConnectionPool, + ) -> Option<(Self::JobId, Self::Job)> { + let prover_job = connection_pool + .access_storage_blocking() + .prover_dal() + .get_next_prover_job(self.config.generation_timeout(), self.config.max_attempts)?; + let job_id = prover_job.id; + Some((job_id, get_circuit(prover_job))) + } + + async fn save_failure( + pool: ConnectionPool, + job_id: Self::JobId, + _started_at: Instant, + error: String, + ) -> () { + let config: CircuitSynthesizerConfig = CircuitSynthesizerConfig::from_env(); + pool.access_storage_blocking() + .prover_dal() + .save_proof_error(job_id, error, config.max_attempts); + } + + async fn process_job( + _connection_pool: ConnectionPool, + job: Self::Job, + _started_at: Instant, + ) -> JoinHandle { + tokio::task::spawn_blocking(move || Self::synthesize(job)) + } + + async fn save_result( + pool: ConnectionPool, + job_id: Self::JobId, + _started_at: Instant, + artifacts: Self::JobArtifacts, + ) { + vlog::info!("Finished circuit synthesis for job: {}", job_id); + let config: CircuitSynthesizerConfig = CircuitSynthesizerConfig::from_env(); + let (assembly, circuit_id) = artifacts; + let now = Instant::now(); + let specialized_prover_group_id = ProverGroupConfig::from_env() + .get_group_id_for_circuit_id(circuit_id) + .unwrap_or_else(|| panic!("No specialized prover running for circuit: {}", circuit_id)); + while now.elapsed() < config.prover_instance_wait_timeout() { + let optional_prover_instance = pool + .clone() + .access_storage_blocking() + .gpu_prover_queue_dal() + .get_free_prover_instance(config.gpu_prover_queue_timeout(), specialized_prover_group_id); + match optional_prover_instance { + Some(address) => { + vlog::info!( + "Found a free prover instance: {:?} to send assembly for job: {}", + address, + job_id + ); + send_assembly(job_id, circuit_id, assembly, address, pool); + return; + } + None => { + sleep(config.prover_instance_poll_time()).await; + } + } + } + vlog::info!( + "Not able to get any free prover instance for sending assembly for job: {}", + job_id + ); + } +} + +fn send_assembly( + job_id: u32, + circuit_id: u8, + assembly: ProvingAssembly, + address: SocketAddress, + pool: ConnectionPool, +) { + let socket_address = SocketAddr::new(address.host, address.port); + vlog::info!( + "Sending assembly to host: {}, port: {}", + address.host, + address.port + ); + match TcpStream::connect(socket_address) { + Ok(stream) => { + serialize_and_send(job_id, circuit_id, address, stream, assembly, pool); + } + Err(e) => { + vlog::info!( + "Failed sending assembly to address: {:?}, socket not reachable reason: {:?}", + address, + e + ); + handle_unreachable_prover_instance(job_id, address, pool); + } + } +} + +fn serialize_and_send( + job_id: u32, + circuit_id: u8, + address: SocketAddress, + mut stream: TcpStream, + assembly: ProvingAssembly, + pool: ConnectionPool, +) { + let started_at = Instant::now(); + let mut serialized: Vec = vec![]; + serialize_job::<_>(&assembly, job_id as usize, circuit_id, &mut serialized); + let blob_size_in_gb = serialized.len() / (1024 * 1024 * 1024); + copy(&mut serialized.as_slice(), &mut stream) + .unwrap_or_else(|_| panic!("failed sending assembly to address: {:?}", address)); + let local_ip = local_ip().expect("Failed obtaining local IP address"); + vlog::info!( + "Sent assembly of size: {}GB successfully, took: {} seconds for job: {} by: {:?} to: {:?}", + blob_size_in_gb, + started_at.elapsed().as_secs(), + job_id, + local_ip, + address + ); + metrics::histogram!( + "server.circuit_synthesizer.blob_sending_time", + started_at.elapsed().as_secs() as f64, + "blob_size_in_gb" => blob_size_in_gb.to_string(), + ); + handle_successful_sent_assembly(job_id, pool); +} + +fn handle_successful_sent_assembly(job_id: u32, pool: ConnectionPool) { + // releasing prover instance in gpu_prover_queue by marking it available is done by prover itself. + // we don't do it here to avoid race condition. + + // mark the job as `in_gpu_proof` + pool.clone() + .access_storage_blocking() + .prover_dal() + .update_status(job_id, "in_gpu_proof"); +} + +fn handle_unreachable_prover_instance(job_id: u32, address: SocketAddress, pool: ConnectionPool) { + // mark prover instance in gpu_prover_queue dead + pool.clone() + .access_storage_blocking() + .gpu_prover_queue_dal() + .update_prover_instance_status(address, GpuProverInstanceStatus::Dead, 0); + + let prover_config = ProverConfigs::from_env().non_gpu; + // mark the job as failed + pool.clone() + .access_storage_blocking() + .prover_dal() + .save_proof_error( + job_id, + "prover instance unreachable".to_string(), + prover_config.max_attempts, + ); +} diff --git a/core/bin/circuit_synthesizer/src/main.rs b/core/bin/circuit_synthesizer/src/main.rs new file mode 100644 index 000000000000..76be71623906 --- /dev/null +++ b/core/bin/circuit_synthesizer/src/main.rs @@ -0,0 +1,85 @@ +extern crate core; + +use std::cell::RefCell; + +use futures::{future, SinkExt, StreamExt}; +use futures::channel::mpsc; +use futures::executor::block_on; +use structopt::StructOpt; +use tokio::sync::watch; +use tokio::task::JoinHandle; + +use prometheus_exporter::run_prometheus_exporter; +use zksync_config::configs::CircuitSynthesizerConfig; +use zksync_config::configs::utils::Prometheus; +use zksync_dal::ConnectionPool; +use zksync_queued_job_processor::JobProcessor; + +use crate::circuit_synthesizer::CircuitSynthesizer; + +mod circuit_synthesizer; + +#[derive(Debug, StructOpt)] +#[structopt(name = "TODO", about = "TODO")] +struct Opt { + /// Number of times circuit_synthesizer should be run. + #[structopt(short = "n", long = "n_iterations")] + number_of_iterations: Option, +} + +pub async fn wait_for_tasks(task_futures: Vec>) { + match future::select_all(task_futures).await.0 { + Ok(_) => { + vlog::info!("One of the actors finished its run, while it wasn't expected to do it"); + } + Err(error) => { + vlog::info!( + "One of the tokio actors unexpectedly finished with error: {:?}", + error + ); + } + } +} + +#[tokio::main] +async fn main() { + let opt = Opt::from_args(); + vlog::init(); + let config: CircuitSynthesizerConfig = CircuitSynthesizerConfig::from_env(); + let pool = ConnectionPool::new(Some(1), true); + let circuit_synthesizer = CircuitSynthesizer::new(config.clone()); + + let (stop_sender, stop_receiver) = watch::channel(false); + + let (stop_signal_sender, mut stop_signal_receiver) = mpsc::channel(256); + { + let stop_signal_sender = RefCell::new(stop_signal_sender.clone()); + ctrlc::set_handler(move || { + let mut sender = stop_signal_sender.borrow_mut(); + block_on(sender.send(true)).expect("Ctrl+C signal send"); + }) + .expect("Error setting Ctrl+C handler"); + } + vlog::info!("Starting circuit synthesizer"); + let prometheus_config = Prometheus { + listener_port: config.prometheus_listener_port, + pushgateway_url: config.prometheus_pushgateway_url, + push_interval_ms: config.prometheus_push_interval_ms, + }; + let tasks = vec![ + run_prometheus_exporter(prometheus_config, true), + tokio::spawn(circuit_synthesizer.run( + pool, + stop_receiver, + opt.number_of_iterations, + ))]; + + tokio::select! { + _ = async { wait_for_tasks(tasks).await } => {}, + _ = async { stop_signal_receiver.next().await } => { + vlog::info!("Stop signal received, shutting down"); + }, + } + ; + let _ = stop_sender.send(true); +} diff --git a/core/bin/contract-verifier/Cargo.toml b/core/bin/contract-verifier/Cargo.toml new file mode 100644 index 000000000000..6c354256cb0b --- /dev/null +++ b/core/bin/contract-verifier/Cargo.toml @@ -0,0 +1,33 @@ +[package] +name = "zksync_contract_verifier" +version = "0.1.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] +description = "The zkEVM contract verifier" +publish = false # We don't want to publish our binaries. + +[dependencies] +zksync_types = { path = "../../lib/types", version = "1.0" } +zksync_dal = { path = "../../lib/dal", version = "1.0" } +zksync_config = { path = "../../lib/config", version = "1.0" } +zksync_contracts = { path = "../../lib/contracts", version = "1.0" } +zksync_queued_job_processor = { path = "../../lib/queued_job_processor", version = "1.0" } +prometheus_exporter = { path = "../../lib/prometheus_exporter", version = "1.0" } +vlog = { path = "../../lib/vlog", version = "1.0" } + +tokio = { version = "1", features = ["time"] } +futures = { version = "0.3", features = ["compat"] } +ctrlc = { version = "3.1", features = ["termination"] } +thiserror = "1.0" +chrono = "0.4" +serde_json = "1.0" +ethabi = "16.0.0" +metrics = "0.20" +hex = "0.4" +serde = { version = "1.0", features = ["derive"] } +structopt = "0.3.20" +lazy_static = "1.4" diff --git a/core/bin/contract-verifier/src/error.rs b/core/bin/contract-verifier/src/error.rs new file mode 100644 index 000000000000..d38c82389ef5 --- /dev/null +++ b/core/bin/contract-verifier/src/error.rs @@ -0,0 +1,27 @@ +#[derive(Debug, Clone, thiserror::Error)] +pub enum ContractVerifierError { + #[error("Internal error")] + InternalError, + #[error("Deployed bytecode is not equal to generated one from given source")] + BytecodeMismatch, + #[error("Constructor arguments are not correct")] + IncorrectConstructorArguments, + #[error("Compilation takes too much time")] + CompilationTimeout, + #[error("ZkSolc error: {0}")] + ZkSolcError(String), + #[error("Compilation error")] + CompilationError(serde_json::Value), + #[error("Unknown zksolc version: {0}")] + UnknownZkSolcVersion(String), + #[error("Unknown solc version: {0}")] + UnknownSolcVersion(String), + #[error("Contract with {0} name is missing in sources")] + MissingContract(String), + #[error("There is no {0} source file")] + MissingSource(String), + #[error("Contract with {0} name is an abstract and thus is not verifiable")] + AbstractContract(String), + #[error("Failed to deserialize standard JSON input")] + FailedToDeserializeInput, +} diff --git a/core/bin/contract-verifier/src/main.rs b/core/bin/contract-verifier/src/main.rs new file mode 100644 index 000000000000..ed625028f83b --- /dev/null +++ b/core/bin/contract-verifier/src/main.rs @@ -0,0 +1,136 @@ +use std::cell::RefCell; + +use zksync_config::{ + configs::utils::Prometheus as PrometheusConfig, ApiConfig, ContractVerifierConfig, +}; +use zksync_dal::ConnectionPool; +use zksync_queued_job_processor::JobProcessor; + +use futures::{channel::mpsc, executor::block_on, future, SinkExt, StreamExt}; +use tokio::sync::watch; +use tokio::task::JoinHandle; + +use crate::verifier::ContractVerifier; + +pub mod error; +pub mod verifier; +pub mod zksolc_utils; + +pub async fn wait_for_tasks(task_futures: Vec>) { + match future::select_all(task_futures).await.0 { + Ok(_) => { + vlog::info!("One of the actors finished its run, while it wasn't expected to do it"); + } + Err(error) => { + vlog::info!( + "One of the tokio actors unexpectedly finished with error: {:?}", + error + ); + } + } +} + +async fn update_compiler_versions(connection_pool: &ConnectionPool) { + let mut storage = connection_pool.access_storage().await; + let mut transaction = storage.start_transaction().await; + + let zksync_home = std::env::var("ZKSYNC_HOME").unwrap_or_else(|_| ".".into()); + + let zksolc_path = format!("{}/etc/zksolc-bin/", zksync_home); + let zksolc_versions: Vec = std::fs::read_dir(zksolc_path) + .unwrap() + .filter_map(|file| { + let file = file.unwrap(); + if file.file_type().unwrap().is_dir() { + Some(file.file_name().into_string().unwrap()) + } else { + None + } + }) + .collect(); + transaction + .explorer() + .contract_verification_dal() + .set_zksolc_versions(zksolc_versions) + .unwrap(); + + let solc_path = format!("{}/etc/solc-bin/", zksync_home); + let solc_versions: Vec = std::fs::read_dir(solc_path) + .unwrap() + .filter_map(|file| { + let file = file.unwrap(); + if file.file_type().unwrap().is_dir() { + Some(file.file_name().into_string().unwrap()) + } else { + None + } + }) + .collect(); + transaction + .explorer() + .contract_verification_dal() + .set_solc_versions(solc_versions) + .unwrap(); + + transaction.commit().await; +} + +use structopt::StructOpt; + +#[derive(StructOpt)] +#[structopt(name = "zkSync contract code verifier", author = "Matter Labs")] +struct Opt { + /// Number of jobs to process. If None, runs indefinitely. + #[structopt(long)] + jobs_number: Option, +} + +#[tokio::main] +async fn main() { + let opt = Opt::from_args(); + + let verifier_config = ContractVerifierConfig::from_env(); + let prometheus_config = PrometheusConfig { + listener_port: verifier_config.prometheus_port, + ..ApiConfig::from_env().prometheus + }; + let pool = ConnectionPool::new(Some(1), true); + + let sentry_guard = vlog::init(); + match sentry_guard { + Some(_) => vlog::info!( + "Starting Sentry url: {}", + std::env::var("MISC_SENTRY_URL").unwrap(), + ), + None => vlog::info!("No sentry url configured"), + } + + let (stop_sender, stop_receiver) = watch::channel(false); + let (stop_signal_sender, mut stop_signal_receiver) = mpsc::channel(256); + { + let stop_signal_sender = RefCell::new(stop_signal_sender.clone()); + ctrlc::set_handler(move || { + let mut sender = stop_signal_sender.borrow_mut(); + block_on(sender.send(true)).expect("Ctrl+C signal send"); + }) + .expect("Error setting Ctrl+C handler"); + } + + update_compiler_versions(&pool).await; + + let contract_verifier = ContractVerifier::new(verifier_config); + let tasks = vec![ + tokio::spawn(contract_verifier.run(pool, stop_receiver, opt.jobs_number)), + prometheus_exporter::run_prometheus_exporter(prometheus_config, false), + ]; + tokio::select! { + _ = async { wait_for_tasks(tasks).await } => {}, + _ = async { stop_signal_receiver.next().await } => { + vlog::info!("Stop signal received, shutting down"); + }, + }; + let _ = stop_sender.send(true); + + // Sleep for some time to let verifier gracefully stop. + tokio::time::sleep(tokio::time::Duration::from_secs(5)).await; +} diff --git a/core/bin/contract-verifier/src/verifier.rs b/core/bin/contract-verifier/src/verifier.rs new file mode 100644 index 000000000000..e160fe4b93f8 --- /dev/null +++ b/core/bin/contract-verifier/src/verifier.rs @@ -0,0 +1,337 @@ +use std::collections::HashMap; +use std::env; +use std::path::Path; +use std::time::{Duration, Instant}; + +use chrono::Utc; +use ethabi::Function; +use lazy_static::lazy_static; +use tokio::time; + +use zksync_config::ContractVerifierConfig; +use zksync_dal::{ConnectionPool, StorageProcessor}; +use zksync_queued_job_processor::{async_trait, JobProcessor}; +use zksync_types::explorer_api::{ + CompilationArtifacts, DeployContractCalldata, SourceCodeData, VerificationInfo, + VerificationRequest, +}; + +use crate::error::ContractVerifierError; +use crate::zksolc_utils::{CompilerInput, Optimizer, Settings, Source, ZkSolc}; + +lazy_static! { + static ref CREATE_CONTRACT_FUNCTION: Function = zksync_contracts::deployer_contract() + .function("create") + .unwrap() + .clone(); +} + +#[derive(Debug)] +pub struct ContractVerifier { + config: ContractVerifierConfig, +} + +impl ContractVerifier { + pub fn new(config: ContractVerifierConfig) -> Self { + Self { config } + } + + async fn verify( + storage: &mut StorageProcessor<'_>, + mut request: VerificationRequest, + config: ContractVerifierConfig, + ) -> Result { + let artifacts = Self::compile(request.clone(), config).await?; + + // Bytecode should be present because it is checked when accepting request. + let (deployed_bytecode, creation_tx_calldata) = storage + .explorer() + .contract_verification_dal() + .get_contract_info_for_verification(request.req.contract_address) + .unwrap() + .ok_or_else(|| { + vlog::warn!("Contract is missing in DB for already accepted verification request. Contract address: {:#?}", request.req.contract_address); + ContractVerifierError::InternalError + })?; + let (constructor_arguments, to_ignore) = + Self::decode_constructor_arguments_from_calldata(creation_tx_calldata); + + if artifacts.bytecode == deployed_bytecode + && (to_ignore || request.req.constructor_arguments.0 == constructor_arguments) + { + if to_ignore { + request.req.constructor_arguments = Vec::new().into(); + } + + Ok(VerificationInfo { + request, + artifacts, + verified_at: Utc::now(), + }) + } else if artifacts.bytecode != deployed_bytecode { + Err(ContractVerifierError::BytecodeMismatch) + } else { + Err(ContractVerifierError::IncorrectConstructorArguments) + } + } + + async fn compile( + request: VerificationRequest, + config: ContractVerifierConfig, + ) -> Result { + // Users may provide either just contract name or + // source file name and contract name joined with ":". + let (file_name, contract_name) = + if let Some((file_name, contract_name)) = request.req.contract_name.rsplit_once(':') { + (file_name.to_string(), contract_name.to_string()) + } else { + ( + format!("{}.sol", request.req.contract_name), + request.req.contract_name.clone(), + ) + }; + let input = Self::build_compiler_input(request.clone(), file_name.clone())?; + + let zksync_home = env::var("ZKSYNC_HOME").unwrap_or_else(|_| ".".into()); + let zksolc_path = Path::new(&zksync_home) + .join("etc") + .join("zksolc-bin") + .join(request.req.compiler_zksolc_version.as_str()) + .join("zksolc"); + if !zksolc_path.exists() { + return Err(ContractVerifierError::UnknownZkSolcVersion( + request.req.compiler_zksolc_version, + )); + } + + let solc_path = Path::new(&zksync_home) + .join("etc") + .join("solc-bin") + .join(request.req.compiler_solc_version.as_str()) + .join("solc"); + if !solc_path.exists() { + return Err(ContractVerifierError::UnknownSolcVersion( + request.req.compiler_solc_version, + )); + } + + let zksolc = ZkSolc::new(zksolc_path, solc_path); + + let output = time::timeout(config.compilation_timeout(), zksolc.async_compile(&input)) + .await + .map_err(|_| ContractVerifierError::CompilationTimeout)??; + + if let Some(errors) = output.get("errors") { + let errors = errors.as_array().unwrap().clone(); + if errors + .iter() + .any(|err| err["severity"].as_str().unwrap() == "error") + { + let error_messages = errors + .into_iter() + .map(|err| err["formattedMessage"].clone()) + .collect(); + return Err(ContractVerifierError::CompilationError( + serde_json::Value::Array(error_messages), + )); + } + } + + let contracts = output["contracts"] + .get(file_name.as_str()) + .cloned() + .ok_or(ContractVerifierError::MissingSource(file_name))?; + let contract = contracts + .get(&contract_name) + .cloned() + .ok_or(ContractVerifierError::MissingContract(contract_name))?; + let bytecode_str = contract["evm"]["bytecode"]["object"].as_str().ok_or( + ContractVerifierError::AbstractContract(request.req.contract_name), + )?; + let bytecode = hex::decode(bytecode_str).unwrap(); + let abi = contract["abi"].clone(); + if !abi.is_array() { + vlog::error!( + "zksolc returned unexpected value for ABI: {}", + serde_json::to_string_pretty(&abi).unwrap() + ); + return Err(ContractVerifierError::InternalError); + } + + Ok(CompilationArtifacts { bytecode, abi }) + } + + fn build_compiler_input( + request: VerificationRequest, + file_name: String, + ) -> Result { + let default_output_selection = serde_json::json!( + { + "*": { + "*": [ "abi" ], + "": [ "abi" ] + } + } + ); + + match request.req.source_code_data { + SourceCodeData::SingleFile(source_code) => { + let source = Source { + content: source_code, + }; + let sources: HashMap = + vec![(file_name, source)].into_iter().collect(); + let optimizer = Optimizer::new(request.req.optimization_used); + + let settings = Settings { + libraries: None, + output_selection: Some(default_output_selection), + optimizer, + }; + + Ok(CompilerInput { + language: "Solidity".to_string(), + sources, + settings, + }) + } + SourceCodeData::StandardJsonInput(map) => { + let mut compiler_input: CompilerInput = + serde_json::from_value(serde_json::Value::Object(map)) + .map_err(|_| ContractVerifierError::FailedToDeserializeInput)?; + // Set default output selection even if it is different in request. + compiler_input.settings.output_selection = Some(default_output_selection); + Ok(compiler_input) + } + } + } + + fn decode_constructor_arguments_from_calldata( + calldata: DeployContractCalldata, + ) -> (Vec, bool) { + match calldata { + DeployContractCalldata::Deploy(calldata) => { + // `calldata` is abi encoded call of `function create(bytes32 _salt, bytes32 _bytecodeHash, bytes _input)`. + // Constructor arguments are in the third parameter. + let tokens = CREATE_CONTRACT_FUNCTION + .decode_input(&calldata[4..]) + .expect("Failed to decode constructor arguments"); + ( + tokens[2] + .clone() + .into_bytes() + .expect("The third parameter of `create` should be of type `bytes`"), + false, + ) + } + DeployContractCalldata::Ignore => (Vec::new(), true), + } + } + + fn process_result( + storage: &mut StorageProcessor<'_>, + request_id: usize, + verification_result: Result, + ) { + match verification_result { + Ok(info) => { + storage + .explorer() + .contract_verification_dal() + .save_verification_info(info) + .unwrap(); + vlog::info!("Successfully processed request with id = {}", request_id); + } + Err(error) => { + let error_message = error.to_string(); + let compilation_errors = match error { + ContractVerifierError::CompilationError(compilation_errors) => { + compilation_errors + } + _ => serde_json::Value::Array(Vec::new()), + }; + storage + .explorer() + .contract_verification_dal() + .save_verification_error(request_id, error_message, compilation_errors, None) + .unwrap(); + vlog::info!("Request with id = {} was failed", request_id); + } + } + } +} + +#[async_trait] +impl JobProcessor for ContractVerifier { + type Job = VerificationRequest; + type JobId = usize; + type JobArtifacts = (); + + const SERVICE_NAME: &'static str = "contract_verifier"; + + async fn get_next_job( + &self, + connection_pool: ConnectionPool, + ) -> Option<(Self::JobId, Self::Job)> { + let mut connection = connection_pool.access_storage().await; + + // Time overhead for all operations except for compilation. + const TIME_OVERHEAD: Duration = Duration::from_secs(10); + + // Considering that jobs that reach compilation timeout will be executed in + // `compilation_timeout` + `non_compilation_time_overhead` (which is significantly less than `compilation_timeout`), + // we re-pick up jobs that are being executed for a bit more than `compilation_timeout`. + let job = connection + .explorer() + .contract_verification_dal() + .get_next_queued_verification_request(self.config.compilation_timeout() + TIME_OVERHEAD) + .unwrap(); + + job.map(|job| (job.id, job)) + } + + async fn save_failure( + connection_pool: ConnectionPool, + job_id: usize, + _started_at: Instant, + error: String, + ) -> () { + let mut connection = connection_pool.access_storage().await; + + connection + .explorer() + .contract_verification_dal() + .save_verification_error( + job_id, + "Internal error".to_string(), + serde_json::Value::Array(Vec::new()), + Some(error), + ) + .unwrap(); + } + + #[allow(clippy::async_yields_async)] + async fn process_job( + connection_pool: ConnectionPool, + job: VerificationRequest, + started_at: Instant, + ) -> tokio::task::JoinHandle<()> { + tokio::task::spawn(async move { + vlog::info!("Started to process request with id = {}", job.id); + + let config: ContractVerifierConfig = ContractVerifierConfig::from_env(); + let mut connection = connection_pool.access_storage_blocking(); + + let job_id = job.id; + let verification_result = Self::verify(&mut connection, job, config).await; + Self::process_result(&mut connection, job_id, verification_result); + + metrics::histogram!( + "api.contract_verifier.request_processing_time", + started_at.elapsed() + ); + }) + } + + async fn save_result(_: ConnectionPool, _: Self::JobId, _: Instant, _: Self::JobArtifacts) {} +} diff --git a/core/bin/contract-verifier/src/zksolc_utils.rs b/core/bin/contract-verifier/src/zksolc_utils.rs new file mode 100644 index 000000000000..badc709c396e --- /dev/null +++ b/core/bin/contract-verifier/src/zksolc_utils.rs @@ -0,0 +1,104 @@ +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::path::PathBuf; +use std::process::Stdio; + +use crate::error::ContractVerifierError; + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CompilerInput { + /// The input language. + pub language: String, + /// The input source code files hashmap. + pub sources: HashMap, + /// The compiler settings. + pub settings: Settings, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Source { + /// The source code file content. + pub content: String, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Settings { + /// The linker library addresses. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub libraries: Option>>, + /// The output selection filters. + pub output_selection: Option, + /// The optimizer settings. + pub optimizer: Optimizer, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Optimizer { + /// Whether the optimizer is enabled. + pub enabled: bool, +} + +impl Optimizer { + /// + /// A shortcut constructor. + /// + pub fn new(enabled: bool) -> Self { + Self { enabled } + } +} + +pub struct ZkSolc { + zksolc_path: PathBuf, + solc_path: PathBuf, +} + +impl ZkSolc { + pub fn new(zksolc_path: impl Into, solc_path: impl Into) -> Self { + ZkSolc { + zksolc_path: zksolc_path.into(), + solc_path: solc_path.into(), + } + } + + pub async fn async_compile( + &self, + input: &CompilerInput, + ) -> Result { + use tokio::io::AsyncWriteExt; + let content = serde_json::to_vec(input).unwrap(); + let mut child = tokio::process::Command::new(&self.zksolc_path) + .arg("--standard-json") + .arg("--solc") + .arg(self.solc_path.to_str().unwrap()) + .stdin(Stdio::piped()) + .stderr(Stdio::piped()) + .stdout(Stdio::piped()) + .spawn() + .map_err(|_err| ContractVerifierError::InternalError)?; + let stdin = child.stdin.as_mut().unwrap(); + stdin + .write_all(&content) + .await + .map_err(|_err| ContractVerifierError::InternalError)?; + stdin + .flush() + .await + .map_err(|_err| ContractVerifierError::InternalError)?; + + let output = child + .wait_with_output() + .await + .map_err(|_err| ContractVerifierError::InternalError)?; + if output.status.success() { + Ok(serde_json::from_slice(&output.stdout).expect("Compiler output must be valid JSON")) + } else { + Err(ContractVerifierError::ZkSolcError( + String::from_utf8_lossy(&output.stderr).to_string(), + )) + } + } +} diff --git a/core/bin/events_tx_initiator_address_migration/Cargo.toml b/core/bin/events_tx_initiator_address_migration/Cargo.toml new file mode 100644 index 000000000000..fffbe2f35b78 --- /dev/null +++ b/core/bin/events_tx_initiator_address_migration/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "events_tx_initiator_address_migration" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +tokio = { version = "1" } +zksync_types = { path = "../../lib/types", version = "1.0" } +zksync_dal = { path = "../../lib/dal", version = "1.0" } \ No newline at end of file diff --git a/core/bin/events_tx_initiator_address_migration/src/main.rs b/core/bin/events_tx_initiator_address_migration/src/main.rs new file mode 100644 index 000000000000..9eac8a81d7b6 --- /dev/null +++ b/core/bin/events_tx_initiator_address_migration/src/main.rs @@ -0,0 +1,24 @@ +use zksync_dal::ConnectionPool; +use zksync_types::MiniblockNumber; + +#[tokio::main] +async fn main() { + let pool = ConnectionPool::new(Some(1), true); + let mut storage = pool.access_storage().await; + let last_sealed_miniblock = storage.blocks_dal().get_sealed_miniblock_number(); + + let mut current_miniblock_number = MiniblockNumber(0); + let block_range = 10000u32; + while current_miniblock_number <= last_sealed_miniblock { + let to_miniblock_number = current_miniblock_number + block_range - 1; + storage + .events_dal() + .set_tx_initiator_address(current_miniblock_number, to_miniblock_number); + println!( + "Processed miniblocks {}-{}", + current_miniblock_number, to_miniblock_number + ); + + current_miniblock_number += block_range; + } +} diff --git a/core/bin/prover/Cargo.lock b/core/bin/prover/Cargo.lock new file mode 100644 index 000000000000..9d8a38dfdb96 --- /dev/null +++ b/core/bin/prover/Cargo.lock @@ -0,0 +1,6393 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "addchain" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b2e69442aa5628ea6951fa33e24efe8313f4321a91bd729fc2f75bdfc858570" +dependencies = [ + "num-bigint 0.3.3", + "num-integer", + "num-traits", +] + +[[package]] +name = "addr2line" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "aes" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "884391ef1066acaa41e766ba8f596341b96e93ce34f9a43e7d24bf0a0eaf0561" +dependencies = [ + "aes-soft", + "aesni", + "cipher", +] + +[[package]] +name = "aes-ctr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7729c3cde54d67063be556aeac75a81330d802f0259500ca40cb52967f975763" +dependencies = [ + "aes-soft", + "aesni", + "cipher", + "ctr", +] + +[[package]] +name = "aes-soft" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be14c7498ea50828a38d0e24a765ed2effe92a705885b57d029cd67d45744072" +dependencies = [ + "cipher", + "opaque-debug", +] + +[[package]] +name = "aesni" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea2e11f5e94c2f7d386164cc2aa1f97823fed6f259e486940a71c174dd01b0ce" +dependencies = [ + "cipher", + "opaque-debug", +] + +[[package]] +name = "ahash" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +dependencies = [ + "getrandom 0.2.8", + "once_cell", + "version_check", +] + +[[package]] +name = "aho-corasick" +version = "0.7.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac" +dependencies = [ + "memchr", +] + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "ansi_term" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" +dependencies = [ + "winapi", +] + +[[package]] +name = "anyhow" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "224afbd727c3d6e4b90103ece64b8d1b67fbb1973b1046c2281eed3f3803f800" + +[[package]] +name = "api" +version = "0.1.0" +source = "git+https://github.com/matter-labs/heavy-ops-service.git?branch=cleanup#c40d66b6d57f7f4d39259ffb993f5a1bba64bf6c" +dependencies = [ + "bellman_ce", + "cfg-if 1.0.0", + "gpu-prover", + "num_cpus", + "serde", +] + +[[package]] +name = "arr_macro" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a105bfda48707cf19220129e78fca01e9639433ffaef4163546ed8fb04120a5" +dependencies = [ + "arr_macro_impl", + "proc-macro-hack", +] + +[[package]] +name = "arr_macro_impl" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0609c78bd572f4edc74310dfb63a01f5609d53fa8b4dd7c4d98aef3b3e8d72d1" +dependencies = [ + "proc-macro-hack", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "arrayref" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4c527152e37cf757a3f78aae5a06fbeefdb07ccc535c980a3208ee3060dd544" + +[[package]] +name = "arrayvec" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd9fd44efafa8690358b7408d253adf110036b88f55672a933f01d616ad9b1b9" +dependencies = [ + "nodrop", +] + +[[package]] +name = "arrayvec" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" + +[[package]] +name = "arrayvec" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" + +[[package]] +name = "async-channel" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf46fee83e5ccffc220104713af3292ff9bc7c64c7de289f66dae8e38d826833" +dependencies = [ + "concurrent-queue", + "event-listener", + "futures-core", +] + +[[package]] +name = "async-executor" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17adb73da160dfb475c183343c8cccd80721ea5a605d3eb57125f0a7b7a92d0b" +dependencies = [ + "async-lock", + "async-task", + "concurrent-queue", + "fastrand", + "futures-lite", + "slab", +] + +[[package]] +name = "async-global-executor" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1b6f5d7df27bd294849f8eec66ecfc63d11814df7a4f5d74168a2394467b776" +dependencies = [ + "async-channel", + "async-executor", + "async-io", + "async-lock", + "blocking", + "futures-lite", + "once_cell", +] + +[[package]] +name = "async-io" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c374dda1ed3e7d8f0d9ba58715f924862c63eae6849c92d3a18e7fbde9e2794" +dependencies = [ + "async-lock", + "autocfg 1.1.0", + "concurrent-queue", + "futures-lite", + "libc", + "log", + "parking", + "polling", + "slab", + "socket2", + "waker-fn", + "windows-sys 0.42.0", +] + +[[package]] +name = "async-lock" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8101efe8695a6c17e02911402145357e718ac92d3ff88ae8419e84b1707b685" +dependencies = [ + "event-listener", + "futures-lite", +] + +[[package]] +name = "async-native-tls" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e9e7a929bd34c68a82d58a4de7f86fffdaf97fb2af850162a7bb19dd7269b33" +dependencies = [ + "async-std", + "native-tls", + "thiserror", + "url", +] + +[[package]] +name = "async-process" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6381ead98388605d0d9ff86371043b5aa922a3905824244de40dc263a14fcba4" +dependencies = [ + "async-io", + "async-lock", + "autocfg 1.1.0", + "blocking", + "cfg-if 1.0.0", + "event-listener", + "futures-lite", + "libc", + "signal-hook", + "windows-sys 0.42.0", +] + +[[package]] +name = "async-std" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62565bb4402e926b29953c785397c6dc0391b7b446e45008b0049eb43cec6f5d" +dependencies = [ + "async-channel", + "async-global-executor", + "async-io", + "async-lock", + "async-process", + "crossbeam-utils 0.8.14", + "futures-channel", + "futures-core", + "futures-io", + "futures-lite", + "gloo-timers", + "kv-log-macro", + "log", + "memchr", + "once_cell", + "pin-project-lite 0.2.9", + "pin-utils", + "slab", + "wasm-bindgen-futures", +] + +[[package]] +name = "async-stream" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dad5c83079eae9969be7fadefe640a1c566901f05ff91ab221de4b6f68d9507e" +dependencies = [ + "async-stream-impl", + "futures-core", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10f203db73a71dfa2fb6dd22763990fa26f3d2625a6da2da900d23b87d26be27" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "async-task" +version = "4.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a40729d2133846d9ed0ea60a8b9541bccddab49cd30f0715a1da672fe9a2524" + +[[package]] +name = "async-trait" +version = "0.1.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cd7fce9ba8c3c042128ce72d8b2ddbf3a05747efb67ea0313c635e10bda47a2" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "atoi" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "616896e05fc0e2649463a93a15183c6a16bf03413a7af88ef1285ddedfa9cda5" +dependencies = [ + "num-traits", +] + +[[package]] +name = "atomic-waker" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "debc29dde2e69f9e47506b525f639ed42300fc014a3e007832592448fa8e4599" + +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi 0.1.19", + "libc", + "winapi", +] + +[[package]] +name = "autocfg" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dde43e75fd43e8a1bf86103336bc699aa8d17ad1be60c76c0bdfd4828e19b78" +dependencies = [ + "autocfg 1.1.0", +] + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "backtrace" +version = "0.3.67" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "233d376d6d185f2a3093e58f283f60f880315b6c60075b01f36b3b85154564ca" +dependencies = [ + "addr2line", + "cc", + "cfg-if 1.0.0", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + +[[package]] +name = "base16ct" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" + +[[package]] +name = "base64" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff" + +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + +[[package]] +name = "base64" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a4ddaa51a5bc52a6948f74c06d20aaaddb71924eab79b8c97a8c556e942d6a" + +[[package]] +name = "base64ct" +version = "1.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b645a089122eccb6111b4f81cbc1a49f5900ac4666bb93ac027feaecf15607bf" + +[[package]] +name = "beef" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1" +dependencies = [ + "serde", +] + +[[package]] +name = "bellman_ce" +version = "0.3.2" +source = "git+https://github.com/matter-labs/bellman?branch=dev#3aa6226d04d60c539ff4ee480479ac5b92871a20" +dependencies = [ + "arrayvec 0.7.2", + "bit-vec", + "blake2s_const", + "blake2s_simd", + "byteorder", + "cfg-if 1.0.0", + "crossbeam 0.7.3", + "futures 0.3.26", + "hex", + "lazy_static", + "num_cpus", + "pairing_ce", + "rand 0.4.6", + "serde", + "smallvec", + "tiny-keccak 1.5.0", +] + +[[package]] +name = "bigdecimal" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc403c26e6b03005522e6e8053384c4e881dfe5b2bf041c0c2c49be33d64a539" +dependencies = [ + "num-bigint 0.3.3", + "num-integer", + "num-traits", + "serde", +] + +[[package]] +name = "bincode" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" +dependencies = [ + "serde", +] + +[[package]] +name = "bindgen" +version = "0.59.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bd2a9a458e8f4304c52c43ebb0cfbd520289f8379a52e329a38afda99bf8eb8" +dependencies = [ + "bitflags", + "cexpr", + "clang-sys", + "clap", + "env_logger 0.9.3", + "lazy_static", + "lazycell", + "log", + "peeking_take_while", + "proc-macro2 1.0.51", + "quote 1.0.23", + "regex", + "rustc-hash", + "shlex", + "which", +] + +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" +dependencies = [ + "serde", +] + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitvec" +version = "0.20.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7774144344a4faa177370406a7ff5f1da24303817368584c6206c8303eb07848" +dependencies = [ + "funty", + "radium", + "tap", + "wyz", +] + +[[package]] +name = "blake2" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a4e37d16930f5459780f5621038b6382b9bb37c19016f39fb6b5808d831f174" +dependencies = [ + "crypto-mac 0.8.0", + "digest 0.9.0", + "opaque-debug", +] + +[[package]] +name = "blake2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" +dependencies = [ + "digest 0.10.6", +] + +[[package]] +name = "blake2-rfc_bellman_edition" +version = "0.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc60350286c7c3db13b98e91dbe5c8b6830a6821bc20af5b0c310ce94d74915" +dependencies = [ + "arrayvec 0.4.12", + "byteorder", + "constant_time_eq", +] + +[[package]] +name = "blake2s_const" +version = "0.6.0" +source = "git+https://github.com/matter-labs/bellman?branch=dev#3aa6226d04d60c539ff4ee480479ac5b92871a20" +dependencies = [ + "arrayref", + "arrayvec 0.5.2", + "constant_time_eq", +] + +[[package]] +name = "blake2s_simd" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e461a7034e85b211a4acb57ee2e6730b32912b06c08cc242243c39fc21ae6a2" +dependencies = [ + "arrayref", + "arrayvec 0.5.2", + "constant_time_eq", +] + +[[package]] +name = "block-buffer" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" +dependencies = [ + "block-padding", + "generic-array", +] + +[[package]] +name = "block-buffer" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e" +dependencies = [ + "generic-array", +] + +[[package]] +name = "block-modes" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57a0e8073e8baa88212fb5823574c02ebccb395136ba9a164ab89379ec6072f0" +dependencies = [ + "block-padding", + "cipher", +] + +[[package]] +name = "block-padding" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d696c370c750c948ada61c69a0ee2cbbb9c50b1019ddb86d9317157a99c2cae" + +[[package]] +name = "blocking" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c67b173a56acffd6d2326fb7ab938ba0b00a71480e14902b2591c87bc5741e8" +dependencies = [ + "async-channel", + "async-lock", + "async-task", + "atomic-waker", + "fastrand", + "futures-lite", +] + +[[package]] +name = "bstr" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7f0778972c64420fdedc63f09919c8a88bda7b25135357fd25a5d9f3257e832" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "bumpalo" +version = "3.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" + +[[package]] +name = "byte-slice-cast" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" + +[[package]] +name = "byteorder" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" + +[[package]] +name = "bytes" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e4cec68f03f32e44924783795810fa50a7035d8c8ebe78580ad7e6c703fba38" + +[[package]] +name = "bytes" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" + +[[package]] +name = "bzip2-sys" +version = "0.1.11+1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "736a955f3fa7875102d57c82b8cac37ec45224a07fd32d58f9f7a186b6cd4cdc" +dependencies = [ + "cc", + "libc", + "pkg-config", +] + +[[package]] +name = "cc" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" +dependencies = [ + "jobserver", +] + +[[package]] +name = "cexpr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +dependencies = [ + "nom", +] + +[[package]] +name = "cfg-if" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16b0a3d9ed01224b22057780a37bb8c5dbfe1be8ba48678e7bf57ec4b385411f" +dependencies = [ + "iana-time-zone", + "js-sys", + "num-integer", + "num-traits", + "rustc-serialize", + "serde", + "time 0.1.43", + "wasm-bindgen", + "winapi", +] + +[[package]] +name = "cipher" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12f8e7987cbd042a63249497f41aed09f8e65add917ea6566effbc56578d6801" +dependencies = [ + "generic-array", +] + +[[package]] +name = "circuit_testing" +version = "0.1.0" +source = "git+https://github.com/matter-labs/circuit_testing.git?branch=main#7160c45c844944748663c91b6860c77f5376d9e4" +dependencies = [ + "bellman_ce", +] + +[[package]] +name = "clang-sys" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa2e27ae6ab525c3d369ded447057bca5438d86dc3a68f6faafb8269ba82ebf3" +dependencies = [ + "glob", + "libc", + "libloading", +] + +[[package]] +name = "clap" +version = "2.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" +dependencies = [ + "ansi_term", + "atty", + "bitflags", + "strsim 0.8.0", + "textwrap", + "unicode-width", + "vec_map", +] + +[[package]] +name = "cloud-storage" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7602ac4363f68ac757d6b87dd5d850549a14d37489902ae639c06ecec06ad275" +dependencies = [ + "async-trait", + "base64 0.13.1", + "bytes 1.4.0", + "chrono", + "dotenv", + "futures-util", + "hex", + "jsonwebtoken", + "lazy_static", + "openssl", + "percent-encoding", + "reqwest", + "serde", + "serde_json", + "tokio 1.25.0", +] + +[[package]] +name = "cloudabi" +version = "0.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" +dependencies = [ + "bitflags", +] + +[[package]] +name = "codegen" +version = "0.1.0" +source = "git+https://github.com/matter-labs/solidity_plonk_verifier.git?branch=dev#4fb6397f778a580c9207ec23661228f5da7e66b4" +dependencies = [ + "ethereum-types", + "franklin-crypto 0.0.5 (git+https://github.com/matter-labs/franklin-crypto?branch=dev)", + "handlebars", + "hex", + "paste", + "rescue_poseidon", + "serde", + "serde_derive", + "serde_json", +] + +[[package]] +name = "codegen" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff61280aed771c3070e7dcc9e050c66f1eb1e3b96431ba66f9f74641d02fc41d" +dependencies = [ + "indexmap", +] + +[[package]] +name = "codespan-reporting" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e" +dependencies = [ + "termcolor", + "unicode-width", +] + +[[package]] +name = "combine" +version = "4.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35ed6e9d84f0b51a7f52daf1c7d71dd136fd7a3f41a8462b8cdb8c78d920fad4" +dependencies = [ + "bytes 1.4.0", + "memchr", +] + +[[package]] +name = "concurrent-queue" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c278839b831783b70278b14df4d45e1beb1aad306c07bb796637de9a0e323e8e" +dependencies = [ + "crossbeam-utils 0.8.14", +] + +[[package]] +name = "const-oid" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cec318a675afcb6a1ea1d4340e2d377e56e47c266f28043ceccbf4412ddfdd3b" + +[[package]] +name = "constant_time_eq" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" + +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + +[[package]] +name = "convert_case" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "core-foundation" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" + +[[package]] +name = "cpufeatures" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320" +dependencies = [ + "libc", +] + +[[package]] +name = "crc" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49fc9a695bca7f35f5f4c15cddc84415f66a74ea78eef08e90c5024f2b540e23" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccaeedb56da03b09f598226e25e80088cb4cd25f316e6e4df7d695f0feeb1403" + +[[package]] +name = "crossbeam" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69323bff1fb41c635347b8ead484a5ca6c3f11914d784170b158d8449ab07f8e" +dependencies = [ + "cfg-if 0.1.10", + "crossbeam-channel 0.4.4", + "crossbeam-deque 0.7.4", + "crossbeam-epoch 0.8.2", + "crossbeam-queue 0.2.3", + "crossbeam-utils 0.7.2", +] + +[[package]] +name = "crossbeam" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2801af0d36612ae591caa9568261fddce32ce6e08a7275ea334a06a4ad021a2c" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-channel 0.5.6", + "crossbeam-deque 0.8.2", + "crossbeam-epoch 0.9.13", + "crossbeam-queue 0.3.8", + "crossbeam-utils 0.8.14", +] + +[[package]] +name = "crossbeam-channel" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b153fe7cbef478c567df0f972e02e6d736db11affe43dfc9c56a9374d1adfb87" +dependencies = [ + "crossbeam-utils 0.7.2", + "maybe-uninit", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-utils 0.8.14", +] + +[[package]] +name = "crossbeam-deque" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20ff29ded3204c5106278a81a38f4b482636ed4fa1e6cfbeef193291beb29ed" +dependencies = [ + "crossbeam-epoch 0.8.2", + "crossbeam-utils 0.7.2", + "maybe-uninit", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-epoch 0.9.13", + "crossbeam-utils 0.8.14", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "058ed274caafc1f60c4997b5fc07bf7dc7cca454af7c6e81edffe5f33f70dace" +dependencies = [ + "autocfg 1.1.0", + "cfg-if 0.1.10", + "crossbeam-utils 0.7.2", + "lazy_static", + "maybe-uninit", + "memoffset 0.5.6", + "scopeguard", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01a9af1f4c2ef74bb8aa1f7e19706bc72d03598c8a570bb5de72243c7a9d9d5a" +dependencies = [ + "autocfg 1.1.0", + "cfg-if 1.0.0", + "crossbeam-utils 0.8.14", + "memoffset 0.7.1", + "scopeguard", +] + +[[package]] +name = "crossbeam-queue" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "774ba60a54c213d409d5353bda12d49cd68d14e45036a285234c8d6f91f92570" +dependencies = [ + "cfg-if 0.1.10", + "crossbeam-utils 0.7.2", + "maybe-uninit", +] + +[[package]] +name = "crossbeam-queue" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-utils 0.8.14", +] + +[[package]] +name = "crossbeam-utils" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8" +dependencies = [ + "autocfg 1.1.0", + "cfg-if 0.1.10", + "lazy_static", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f" +dependencies = [ + "cfg-if 1.0.0", +] + +[[package]] +name = "crunchy" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" + +[[package]] +name = "crypto-bigint" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" +dependencies = [ + "generic-array", + "rand_core 0.6.4", + "subtle", + "zeroize", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "crypto-mac" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" +dependencies = [ + "generic-array", + "subtle", +] + +[[package]] +name = "crypto-mac" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bff07008ec701e8028e2ceb8f83f0e4274ee62bd2dbdc4fefff2e9a91824081a" +dependencies = [ + "generic-array", + "subtle", +] + +[[package]] +name = "crypto-mac" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1d1a86f49236c215f271d40892d5fc950490551400b02ef360692c29815c714" +dependencies = [ + "generic-array", + "subtle", +] + +[[package]] +name = "cs_derive" +version = "0.1.0" +source = "git+https://github.com/matter-labs/sync_vm.git?branch=v1.3.1#a69bcef3eafcc39887ca8c09ec835c1a426d0813" +dependencies = [ + "proc-macro-error", + "proc-macro2 1.0.51", + "quote 1.0.23", + "serde", + "syn 1.0.107", +] + +[[package]] +name = "ctor" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d2301688392eb071b0bf1a37be05c469d3cc4dbbd95df672fe28ab021e6a096" +dependencies = [ + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "ctr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb4a30d54f7443bf3d6191dcd486aca19e67cb3c49fa7a06a319966346707e7f" +dependencies = [ + "cipher", +] + +[[package]] +name = "ctrlc" +version = "3.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbcf33c2a618cbe41ee43ae6e9f2e48368cd9f9db2896f10167d8d762679f639" +dependencies = [ + "nix", + "windows-sys 0.45.0", +] + +[[package]] +name = "cxx" +version = "1.0.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90d59d9acd2a682b4e40605a242f6670eaa58c5957471cbf85e8aa6a0b97a5e8" +dependencies = [ + "cc", + "cxxbridge-flags", + "cxxbridge-macro", + "link-cplusplus", +] + +[[package]] +name = "cxx-build" +version = "1.0.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebfa40bda659dd5c864e65f4c9a2b0aff19bea56b017b9b77c73d3766a453a38" +dependencies = [ + "cc", + "codespan-reporting", + "once_cell", + "proc-macro2 1.0.51", + "quote 1.0.23", + "scratch", + "syn 1.0.107", +] + +[[package]] +name = "cxxbridge-flags" +version = "1.0.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "457ce6757c5c70dc6ecdbda6925b958aae7f959bda7d8fb9bde889e34a09dc03" + +[[package]] +name = "cxxbridge-macro" +version = "1.0.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebf883b7aacd7b2aeb2a7b338648ee19f57c140d4ee8e52c68979c6b2f7f2263" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "darling" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a01d95850c592940db9b8194bc39f4bc0e89dee5c4265e4b1807c34a9aba453c" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2 1.0.51", + "quote 1.0.23", + "strsim 0.10.0", + "syn 1.0.107", +] + +[[package]] +name = "darling_macro" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" +dependencies = [ + "darling_core", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "debugid" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef552e6f588e446098f6ba40d89ac146c8c7b64aade83c051ee00bb5d2bc18d" +dependencies = [ + "serde", + "uuid", +] + +[[package]] +name = "der" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1a467a65c5e759bce6e65eaf91cc29f466cdc57cb65777bd646872a8a1fd4de" +dependencies = [ + "const-oid", + "zeroize", +] + +[[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "derive_more" +version = "0.99.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +dependencies = [ + "convert_case 0.4.0", + "proc-macro2 1.0.51", + "quote 1.0.23", + "rustc_version", + "syn 1.0.107", +] + +[[package]] +name = "digest" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" +dependencies = [ + "generic-array", +] + +[[package]] +name = "digest" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8168378f4e5023e7218c89c891c0fd8ecdb5e5e4f18cb78f38cf245dd021e76f" +dependencies = [ + "block-buffer 0.10.3", + "crypto-common", + "subtle", +] + +[[package]] +name = "dirs" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30baa043103c9d0c2a57cf537cc2f35623889dc0d405e6c3cccfadbc81c71309" +dependencies = [ + "dirs-sys", +] + +[[package]] +name = "dirs-sys" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6" +dependencies = [ + "libc", + "redox_users", + "winapi", +] + +[[package]] +name = "dotenv" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f" + +[[package]] +name = "ecdsa" +version = "0.14.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413301934810f597c1d19ca71c8710e99a3f1ba28a0d2ebc01551a2daeea3c5c" +dependencies = [ + "der", + "elliptic-curve", + "rfc6979", + "signature", +] + +[[package]] +name = "eip712-signature" +version = "0.1.0" +source = "git+https://github.com/vladbochok/eip712-signature#30b11455e7d613313e8c12d2aad961fd4bf902fe" +dependencies = [ + "ethereum-types", + "parity-crypto", + "thiserror", +] + +[[package]] +name = "either" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" +dependencies = [ + "serde", +] + +[[package]] +name = "elliptic-curve" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7bb888ab5300a19b8e5bceef25ac745ad065f3c9f7efc6de1b91958110891d3" +dependencies = [ + "base16ct", + "crypto-bigint", + "der", + "digest 0.10.6", + "ff", + "generic-array", + "group", + "pkcs8", + "rand_core 0.6.4", + "sec1", + "subtle", + "zeroize", +] + +[[package]] +name = "encoding_rs" +version = "0.8.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" +dependencies = [ + "cfg-if 1.0.0", +] + +[[package]] +name = "env_logger" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a12e6657c4c97ebab115a42dcee77225f7f482cdd841cf7088c657a42e9e00e7" +dependencies = [ + "atty", + "humantime", + "log", + "regex", + "termcolor", +] + +[[package]] +name = "env_logger" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85cdab6a89accf66733ad5a1693a4dcced6aeff64602b634530dd73c1f3ee9f0" +dependencies = [ + "humantime", + "is-terminal", + "log", + "regex", + "termcolor", +] + +[[package]] +name = "envy" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f47e0157f2cb54f5ae1bd371b30a2ae4311e1c028f575cd4e81de7353215965" +dependencies = [ + "serde", +] + +[[package]] +name = "errno" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1" +dependencies = [ + "errno-dragonfly", + "libc", + "winapi", +] + +[[package]] +name = "errno-dragonfly" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "ethabi" +version = "16.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4c98847055d934070b90e806e12d3936b787d0a115068981c1d8dfd5dfef5a5" +dependencies = [ + "ethereum-types", + "hex", + "serde", + "serde_json", + "sha3 0.9.1", + "thiserror", + "uint", +] + +[[package]] +name = "ethbloom" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfb684ac8fa8f6c5759f788862bb22ec6fe3cb392f6bfd08e3c64b603661e3f8" +dependencies = [ + "crunchy", + "fixed-hash", + "impl-rlp", + "impl-serde", + "tiny-keccak 2.0.2", +] + +[[package]] +name = "ethereum-types" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05136f7057fe789f06e6d41d07b34e6f70d8c86e5693b60f97aaa6553553bdaf" +dependencies = [ + "ethbloom", + "fixed-hash", + "impl-rlp", + "impl-serde", + "primitive-types", + "uint", +] + +[[package]] +name = "event-listener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" + +[[package]] +name = "fastrand" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499" +dependencies = [ + "instant", +] + +[[package]] +name = "ff" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160" +dependencies = [ + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "ff_ce" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b538e4231443a5b9c507caee3356f016d832cf7393d2d90f03ea3180d4e3fbc" +dependencies = [ + "byteorder", + "ff_derive_ce", + "hex", + "rand 0.4.6", + "serde", +] + +[[package]] +name = "ff_derive_ce" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b96fbccd88dbb1fac4ee4a07c2fcc4ca719a74ffbd9d2b9d41d8c8eb073d8b20" +dependencies = [ + "num-bigint 0.4.3", + "num-integer", + "num-traits", + "proc-macro2 1.0.51", + "quote 1.0.23", + "serde", + "syn 1.0.107", +] + +[[package]] +name = "fixed-hash" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcf0ed7fe52a17a03854ec54a9f76d6d84508d1c0e66bc1793301c73fc8493c" +dependencies = [ + "byteorder", + "rand 0.8.5", + "rustc-hex", + "static_assertions", +] + +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "franklin-crypto" +version = "0.0.5" +source = "git+ssh://git@github.com/matter-labs/franklin-crypto?branch=dev#3baf4c4eb3b41fcaca5cfd36d0dc46b097ba7322" +dependencies = [ + "arr_macro", + "bellman_ce", + "bit-vec", + "blake2 0.9.2", + "blake2-rfc_bellman_edition", + "blake2s_simd", + "byteorder", + "digest 0.9.0", + "hex", + "indexmap", + "itertools", + "lazy_static", + "num-bigint 0.4.3", + "num-derive 0.2.5", + "num-integer", + "num-traits", + "rand 0.4.6", + "serde", + "sha2 0.9.9", + "sha3 0.9.1", + "smallvec", + "splitmut", + "tiny-keccak 1.5.0", +] + +[[package]] +name = "franklin-crypto" +version = "0.0.5" +source = "git+https://github.com/matter-labs/franklin-crypto?branch=dev#3baf4c4eb3b41fcaca5cfd36d0dc46b097ba7322" +dependencies = [ + "arr_macro", + "bellman_ce", + "bit-vec", + "blake2 0.9.2", + "blake2-rfc_bellman_edition", + "blake2s_simd", + "byteorder", + "digest 0.9.0", + "hex", + "indexmap", + "itertools", + "lazy_static", + "num-bigint 0.4.3", + "num-derive 0.2.5", + "num-integer", + "num-traits", + "rand 0.4.6", + "serde", + "sha2 0.9.9", + "sha3 0.9.1", + "smallvec", + "splitmut", + "tiny-keccak 1.5.0", +] + +[[package]] +name = "fuchsia-cprng" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" + +[[package]] +name = "funty" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7" + +[[package]] +name = "futures" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678" + +[[package]] +name = "futures" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13e2792b0ff0340399d58445b88fd9770e3489eff258a4cbc1523418f12abf84" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e5317663a9089767a1ec00a487df42e0ca174b61b4483213ac24448e4664df5" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec90ff4d0fe1f57d600049061dc6bb68ed03c7d2fbd697274c41805dcb3f8608" + +[[package]] +name = "futures-executor" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8de0a35a6ab97ec8869e32a2473f4b1324459e14c29275d14b10cb1fd19b50e" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", + "num_cpus", +] + +[[package]] +name = "futures-intrusive" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a604f7a68fbf8103337523b1fadc8ade7361ee3f112f7c680ad179651616aed5" +dependencies = [ + "futures-core", + "lock_api", + "parking_lot 0.11.2", +] + +[[package]] +name = "futures-io" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfb8371b6fb2aeb2d280374607aeabfc99d95c72edfe51692e42d3d7f0d08531" + +[[package]] +name = "futures-lite" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7694489acd39452c77daa48516b894c153f192c3578d5a839b62c58099fcbf48" +dependencies = [ + "fastrand", + "futures-core", + "futures-io", + "memchr", + "parking", + "pin-project-lite 0.2.9", + "waker-fn", +] + +[[package]] +name = "futures-locks" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50c4e684ddb2d8a4db5ca8a02b35156da129674ba4412b6f528698d58c594954" +dependencies = [ + "futures 0.3.26", + "tokio 0.2.25", +] + +[[package]] +name = "futures-macro" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95a73af87da33b5acf53acfebdc339fe592ecf5357ac7c0a7734ab9d8c876a70" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "futures-sink" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f310820bb3e8cfd46c80db4d7fb8353e15dfff853a127158425f31e0be6c8364" + +[[package]] +name = "futures-task" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcf79a1bf610b10f42aea489289c5a2c478a786509693b80cd39c44ccd936366" + +[[package]] +name = "futures-timer" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" +dependencies = [ + "gloo-timers", + "send_wrapper", +] + +[[package]] +name = "futures-util" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c1d6de3acfef38d2be4b1f543f553131788603495be83da675e180c8d6b7bd1" +dependencies = [ + "futures 0.1.31", + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite 0.2.9", + "pin-utils", + "slab", +] + +[[package]] +name = "generic-array" +version = "0.14.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", +] + +[[package]] +name = "getrandom" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "wasi 0.11.0+wasi-snapshot-preview1", +] + +[[package]] +name = "gimli" +version = "0.27.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "221996f774192f0f718773def8201c4ae31f02616a54ccfc2d358bb0e5cefdec" + +[[package]] +name = "glob" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" + +[[package]] +name = "globset" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "029d74589adefde59de1a0c4f4732695c32805624aec7b68d91503d4dba79afc" +dependencies = [ + "aho-corasick", + "bstr", + "fnv", + "log", + "regex", +] + +[[package]] +name = "gloo-net" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9902a044653b26b99f7e3693a42f171312d9be8b26b5697bd1e43ad1f8a35e10" +dependencies = [ + "futures-channel", + "futures-core", + "futures-sink", + "gloo-utils", + "js-sys", + "pin-project", + "serde", + "serde_json", + "thiserror", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "gloo-timers" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" +dependencies = [ + "futures-channel", + "futures-core", + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "gloo-utils" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8e8fc851e9c7b9852508bc6e3f690f452f474417e8545ec9857b7f7377036b5" +dependencies = [ + "js-sys", + "serde", + "serde_json", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "gpu-ffi" +version = "0.1.0" +source = "git+https://github.com/matter-labs/heavy-ops-service.git?branch=cleanup#c40d66b6d57f7f4d39259ffb993f5a1bba64bf6c" +dependencies = [ + "bindgen", + "crossbeam 0.8.2", + "derivative", + "futures 0.3.26", + "futures-locks", + "num_cpus", +] + +[[package]] +name = "gpu-prover" +version = "0.1.0" +source = "git+https://github.com/matter-labs/heavy-ops-service.git?branch=cleanup#c40d66b6d57f7f4d39259ffb993f5a1bba64bf6c" +dependencies = [ + "bit-vec", + "cfg-if 1.0.0", + "crossbeam 0.8.2", + "franklin-crypto 0.0.5 (git+ssh://git@github.com/matter-labs/franklin-crypto?branch=dev)", + "gpu-ffi", + "itertools", + "num_cpus", + "rand 0.4.6", + "serde", +] + +[[package]] +name = "group" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" +dependencies = [ + "ff", + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "h2" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f9f29bc9dda355256b2916cf526ab02ce0aeaaaf2bad60d65ef3f12f11dd0f4" +dependencies = [ + "bytes 1.4.0", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap", + "slab", + "tokio 1.25.0", + "tokio-util 0.7.6", + "tracing", +] + +[[package]] +name = "handlebars" +version = "4.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "035ef95d03713f2c347a72547b7cd38cbc9af7cd51e6099fb62d586d4a6dee3a" +dependencies = [ + "log", + "pest", + "pest_derive", + "serde", + "serde_json", + "thiserror", +] + +[[package]] +name = "hashbrown" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" +dependencies = [ + "ahash", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +dependencies = [ + "ahash", +] + +[[package]] +name = "hashlink" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7249a3129cbc1ffccd74857f81464a323a152173cdb134e0fd81bc803b29facf" +dependencies = [ + "hashbrown 0.11.2", +] + +[[package]] +name = "headers" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3e372db8e5c0d213e0cd0b9be18be2aca3d44cf2fe30a9d46a65581cd454584" +dependencies = [ + "base64 0.13.1", + "bitflags", + "bytes 1.4.0", + "headers-core", + "http", + "httpdate", + "mime", + "sha1", +] + +[[package]] +name = "headers-core" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7f66481bfee273957b1f20485a4ff3362987f85b2c236580d81b4eb7a326429" +dependencies = [ + "http", +] + +[[package]] +name = "heck" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + +[[package]] +name = "hermit-abi" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7" +dependencies = [ + "libc", +] + +[[package]] +name = "hermit-abi" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hmac" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1441c6b1e930e2817404b5046f1f989899143a12bf92de603b69f4e0aee1e15" +dependencies = [ + "crypto-mac 0.10.1", + "digest 0.9.0", +] + +[[package]] +name = "hmac" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a2a2320eb7ec0ebe8da8f744d7812d9fc4cb4d09344ac01898dbcb6a20ae69b" +dependencies = [ + "crypto-mac 0.11.1", + "digest 0.9.0", +] + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest 0.10.6", +] + +[[package]] +name = "hostname" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c731c3e10504cc8ed35cfe2f1db4c9274c3d35fa486e3b31df46f068ef3e867" +dependencies = [ + "libc", + "match_cfg", + "winapi", +] + +[[package]] +name = "http" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75f43d41e26995c17e71ee126451dd3941010b0514a81a9d11f3b341debc2399" +dependencies = [ + "bytes 1.4.0", + "fnv", + "itoa 1.0.5", +] + +[[package]] +name = "http-body" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" +dependencies = [ + "bytes 1.4.0", + "http", + "pin-project-lite 0.2.9", +] + +[[package]] +name = "httparse" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" + +[[package]] +name = "httpdate" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" + +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + +[[package]] +name = "hyper" +version = "0.14.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e011372fa0b68db8350aa7a248930ecc7839bf46d8485577d69f117a75f164c" +dependencies = [ + "bytes 1.4.0", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa 1.0.5", + "pin-project-lite 0.2.9", + "socket2", + "tokio 1.25.0", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.23.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1788965e61b367cd03a62950836d5cd41560c3577d90e40e0819373194d1661c" +dependencies = [ + "http", + "hyper", + "log", + "rustls", + "rustls-native-certs", + "tokio 1.25.0", + "tokio-rustls", + "webpki-roots", +] + +[[package]] +name = "hyper-timeout" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +dependencies = [ + "hyper", + "pin-project-lite 0.2.9", + "tokio 1.25.0", + "tokio-io-timeout", +] + +[[package]] +name = "hyper-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +dependencies = [ + "bytes 1.4.0", + "hyper", + "native-tls", + "tokio 1.25.0", + "tokio-native-tls", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.53" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64c122667b287044802d6ce17ee2ddf13207ed924c712de9a66a5814d5b64765" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "winapi", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0703ae284fc167426161c2e3f1da3ea71d94b21bedbcc9494e92b28e334e3dca" +dependencies = [ + "cxx", + "cxx-build", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" +dependencies = [ + "matches", + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "idna" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "impl-codec" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "161ebdfec3c8e3b52bf61c4f3550a1eea4f9579d10dc1b936f3171ebdcd6c443" +dependencies = [ + "parity-scale-codec", +] + +[[package]] +name = "impl-rlp" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28220f89297a075ddc7245cd538076ee98b01f2a9c23a53a4f1105d5a322808" +dependencies = [ + "rlp", +] + +[[package]] +name = "impl-serde" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4551f042f3438e64dbd6226b20527fc84a6e1fe65688b58746a2f53623f25f5c" +dependencies = [ + "serde", +] + +[[package]] +name = "impl-trait-for-tuples" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "indexmap" +version = "1.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399" +dependencies = [ + "autocfg 1.1.0", + "hashbrown 0.12.3", +] + +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +dependencies = [ + "cfg-if 1.0.0", +] + +[[package]] +name = "io-lifetimes" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1abeb7a0dd0f8181267ff8adc397075586500b81b28a73e8a0208b00fc170fb3" +dependencies = [ + "libc", + "windows-sys 0.45.0", +] + +[[package]] +name = "ipnet" +version = "2.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30e22bd8629359895450b59ea7a776c850561b96a3b1d31321c1949d9e6c9146" + +[[package]] +name = "ipnetwork" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02c3eaab3ac0ede60ffa41add21970a7df7d91772c03383aac6c2c3d53cc716b" + +[[package]] +name = "is-terminal" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22e18b0a45d56fe973d6db23972bf5bc46f988a4a2385deac9cc29572f09daef" +dependencies = [ + "hermit-abi 0.3.1", + "io-lifetimes", + "rustix", + "windows-sys 0.45.0", +] + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" + +[[package]] +name = "itoa" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440" + +[[package]] +name = "jobserver" +version = "0.1.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "068b1ee6743e4d11fb9c6a1e6064b3693a1b600e7f5f5988047d98b3dc9fb90b" +dependencies = [ + "libc", +] + +[[package]] +name = "js-sys" +version = "0.3.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "445dde2150c55e483f3d8416706b97ec8e8237c307e5b7b4b8dd15e6af2a0730" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "jsonrpc-core" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14f7f76aef2d054868398427f6c54943cf3d1caa9a7ec7d0c38d69df97a965eb" +dependencies = [ + "futures 0.3.26", + "futures-executor", + "futures-util", + "log", + "serde", + "serde_derive", + "serde_json", +] + +[[package]] +name = "jsonrpsee" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d291e3a5818a2384645fd9756362e6d89cf0541b0b916fa7702ea4a9833608e" +dependencies = [ + "jsonrpsee-client-transport", + "jsonrpsee-core", + "jsonrpsee-http-client", + "jsonrpsee-proc-macros", + "jsonrpsee-server", + "jsonrpsee-types", + "jsonrpsee-wasm-client", + "jsonrpsee-ws-client", + "tracing", +] + +[[package]] +name = "jsonrpsee-client-transport" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "965de52763f2004bc91ac5bcec504192440f0b568a5d621c59d9dbd6f886c3fb" +dependencies = [ + "anyhow", + "futures-channel", + "futures-timer", + "futures-util", + "gloo-net", + "http", + "jsonrpsee-core", + "jsonrpsee-types", + "pin-project", + "rustls-native-certs", + "soketto", + "thiserror", + "tokio 1.25.0", + "tokio-rustls", + "tokio-util 0.7.6", + "tracing", + "webpki-roots", +] + +[[package]] +name = "jsonrpsee-core" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4e70b4439a751a5de7dd5ed55eacff78ebf4ffe0fc009cb1ebb11417f5b536b" +dependencies = [ + "anyhow", + "arrayvec 0.7.2", + "async-lock", + "async-trait", + "beef", + "futures-channel", + "futures-timer", + "futures-util", + "globset", + "hyper", + "jsonrpsee-types", + "parking_lot 0.12.1", + "rand 0.8.5", + "rustc-hash", + "serde", + "serde_json", + "soketto", + "thiserror", + "tokio 1.25.0", + "tracing", + "wasm-bindgen-futures", +] + +[[package]] +name = "jsonrpsee-http-client" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc345b0a43c6bc49b947ebeb936e886a419ee3d894421790c969cc56040542ad" +dependencies = [ + "async-trait", + "hyper", + "hyper-rustls", + "jsonrpsee-core", + "jsonrpsee-types", + "rustc-hash", + "serde", + "serde_json", + "thiserror", + "tokio 1.25.0", + "tracing", +] + +[[package]] +name = "jsonrpsee-proc-macros" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baa6da1e4199c10d7b1d0a6e5e8bd8e55f351163b6f4b3cbb044672a69bd4c1c" +dependencies = [ + "heck 0.4.1", + "proc-macro-crate", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "jsonrpsee-server" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fb69dad85df79527c019659a992498d03f8495390496da2f07e6c24c2b356fc" +dependencies = [ + "futures-channel", + "futures-util", + "http", + "hyper", + "jsonrpsee-core", + "jsonrpsee-types", + "serde", + "serde_json", + "soketto", + "tokio 1.25.0", + "tokio-stream", + "tokio-util 0.7.6", + "tower", + "tracing", +] + +[[package]] +name = "jsonrpsee-types" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5bd522fe1ce3702fd94812965d7bb7a3364b1c9aba743944c5a00529aae80f8c" +dependencies = [ + "anyhow", + "beef", + "serde", + "serde_json", + "thiserror", + "tracing", +] + +[[package]] +name = "jsonrpsee-wasm-client" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a77310456f43c6c89bcba1f6b2fc2a28300da7c341f320f5128f8c83cc63232d" +dependencies = [ + "jsonrpsee-client-transport", + "jsonrpsee-core", + "jsonrpsee-types", +] + +[[package]] +name = "jsonrpsee-ws-client" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b83daeecfc6517cfe210df24e570fb06213533dfb990318fae781f4c7119dd9" +dependencies = [ + "http", + "jsonrpsee-client-transport", + "jsonrpsee-core", + "jsonrpsee-types", +] + +[[package]] +name = "jsonwebtoken" +version = "7.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afabcc15e437a6484fc4f12d0fd63068fe457bf93f1c148d3d9649c60b103f32" +dependencies = [ + "base64 0.12.3", + "pem", + "ring", + "serde", + "serde_json", + "simple_asn1", +] + +[[package]] +name = "k256" +version = "0.11.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72c1e0b51e7ec0a97369623508396067a486bd0cbed95a2659a4b863d28cfc8b" +dependencies = [ + "cfg-if 1.0.0", + "ecdsa", + "elliptic-curve", + "sha2 0.10.6", +] + +[[package]] +name = "keccak" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3afef3b6eff9ce9d8ff9b3601125eec7f0c8cbac7abd14f355d053fa56c98768" +dependencies = [ + "cpufeatures", +] + +[[package]] +name = "kv-log-macro" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" +dependencies = [ + "log", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + +[[package]] +name = "libc" +version = "0.2.139" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79" + +[[package]] +name = "libloading" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f" +dependencies = [ + "cfg-if 1.0.0", + "winapi", +] + +[[package]] +name = "librocksdb-sys" +version = "0.6.1+6.28.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81bc587013734dadb7cf23468e531aa120788b87243648be42e2d3a072186291" +dependencies = [ + "bindgen", + "bzip2-sys", + "cc", + "glob", + "libc", + "libz-sys", +] + +[[package]] +name = "libz-sys" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9702761c3935f8cc2f101793272e202c72b99da8f4224a19ddcf1279a6450bbf" +dependencies = [ + "cc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "link-cplusplus" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecd207c9c713c34f95a097a5b029ac2ce6010530c7b49d7fea24d977dede04f5" +dependencies = [ + "cc", +] + +[[package]] +name = "linux-raw-sys" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4" + +[[package]] +name = "local-ip-address" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "faa9d02443a1741e9f51dafdfcbffb3863b2a89c457d762b40337d6c5153ef81" +dependencies = [ + "libc", + "neli", + "thiserror", + "windows-sys 0.42.0", +] + +[[package]] +name = "lock_api" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" +dependencies = [ + "autocfg 1.1.0", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" +dependencies = [ + "cfg-if 1.0.0", + "value-bag", +] + +[[package]] +name = "mach" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b823e83b2affd8f40a9ee8c29dbc56404c1e34cd2710921f2801e2cf29527afa" +dependencies = [ + "libc", +] + +[[package]] +name = "match_cfg" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4" + +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata", +] + +[[package]] +name = "matches" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" + +[[package]] +name = "maybe-uninit" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00" + +[[package]] +name = "md-5" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5a279bb9607f9f53c22d496eade00d138d1bdcccd07d74650387cf94942a15" +dependencies = [ + "block-buffer 0.9.0", + "digest 0.9.0", + "opaque-debug", +] + +[[package]] +name = "memchr" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" + +[[package]] +name = "memoffset" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "043175f069eda7b85febe4a74abbaeff828d9f8b448515d3151a14a3542811aa" +dependencies = [ + "autocfg 1.1.0", +] + +[[package]] +name = "memoffset" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4" +dependencies = [ + "autocfg 1.1.0", +] + +[[package]] +name = "metrics" +version = "0.20.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b9b8653cec6897f73b519a43fba5ee3d50f62fe9af80b428accdcc093b4a849" +dependencies = [ + "ahash", + "metrics-macros", + "portable-atomic", +] + +[[package]] +name = "metrics-exporter-prometheus" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8603921e1f54ef386189335f288441af761e0fc61bcb552168d9cedfe63ebc70" +dependencies = [ + "hyper", + "indexmap", + "ipnet", + "metrics", + "metrics-util", + "parking_lot 0.12.1", + "portable-atomic", + "quanta", + "thiserror", + "tokio 1.25.0", + "tracing", +] + +[[package]] +name = "metrics-macros" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "731f8ecebd9f3a4aa847dfe75455e4757a45da40a7793d2f0b1f9b6ed18b23f3" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "metrics-util" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7d24dc2dbae22bff6f1f9326ffce828c9f07ef9cc1e8002e5279f845432a30a" +dependencies = [ + "crossbeam-epoch 0.9.13", + "crossbeam-utils 0.8.14", + "hashbrown 0.12.3", + "metrics", + "num_cpus", + "parking_lot 0.12.1", + "portable-atomic", + "quanta", + "sketches-ddsketch", +] + +[[package]] +name = "mime" +version = "0.3.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d" + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa" +dependencies = [ + "adler", +] + +[[package]] +name = "mio" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5d732bc30207a6423068df043e3d02e0735b155ad7ce1a6f76fe2baa5b158de" +dependencies = [ + "libc", + "log", + "wasi 0.11.0+wasi-snapshot-preview1", + "windows-sys 0.42.0", +] + +[[package]] +name = "multimap" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" + +[[package]] +name = "native-tls" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" +dependencies = [ + "lazy_static", + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "neli" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9053554eb5dcb7e10d9cdab1206965bde870eed5d0d341532ca035e3ba221508" +dependencies = [ + "byteorder", + "libc", +] + +[[package]] +name = "nix" +version = "0.26.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfdda3d196821d6af13126e40375cdf7da646a96114af134d5f417a9a1dc8e1a" +dependencies = [ + "bitflags", + "cfg-if 1.0.0", + "libc", + "static_assertions", +] + +[[package]] +name = "nodrop" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb" + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "nom8" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae01545c9c7fc4486ab7debaf2aad7003ac19431791868fb2e8066df97fad2f8" +dependencies = [ + "memchr", +] + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b7a8e9be5e039e2ff869df49155f1c06bd01ade2117ec783e56ab0932b67a8f" +dependencies = [ + "num-bigint 0.3.3", + "num-complex 0.3.1", + "num-integer", + "num-iter", + "num-rational 0.3.2", + "num-traits", +] + +[[package]] +name = "num" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43db66d1170d347f9a065114077f7dccb00c1b9478c89384490a3425279a4606" +dependencies = [ + "num-bigint 0.4.3", + "num-complex 0.4.3", + "num-integer", + "num-iter", + "num-rational 0.4.1", + "num-traits", +] + +[[package]] +name = "num-bigint" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "090c7f9998ee0ff65aa5b723e4009f7b217707f1fb5ea551329cc4d6231fb304" +dependencies = [ + "autocfg 1.1.0", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-bigint" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6f7833f2cbf2360a6cfd58cd41a53aa7a90bd4c202f5b1c7dd2ed73c57b2c3" +dependencies = [ + "autocfg 1.1.0", + "num-integer", + "num-traits", + "serde", +] + +[[package]] +name = "num-bigint" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" +dependencies = [ + "autocfg 1.1.0", + "num-integer", + "num-traits", + "serde", +] + +[[package]] +name = "num-complex" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "747d632c0c558b87dbabbe6a82f3b4ae03720d0646ac5b7b4dae89394be5f2c5" +dependencies = [ + "num-traits", + "serde", +] + +[[package]] +name = "num-complex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02e0d21255c828d6f128a1e41534206671e8c3ea0c62f32291e808dc82cff17d" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-derive" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eafd0b45c5537c3ba526f79d3e75120036502bebacbb3f3220914067ce39dbf2" +dependencies = [ + "proc-macro2 0.4.30", + "quote 0.6.13", + "syn 0.15.44", +] + +[[package]] +name = "num-derive" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "num-integer" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" +dependencies = [ + "autocfg 1.1.0", + "num-traits", +] + +[[package]] +name = "num-iter" +version = "0.1.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d03e6c028c5dc5cac6e2dec0efda81fc887605bb3d884578bb6d6bf7514e252" +dependencies = [ + "autocfg 1.1.0", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-rational" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12ac428b1cb17fce6f731001d307d351ec70a6d202fc2e60f7d4c5e42d8f4f07" +dependencies = [ + "autocfg 1.1.0", + "num-bigint 0.3.3", + "num-integer", + "num-traits", + "serde", +] + +[[package]] +name = "num-rational" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0638a1c9d0a3c0914158145bc76cff373a75a627e6ecbfb71cbe6f453a5a19b0" +dependencies = [ + "autocfg 1.1.0", + "num-bigint 0.4.3", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" +dependencies = [ + "autocfg 1.1.0", +] + +[[package]] +name = "num_cpus" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" +dependencies = [ + "hermit-abi 0.2.6", + "libc", +] + +[[package]] +name = "object" +version = "0.30.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea86265d3d3dcb6a27fc51bd29a4bf387fae9d2986b823079d4986af253eb439" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66" + +[[package]] +name = "opaque-debug" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" + +[[package]] +name = "openssl" +version = "0.10.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b102428fd03bc5edf97f62620f7298614c45cedf287c271e7ed450bbaf83f2e1" +dependencies = [ + "bitflags", + "cfg-if 1.0.0", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b501e44f11665960c7e7fcf062c7d96a14ade4aa98116c004b2e37b5be7d736c" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-sys" +version = "0.9.80" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23bbbf7854cd45b83958ebe919f0e8e516793727652e27fda10a8384cfc790b7" +dependencies = [ + "autocfg 1.1.0", + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "opentelemetry" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6105e89802af13fdf48c49d7646d3b533a70e536d818aae7e78ba0433d01acb8" +dependencies = [ + "async-trait", + "crossbeam-channel 0.5.6", + "futures-channel", + "futures-executor", + "futures-util", + "js-sys", + "lazy_static", + "percent-encoding", + "pin-project", + "rand 0.8.5", + "thiserror", +] + +[[package]] +name = "opentelemetry-http" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "449048140ee61e28f57abe6e9975eedc1f3a29855c7407bd6c12b18578863379" +dependencies = [ + "async-trait", + "bytes 1.4.0", + "http", + "opentelemetry", + "reqwest", +] + +[[package]] +name = "opentelemetry-otlp" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d1a6ca9de4c8b00aa7f1a153bd76cb263287155cec642680d79d98706f3d28a" +dependencies = [ + "async-trait", + "futures 0.3.26", + "futures-util", + "http", + "opentelemetry", + "opentelemetry-http", + "prost", + "prost-build", + "reqwest", + "thiserror", + "tokio 1.25.0", + "tonic", + "tonic-build", +] + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "985cc35d832d412224b2cffe2f9194b1b89b6aa5d0bef76d080dce09d90e62bd" +dependencies = [ + "opentelemetry", +] + +[[package]] +name = "os_info" +version = "3.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c424bc68d15e0778838ac013b5b3449544d8133633d8016319e7e05a820b8c0" +dependencies = [ + "log", + "serde", + "winapi", +] + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "pairing_ce" +version = "0.28.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db007b21259660d025918e653508f03050bf23fb96a88601f9936329faadc597" +dependencies = [ + "byteorder", + "cfg-if 1.0.0", + "ff_ce", + "rand 0.4.6", + "serde", +] + +[[package]] +name = "parity-crypto" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b92ea9ddac0d6e1db7c49991e7d397d34a9fd814b4c93cda53788e8eef94e35" +dependencies = [ + "aes", + "aes-ctr", + "block-modes", + "digest 0.9.0", + "ethereum-types", + "hmac 0.10.1", + "lazy_static", + "pbkdf2 0.7.5", + "ripemd160", + "rustc-hex", + "scrypt", + "secp256k1 0.20.3", + "sha2 0.9.9", + "subtle", + "tiny-keccak 2.0.2", + "zeroize", +] + +[[package]] +name = "parity-scale-codec" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "373b1a4c1338d9cd3d1fa53b3a11bdab5ab6bd80a20f7f7becd76953ae2be909" +dependencies = [ + "arrayvec 0.7.2", + "bitvec", + "byte-slice-cast", + "impl-trait-for-tuples", + "parity-scale-codec-derive", + "serde", +] + +[[package]] +name = "parity-scale-codec-derive" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1557010476e0595c9b568d16dcfb81b93cdeb157612726f5170d31aa707bed27" +dependencies = [ + "proc-macro-crate", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "parking" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "427c3892f9e783d91cc128285287e70a59e206ca452770ece88a76f7a3eddd72" + +[[package]] +name = "parking_lot" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core 0.8.6", +] + +[[package]] +name = "parking_lot" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +dependencies = [ + "lock_api", + "parking_lot_core 0.9.7", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" +dependencies = [ + "cfg-if 1.0.0", + "instant", + "libc", + "redox_syscall", + "smallvec", + "winapi", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9069cbb9f99e3a5083476ccb29ceb1de18b9118cafa53e90c9551235de2b9521" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "redox_syscall", + "smallvec", + "windows-sys 0.45.0", +] + +[[package]] +name = "password-hash" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54986aa4bfc9b98c6a5f40184223658d187159d7b3c6af33f2b2aa25ae1db0fa" +dependencies = [ + "base64ct", + "rand_core 0.6.4", +] + +[[package]] +name = "paste" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d01a5bd0424d00070b0098dd17ebca6f961a959dead1dbcbbbc1d1cd8d3deeba" + +[[package]] +name = "pbkdf2" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3b8c0d71734018084da0c0354193a5edfb81b20d2d57a92c5b154aefc554a4a" +dependencies = [ + "crypto-mac 0.10.1", +] + +[[package]] +name = "pbkdf2" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf916dd32dd26297907890d99dc2740e33f6bd9073965af4ccff2967962f5508" +dependencies = [ + "base64ct", + "crypto-mac 0.10.1", + "hmac 0.10.1", + "password-hash", + "sha2 0.9.9", +] + +[[package]] +name = "peeking_take_while" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" + +[[package]] +name = "pem" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd56cbd21fea48d0c440b41cd69c589faacade08c992d9a54e471b79d0fd13eb" +dependencies = [ + "base64 0.13.1", + "once_cell", + "regex", +] + +[[package]] +name = "percent-encoding" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e" + +[[package]] +name = "pest" +version = "2.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "028accff104c4e513bad663bbcd2ad7cfd5304144404c31ed0a77ac103d00660" +dependencies = [ + "thiserror", + "ucd-trie", +] + +[[package]] +name = "pest_derive" +version = "2.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ac3922aac69a40733080f53c1ce7f91dcf57e1a5f6c52f421fadec7fbdc4b69" +dependencies = [ + "pest", + "pest_generator", +] + +[[package]] +name = "pest_generator" +version = "2.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d06646e185566b5961b4058dd107e0a7f56e77c3f484549fb119867773c0f202" +dependencies = [ + "pest", + "pest_meta", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "pest_meta" +version = "2.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6f60b2ba541577e2a0c307c8f39d1439108120eb7903adeb6497fa880c59616" +dependencies = [ + "once_cell", + "pest", + "sha2 0.10.6", +] + +[[package]] +name = "petgraph" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4dd7d28ee937e54fe3080c91faa1c3a46c06de6252988a7f4592ba2310ef22a4" +dependencies = [ + "fixedbitset", + "indexmap", +] + +[[package]] +name = "pin-project" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad29a609b6bcd67fee905812e544992d216af9d755757c05ed2d0e15a74c6ecc" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "069bdb1e05adc7a8990dce9cc75370895fbe4e3d58b9b73bf1aee56359344a55" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "pin-project-lite" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "257b64915a082f7811703966789728173279bdebb956b143dbcd23f6f970a777" + +[[package]] +name = "pin-project-lite" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkcs8" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9eca2c590a5f85da82668fa685c09ce2888b9430e83299debf1f34b65fd4a4ba" +dependencies = [ + "der", + "spki", +] + +[[package]] +name = "pkg-config" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ac9a59f73473f1b8d852421e59e64809f025994837ef743615c6d0c5b305160" + +[[package]] +name = "polling" +version = "2.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22122d5ec4f9fe1b3916419b76be1e80bcb93f618d071d2edf841b137b2a2bd6" +dependencies = [ + "autocfg 1.1.0", + "cfg-if 1.0.0", + "libc", + "log", + "wepoll-ffi", + "windows-sys 0.42.0", +] + +[[package]] +name = "portable-atomic" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26f6a7b87c2e435a3241addceeeff740ff8b7e76b74c13bf9acb17fa454ea00b" + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "primitive-types" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05e4722c697a58a99d5d06a08c30821d7c082a4632198de1eaa5a6c22ef42373" +dependencies = [ + "fixed-hash", + "impl-codec", + "impl-rlp", + "impl-serde", + "uint", +] + +[[package]] +name = "proc-macro-crate" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66618389e4ec1c7afe67d51a9bf34ff9236480f8d51e7489b7d5ab0303c13f34" +dependencies = [ + "once_cell", + "toml_edit 0.18.1", +] + +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "version_check", +] + +[[package]] +name = "proc-macro-hack" +version = "0.5.20+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" + +[[package]] +name = "proc-macro2" +version = "0.4.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759" +dependencies = [ + "unicode-xid", +] + +[[package]] +name = "proc-macro2" +version = "1.0.51" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d727cae5b39d21da60fa540906919ad737832fe0b1c165da3a34d6548c849d6" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "prometheus_exporter" +version = "1.0.0" +dependencies = [ + "metrics", + "metrics-exporter-prometheus", + "tokio 1.25.0", + "vlog", + "zksync_config", +] + +[[package]] +name = "prost" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "444879275cb4fd84958b1a1d5420d15e6fcf7c235fe47f053c9c2a80aceb6001" +dependencies = [ + "bytes 1.4.0", + "prost-derive", +] + +[[package]] +name = "prost-build" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62941722fb675d463659e49c4f3fe1fe792ff24fe5bbaa9c08cd3b98a1c354f5" +dependencies = [ + "bytes 1.4.0", + "heck 0.3.3", + "itertools", + "lazy_static", + "log", + "multimap", + "petgraph", + "prost", + "prost-types", + "regex", + "tempfile", + "which", +] + +[[package]] +name = "prost-derive" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9cc1a3263e07e0bf68e96268f37665207b49560d98739662cdfaae215c720fe" +dependencies = [ + "anyhow", + "itertools", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "prost-types" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "534b7a0e836e3c482d2693070f982e39e7611da9695d4d1f5a4b186b51faef0a" +dependencies = [ + "bytes 1.4.0", + "prost", +] + +[[package]] +name = "prover-service" +version = "0.1.0" +source = "git+https://github.com/matter-labs/heavy-ops-service.git?branch=cleanup#c40d66b6d57f7f4d39259ffb993f5a1bba64bf6c" +dependencies = [ + "api", + "bincode", + "crossbeam-utils 0.8.14", + "log", + "num_cpus", + "rand 0.4.6", + "serde", + "serde_json", + "zkevm_test_harness", +] + +[[package]] +name = "quanta" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7e31331286705f455e56cca62e0e717158474ff02b7936c1fa596d983f4ae27" +dependencies = [ + "crossbeam-utils 0.8.14", + "libc", + "mach", + "once_cell", + "raw-cpuid", + "wasi 0.10.2+wasi-snapshot-preview1", + "web-sys", + "winapi", +] + +[[package]] +name = "queues" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1475abae4f8ad4998590fe3acfe20104f0a5d48fc420c817cd2c09c3f56151f0" + +[[package]] +name = "quote" +version = "0.6.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1" +dependencies = [ + "proc-macro2 0.4.30", +] + +[[package]] +name = "quote" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b" +dependencies = [ + "proc-macro2 1.0.51", +] + +[[package]] +name = "radium" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "643f8f41a8ebc4c5dc4515c82bb8abd397b527fc20fd681b7c011c2aee5d44fb" + +[[package]] +name = "rand" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" +dependencies = [ + "fuchsia-cprng", + "libc", + "rand_core 0.3.1", + "rdrand", + "winapi", +] + +[[package]] +name = "rand" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" +dependencies = [ + "autocfg 0.1.8", + "libc", + "rand_chacha 0.1.1", + "rand_core 0.4.2", + "rand_hc 0.1.0", + "rand_isaac", + "rand_jitter", + "rand_os", + "rand_pcg", + "rand_xorshift", + "winapi", +] + +[[package]] +name = "rand" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" +dependencies = [ + "rand_chacha 0.2.2", + "rand_core 0.5.1", + "rand_hc 0.2.0", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" +dependencies = [ + "autocfg 0.1.8", + "rand_core 0.3.1", +] + +[[package]] +name = "rand_chacha" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" +dependencies = [ + "ppv-lite86", + "rand_core 0.5.1", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_core" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" +dependencies = [ + "rand_core 0.4.2", +] + +[[package]] +name = "rand_core" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" + +[[package]] +name = "rand_core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" +dependencies = [ + "getrandom 0.1.16", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.8", +] + +[[package]] +name = "rand_hc" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "rand_hc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" +dependencies = [ + "rand_core 0.5.1", +] + +[[package]] +name = "rand_isaac" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "rand_jitter" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b" +dependencies = [ + "libc", + "rand_core 0.4.2", + "winapi", +] + +[[package]] +name = "rand_os" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071" +dependencies = [ + "cloudabi", + "fuchsia-cprng", + "libc", + "rand_core 0.4.2", + "rdrand", + "winapi", +] + +[[package]] +name = "rand_pcg" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44" +dependencies = [ + "autocfg 0.1.8", + "rand_core 0.4.2", +] + +[[package]] +name = "rand_xorshift" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "raw-cpuid" +version = "10.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c307f7aacdbab3f0adee67d52739a1d71112cc068d6fab169ddeb18e48877fad" +dependencies = [ + "bitflags", +] + +[[package]] +name = "rayon" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db3a213adf02b3bcfd2d3846bb41cb22857d131789e01df434fb7e7bc0759b7" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "356a0625f1954f730c0201cdab48611198dc6ce21f4acff55089b5a78e6e835b" +dependencies = [ + "crossbeam-channel 0.5.6", + "crossbeam-deque 0.8.2", + "crossbeam-utils 0.8.14", + "num_cpus", +] + +[[package]] +name = "rdrand" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "redox_syscall" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" +dependencies = [ + "bitflags", +] + +[[package]] +name = "redox_users" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" +dependencies = [ + "getrandom 0.2.8", + "redox_syscall", + "thiserror", +] + +[[package]] +name = "regex" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.6.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848" + +[[package]] +name = "remove_dir_all" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" +dependencies = [ + "winapi", +] + +[[package]] +name = "reqwest" +version = "0.11.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21eed90ec8570952d53b772ecf8f206aa1ec9a3d76b2521c56c42973f2d91ee9" +dependencies = [ + "base64 0.21.0", + "bytes 1.4.0", + "encoding_rs", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-rustls", + "hyper-tls", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite 0.2.9", + "rustls", + "rustls-pemfile", + "serde", + "serde_json", + "serde_urlencoded", + "tokio 1.25.0", + "tokio-native-tls", + "tokio-rustls", + "tokio-util 0.7.6", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "webpki-roots", + "winreg", +] + +[[package]] +name = "rescue_poseidon" +version = "0.4.1" +source = "git+https://github.com/matter-labs/rescue-poseidon.git#fbb3882b8f1e63dff769a1f1a59211d0e0838351" +dependencies = [ + "addchain", + "arrayvec 0.7.2", + "blake2 0.10.6", + "byteorder", + "franklin-crypto 0.0.5 (git+https://github.com/matter-labs/franklin-crypto?branch=dev)", + "num-bigint 0.3.3", + "num-integer", + "num-iter", + "num-traits", + "rand 0.4.6", + "serde", + "sha3 0.9.1", + "smallvec", +] + +[[package]] +name = "rfc6979" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7743f17af12fa0b03b803ba12cd6a8d9483a587e89c69445e3909655c0b9fabb" +dependencies = [ + "crypto-bigint", + "hmac 0.12.1", + "zeroize", +] + +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin", + "untrusted", + "web-sys", + "winapi", +] + +[[package]] +name = "ripemd160" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2eca4ecc81b7f313189bf73ce724400a07da2a6dac19588b03c8bd76a2dcc251" +dependencies = [ + "block-buffer 0.9.0", + "digest 0.9.0", + "opaque-debug", +] + +[[package]] +name = "rlp" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb919243f34364b6bd2fc10ef797edbfa75f33c252e7998527479c6d6b47e1ec" +dependencies = [ + "bytes 1.4.0", + "rustc-hex", +] + +[[package]] +name = "rocksdb" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "620f4129485ff1a7128d184bc687470c21c7951b64779ebc9cfdad3dcd920290" +dependencies = [ + "libc", + "librocksdb-sys", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342" + +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + +[[package]] +name = "rustc-hex" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" + +[[package]] +name = "rustc-serialize" +version = "0.3.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" + +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver", +] + +[[package]] +name = "rustix" +version = "0.36.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f43abb88211988493c1abb44a70efa56ff0ce98f233b7b276146f1f3f7ba9644" +dependencies = [ + "bitflags", + "errno", + "io-lifetimes", + "libc", + "linux-raw-sys", + "windows-sys 0.45.0", +] + +[[package]] +name = "rustls" +version = "0.20.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fff78fc74d175294f4e83b28343315ffcfb114b156f0185e9741cb5570f50e2f" +dependencies = [ + "log", + "ring", + "sct", + "webpki", +] + +[[package]] +name = "rustls-native-certs" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0167bac7a9f490495f3c33013e7722b53cb087ecbe082fb0c6387c96f634ea50" +dependencies = [ + "openssl-probe", + "rustls-pemfile", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d194b56d58803a43635bdc398cd17e383d6f71f9182b9a192c127ca42494a59b" +dependencies = [ + "base64 0.21.0", +] + +[[package]] +name = "rustversion" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5583e89e108996506031660fe09baa5011b9dd0341b89029313006d1fb508d70" + +[[package]] +name = "ryu" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde" + +[[package]] +name = "salsa20" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "399f290ffc409596022fce5ea5d4138184be4784f2b28c62c59f0d8389059a15" +dependencies = [ + "cipher", +] + +[[package]] +name = "schannel" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "713cfb06c7059f3588fb8044c0fad1d09e3c01d225e25b9220dbfdcf16dbb1b3" +dependencies = [ + "windows-sys 0.42.0", +] + +[[package]] +name = "scopeguard" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" + +[[package]] +name = "scratch" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddccb15bcce173023b3fedd9436f882a0739b8dfb45e4f6b6002bee5929f61b2" + +[[package]] +name = "scrypt" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8da492dab03f925d977776a0b7233d7b934d6dc2b94faead48928e2e9bacedb9" +dependencies = [ + "base64 0.13.1", + "hmac 0.10.1", + "pbkdf2 0.6.0", + "rand 0.7.3", + "rand_core 0.5.1", + "salsa20", + "sha2 0.9.9", + "subtle", +] + +[[package]] +name = "sct" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "sec1" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3be24c1842290c45df0a7bf069e0c268a747ad05a192f2fd7dcfdbc1cba40928" +dependencies = [ + "base16ct", + "der", + "generic-array", + "pkcs8", + "subtle", + "zeroize", +] + +[[package]] +name = "secp256k1" +version = "0.20.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97d03ceae636d0fed5bae6a7f4f664354c5f4fcedf6eef053fef17e49f837d0a" +dependencies = [ + "rand 0.6.5", + "secp256k1-sys", +] + +[[package]] +name = "secp256k1" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c42e6f1735c5f00f51e43e28d6634141f2bcad10931b2609ddd74a86d751260" +dependencies = [ + "secp256k1-sys", +] + +[[package]] +name = "secp256k1-sys" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "957da2573cde917463ece3570eab4a0b3f19de6f1646cde62e6fd3868f566036" +dependencies = [ + "cc", +] + +[[package]] +name = "security-framework" +version = "2.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a332be01508d814fed64bf28f798a146d73792121129962fdf335bb3c49a4254" +dependencies = [ + "bitflags", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31c9bb296072e961fcbd8853511dd39c2d8be2deb1e17c6860b1d30732b323b4" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "1.0.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58bc9567378fc7690d6b2addae4e60ac2eeea07becb2c64b9f218b53865cba2a" + +[[package]] +name = "send_wrapper" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f638d531eccd6e23b980caf34876660d38e265409d8e99b397ab71eb3612fad0" + +[[package]] +name = "sentry" +version = "0.29.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6097dc270a9c4555c5d6222ed243eaa97ff38e29299ed7c5cb36099033c604e" +dependencies = [ + "httpdate", + "native-tls", + "reqwest", + "sentry-backtrace", + "sentry-contexts", + "sentry-core", + "sentry-panic", + "tokio 1.25.0", + "ureq", +] + +[[package]] +name = "sentry-backtrace" +version = "0.29.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d92d1e4d591534ae4f872d6142f3b500f4ffc179a6aed8a3e86c7cc96d10a6a" +dependencies = [ + "backtrace", + "once_cell", + "regex", + "sentry-core", +] + +[[package]] +name = "sentry-contexts" +version = "0.29.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3afa877b1898ff67dd9878cf4bec4e53cef7d3be9f14b1fc9e4fcdf36f8e4259" +dependencies = [ + "hostname", + "libc", + "os_info", + "rustc_version", + "sentry-core", + "uname", +] + +[[package]] +name = "sentry-core" +version = "0.29.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc43eb7e4e3a444151a0fe8a0e9ce60eabd905dae33d66e257fa26f1b509c1bd" +dependencies = [ + "once_cell", + "rand 0.8.5", + "sentry-types", + "serde", + "serde_json", +] + +[[package]] +name = "sentry-panic" +version = "0.29.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccab4fab11e3e63c45f4524bee2e75cde39cdf164cb0b0cbe6ccd1948ceddf66" +dependencies = [ + "sentry-backtrace", + "sentry-core", +] + +[[package]] +name = "sentry-types" +version = "0.29.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f63708ec450b6bdcb657af760c447416d69c38ce421f34e5e2e9ce8118410bc7" +dependencies = [ + "debugid", + "getrandom 0.2.8", + "hex", + "serde", + "serde_json", + "thiserror", + "time 0.3.17", + "url", + "uuid", +] + +[[package]] +name = "serde" +version = "1.0.152" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.152" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "serde_json" +version = "1.0.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cad406b69c91885b5107daf2c29572f6c8cdb3c66826821e286c533490c0bc76" +dependencies = [ + "indexmap", + "itoa 1.0.5", + "ryu", + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa 1.0.5", + "ryu", + "serde", +] + +[[package]] +name = "serde_with" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678b5a069e50bf00ecd22d0cd8ddf7c236f68581b03db652061ed5eb13a312ff" +dependencies = [ + "serde", + "serde_with_macros", +] + +[[package]] +name = "serde_with_macros" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e182d6ec6f05393cc0e5ed1bf81ad6db3a8feedf8ee515ecdd369809bcce8082" +dependencies = [ + "darling", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "setup_key_generator_and_server" +version = "1.0.0" +dependencies = [ + "api", + "circuit_testing", + "itertools", + "prover-service", + "structopt", + "vlog", + "zkevm_test_harness", + "zksync_config", + "zksync_types", +] + +[[package]] +name = "sha-1" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6" +dependencies = [ + "block-buffer 0.9.0", + "cfg-if 1.0.0", + "cpufeatures", + "digest 0.9.0", + "opaque-debug", +] + +[[package]] +name = "sha1" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" +dependencies = [ + "cfg-if 1.0.0", + "cpufeatures", + "digest 0.10.6", +] + +[[package]] +name = "sha2" +version = "0.9.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" +dependencies = [ + "block-buffer 0.9.0", + "cfg-if 1.0.0", + "cpufeatures", + "digest 0.9.0", + "opaque-debug", +] + +[[package]] +name = "sha2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" +dependencies = [ + "cfg-if 1.0.0", + "cpufeatures", + "digest 0.10.6", +] + +[[package]] +name = "sha3" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f81199417d4e5de3f04b1e871023acea7389672c4135918f05aa9cbf2f2fa809" +dependencies = [ + "block-buffer 0.9.0", + "digest 0.9.0", + "keccak", + "opaque-debug", +] + +[[package]] +name = "sha3" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bdf0c33fae925bdc080598b84bc15c55e7b9a4a43b3c704da051f977469691c9" +dependencies = [ + "digest 0.10.6", + "keccak", +] + +[[package]] +name = "sharded-slab" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shlex" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43b2853a4d09f215c24cc5489c992ce46052d359b5109343cbafbf26bc62f8a3" + +[[package]] +name = "signal-hook" +version = "0.3.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a253b5e89e2698464fc26b545c9edceb338e18a89effeeecfea192c3025be29d" +dependencies = [ + "libc", + "signal-hook-registry", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e51e73328dc4ac0c7ccbda3a494dfa03df1de2f46018127f60c693f2648455b0" +dependencies = [ + "libc", +] + +[[package]] +name = "signature" +version = "1.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" +dependencies = [ + "digest 0.10.6", + "rand_core 0.6.4", +] + +[[package]] +name = "simple_asn1" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "692ca13de57ce0613a363c8c2f1de925adebc81b04c923ac60c5488bb44abe4b" +dependencies = [ + "chrono", + "num-bigint 0.2.6", + "num-traits", +] + +[[package]] +name = "sketches-ddsketch" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ceb945e54128e09c43d8e4f1277851bd5044c6fc540bbaa2ad888f60b3da9ae7" + +[[package]] +name = "slab" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4614a76b2a8be0058caa9dbbaf66d988527d86d003c11a94fbd335d7661edcef" +dependencies = [ + "autocfg 1.1.0", +] + +[[package]] +name = "smallvec" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" + +[[package]] +name = "socket2" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02e2d2db9033d13a1567121ddd7a095ee144db4e1ca1b1bda3419bc0da294ebd" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "soketto" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d1c5305e39e09653383c2c7244f2f78b3bcae37cf50c64cb4789c9f5096ec2" +dependencies = [ + "base64 0.13.1", + "bytes 1.4.0", + "futures 0.3.26", + "http", + "httparse", + "log", + "rand 0.8.5", + "sha-1", +] + +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + +[[package]] +name = "spki" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67cf02bbac7a337dc36e4f5a693db6c21e7863f45070f7064577eb4367a3212b" +dependencies = [ + "base64ct", + "der", +] + +[[package]] +name = "splitmut" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c85070f382340e8b23a75808e83573ddf65f9ad9143df9573ca37c1ed2ee956a" + +[[package]] +name = "sqlformat" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4b7922be017ee70900be125523f38bdd644f4f06a1b16e8fa5a8ee8c34bffd4" +dependencies = [ + "itertools", + "nom", + "unicode_categories", +] + +[[package]] +name = "sqlx" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7911b0031a0247af40095838002999c7a52fba29d9739e93326e71a5a1bc9d43" +dependencies = [ + "sqlx-core", + "sqlx-macros", +] + +[[package]] +name = "sqlx-core" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aec89bfaca8f7737439bad16d52b07f1ccd0730520d3bf6ae9d069fe4b641fb1" +dependencies = [ + "ahash", + "atoi", + "base64 0.13.1", + "bigdecimal", + "bitflags", + "byteorder", + "bytes 1.4.0", + "chrono", + "crc", + "crossbeam-channel 0.5.6", + "crossbeam-queue 0.3.8", + "crossbeam-utils 0.8.14", + "dirs", + "either", + "futures-channel", + "futures-core", + "futures-intrusive", + "futures-util", + "hashlink", + "hex", + "hmac 0.11.0", + "indexmap", + "ipnetwork", + "itoa 0.4.8", + "libc", + "log", + "md-5", + "memchr", + "num-bigint 0.3.3", + "once_cell", + "parking_lot 0.11.2", + "percent-encoding", + "rand 0.8.5", + "serde", + "serde_json", + "sha-1", + "sha2 0.9.9", + "smallvec", + "sqlformat", + "sqlx-rt", + "stringprep", + "thiserror", + "url", + "whoami", +] + +[[package]] +name = "sqlx-macros" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "584866c833511b1a152e87a7ee20dee2739746f60c858b3c5209150bc4b466f5" +dependencies = [ + "dotenv", + "either", + "heck 0.3.3", + "hex", + "once_cell", + "proc-macro2 1.0.51", + "quote 1.0.23", + "serde", + "serde_json", + "sha2 0.9.9", + "sqlx-core", + "sqlx-rt", + "syn 1.0.107", + "url", +] + +[[package]] +name = "sqlx-rt" +version = "0.5.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4db708cd3e459078f85f39f96a00960bd841f66ee2a669e90bf36907f5a79aae" +dependencies = [ + "async-native-tls", + "async-std", + "native-tls", +] + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "stringprep" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ee348cb74b87454fff4b551cbf727025810a004f88aeacae7f85b87f4e9a1c1" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "strsim" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "structopt" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c6b5c64445ba8094a6ab0c3cd2ad323e07171012d9c98b0b15651daf1787a10" +dependencies = [ + "clap", + "lazy_static", + "structopt-derive", +] + +[[package]] +name = "structopt-derive" +version = "0.4.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0" +dependencies = [ + "heck 0.3.3", + "proc-macro-error", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "strum" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.24.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" +dependencies = [ + "heck 0.4.1", + "proc-macro2 1.0.51", + "quote 1.0.23", + "rustversion", + "syn 1.0.107", +] + +[[package]] +name = "subtle" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" + +[[package]] +name = "syn" +version = "0.15.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ca4b3b69a77cbe1ffc9e198781b7acb0c7365a883670e8f1c1bc66fba79a5c5" +dependencies = [ + "proc-macro2 0.4.30", + "quote 0.6.13", + "unicode-xid", +] + +[[package]] +name = "syn" +version = "1.0.107" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "unicode-ident", +] + +[[package]] +name = "sync_vm" +version = "1.3.1" +source = "git+https://github.com/matter-labs/sync_vm.git?branch=v1.3.1#a69bcef3eafcc39887ca8c09ec835c1a426d0813" +dependencies = [ + "arrayvec 0.7.2", + "cs_derive", + "derivative", + "eip712-signature", + "franklin-crypto 0.0.5 (git+https://github.com/matter-labs/franklin-crypto?branch=dev)", + "hex", + "itertools", + "num-bigint 0.4.3", + "num-derive 0.3.3", + "num-integer", + "num-traits", + "once_cell", + "rand 0.4.6", + "rescue_poseidon", + "serde", + "sha2 0.10.6", + "sha3 0.10.6", + "smallvec", + "zk_evm", + "zkevm_opcode_defs", +] + +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + +[[package]] +name = "tempfile" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4" +dependencies = [ + "cfg-if 1.0.0", + "fastrand", + "libc", + "redox_syscall", + "remove_dir_all", + "winapi", +] + +[[package]] +name = "termcolor" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "test-log" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38f0c854faeb68a048f0f2dc410c5ddae3bf83854ef0e4977d58306a5edef50e" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "textwrap" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" +dependencies = [ + "unicode-width", +] + +[[package]] +name = "thiserror" +version = "1.0.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a9cd18aa97d5c45c6603caea1da6628790b37f7a34b6ca89522331c5180fed0" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "thread_local" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180" +dependencies = [ + "once_cell", +] + +[[package]] +name = "time" +version = "0.1.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca8a50ef2360fbd1eeb0ecd46795a87a19024eb4b53c5dc916ca1fd95fe62438" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "time" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a561bf4617eebd33bca6434b988f39ed798e527f51a1e797d0ee4f61c0a38376" +dependencies = [ + "itoa 1.0.5", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd" + +[[package]] +name = "time-macros" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d967f99f534ca7e495c575c62638eebc2898a8c84c119b89e250477bc4ba16b2" +dependencies = [ + "time-core", +] + +[[package]] +name = "tiny-keccak" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d8a021c69bb74a44ccedb824a046447e2c84a01df9e5c20779750acb38e11b2" +dependencies = [ + "crunchy", +] + +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "0.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6703a273949a90131b290be1fe7b039d0fc884aa1935860dfcbe056f28cd8092" +dependencies = [ + "bytes 0.5.6", + "pin-project-lite 0.1.12", + "slab", +] + +[[package]] +name = "tokio" +version = "1.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8e00990ebabbe4c14c08aca901caed183ecd5c09562a12c824bb53d3c3fd3af" +dependencies = [ + "autocfg 1.1.0", + "bytes 1.4.0", + "libc", + "memchr", + "mio", + "num_cpus", + "parking_lot 0.12.1", + "pin-project-lite 0.2.9", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.42.0", +] + +[[package]] +name = "tokio-io-timeout" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" +dependencies = [ + "pin-project-lite 0.2.9", + "tokio 1.25.0", +] + +[[package]] +name = "tokio-macros" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d266c00fde287f55d3f1c3e96c500c362a2b8c695076ec180f27918820bc6df8" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio 1.25.0", +] + +[[package]] +name = "tokio-rustls" +version = "0.23.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" +dependencies = [ + "rustls", + "tokio 1.25.0", + "webpki", +] + +[[package]] +name = "tokio-stream" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d660770404473ccd7bc9f8b28494a811bc18542b915c0855c51e8f419d5223ce" +dependencies = [ + "futures-core", + "pin-project-lite 0.2.9", + "tokio 1.25.0", +] + +[[package]] +name = "tokio-util" +version = "0.6.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36943ee01a6d67977dd3f84a5a1d2efeb4ada3a1ae771cadfaa535d9d9fc6507" +dependencies = [ + "bytes 1.4.0", + "futures-core", + "futures-sink", + "log", + "pin-project-lite 0.2.9", + "tokio 1.25.0", +] + +[[package]] +name = "tokio-util" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc6a3b08b64e6dfad376fa2432c7b1f01522e37a623c3050bc95db2d3ff21583" +dependencies = [ + "bytes 1.4.0", + "futures-core", + "futures-io", + "futures-sink", + "pin-project-lite 0.2.9", + "tokio 1.25.0", + "tracing", +] + +[[package]] +name = "toml_datetime" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4553f467ac8e3d374bc9a177a26801e5d0f9b211aa1673fb137a403afd1c9cf5" + +[[package]] +name = "toml_edit" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5376256e44f2443f8896ac012507c19a012df0fe8758b55246ae51a2279db51f" +dependencies = [ + "combine", + "indexmap", + "itertools", +] + +[[package]] +name = "toml_edit" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56c59d8dd7d0dcbc6428bf7aa2f0e823e26e43b3c9aca15bbc9475d23e5fa12b" +dependencies = [ + "indexmap", + "nom8", + "toml_datetime", +] + +[[package]] +name = "tonic" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff08f4649d10a70ffa3522ca559031285d8e421d727ac85c60825761818f5d0a" +dependencies = [ + "async-stream", + "async-trait", + "base64 0.13.1", + "bytes 1.4.0", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-timeout", + "percent-encoding", + "pin-project", + "prost", + "prost-derive", + "tokio 1.25.0", + "tokio-stream", + "tokio-util 0.6.10", + "tower", + "tower-layer", + "tower-service", + "tracing", + "tracing-futures", +] + +[[package]] +name = "tonic-build" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9403f1bafde247186684b230dc6f38b5cd514584e8bec1dd32514be4745fa757" +dependencies = [ + "proc-macro2 1.0.51", + "prost-build", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "indexmap", + "pin-project", + "pin-project-lite 0.2.9", + "rand 0.8.5", + "slab", + "tokio 1.25.0", + "tokio-util 0.7.6", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + +[[package]] +name = "tracing" +version = "0.1.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +dependencies = [ + "cfg-if 1.0.0", + "log", + "pin-project-lite 0.2.9", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "tracing-core" +version = "0.1.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-futures" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" +dependencies = [ + "pin-project", + "tracing", +] + +[[package]] +name = "tracing-log" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" +dependencies = [ + "lazy_static", + "log", + "tracing-core", +] + +[[package]] +name = "tracing-opentelemetry" +version = "0.17.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbbe89715c1dbbb790059e2565353978564924ee85017b5fff365c872ff6721f" +dependencies = [ + "once_cell", + "opentelemetry", + "tracing", + "tracing-core", + "tracing-log", + "tracing-subscriber", +] + +[[package]] +name = "tracing-serde" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1" +dependencies = [ + "serde", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6176eae26dd70d0c919749377897b54a9276bd7061339665dd68777926b5a70" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "serde", + "serde_json", + "sharded-slab", + "smallvec", + "thread_local", + "time 0.3.17", + "tracing", + "tracing-core", + "tracing-log", + "tracing-serde", +] + +[[package]] +name = "try-lock" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" + +[[package]] +name = "typenum" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" + +[[package]] +name = "ucd-trie" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e79c4d996edb816c91e4308506774452e55e95c3c9de07b6729e17e15a5ef81" + +[[package]] +name = "uint" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76f64bba2c53b04fcab63c01a7d7427eadc821e3bc48c34dc9ba29c501164b52" +dependencies = [ + "byteorder", + "crunchy", + "hex", + "static_assertions", +] + +[[package]] +name = "uname" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b72f89f0ca32e4db1c04e2a72f5345d59796d4866a1ee0609084569f73683dc8" +dependencies = [ + "libc", +] + +[[package]] +name = "unicode-bidi" +version = "0.3.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d54675592c1dbefd78cbd98db9bacd89886e1ca50692a0692baefffdeb92dd58" + +[[package]] +name = "unicode-ident" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc" + +[[package]] +name = "unicode-normalization" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-segmentation" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" + +[[package]] +name = "unicode-width" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" + +[[package]] +name = "unicode-xid" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" + +[[package]] +name = "unicode_categories" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" + +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + +[[package]] +name = "ureq" +version = "2.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "338b31dd1314f68f3aabf3ed57ab922df95ffcd902476ca7ba3c4ce7b908c46d" +dependencies = [ + "base64 0.13.1", + "log", + "native-tls", + "once_cell", + "url", +] + +[[package]] +name = "url" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643" +dependencies = [ + "form_urlencoded", + "idna 0.3.0", + "percent-encoding", + "serde", +] + +[[package]] +name = "uuid" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1674845326ee10d37ca60470760d4288a6f80f304007d92e5c53bab78c9cfd79" +dependencies = [ + "getrandom 0.2.8", + "serde", +] + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "value-bag" +version = "1.0.0-alpha.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2209b78d1249f7e6f3293657c9779fe31ced465df091bbd433a1cf88e916ec55" +dependencies = [ + "ctor", + "version_check", +] + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "vec_map" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "vlog" +version = "1.0.0" +dependencies = [ + "chrono", + "opentelemetry", + "opentelemetry-otlp", + "opentelemetry-semantic-conventions", + "sentry", + "serde_json", + "tracing", + "tracing-opentelemetry", + "tracing-subscriber", +] + +[[package]] +name = "vm" +version = "0.1.0" +dependencies = [ + "hex", + "itertools", + "metrics", + "once_cell", + "thiserror", + "tracing", + "vlog", + "zk_evm", + "zkevm-assembly", + "zksync_config", + "zksync_contracts", + "zksync_crypto", + "zksync_state", + "zksync_storage", + "zksync_types", + "zksync_utils", +] + +[[package]] +name = "waker-fn" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca" + +[[package]] +name = "want" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" +dependencies = [ + "log", + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + +[[package]] +name = "wasi" +version = "0.10.2+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b" +dependencies = [ + "cfg-if 1.0.0", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f219e0d211ba40266969f6dbdd90636da12f75bee4fc9d6c23d1260dadb51454" +dependencies = [ + "cfg-if 1.0.0", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5" +dependencies = [ + "quote 1.0.23", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d" + +[[package]] +name = "wasm-streams" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bbae3363c08332cadccd13b67db371814cd214c2524020932f0804b8cf7c078" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "web-sys" +version = "0.3.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e33b99f4b23ba3eec1a53ac264e35a755f00e966e0065077d6027c0f575b0b97" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web3" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44f258e254752d210b84fe117b31f1e3cc9cbf04c0d747eb7f8cf7cf5e370f6d" +dependencies = [ + "arrayvec 0.7.2", + "base64 0.13.1", + "bytes 1.4.0", + "derive_more", + "ethabi", + "ethereum-types", + "futures 0.3.26", + "futures-timer", + "headers", + "hex", + "idna 0.2.3", + "jsonrpc-core", + "log", + "once_cell", + "parking_lot 0.12.1", + "pin-project", + "reqwest", + "rlp", + "secp256k1 0.21.3", + "serde", + "serde_json", + "tiny-keccak 2.0.2", + "url", +] + +[[package]] +name = "webpki" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "webpki-roots" +version = "0.22.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" +dependencies = [ + "webpki", +] + +[[package]] +name = "wepoll-ffi" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d743fdedc5c64377b5fc2bc036b01c7fd642205a0d96356034ae3404d49eb7fb" +dependencies = [ + "cc", +] + +[[package]] +name = "which" +version = "4.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2441c784c52b289a054b7201fc93253e288f094e2f4be9058343127c4226a269" +dependencies = [ + "either", + "libc", + "once_cell", +] + +[[package]] +name = "whoami" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45dbc71f0cdca27dc261a9bd37ddec174e4a0af2b900b890f378460f745426e3" +dependencies = [ + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +dependencies = [ + "winapi", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-sys" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e2522491fbfcd58cc84d47aeb2958948c4b8982e9a2d8a2a35bbaed431390e7" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c9864e83243fdec7fc9c5444389dcbbfd258f745e7853198f365e3c4968a608" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c8b1b673ffc16c47a9ff48570a9d85e25d265735c503681332589af6253c6c7" + +[[package]] +name = "windows_i686_gnu" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de3887528ad530ba7bdbb1faa8275ec7a1155a45ffa57c37993960277145d640" + +[[package]] +name = "windows_i686_msvc" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf4d1122317eddd6ff351aa852118a2418ad4214e6613a50e0191f7004372605" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1040f221285e17ebccbc2591ffdc2d44ee1f9186324dd3e84e99ac68d699c45" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "628bfdf232daa22b0d64fdb62b09fcc36bb01f05a3939e20ab73aaf9470d0463" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "447660ad36a13288b1db4d4248e857b510e8c3a225c822ba4fb748c0aafecffd" + +[[package]] +name = "winreg" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d" +dependencies = [ + "winapi", +] + +[[package]] +name = "wyz" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85e60b0d1b5f99db2556934e21937020776a5d31520bf169e851ac44e6420214" + +[[package]] +name = "zeroize" +version = "1.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c394b5bd0c6f669e7275d9c20aa90ae064cb22e75a1cad54e1b34088034b149f" + +[[package]] +name = "zk_evm" +version = "1.3.1" +source = "git+https://github.com/matter-labs/zk_evm.git?branch=v1.3.1#76a3877f5a0b7449bcca73d35bae3ae226996fdd" +dependencies = [ + "blake2 0.10.6", + "k256", + "lazy_static", + "num 0.4.0", + "serde", + "serde_json", + "sha2 0.10.6", + "sha3 0.10.6", + "static_assertions", + "zkevm_opcode_defs", +] + +[[package]] +name = "zkevm-assembly" +version = "1.3.0" +source = "git+https://github.com/matter-labs/zkEVM-assembly.git?branch=v1.3.1#e2a2145a90ceeb54407df1be5254291a9f693422" +dependencies = [ + "env_logger 0.9.3", + "hex", + "lazy_static", + "log", + "nom", + "num-bigint 0.4.3", + "num-traits", + "regex", + "smallvec", + "structopt", + "thiserror", + "zkevm_opcode_defs", +] + +[[package]] +name = "zkevm_opcode_defs" +version = "1.3.1" +source = "git+https://github.com/matter-labs/zkevm_opcode_defs.git?branch=v1.3.1#dc5b5c463d867855514f03c179992acbde74face" +dependencies = [ + "bitflags", + "ethereum-types", + "lazy_static", + "sha2 0.10.6", +] + +[[package]] +name = "zkevm_test_harness" +version = "1.3.1" +source = "git+https://github.com/matter-labs/zkevm_test_harness.git?branch=v1.3.1#2100f00f9cb79e851bf11ec21391672244e382fc" +dependencies = [ + "bincode", + "blake2 0.10.6", + "circuit_testing", + "codegen 0.2.0", + "crossbeam 0.8.2", + "derivative", + "env_logger 0.10.0", + "hex", + "num-bigint 0.4.3", + "num-integer", + "num-traits", + "rayon", + "serde", + "serde_json", + "sha2 0.10.6", + "sha3 0.10.6", + "smallvec", + "structopt", + "sync_vm", + "test-log", + "tracing", + "zk_evm", + "zkevm-assembly", +] + +[[package]] +name = "zksync_basic_types" +version = "1.0.0" +dependencies = [ + "serde", + "web3", +] + +[[package]] +name = "zksync_circuit_breaker" +version = "1.0.0" +dependencies = [ + "async-trait", + "convert_case 0.6.0", + "futures 0.3.26", + "hex", + "serde", + "serde_json", + "thiserror", + "tokio 1.25.0", + "zksync_config", + "zksync_contracts", + "zksync_dal", + "zksync_eth_client", + "zksync_types", + "zksync_utils", + "zksync_verification_key_generator_and_server", +] + +[[package]] +name = "zksync_config" +version = "1.0.0" +dependencies = [ + "bigdecimal", + "envy", + "num 0.3.1", + "once_cell", + "serde", + "serde_json", + "url", + "zksync_basic_types", + "zksync_utils", +] + +[[package]] +name = "zksync_contracts" +version = "1.0.0" +dependencies = [ + "ethabi", + "hex", + "once_cell", + "serde_json", + "zksync_utils", +] + +[[package]] +name = "zksync_crypto" +version = "1.0.0" +dependencies = [ + "base64 0.13.1", + "blake2 0.10.6", + "hex", + "once_cell", + "rand 0.4.6", + "serde", + "sha2 0.9.9", + "thiserror", + "zksync_basic_types", +] + +[[package]] +name = "zksync_dal" +version = "1.0.0" +dependencies = [ + "anyhow", + "async-std", + "bigdecimal", + "bincode", + "hex", + "itertools", + "metrics", + "num 0.3.1", + "once_cell", + "serde_json", + "sqlx", + "thiserror", + "vlog", + "vm", + "zksync_config", + "zksync_contracts", + "zksync_object_store", + "zksync_state", + "zksync_storage", + "zksync_types", + "zksync_utils", + "zksync_web3_decl", +] + +[[package]] +name = "zksync_eth_client" +version = "1.0.0" +dependencies = [ + "anyhow", + "async-trait", + "hex", + "jsonrpc-core", + "metrics", + "parity-crypto", + "serde", + "thiserror", + "tokio 1.25.0", + "vlog", + "zksync_config", + "zksync_contracts", + "zksync_eth_signer", + "zksync_types", +] + +[[package]] +name = "zksync_eth_signer" +version = "1.0.0" +dependencies = [ + "async-trait", + "hex", + "jsonrpc-core", + "parity-crypto", + "reqwest", + "rlp", + "secp256k1 0.21.3", + "serde", + "serde_derive", + "serde_json", + "thiserror", + "zksync_types", +] + +[[package]] +name = "zksync_mini_merkle_tree" +version = "1.0.0" +dependencies = [ + "once_cell", + "rayon", + "zksync_basic_types", + "zksync_crypto", +] + +[[package]] +name = "zksync_object_store" +version = "1.0.0" +dependencies = [ + "cloud-storage", + "metrics", + "tokio 1.25.0", + "vlog", + "zksync_config", + "zksync_types", +] + +[[package]] +name = "zksync_prover" +version = "1.0.0" +dependencies = [ + "api", + "bincode", + "chrono", + "ctrlc", + "ethabi", + "futures 0.3.26", + "hex", + "local-ip-address", + "metrics", + "prometheus_exporter", + "prover-service", + "queues", + "reqwest", + "serde", + "serde_json", + "setup_key_generator_and_server", + "thiserror", + "tokio 1.25.0", + "vlog", + "zkevm_test_harness", + "zksync_circuit_breaker", + "zksync_config", + "zksync_dal", + "zksync_object_store", + "zksync_prover_utils", + "zksync_types", + "zksync_utils", + "zksync_verification_key_generator_and_server", +] + +[[package]] +name = "zksync_prover_utils" +version = "1.0.0" +dependencies = [ + "metrics", + "reqwest", + "vlog", +] + +[[package]] +name = "zksync_state" +version = "1.0.0" +dependencies = [ + "vlog", + "zksync_storage", + "zksync_types", + "zksync_utils", +] + +[[package]] +name = "zksync_storage" +version = "1.0.0" +dependencies = [ + "bincode", + "byteorder", + "num_cpus", + "once_cell", + "rocksdb", + "serde", + "vlog", + "zksync_types", + "zksync_utils", +] + +[[package]] +name = "zksync_types" +version = "1.0.0" +dependencies = [ + "bigdecimal", + "blake2 0.10.6", + "chrono", + "codegen 0.1.0", + "ethbloom", + "hex", + "metrics", + "num 0.3.1", + "once_cell", + "parity-crypto", + "rayon", + "rlp", + "serde", + "serde_json", + "serde_with", + "strum", + "thiserror", + "tiny-keccak 1.5.0", + "zk_evm", + "zkevm-assembly", + "zkevm_test_harness", + "zksync_basic_types", + "zksync_config", + "zksync_contracts", + "zksync_mini_merkle_tree", + "zksync_utils", +] + +[[package]] +name = "zksync_utils" +version = "1.0.0" +dependencies = [ + "anyhow", + "bigdecimal", + "envy", + "futures 0.3.26", + "hex", + "num 0.3.1", + "serde", + "thiserror", + "tokio 1.25.0", + "zk_evm", + "zksync_basic_types", +] + +[[package]] +name = "zksync_verification_key_generator_and_server" +version = "1.0.0" +dependencies = [ + "bincode", + "circuit_testing", + "ff_ce", + "hex", + "itertools", + "serde_json", + "structopt", + "toml_edit 0.14.4", + "vlog", + "zksync_types", +] + +[[package]] +name = "zksync_web3_decl" +version = "1.0.0" +dependencies = [ + "bigdecimal", + "chrono", + "itertools", + "jsonrpsee", + "rlp", + "serde", + "serde_json", + "thiserror", + "zksync_types", +] diff --git a/core/bin/prover/Cargo.toml b/core/bin/prover/Cargo.toml new file mode 100644 index 000000000000..cd278bde933b --- /dev/null +++ b/core/bin/prover/Cargo.toml @@ -0,0 +1,45 @@ +[package] +name = "zksync_prover" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-2" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] +publish = false # We don't want to publish our binaries. + +[dependencies] +zksync_types = { path = "../../lib/types", version = "1.0" } +zksync_dal = { path = "../../lib/dal", version = "1.0" } +zksync_config = { path = "../../lib/config", version = "1.0" } +zksync_utils = {path = "../../lib/utils", version = "1.0" } +zksync_prover_utils = {path = "../../lib/prover_utils", version = "1.0" } +zksync_circuit_breaker = {path = "../../lib/circuit_breaker", version = "1.0" } +prometheus_exporter = { path = "../../lib/prometheus_exporter", version = "1.0" } +vlog = { path = "../../lib/vlog", version = "1.0" } +zksync_verification_key_generator_and_server = { path = "../verification_key_generator_and_server", version = "1.0" } +zksync_object_store = { path = "../../lib/object_store", version = "1.0" } +setup_key_generator_and_server = { path = "../setup_key_generator_and_server", version = "1.0" } + + +api = { git = "https://github.com/matter-labs/heavy-ops-service.git", branch = "cleanup", features=["gpu"], default-features=false} +prover-service = { git = "https://github.com/matter-labs/heavy-ops-service.git", branch = "cleanup", features=["gpu"], default-features=false} + +zkevm_test_harness = { git = "https://github.com/matter-labs/zkevm_test_harness.git", branch = "v1.3.1"} + +tokio = { version = "1", features = ["time"] } +futures = { version = "0.3", features = ["compat"] } +ctrlc = { version = "3.1", features = ["termination"] } +thiserror = "1.0" +chrono = "0.4" +serde_json = "1.0" +ethabi = "16.0.0" +metrics = "0.20" +hex = "0.4" +serde = { version = "1.0", features = ["derive"] } +bincode = "1.3.2" +reqwest = { version = "0.11", features = ["blocking"] } +queues = "1.1.0" +local-ip-address = "0.5.0" diff --git a/core/bin/prover/README.md b/core/bin/prover/README.md new file mode 100644 index 000000000000..2d70cf056d4b --- /dev/null +++ b/core/bin/prover/README.md @@ -0,0 +1,8 @@ +# Readme + +For compiling locally (no cuda) set `features=["legacy"], default-features=false` for: + +- `./Cargo.toml`: `heavy-ops-service` dependency. +- `../setup_key_generator_and_server/Cargo.toml`: `api` and `prover-service` dependencies. + +**! Don't push those changes !** diff --git a/core/bin/prover/rust-toolchain.toml b/core/bin/prover/rust-toolchain.toml new file mode 100644 index 000000000000..271800cb2f37 --- /dev/null +++ b/core/bin/prover/rust-toolchain.toml @@ -0,0 +1,2 @@ +[toolchain] +channel = "nightly" \ No newline at end of file diff --git a/core/bin/prover/src/artifact_provider.rs b/core/bin/prover/src/artifact_provider.rs new file mode 100644 index 000000000000..a12819b25b7f --- /dev/null +++ b/core/bin/prover/src/artifact_provider.rs @@ -0,0 +1,21 @@ +use prover_service::ArtifactProvider; +use std::io::Read; +use zkevm_test_harness::abstract_zksync_circuit::concrete_circuits::ZkSyncVerificationKey; +use zkevm_test_harness::pairing::bn256::Bn256; +use zksync_setup_key_server::get_setup_for_circuit_type; +use zksync_verification_key_server::get_vk_for_circuit_type; + +#[derive(Debug)] +pub struct ProverArtifactProvider; +impl ArtifactProvider for ProverArtifactProvider { + type ArtifactError = String; + + fn get_setup(&self, circuit_id: u8) -> Result, Self::ArtifactError> { + Ok(get_setup_for_circuit_type(circuit_id)) + } + + fn get_vk(&self, circuit_id: u8) -> Result, Self::ArtifactError> { + let vk = get_vk_for_circuit_type(circuit_id); + Ok(ZkSyncVerificationKey::from_verification_key_and_numeric_type(circuit_id, vk)) + } +} diff --git a/core/bin/prover/src/main.rs b/core/bin/prover/src/main.rs new file mode 100644 index 000000000000..aede401a704d --- /dev/null +++ b/core/bin/prover/src/main.rs @@ -0,0 +1,196 @@ +use std::cell::RefCell; +use std::env; +use std::sync::{Arc, Mutex}; + +use api::gpu_prover; +use futures::{channel::mpsc, executor::block_on, future, SinkExt, StreamExt}; +use local_ip_address::local_ip; +use prover_service::run_prover::run_prover_with_remote_synthesizer; +use queues::Buffer; +use tokio::task::JoinHandle; + +use zksync_circuit_breaker::{vks::VksChecker, CircuitBreakerChecker}; +use zksync_config::configs::prover_group::ProverGroupConfig; +use zksync_config::{ + configs::api::Prometheus as PrometheusConfig, ApiConfig, ProverConfig, ProverConfigs, + ZkSyncConfig, +}; +use zksync_dal::gpu_prover_queue_dal::SocketAddress; +use zksync_dal::ConnectionPool; + +use crate::artifact_provider::ProverArtifactProvider; +use crate::prover::ProverReporter; +use crate::prover_params::ProverParams; +use crate::socket_listener::incoming_socket_listener; +use crate::synthesized_circuit_provider::SynthesizedCircuitProvider; + +mod artifact_provider; +mod prover; +mod prover_params; +mod socket_listener; +mod synthesized_circuit_provider; + +pub async fn wait_for_tasks(task_futures: Vec>) { + match future::select_all(task_futures).await.0 { + Ok(_) => { + vlog::info!("One of the actors finished its run, while it wasn't expected to do it"); + } + Err(error) => { + vlog::info!( + "One of the tokio actors unexpectedly finished with error: {:?}", + error + ); + } + } +} + +fn get_ram_per_gpu() -> u64 { + let device_info = gpu_prover::cuda_bindings::device_info(0).unwrap(); + let ram_in_gb: u64 = device_info.total / (1024 * 1024 * 1024); + vlog::info!("Detected RAM per GPU: {:?} GB", ram_in_gb); + ram_in_gb +} + +fn get_prover_config_for_machine_type() -> ProverConfig { + let prover_configs = ProverConfigs::from_env(); + let actual_num_gpus = gpu_prover::cuda_bindings::devices().unwrap() as usize; + vlog::info!("detected number of gpus: {}", actual_num_gpus); + let ram_in_gb = get_ram_per_gpu(); + + match actual_num_gpus { + 1 => { + vlog::info!("Detected machine type with 1 GPU and 80GB RAM"); + prover_configs.one_gpu_eighty_gb_mem + } + 2 => { + if ram_in_gb > 39 { + vlog::info!("Detected machine type with 2 GPU and 80GB RAM"); + prover_configs.two_gpu_eighty_gb_mem + } else { + vlog::info!("Detected machine type with 2 GPU and 40GB RAM"); + prover_configs.two_gpu_forty_gb_mem + } + } + 4 => { + vlog::info!("Detected machine type with 4 GPU and 80GB RAM"); + prover_configs.four_gpu_eighty_gb_mem + } + _ => panic!("actual_num_gpus: {} not supported yet", actual_num_gpus), + } +} + +#[tokio::main] +async fn main() { + let sentry_guard = vlog::init(); + let config = ZkSyncConfig::from_env(); + let prover_config = get_prover_config_for_machine_type(); + let prometheus_config = PrometheusConfig { + listener_port: prover_config.prometheus_port, + ..ApiConfig::from_env().prometheus + }; + + match sentry_guard { + Some(_) => vlog::info!( + "Starting Sentry url: {}", + std::env::var("MISC_SENTRY_URL").unwrap(), + ), + None => vlog::info!("No sentry url configured"), + } + + let (stop_signal_sender, mut stop_signal_receiver) = mpsc::channel(256); + + { + let stop_signal_sender = RefCell::new(stop_signal_sender.clone()); + ctrlc::set_handler(move || { + let mut sender = stop_signal_sender.borrow_mut(); + block_on(sender.send(true)).expect("Ctrl+C signal send"); + }) + .expect("Error setting Ctrl+C handler"); + } + + zksync_prover_utils::ensure_initial_setup_keys_present( + &prover_config.initial_setup_key_path, + &prover_config.key_download_url, + ); + env::set_var("CRS_FILE", prover_config.initial_setup_key_path.clone()); + + let circuit_breaker_checker = CircuitBreakerChecker::new( + vec![Box::new(VksChecker::new(&config))], + &config.chain.circuit_breaker, + ); + circuit_breaker_checker + .check() + .await + .expect("Circuit breaker triggered"); + let (cb_sender, cb_receiver) = futures::channel::oneshot::channel(); + // We don't have a graceful shutdown process for the prover, so `_stop_sender` is unused. + // Though we still need to create a channel because circuit breaker expects `stop_receiver`. + let (_stop_sender, stop_receiver) = tokio::sync::watch::channel(false); + + let circuit_ids = ProverGroupConfig::from_env() + .get_circuit_ids_for_group_id(prover_config.specialized_prover_group_id); + + vlog::info!("Starting proof generation for circuits: {:?}", circuit_ids); + let mut tasks: Vec> = vec![]; + + tasks.push(prometheus_exporter::run_prometheus_exporter( + prometheus_config, + true, + )); + tasks.push(tokio::spawn( + circuit_breaker_checker.run(cb_sender, stop_receiver), + )); + + let assembly_queue = Buffer::new(prover_config.assembly_queue_capacity); + let shared_assembly_queue = Arc::new(Mutex::new(assembly_queue)); + let producer = shared_assembly_queue.clone(); + let consumer = shared_assembly_queue.clone(); + let local_ip = local_ip().expect("Failed obtaining local IP address"); + let address = SocketAddress { + host: local_ip, + port: prover_config.assembly_receiver_port, + }; + let synthesized_circuit_provider = + SynthesizedCircuitProvider::new(consumer, ConnectionPool::new(Some(1), true), address); + vlog::info!("local IP address is: {:?}", local_ip); + + tasks.push(tokio::task::spawn(incoming_socket_listener( + local_ip, + prover_config.assembly_receiver_port, + prover_config.assembly_receiver_poll_time_in_millis, + producer, + ConnectionPool::new(Some(1), true), + prover_config.specialized_prover_group_id, + ))); + + let artifact_provider = ProverArtifactProvider {}; + let prover_job_reporter = ProverReporter { + pool: ConnectionPool::new(Some(1), true), + config: prover_config.clone(), + processed_by: std::env::var("POD_NAME").unwrap_or("Unknown".to_string()), + }; + + let params: ProverParams = prover_config.clone().into(); + + tasks.push(tokio::task::spawn_blocking(move || { + run_prover_with_remote_synthesizer::<_, _, _, _>( + synthesized_circuit_provider, + artifact_provider, + prover_job_reporter, + circuit_ids, + params, + ) + })); + + tokio::select! { + _ = async { wait_for_tasks(tasks).await } => {}, + _ = async { stop_signal_receiver.next().await } => { + vlog::info!("Stop signal received, shutting down"); + }, + error = async { cb_receiver.await } => { + if let Ok(error_msg) = error { + vlog::warn!("Circuit breaker received, shutting down. Reason: {}", error_msg); + } + }, + }; +} diff --git a/core/bin/prover/src/prover.rs b/core/bin/prover/src/prover.rs new file mode 100644 index 000000000000..1503ca881eb3 --- /dev/null +++ b/core/bin/prover/src/prover.rs @@ -0,0 +1,267 @@ +use std::time::Duration; + +use prover_service::JobResult::{Failure, ProofGenerated}; +use prover_service::{JobReporter, JobResult}; +use zkevm_test_harness::abstract_zksync_circuit::concrete_circuits::{ZkSyncProof}; +use zkevm_test_harness::pairing::bn256::Bn256; + + +use zksync_config::ProverConfig; +use zksync_dal::ConnectionPool; + +use zksync_object_store::object_store::{create_object_store_from_env, PROVER_JOBS_BUCKET_PATH}; + + +#[derive(Debug)] +pub struct ProverReporter { + pub(crate) pool: ConnectionPool, + pub(crate) config: ProverConfig, + pub(crate) processed_by: String, +} + +pub fn assembly_debug_blob_url(job_id: usize, circuit_id: u8) -> String { + format!("assembly_debugging_{}_{}.bin", job_id, circuit_id) +} + +impl ProverReporter { + fn handle_successful_proof_generation( + &self, + job_id: usize, + proof: ZkSyncProof, + duration: Duration, + index: usize, + ) { + let circuit_type = self.get_circuit_type(job_id); + let serialized = bincode::serialize(&proof).expect("Failed to serialize proof"); + vlog::info!( + "Successfully generated proof with id {:?} and type: {} for index: {}. Size: {:?}KB took: {}", + job_id, + circuit_type.clone(), + index, + serialized.len() >> 10, + duration.as_secs() as f64, + ); + metrics::histogram!( + "server.prover.proof_generation_time", + duration.as_secs() as f64, + "circuit_type" => circuit_type, + ); + let job_id = job_id as u32; + let mut connection = self.pool.access_storage_blocking(); + let mut transaction = connection.start_transaction_blocking(); + + // Lock `prover_jobs` table. + // It is needed to have only one transaction at the moment + // that calls `successful_proofs_count` method to avoid race condition. + transaction.prover_dal().lock_prover_jobs_table_exclusive(); + transaction + .prover_dal() + .save_proof(job_id, duration, serialized, &self.processed_by); + let prover_job_metadata = transaction + .prover_dal() + .get_prover_job_by_id(job_id) + .unwrap_or_else(|| panic!("No job with id: {} exist", job_id)); + + if let Some(next_round) = prover_job_metadata.aggregation_round.next() { + // for Basic, Leaf and Node rounds we need to mark the next job as `queued` + // if all the dependent proofs are computed + + let successful_proofs_count = transaction.prover_dal().successful_proofs_count( + prover_job_metadata.block_number, + prover_job_metadata.aggregation_round, + ); + + let required_proofs_count = transaction + .witness_generator_dal() + .required_proofs_count(prover_job_metadata.block_number, next_round); + + vlog::info!( + "Generated {}/{} {:?} circuits of block {:?}", + successful_proofs_count, + required_proofs_count, + prover_job_metadata.aggregation_round, + prover_job_metadata.block_number.0 + ); + + if successful_proofs_count == required_proofs_count { + transaction + .witness_generator_dal() + .mark_witness_job_as_queued(prover_job_metadata.block_number, next_round); + } + } else { + let block = transaction + .blocks_dal() + .get_block_header(prover_job_metadata.block_number) + .unwrap(); + metrics::counter!( + "server.processed_txs", + block.tx_count() as u64, + "stage" => "prove_generated" + ); + } + transaction.commit_blocking(); + metrics::gauge!( + "server.block_number", + prover_job_metadata.block_number.0 as f64, + "stage" => format!("prove_{:?}",prover_job_metadata.aggregation_round), + ); + } + + fn get_circuit_type(&self, job_id: usize) -> String { + let prover_job_metadata = self + .pool + .access_storage_blocking() + .prover_dal() + .get_prover_job_by_id(job_id as u32) + .unwrap_or_else(|| panic!("No job with id: {} exist", job_id)); + prover_job_metadata.circuit_type + } +} + +impl JobReporter for ProverReporter { + fn send_report(&mut self, report: JobResult) { + match report { + Failure(job_id, error) => { + vlog::info!( + "Failed to generate proof for id {:?}. error reason; {}", + job_id, + error + ); + self.pool + .access_storage_blocking() + .prover_dal() + .save_proof_error(job_id as u32, error, self.config.max_attempts); + } + ProofGenerated(job_id, duration, proof, index) => { + self.handle_successful_proof_generation(job_id, proof, duration, index); + } + + JobResult::Synthesized(job_id, duration) => { + let circuit_type = self.get_circuit_type(job_id); + vlog::info!( + "Successfully synthesized circuit with id {:?} and type: {}. took: {}", + job_id, + circuit_type.clone(), + duration.as_secs() as f64, + ); + metrics::histogram!( + "server.prover.circuit_synthesis_time", + duration.as_secs() as f64, + "circuit_type" => circuit_type, + ); + } + JobResult::AssemblyFinalized(job_id, duration) => { + let circuit_type = self.get_circuit_type(job_id); + vlog::info!( + "Successfully finalized assembly with id {:?} and type: {}. took: {}", + job_id, + circuit_type.clone(), + duration.as_secs() as f64, + ); + metrics::histogram!( + "server.prover.assembly_finalize_time", + duration.as_secs() as f64, + "circuit_type" => circuit_type, + ); + } + + JobResult::SetupLoaded(job_id, duration, cache_miss) => { + let circuit_type = self.get_circuit_type(job_id); + vlog::info!( + "Successfully setup loaded with id {:?} and type: {}. took: {:?} and had cache_miss: {}", + job_id, + circuit_type.clone(), + duration.as_secs() as f64, + cache_miss + ); + metrics::histogram!("server.prover.setup_load_time", duration.as_secs() as f64, + "circuit_type" => circuit_type.clone(),); + metrics::counter!( + "server.prover.setup_loading_cache_miss", + 1, + "circuit_type" => circuit_type + ); + } + JobResult::AssemblyEncoded(job_id, duration) => { + let circuit_type = self.get_circuit_type(job_id); + vlog::info!( + "Successfully encoded assembly with id {:?} and type: {}. took: {}", + job_id, + circuit_type.clone(), + duration.as_secs() as f64, + ); + metrics::histogram!( + "server.prover.assembly_encoding_time", + duration.as_secs() as f64, + "circuit_type" => circuit_type, + ); + } + JobResult::AssemblyDecoded(job_id, duration) => { + let circuit_type = self.get_circuit_type(job_id); + vlog::info!( + "Successfully decoded assembly with id {:?} and type: {}. took: {}", + job_id, + circuit_type.clone(), + duration.as_secs() as f64, + ); + metrics::histogram!( + "server.prover.assembly_decoding_time", + duration.as_secs() as f64, + "circuit_type" => circuit_type, + ); + } + JobResult::FailureWithDebugging(job_id, circuit_id, assembly, error) => { + let mut object_store = create_object_store_from_env(); + vlog::info!( + "Failed assembly decoding for job-id {} and circuit-type: {}. error: {}", + job_id, + circuit_id, + error, + ); + let blob_url = assembly_debug_blob_url(job_id, circuit_id); + object_store + .put(PROVER_JOBS_BUCKET_PATH, blob_url, assembly) + .expect("Failed saving debug assembly to GCS"); + } + JobResult::AssemblyTransferred(job_id, duration) => { + let circuit_type = self.get_circuit_type(job_id); + vlog::info!( + "Successfully transferred assembly with id {:?} and type: {}. took: {}", + job_id, + circuit_type.clone(), + duration.as_secs() as f64, + ); + metrics::histogram!( + "server.prover.assembly_transferring_time", + duration.as_secs() as f64, + "circuit_type" => circuit_type, + ); + } + JobResult::ProverWaitedIdle(prover_id, duration) => { + vlog::info!( + "Prover wait idle time: {} for prover-id: {:?}", + duration.as_secs() as f64, + prover_id + ); + metrics::histogram!( + "server.prover.prover_wait_idle_time", + duration.as_secs() as f64, + ); + } + JobResult::SetupLoaderWaitedIdle(duration) => { + vlog::info!("Setup load wait idle time: {}", duration.as_secs() as f64,); + metrics::histogram!( + "server.prover.setup_load_wait_wait_idle_time", + duration.as_secs() as f64, + ); + } + JobResult::SchedulerWaitedIdle(duration) => { + vlog::info!("Scheduler wait idle time: {}", duration.as_secs() as f64,); + metrics::histogram!( + "server.prover.scheduler_wait_idle_time", + duration.as_secs() as f64, + ); + } + } + } +} diff --git a/core/bin/prover/src/prover_params.rs b/core/bin/prover/src/prover_params.rs new file mode 100644 index 000000000000..112b71c4428d --- /dev/null +++ b/core/bin/prover/src/prover_params.rs @@ -0,0 +1,36 @@ +use std::time::Duration; + +use prover_service::Params; + +use zksync_config::ProverConfig; + +#[derive(Debug)] +pub struct ProverParams { + number_of_threads: u8, + polling_duration: Duration, + number_of_setup_slots: u8, +} + +impl From for ProverParams { + fn from(config: ProverConfig) -> Self { + ProverParams { + number_of_threads: config.number_of_threads as u8, + polling_duration: Duration::from_millis(config.polling_duration_in_millis), + number_of_setup_slots: config.number_of_setup_slots, + } + } +} + +impl Params for ProverParams { + fn number_of_parallel_synthesis(&self) -> u8 { + self.number_of_threads as u8 + } + + fn number_of_setup_slots(&self) -> u8 { + self.number_of_setup_slots + } + + fn polling_duration(&self) -> Duration { + self.polling_duration + } +} diff --git a/core/bin/prover/src/socket_listener.rs b/core/bin/prover/src/socket_listener.rs new file mode 100644 index 000000000000..815b1faa4c46 --- /dev/null +++ b/core/bin/prover/src/socket_listener.rs @@ -0,0 +1,82 @@ +use crate::synthesized_circuit_provider::SharedAssemblyQueue; +use queues::IsQueue; +use std::io::copy; +use std::net::{IpAddr, SocketAddr, TcpListener, TcpStream}; +use std::time::{Duration, Instant}; +use tokio::time::sleep; +use zksync_dal::gpu_prover_queue_dal::{GpuProverInstanceStatus, SocketAddress}; +use zksync_dal::ConnectionPool; + +pub async fn incoming_socket_listener( + host: IpAddr, + port: u16, + poll_time_in_millis: u64, + queue: SharedAssemblyQueue, + pool: ConnectionPool, + specialized_prover_group_id: u8, +) { + let listening_address = SocketAddr::new(host, port); + vlog::info!( + "Starting assembly receiver at host: {}, port: {}", + host, + port + ); + let listener = TcpListener::bind(listening_address) + .unwrap_or_else(|_| panic!("Failed binding address: {:?}", listening_address)); + let address = SocketAddress { host, port }; + + pool.clone() + .access_storage_blocking() + .gpu_prover_queue_dal() + .insert_prover_instance( + address.clone(), + queue.lock().unwrap().capacity(), + specialized_prover_group_id, + ); + + loop { + match listener.incoming().next() { + Some(stream) => { + let stream = stream.expect("Stream closed early"); + handle_incoming_file(stream, queue.clone(), pool.clone(), address.clone()); + } + None => sleep(Duration::from_millis(poll_time_in_millis)).await, + } + } +} + +fn handle_incoming_file( + mut stream: TcpStream, + queue: SharedAssemblyQueue, + pool: ConnectionPool, + address: SocketAddress, +) { + let mut assembly: Vec = vec![]; + let started_at = Instant::now(); + copy(&mut stream, &mut assembly).expect("Failed reading from stream"); + let file_size_in_gb = assembly.len() / (1024 * 1024 * 1024); + vlog::info!( + "Read file of size: {}GB from stream took: {} seconds", + file_size_in_gb, + started_at.elapsed().as_secs() + ); + let mut assembly_queue = queue.lock().unwrap(); + + assembly_queue + .add(assembly) + .expect("Failed saving assembly to queue"); + let status = if assembly_queue.capacity() == assembly_queue.size() { + GpuProverInstanceStatus::Full + } else { + GpuProverInstanceStatus::Available + }; + + pool.clone() + .access_storage_blocking() + .gpu_prover_queue_dal() + .update_prover_instance_status( + address, + status, + assembly_queue.capacity() - assembly_queue.size(), + ); +} diff --git a/core/bin/prover/src/synthesized_circuit_provider.rs b/core/bin/prover/src/synthesized_circuit_provider.rs new file mode 100644 index 000000000000..57858f8887ec --- /dev/null +++ b/core/bin/prover/src/synthesized_circuit_provider.rs @@ -0,0 +1,49 @@ +use std::io::Cursor; +use std::io::Read; +use std::sync::{Arc, Mutex}; + +use prover_service::RemoteSynthesizer; +use queues::{Buffer, IsQueue}; +use zksync_dal::gpu_prover_queue_dal::SocketAddress; +use zksync_dal::ConnectionPool; + +pub type SharedAssemblyQueue = Arc>>>; + +pub struct SynthesizedCircuitProvider { + queue: SharedAssemblyQueue, + pool: ConnectionPool, + address: SocketAddress, +} + +impl SynthesizedCircuitProvider { + pub fn new(queue: SharedAssemblyQueue, pool: ConnectionPool, address: SocketAddress) -> Self { + Self { + queue, + pool, + address, + } + } +} + +impl RemoteSynthesizer for SynthesizedCircuitProvider { + fn try_next(&mut self) -> Option> { + let mut assembly_queue = self.queue.lock().unwrap(); + let is_full = assembly_queue.capacity() == assembly_queue.size(); + return match assembly_queue.remove() { + Ok(blob) => { + if is_full { + self.pool + .clone() + .access_storage_blocking() + .gpu_prover_queue_dal() + .update_prover_instance_from_full_to_available( + self.address.clone(), + assembly_queue.capacity() - assembly_queue.size(), + ); + } + Some(Box::new(Cursor::new(blob))) + } + Err(_) => None, + }; + } +} diff --git a/core/bin/set_correct_tx_format_for_priority_ops/Cargo.toml b/core/bin/set_correct_tx_format_for_priority_ops/Cargo.toml new file mode 100644 index 000000000000..8bc799cad687 --- /dev/null +++ b/core/bin/set_correct_tx_format_for_priority_ops/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "set_correct_tx_format_for_priority_ops" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +tokio = { version = "1" } +zksync_types = { path = "../../lib/types", version = "1.0" } +zksync_dal = { path = "../../lib/dal", version = "1.0" } diff --git a/core/bin/set_correct_tx_format_for_priority_ops/src/main.rs b/core/bin/set_correct_tx_format_for_priority_ops/src/main.rs new file mode 100644 index 000000000000..0e208c6b241b --- /dev/null +++ b/core/bin/set_correct_tx_format_for_priority_ops/src/main.rs @@ -0,0 +1,18 @@ +use std::thread::sleep; +use std::time::Duration; +use zksync_dal::ConnectionPool; + +#[tokio::main] +async fn main() { + let pool = ConnectionPool::new(Some(1), true); + let mut storage = pool.access_storage().await; + + while storage + .transactions_dal() + .set_correct_tx_type_for_priority_operations(40) + { + println!("Some txs were updated"); + sleep(Duration::from_secs(1)); + } + println!("finish"); +} diff --git a/core/bin/setup_key_generator_and_server/Cargo.lock b/core/bin/setup_key_generator_and_server/Cargo.lock new file mode 100644 index 000000000000..db858f272c9c --- /dev/null +++ b/core/bin/setup_key_generator_and_server/Cargo.lock @@ -0,0 +1,5002 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "addchain" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b2e69442aa5628ea6951fa33e24efe8313f4321a91bd729fc2f75bdfc858570" +dependencies = [ + "num-bigint 0.3.3", + "num-integer", + "num-traits", +] + +[[package]] +name = "addr2line" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "aes" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "884391ef1066acaa41e766ba8f596341b96e93ce34f9a43e7d24bf0a0eaf0561" +dependencies = [ + "aes-soft", + "aesni", + "cipher", +] + +[[package]] +name = "aes-ctr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7729c3cde54d67063be556aeac75a81330d802f0259500ca40cb52967f975763" +dependencies = [ + "aes-soft", + "aesni", + "cipher", + "ctr", +] + +[[package]] +name = "aes-soft" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be14c7498ea50828a38d0e24a765ed2effe92a705885b57d029cd67d45744072" +dependencies = [ + "cipher", + "opaque-debug", +] + +[[package]] +name = "aesni" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea2e11f5e94c2f7d386164cc2aa1f97823fed6f259e486940a71c174dd01b0ce" +dependencies = [ + "cipher", + "opaque-debug", +] + +[[package]] +name = "ahash" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +dependencies = [ + "getrandom 0.2.8", + "once_cell", + "version_check", +] + +[[package]] +name = "aho-corasick" +version = "0.7.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac" +dependencies = [ + "memchr", +] + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "ansi_term" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" +dependencies = [ + "winapi", +] + +[[package]] +name = "anyhow" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "224afbd727c3d6e4b90103ece64b8d1b67fbb1973b1046c2281eed3f3803f800" + +[[package]] +name = "api" +version = "0.1.0" +source = "git+https://github.com/matter-labs/heavy-ops-service.git?branch=cleanup#c40d66b6d57f7f4d39259ffb993f5a1bba64bf6c" +dependencies = [ + "bellman_ce", + "cfg-if 1.0.0", + "gpu-prover", + "num_cpus", + "serde", +] + +[[package]] +name = "arr_macro" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a105bfda48707cf19220129e78fca01e9639433ffaef4163546ed8fb04120a5" +dependencies = [ + "arr_macro_impl", + "proc-macro-hack", +] + +[[package]] +name = "arr_macro_impl" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0609c78bd572f4edc74310dfb63a01f5609d53fa8b4dd7c4d98aef3b3e8d72d1" +dependencies = [ + "proc-macro-hack", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "arrayref" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4c527152e37cf757a3f78aae5a06fbeefdb07ccc535c980a3208ee3060dd544" + +[[package]] +name = "arrayvec" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd9fd44efafa8690358b7408d253adf110036b88f55672a933f01d616ad9b1b9" +dependencies = [ + "nodrop", +] + +[[package]] +name = "arrayvec" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" + +[[package]] +name = "arrayvec" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" + +[[package]] +name = "async-stream" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dad5c83079eae9969be7fadefe640a1c566901f05ff91ab221de4b6f68d9507e" +dependencies = [ + "async-stream-impl", + "futures-core", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10f203db73a71dfa2fb6dd22763990fa26f3d2625a6da2da900d23b87d26be27" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "async-trait" +version = "0.1.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cd7fce9ba8c3c042128ce72d8b2ddbf3a05747efb67ea0313c635e10bda47a2" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi 0.1.19", + "libc", + "winapi", +] + +[[package]] +name = "autocfg" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dde43e75fd43e8a1bf86103336bc699aa8d17ad1be60c76c0bdfd4828e19b78" +dependencies = [ + "autocfg 1.1.0", +] + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "backtrace" +version = "0.3.67" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "233d376d6d185f2a3093e58f283f60f880315b6c60075b01f36b3b85154564ca" +dependencies = [ + "addr2line", + "cc", + "cfg-if 1.0.0", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + +[[package]] +name = "base16ct" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" + +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + +[[package]] +name = "base64" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a4ddaa51a5bc52a6948f74c06d20aaaddb71924eab79b8c97a8c556e942d6a" + +[[package]] +name = "base64ct" +version = "1.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b645a089122eccb6111b4f81cbc1a49f5900ac4666bb93ac027feaecf15607bf" + +[[package]] +name = "bellman_ce" +version = "0.3.2" +source = "git+https://github.com/matter-labs/bellman?branch=dev#3aa6226d04d60c539ff4ee480479ac5b92871a20" +dependencies = [ + "arrayvec 0.7.2", + "bit-vec", + "blake2s_const", + "blake2s_simd", + "byteorder", + "cfg-if 1.0.0", + "crossbeam 0.7.3", + "futures", + "hex", + "lazy_static", + "num_cpus", + "pairing_ce", + "rand 0.4.6", + "serde", + "smallvec", + "tiny-keccak 1.5.0", +] + +[[package]] +name = "bigdecimal" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc403c26e6b03005522e6e8053384c4e881dfe5b2bf041c0c2c49be33d64a539" +dependencies = [ + "num-bigint 0.3.3", + "num-integer", + "num-traits", + "serde", +] + +[[package]] +name = "bincode" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" +dependencies = [ + "serde", +] + +[[package]] +name = "bindgen" +version = "0.59.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bd2a9a458e8f4304c52c43ebb0cfbd520289f8379a52e329a38afda99bf8eb8" +dependencies = [ + "bitflags", + "cexpr", + "clang-sys", + "clap", + "env_logger 0.9.3", + "lazy_static", + "lazycell", + "log", + "peeking_take_while", + "proc-macro2 1.0.51", + "quote 1.0.23", + "regex", + "rustc-hash", + "shlex", + "which", +] + +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" +dependencies = [ + "serde", +] + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitvec" +version = "0.20.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7774144344a4faa177370406a7ff5f1da24303817368584c6206c8303eb07848" +dependencies = [ + "funty", + "radium", + "tap", + "wyz", +] + +[[package]] +name = "blake2" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a4e37d16930f5459780f5621038b6382b9bb37c19016f39fb6b5808d831f174" +dependencies = [ + "crypto-mac 0.8.0", + "digest 0.9.0", + "opaque-debug", +] + +[[package]] +name = "blake2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" +dependencies = [ + "digest 0.10.6", +] + +[[package]] +name = "blake2-rfc_bellman_edition" +version = "0.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc60350286c7c3db13b98e91dbe5c8b6830a6821bc20af5b0c310ce94d74915" +dependencies = [ + "arrayvec 0.4.12", + "byteorder", + "constant_time_eq", +] + +[[package]] +name = "blake2s_const" +version = "0.6.0" +source = "git+https://github.com/matter-labs/bellman?branch=dev#3aa6226d04d60c539ff4ee480479ac5b92871a20" +dependencies = [ + "arrayref", + "arrayvec 0.5.2", + "constant_time_eq", +] + +[[package]] +name = "blake2s_simd" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e461a7034e85b211a4acb57ee2e6730b32912b06c08cc242243c39fc21ae6a2" +dependencies = [ + "arrayref", + "arrayvec 0.5.2", + "constant_time_eq", +] + +[[package]] +name = "block-buffer" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" +dependencies = [ + "block-padding", + "generic-array", +] + +[[package]] +name = "block-buffer" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e" +dependencies = [ + "generic-array", +] + +[[package]] +name = "block-modes" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57a0e8073e8baa88212fb5823574c02ebccb395136ba9a164ab89379ec6072f0" +dependencies = [ + "block-padding", + "cipher", +] + +[[package]] +name = "block-padding" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d696c370c750c948ada61c69a0ee2cbbb9c50b1019ddb86d9317157a99c2cae" + +[[package]] +name = "bumpalo" +version = "3.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" + +[[package]] +name = "byte-slice-cast" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" + +[[package]] +name = "byteorder" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" + +[[package]] +name = "bytes" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e4cec68f03f32e44924783795810fa50a7035d8c8ebe78580ad7e6c703fba38" + +[[package]] +name = "bytes" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" + +[[package]] +name = "cc" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" + +[[package]] +name = "cexpr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +dependencies = [ + "nom", +] + +[[package]] +name = "cfg-if" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16b0a3d9ed01224b22057780a37bb8c5dbfe1be8ba48678e7bf57ec4b385411f" +dependencies = [ + "iana-time-zone", + "js-sys", + "num-integer", + "num-traits", + "rustc-serialize", + "serde", + "time 0.1.45", + "wasm-bindgen", + "winapi", +] + +[[package]] +name = "cipher" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12f8e7987cbd042a63249497f41aed09f8e65add917ea6566effbc56578d6801" +dependencies = [ + "generic-array", +] + +[[package]] +name = "circuit_testing" +version = "0.1.0" +source = "git+https://github.com/matter-labs/circuit_testing.git?branch=main#7160c45c844944748663c91b6860c77f5376d9e4" +dependencies = [ + "bellman_ce", +] + +[[package]] +name = "clang-sys" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa2e27ae6ab525c3d369ded447057bca5438d86dc3a68f6faafb8269ba82ebf3" +dependencies = [ + "glob", + "libc", + "libloading", +] + +[[package]] +name = "clap" +version = "2.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" +dependencies = [ + "ansi_term", + "atty", + "bitflags", + "strsim 0.8.0", + "textwrap", + "unicode-width", + "vec_map", +] + +[[package]] +name = "cloudabi" +version = "0.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" +dependencies = [ + "bitflags", +] + +[[package]] +name = "codegen" +version = "0.1.0" +source = "git+https://github.com/matter-labs/solidity_plonk_verifier.git?branch=dev#4fb6397f778a580c9207ec23661228f5da7e66b4" +dependencies = [ + "ethereum-types", + "franklin-crypto 0.0.5 (git+https://github.com/matter-labs/franklin-crypto?branch=dev)", + "handlebars", + "hex", + "paste", + "rescue_poseidon", + "serde", + "serde_derive", + "serde_json", +] + +[[package]] +name = "codegen" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff61280aed771c3070e7dcc9e050c66f1eb1e3b96431ba66f9f74641d02fc41d" +dependencies = [ + "indexmap", +] + +[[package]] +name = "codespan-reporting" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e" +dependencies = [ + "termcolor", + "unicode-width", +] + +[[package]] +name = "const-oid" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cec318a675afcb6a1ea1d4340e2d377e56e47c266f28043ceccbf4412ddfdd3b" + +[[package]] +name = "constant_time_eq" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" + +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + +[[package]] +name = "core-foundation" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" + +[[package]] +name = "cpufeatures" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320" +dependencies = [ + "libc", +] + +[[package]] +name = "crossbeam" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69323bff1fb41c635347b8ead484a5ca6c3f11914d784170b158d8449ab07f8e" +dependencies = [ + "cfg-if 0.1.10", + "crossbeam-channel 0.4.4", + "crossbeam-deque 0.7.4", + "crossbeam-epoch 0.8.2", + "crossbeam-queue 0.2.3", + "crossbeam-utils 0.7.2", +] + +[[package]] +name = "crossbeam" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2801af0d36612ae591caa9568261fddce32ce6e08a7275ea334a06a4ad021a2c" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-channel 0.5.6", + "crossbeam-deque 0.8.2", + "crossbeam-epoch 0.9.13", + "crossbeam-queue 0.3.8", + "crossbeam-utils 0.8.14", +] + +[[package]] +name = "crossbeam-channel" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b153fe7cbef478c567df0f972e02e6d736db11affe43dfc9c56a9374d1adfb87" +dependencies = [ + "crossbeam-utils 0.7.2", + "maybe-uninit", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-utils 0.8.14", +] + +[[package]] +name = "crossbeam-deque" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20ff29ded3204c5106278a81a38f4b482636ed4fa1e6cfbeef193291beb29ed" +dependencies = [ + "crossbeam-epoch 0.8.2", + "crossbeam-utils 0.7.2", + "maybe-uninit", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-epoch 0.9.13", + "crossbeam-utils 0.8.14", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "058ed274caafc1f60c4997b5fc07bf7dc7cca454af7c6e81edffe5f33f70dace" +dependencies = [ + "autocfg 1.1.0", + "cfg-if 0.1.10", + "crossbeam-utils 0.7.2", + "lazy_static", + "maybe-uninit", + "memoffset 0.5.6", + "scopeguard", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01a9af1f4c2ef74bb8aa1f7e19706bc72d03598c8a570bb5de72243c7a9d9d5a" +dependencies = [ + "autocfg 1.1.0", + "cfg-if 1.0.0", + "crossbeam-utils 0.8.14", + "memoffset 0.7.1", + "scopeguard", +] + +[[package]] +name = "crossbeam-queue" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "774ba60a54c213d409d5353bda12d49cd68d14e45036a285234c8d6f91f92570" +dependencies = [ + "cfg-if 0.1.10", + "crossbeam-utils 0.7.2", + "maybe-uninit", +] + +[[package]] +name = "crossbeam-queue" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-utils 0.8.14", +] + +[[package]] +name = "crossbeam-utils" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8" +dependencies = [ + "autocfg 1.1.0", + "cfg-if 0.1.10", + "lazy_static", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f" +dependencies = [ + "cfg-if 1.0.0", +] + +[[package]] +name = "crunchy" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" + +[[package]] +name = "crypto-bigint" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" +dependencies = [ + "generic-array", + "rand_core 0.6.4", + "subtle", + "zeroize", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "crypto-mac" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" +dependencies = [ + "generic-array", + "subtle", +] + +[[package]] +name = "crypto-mac" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bff07008ec701e8028e2ceb8f83f0e4274ee62bd2dbdc4fefff2e9a91824081a" +dependencies = [ + "generic-array", + "subtle", +] + +[[package]] +name = "cs_derive" +version = "0.1.0" +source = "git+https://github.com/matter-labs/sync_vm.git?branch=v1.3.1#a69bcef3eafcc39887ca8c09ec835c1a426d0813" +dependencies = [ + "proc-macro-error", + "proc-macro2 1.0.51", + "quote 1.0.23", + "serde", + "syn 1.0.107", +] + +[[package]] +name = "ctr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb4a30d54f7443bf3d6191dcd486aca19e67cb3c49fa7a06a319966346707e7f" +dependencies = [ + "cipher", +] + +[[package]] +name = "cxx" +version = "1.0.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90d59d9acd2a682b4e40605a242f6670eaa58c5957471cbf85e8aa6a0b97a5e8" +dependencies = [ + "cc", + "cxxbridge-flags", + "cxxbridge-macro", + "link-cplusplus", +] + +[[package]] +name = "cxx-build" +version = "1.0.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebfa40bda659dd5c864e65f4c9a2b0aff19bea56b017b9b77c73d3766a453a38" +dependencies = [ + "cc", + "codespan-reporting", + "once_cell", + "proc-macro2 1.0.51", + "quote 1.0.23", + "scratch", + "syn 1.0.107", +] + +[[package]] +name = "cxxbridge-flags" +version = "1.0.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "457ce6757c5c70dc6ecdbda6925b958aae7f959bda7d8fb9bde889e34a09dc03" + +[[package]] +name = "cxxbridge-macro" +version = "1.0.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebf883b7aacd7b2aeb2a7b338648ee19f57c140d4ee8e52c68979c6b2f7f2263" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "darling" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a01d95850c592940db9b8194bc39f4bc0e89dee5c4265e4b1807c34a9aba453c" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2 1.0.51", + "quote 1.0.23", + "strsim 0.10.0", + "syn 1.0.107", +] + +[[package]] +name = "darling_macro" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" +dependencies = [ + "darling_core", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "debugid" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef552e6f588e446098f6ba40d89ac146c8c7b64aade83c051ee00bb5d2bc18d" +dependencies = [ + "serde", + "uuid", +] + +[[package]] +name = "der" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1a467a65c5e759bce6e65eaf91cc29f466cdc57cb65777bd646872a8a1fd4de" +dependencies = [ + "const-oid", + "zeroize", +] + +[[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "derive_more" +version = "0.99.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +dependencies = [ + "convert_case", + "proc-macro2 1.0.51", + "quote 1.0.23", + "rustc_version", + "syn 1.0.107", +] + +[[package]] +name = "digest" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" +dependencies = [ + "generic-array", +] + +[[package]] +name = "digest" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8168378f4e5023e7218c89c891c0fd8ecdb5e5e4f18cb78f38cf245dd021e76f" +dependencies = [ + "block-buffer 0.10.3", + "crypto-common", + "subtle", +] + +[[package]] +name = "ecdsa" +version = "0.14.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413301934810f597c1d19ca71c8710e99a3f1ba28a0d2ebc01551a2daeea3c5c" +dependencies = [ + "der", + "elliptic-curve", + "rfc6979", + "signature", +] + +[[package]] +name = "eip712-signature" +version = "0.1.0" +source = "git+https://github.com/vladbochok/eip712-signature#30b11455e7d613313e8c12d2aad961fd4bf902fe" +dependencies = [ + "ethereum-types", + "parity-crypto", + "thiserror", +] + +[[package]] +name = "either" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" + +[[package]] +name = "elliptic-curve" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7bb888ab5300a19b8e5bceef25ac745ad065f3c9f7efc6de1b91958110891d3" +dependencies = [ + "base16ct", + "crypto-bigint", + "der", + "digest 0.10.6", + "ff", + "generic-array", + "group", + "pkcs8", + "rand_core 0.6.4", + "sec1", + "subtle", + "zeroize", +] + +[[package]] +name = "encoding_rs" +version = "0.8.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" +dependencies = [ + "cfg-if 1.0.0", +] + +[[package]] +name = "env_logger" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a12e6657c4c97ebab115a42dcee77225f7f482cdd841cf7088c657a42e9e00e7" +dependencies = [ + "atty", + "humantime", + "log", + "regex", + "termcolor", +] + +[[package]] +name = "env_logger" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85cdab6a89accf66733ad5a1693a4dcced6aeff64602b634530dd73c1f3ee9f0" +dependencies = [ + "humantime", + "is-terminal", + "log", + "regex", + "termcolor", +] + +[[package]] +name = "envy" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f47e0157f2cb54f5ae1bd371b30a2ae4311e1c028f575cd4e81de7353215965" +dependencies = [ + "serde", +] + +[[package]] +name = "errno" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1" +dependencies = [ + "errno-dragonfly", + "libc", + "winapi", +] + +[[package]] +name = "errno-dragonfly" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "ethabi" +version = "16.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4c98847055d934070b90e806e12d3936b787d0a115068981c1d8dfd5dfef5a5" +dependencies = [ + "ethereum-types", + "hex", + "serde", + "serde_json", + "sha3 0.9.1", + "thiserror", + "uint", +] + +[[package]] +name = "ethbloom" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfb684ac8fa8f6c5759f788862bb22ec6fe3cb392f6bfd08e3c64b603661e3f8" +dependencies = [ + "crunchy", + "fixed-hash", + "impl-rlp", + "impl-serde", + "tiny-keccak 2.0.2", +] + +[[package]] +name = "ethereum-types" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05136f7057fe789f06e6d41d07b34e6f70d8c86e5693b60f97aaa6553553bdaf" +dependencies = [ + "ethbloom", + "fixed-hash", + "impl-rlp", + "impl-serde", + "primitive-types", + "uint", +] + +[[package]] +name = "fastrand" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499" +dependencies = [ + "instant", +] + +[[package]] +name = "ff" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160" +dependencies = [ + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "ff_ce" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b538e4231443a5b9c507caee3356f016d832cf7393d2d90f03ea3180d4e3fbc" +dependencies = [ + "byteorder", + "ff_derive_ce", + "hex", + "rand 0.4.6", + "serde", +] + +[[package]] +name = "ff_derive_ce" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b96fbccd88dbb1fac4ee4a07c2fcc4ca719a74ffbd9d2b9d41d8c8eb073d8b20" +dependencies = [ + "num-bigint 0.4.3", + "num-integer", + "num-traits", + "proc-macro2 1.0.51", + "quote 1.0.23", + "serde", + "syn 1.0.107", +] + +[[package]] +name = "fixed-hash" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcf0ed7fe52a17a03854ec54a9f76d6d84508d1c0e66bc1793301c73fc8493c" +dependencies = [ + "byteorder", + "rand 0.8.5", + "rustc-hex", + "static_assertions", +] + +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "franklin-crypto" +version = "0.0.5" +source = "git+ssh://git@github.com/matter-labs/franklin-crypto?branch=dev#3baf4c4eb3b41fcaca5cfd36d0dc46b097ba7322" +dependencies = [ + "arr_macro", + "bellman_ce", + "bit-vec", + "blake2 0.9.2", + "blake2-rfc_bellman_edition", + "blake2s_simd", + "byteorder", + "digest 0.9.0", + "hex", + "indexmap", + "itertools", + "lazy_static", + "num-bigint 0.4.3", + "num-derive 0.2.5", + "num-integer", + "num-traits", + "rand 0.4.6", + "serde", + "sha2 0.9.9", + "sha3 0.9.1", + "smallvec", + "splitmut", + "tiny-keccak 1.5.0", +] + +[[package]] +name = "franklin-crypto" +version = "0.0.5" +source = "git+https://github.com/matter-labs/franklin-crypto?branch=dev#3baf4c4eb3b41fcaca5cfd36d0dc46b097ba7322" +dependencies = [ + "arr_macro", + "bellman_ce", + "bit-vec", + "blake2 0.9.2", + "blake2-rfc_bellman_edition", + "blake2s_simd", + "byteorder", + "digest 0.9.0", + "hex", + "indexmap", + "itertools", + "lazy_static", + "num-bigint 0.4.3", + "num-derive 0.2.5", + "num-integer", + "num-traits", + "rand 0.4.6", + "serde", + "sha2 0.9.9", + "sha3 0.9.1", + "smallvec", + "splitmut", + "tiny-keccak 1.5.0", +] + +[[package]] +name = "fuchsia-cprng" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" + +[[package]] +name = "funty" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7" + +[[package]] +name = "futures" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13e2792b0ff0340399d58445b88fd9770e3489eff258a4cbc1523418f12abf84" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e5317663a9089767a1ec00a487df42e0ca174b61b4483213ac24448e4664df5" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec90ff4d0fe1f57d600049061dc6bb68ed03c7d2fbd697274c41805dcb3f8608" + +[[package]] +name = "futures-executor" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8de0a35a6ab97ec8869e32a2473f4b1324459e14c29275d14b10cb1fd19b50e" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", + "num_cpus", +] + +[[package]] +name = "futures-io" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfb8371b6fb2aeb2d280374607aeabfc99d95c72edfe51692e42d3d7f0d08531" + +[[package]] +name = "futures-locks" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50c4e684ddb2d8a4db5ca8a02b35156da129674ba4412b6f528698d58c594954" +dependencies = [ + "futures", + "tokio 0.2.25", +] + +[[package]] +name = "futures-macro" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95a73af87da33b5acf53acfebdc339fe592ecf5357ac7c0a7734ab9d8c876a70" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "futures-sink" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f310820bb3e8cfd46c80db4d7fb8353e15dfff853a127158425f31e0be6c8364" + +[[package]] +name = "futures-task" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcf79a1bf610b10f42aea489289c5a2c478a786509693b80cd39c44ccd936366" + +[[package]] +name = "futures-timer" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" + +[[package]] +name = "futures-util" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c1d6de3acfef38d2be4b1f543f553131788603495be83da675e180c8d6b7bd1" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite 0.2.9", + "pin-utils", + "slab", +] + +[[package]] +name = "generic-array" +version = "0.14.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", +] + +[[package]] +name = "getrandom" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "wasi 0.11.0+wasi-snapshot-preview1", +] + +[[package]] +name = "gimli" +version = "0.27.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "221996f774192f0f718773def8201c4ae31f02616a54ccfc2d358bb0e5cefdec" + +[[package]] +name = "glob" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" + +[[package]] +name = "gpu-ffi" +version = "0.1.0" +source = "git+https://github.com/matter-labs/heavy-ops-service.git?branch=cleanup#c40d66b6d57f7f4d39259ffb993f5a1bba64bf6c" +dependencies = [ + "bindgen", + "crossbeam 0.8.2", + "derivative", + "futures", + "futures-locks", + "num_cpus", +] + +[[package]] +name = "gpu-prover" +version = "0.1.0" +source = "git+https://github.com/matter-labs/heavy-ops-service.git?branch=cleanup#c40d66b6d57f7f4d39259ffb993f5a1bba64bf6c" +dependencies = [ + "bit-vec", + "cfg-if 1.0.0", + "crossbeam 0.8.2", + "franklin-crypto 0.0.5 (git+ssh://git@github.com/matter-labs/franklin-crypto?branch=dev)", + "gpu-ffi", + "itertools", + "num_cpus", + "rand 0.4.6", + "serde", +] + +[[package]] +name = "group" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" +dependencies = [ + "ff", + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "h2" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f9f29bc9dda355256b2916cf526ab02ce0aeaaaf2bad60d65ef3f12f11dd0f4" +dependencies = [ + "bytes 1.4.0", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap", + "slab", + "tokio 1.25.0", + "tokio-util 0.7.6", + "tracing", +] + +[[package]] +name = "handlebars" +version = "4.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "035ef95d03713f2c347a72547b7cd38cbc9af7cd51e6099fb62d586d4a6dee3a" +dependencies = [ + "log", + "pest", + "pest_derive", + "serde", + "serde_json", + "thiserror", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "headers" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3e372db8e5c0d213e0cd0b9be18be2aca3d44cf2fe30a9d46a65581cd454584" +dependencies = [ + "base64 0.13.1", + "bitflags", + "bytes 1.4.0", + "headers-core", + "http", + "httpdate", + "mime", + "sha1", +] + +[[package]] +name = "headers-core" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7f66481bfee273957b1f20485a4ff3362987f85b2c236580d81b4eb7a326429" +dependencies = [ + "http", +] + +[[package]] +name = "heck" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + +[[package]] +name = "hermit-abi" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7" +dependencies = [ + "libc", +] + +[[package]] +name = "hermit-abi" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hmac" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1441c6b1e930e2817404b5046f1f989899143a12bf92de603b69f4e0aee1e15" +dependencies = [ + "crypto-mac 0.10.1", + "digest 0.9.0", +] + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest 0.10.6", +] + +[[package]] +name = "hostname" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c731c3e10504cc8ed35cfe2f1db4c9274c3d35fa486e3b31df46f068ef3e867" +dependencies = [ + "libc", + "match_cfg", + "winapi", +] + +[[package]] +name = "http" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75f43d41e26995c17e71ee126451dd3941010b0514a81a9d11f3b341debc2399" +dependencies = [ + "bytes 1.4.0", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" +dependencies = [ + "bytes 1.4.0", + "http", + "pin-project-lite 0.2.9", +] + +[[package]] +name = "httparse" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" + +[[package]] +name = "httpdate" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" + +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + +[[package]] +name = "hyper" +version = "0.14.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e011372fa0b68db8350aa7a248930ecc7839bf46d8485577d69f117a75f164c" +dependencies = [ + "bytes 1.4.0", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite 0.2.9", + "socket2", + "tokio 1.25.0", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.23.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1788965e61b367cd03a62950836d5cd41560c3577d90e40e0819373194d1661c" +dependencies = [ + "http", + "hyper", + "rustls", + "tokio 1.25.0", + "tokio-rustls", +] + +[[package]] +name = "hyper-timeout" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +dependencies = [ + "hyper", + "pin-project-lite 0.2.9", + "tokio 1.25.0", + "tokio-io-timeout", +] + +[[package]] +name = "hyper-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +dependencies = [ + "bytes 1.4.0", + "hyper", + "native-tls", + "tokio 1.25.0", + "tokio-native-tls", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.53" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64c122667b287044802d6ce17ee2ddf13207ed924c712de9a66a5814d5b64765" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "winapi", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0703ae284fc167426161c2e3f1da3ea71d94b21bedbcc9494e92b28e334e3dca" +dependencies = [ + "cxx", + "cxx-build", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" +dependencies = [ + "matches", + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "idna" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "impl-codec" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "161ebdfec3c8e3b52bf61c4f3550a1eea4f9579d10dc1b936f3171ebdcd6c443" +dependencies = [ + "parity-scale-codec", +] + +[[package]] +name = "impl-rlp" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28220f89297a075ddc7245cd538076ee98b01f2a9c23a53a4f1105d5a322808" +dependencies = [ + "rlp", +] + +[[package]] +name = "impl-serde" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4551f042f3438e64dbd6226b20527fc84a6e1fe65688b58746a2f53623f25f5c" +dependencies = [ + "serde", +] + +[[package]] +name = "impl-trait-for-tuples" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "indexmap" +version = "1.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399" +dependencies = [ + "autocfg 1.1.0", + "hashbrown", +] + +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +dependencies = [ + "cfg-if 1.0.0", +] + +[[package]] +name = "io-lifetimes" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1abeb7a0dd0f8181267ff8adc397075586500b81b28a73e8a0208b00fc170fb3" +dependencies = [ + "libc", + "windows-sys 0.45.0", +] + +[[package]] +name = "ipnet" +version = "2.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30e22bd8629359895450b59ea7a776c850561b96a3b1d31321c1949d9e6c9146" + +[[package]] +name = "is-terminal" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22e18b0a45d56fe973d6db23972bf5bc46f988a4a2385deac9cc29572f09daef" +dependencies = [ + "hermit-abi 0.3.1", + "io-lifetimes", + "rustix", + "windows-sys 0.45.0", +] + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440" + +[[package]] +name = "js-sys" +version = "0.3.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "445dde2150c55e483f3d8416706b97ec8e8237c307e5b7b4b8dd15e6af2a0730" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "jsonrpc-core" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14f7f76aef2d054868398427f6c54943cf3d1caa9a7ec7d0c38d69df97a965eb" +dependencies = [ + "futures", + "futures-executor", + "futures-util", + "log", + "serde", + "serde_derive", + "serde_json", +] + +[[package]] +name = "k256" +version = "0.11.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72c1e0b51e7ec0a97369623508396067a486bd0cbed95a2659a4b863d28cfc8b" +dependencies = [ + "cfg-if 1.0.0", + "ecdsa", + "elliptic-curve", + "sha2 0.10.6", +] + +[[package]] +name = "keccak" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3afef3b6eff9ce9d8ff9b3601125eec7f0c8cbac7abd14f355d053fa56c98768" +dependencies = [ + "cpufeatures", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + +[[package]] +name = "libc" +version = "0.2.139" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79" + +[[package]] +name = "libloading" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f" +dependencies = [ + "cfg-if 1.0.0", + "winapi", +] + +[[package]] +name = "link-cplusplus" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecd207c9c713c34f95a097a5b029ac2ce6010530c7b49d7fea24d977dede04f5" +dependencies = [ + "cc", +] + +[[package]] +name = "linux-raw-sys" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4" + +[[package]] +name = "lock_api" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" +dependencies = [ + "autocfg 1.1.0", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" +dependencies = [ + "cfg-if 1.0.0", +] + +[[package]] +name = "match_cfg" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4" + +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata", +] + +[[package]] +name = "matches" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" + +[[package]] +name = "maybe-uninit" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00" + +[[package]] +name = "memchr" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" + +[[package]] +name = "memoffset" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "043175f069eda7b85febe4a74abbaeff828d9f8b448515d3151a14a3542811aa" +dependencies = [ + "autocfg 1.1.0", +] + +[[package]] +name = "memoffset" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4" +dependencies = [ + "autocfg 1.1.0", +] + +[[package]] +name = "metrics" +version = "0.20.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b9b8653cec6897f73b519a43fba5ee3d50f62fe9af80b428accdcc093b4a849" +dependencies = [ + "ahash", + "metrics-macros", + "portable-atomic", +] + +[[package]] +name = "metrics-macros" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "731f8ecebd9f3a4aa847dfe75455e4757a45da40a7793d2f0b1f9b6ed18b23f3" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "mime" +version = "0.3.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d" + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa" +dependencies = [ + "adler", +] + +[[package]] +name = "mio" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5d732bc30207a6423068df043e3d02e0735b155ad7ce1a6f76fe2baa5b158de" +dependencies = [ + "libc", + "log", + "wasi 0.11.0+wasi-snapshot-preview1", + "windows-sys 0.42.0", +] + +[[package]] +name = "multimap" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" + +[[package]] +name = "native-tls" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" +dependencies = [ + "lazy_static", + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "nodrop" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb" + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "nom8" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae01545c9c7fc4486ab7debaf2aad7003ac19431791868fb2e8066df97fad2f8" +dependencies = [ + "memchr", +] + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b7a8e9be5e039e2ff869df49155f1c06bd01ade2117ec783e56ab0932b67a8f" +dependencies = [ + "num-bigint 0.3.3", + "num-complex 0.3.1", + "num-integer", + "num-iter", + "num-rational 0.3.2", + "num-traits", +] + +[[package]] +name = "num" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43db66d1170d347f9a065114077f7dccb00c1b9478c89384490a3425279a4606" +dependencies = [ + "num-bigint 0.4.3", + "num-complex 0.4.3", + "num-integer", + "num-iter", + "num-rational 0.4.1", + "num-traits", +] + +[[package]] +name = "num-bigint" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6f7833f2cbf2360a6cfd58cd41a53aa7a90bd4c202f5b1c7dd2ed73c57b2c3" +dependencies = [ + "autocfg 1.1.0", + "num-integer", + "num-traits", + "serde", +] + +[[package]] +name = "num-bigint" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" +dependencies = [ + "autocfg 1.1.0", + "num-integer", + "num-traits", + "serde", +] + +[[package]] +name = "num-complex" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "747d632c0c558b87dbabbe6a82f3b4ae03720d0646ac5b7b4dae89394be5f2c5" +dependencies = [ + "num-traits", + "serde", +] + +[[package]] +name = "num-complex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02e0d21255c828d6f128a1e41534206671e8c3ea0c62f32291e808dc82cff17d" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-derive" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eafd0b45c5537c3ba526f79d3e75120036502bebacbb3f3220914067ce39dbf2" +dependencies = [ + "proc-macro2 0.4.30", + "quote 0.6.13", + "syn 0.15.44", +] + +[[package]] +name = "num-derive" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "num-integer" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" +dependencies = [ + "autocfg 1.1.0", + "num-traits", +] + +[[package]] +name = "num-iter" +version = "0.1.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d03e6c028c5dc5cac6e2dec0efda81fc887605bb3d884578bb6d6bf7514e252" +dependencies = [ + "autocfg 1.1.0", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-rational" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12ac428b1cb17fce6f731001d307d351ec70a6d202fc2e60f7d4c5e42d8f4f07" +dependencies = [ + "autocfg 1.1.0", + "num-bigint 0.3.3", + "num-integer", + "num-traits", + "serde", +] + +[[package]] +name = "num-rational" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0638a1c9d0a3c0914158145bc76cff373a75a627e6ecbfb71cbe6f453a5a19b0" +dependencies = [ + "autocfg 1.1.0", + "num-bigint 0.4.3", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" +dependencies = [ + "autocfg 1.1.0", +] + +[[package]] +name = "num_cpus" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" +dependencies = [ + "hermit-abi 0.2.6", + "libc", +] + +[[package]] +name = "object" +version = "0.30.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea86265d3d3dcb6a27fc51bd29a4bf387fae9d2986b823079d4986af253eb439" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66" + +[[package]] +name = "opaque-debug" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" + +[[package]] +name = "openssl" +version = "0.10.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b102428fd03bc5edf97f62620f7298614c45cedf287c271e7ed450bbaf83f2e1" +dependencies = [ + "bitflags", + "cfg-if 1.0.0", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b501e44f11665960c7e7fcf062c7d96a14ade4aa98116c004b2e37b5be7d736c" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-sys" +version = "0.9.80" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23bbbf7854cd45b83958ebe919f0e8e516793727652e27fda10a8384cfc790b7" +dependencies = [ + "autocfg 1.1.0", + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "opentelemetry" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6105e89802af13fdf48c49d7646d3b533a70e536d818aae7e78ba0433d01acb8" +dependencies = [ + "async-trait", + "crossbeam-channel 0.5.6", + "futures-channel", + "futures-executor", + "futures-util", + "js-sys", + "lazy_static", + "percent-encoding", + "pin-project", + "rand 0.8.5", + "thiserror", +] + +[[package]] +name = "opentelemetry-http" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "449048140ee61e28f57abe6e9975eedc1f3a29855c7407bd6c12b18578863379" +dependencies = [ + "async-trait", + "bytes 1.4.0", + "http", + "opentelemetry", + "reqwest", +] + +[[package]] +name = "opentelemetry-otlp" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d1a6ca9de4c8b00aa7f1a153bd76cb263287155cec642680d79d98706f3d28a" +dependencies = [ + "async-trait", + "futures", + "futures-util", + "http", + "opentelemetry", + "opentelemetry-http", + "prost", + "prost-build", + "reqwest", + "thiserror", + "tokio 1.25.0", + "tonic", + "tonic-build", +] + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "985cc35d832d412224b2cffe2f9194b1b89b6aa5d0bef76d080dce09d90e62bd" +dependencies = [ + "opentelemetry", +] + +[[package]] +name = "os_info" +version = "3.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c424bc68d15e0778838ac013b5b3449544d8133633d8016319e7e05a820b8c0" +dependencies = [ + "log", + "serde", + "winapi", +] + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "pairing_ce" +version = "0.28.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db007b21259660d025918e653508f03050bf23fb96a88601f9936329faadc597" +dependencies = [ + "byteorder", + "cfg-if 1.0.0", + "ff_ce", + "rand 0.4.6", + "serde", +] + +[[package]] +name = "parity-crypto" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b92ea9ddac0d6e1db7c49991e7d397d34a9fd814b4c93cda53788e8eef94e35" +dependencies = [ + "aes", + "aes-ctr", + "block-modes", + "digest 0.9.0", + "ethereum-types", + "hmac 0.10.1", + "lazy_static", + "pbkdf2 0.7.5", + "ripemd160", + "rustc-hex", + "scrypt", + "secp256k1 0.20.3", + "sha2 0.9.9", + "subtle", + "tiny-keccak 2.0.2", + "zeroize", +] + +[[package]] +name = "parity-scale-codec" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "373b1a4c1338d9cd3d1fa53b3a11bdab5ab6bd80a20f7f7becd76953ae2be909" +dependencies = [ + "arrayvec 0.7.2", + "bitvec", + "byte-slice-cast", + "impl-trait-for-tuples", + "parity-scale-codec-derive", + "serde", +] + +[[package]] +name = "parity-scale-codec-derive" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1557010476e0595c9b568d16dcfb81b93cdeb157612726f5170d31aa707bed27" +dependencies = [ + "proc-macro-crate", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "parking_lot" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9069cbb9f99e3a5083476ccb29ceb1de18b9118cafa53e90c9551235de2b9521" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "redox_syscall", + "smallvec", + "windows-sys 0.45.0", +] + +[[package]] +name = "password-hash" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54986aa4bfc9b98c6a5f40184223658d187159d7b3c6af33f2b2aa25ae1db0fa" +dependencies = [ + "base64ct", + "rand_core 0.6.4", +] + +[[package]] +name = "paste" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d01a5bd0424d00070b0098dd17ebca6f961a959dead1dbcbbbc1d1cd8d3deeba" + +[[package]] +name = "pbkdf2" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3b8c0d71734018084da0c0354193a5edfb81b20d2d57a92c5b154aefc554a4a" +dependencies = [ + "crypto-mac 0.10.1", +] + +[[package]] +name = "pbkdf2" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf916dd32dd26297907890d99dc2740e33f6bd9073965af4ccff2967962f5508" +dependencies = [ + "base64ct", + "crypto-mac 0.10.1", + "hmac 0.10.1", + "password-hash", + "sha2 0.9.9", +] + +[[package]] +name = "peeking_take_while" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" + +[[package]] +name = "percent-encoding" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e" + +[[package]] +name = "pest" +version = "2.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "028accff104c4e513bad663bbcd2ad7cfd5304144404c31ed0a77ac103d00660" +dependencies = [ + "thiserror", + "ucd-trie", +] + +[[package]] +name = "pest_derive" +version = "2.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ac3922aac69a40733080f53c1ce7f91dcf57e1a5f6c52f421fadec7fbdc4b69" +dependencies = [ + "pest", + "pest_generator", +] + +[[package]] +name = "pest_generator" +version = "2.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d06646e185566b5961b4058dd107e0a7f56e77c3f484549fb119867773c0f202" +dependencies = [ + "pest", + "pest_meta", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "pest_meta" +version = "2.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6f60b2ba541577e2a0c307c8f39d1439108120eb7903adeb6497fa880c59616" +dependencies = [ + "once_cell", + "pest", + "sha2 0.10.6", +] + +[[package]] +name = "petgraph" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4dd7d28ee937e54fe3080c91faa1c3a46c06de6252988a7f4592ba2310ef22a4" +dependencies = [ + "fixedbitset", + "indexmap", +] + +[[package]] +name = "pin-project" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad29a609b6bcd67fee905812e544992d216af9d755757c05ed2d0e15a74c6ecc" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "069bdb1e05adc7a8990dce9cc75370895fbe4e3d58b9b73bf1aee56359344a55" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "pin-project-lite" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "257b64915a082f7811703966789728173279bdebb956b143dbcd23f6f970a777" + +[[package]] +name = "pin-project-lite" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkcs8" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9eca2c590a5f85da82668fa685c09ce2888b9430e83299debf1f34b65fd4a4ba" +dependencies = [ + "der", + "spki", +] + +[[package]] +name = "pkg-config" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ac9a59f73473f1b8d852421e59e64809f025994837ef743615c6d0c5b305160" + +[[package]] +name = "portable-atomic" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26f6a7b87c2e435a3241addceeeff740ff8b7e76b74c13bf9acb17fa454ea00b" + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "primitive-types" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05e4722c697a58a99d5d06a08c30821d7c082a4632198de1eaa5a6c22ef42373" +dependencies = [ + "fixed-hash", + "impl-codec", + "impl-rlp", + "impl-serde", + "uint", +] + +[[package]] +name = "proc-macro-crate" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66618389e4ec1c7afe67d51a9bf34ff9236480f8d51e7489b7d5ab0303c13f34" +dependencies = [ + "once_cell", + "toml_edit", +] + +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "version_check", +] + +[[package]] +name = "proc-macro-hack" +version = "0.5.20+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" + +[[package]] +name = "proc-macro2" +version = "0.4.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759" +dependencies = [ + "unicode-xid", +] + +[[package]] +name = "proc-macro2" +version = "1.0.51" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d727cae5b39d21da60fa540906919ad737832fe0b1c165da3a34d6548c849d6" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "prost" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "444879275cb4fd84958b1a1d5420d15e6fcf7c235fe47f053c9c2a80aceb6001" +dependencies = [ + "bytes 1.4.0", + "prost-derive", +] + +[[package]] +name = "prost-build" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62941722fb675d463659e49c4f3fe1fe792ff24fe5bbaa9c08cd3b98a1c354f5" +dependencies = [ + "bytes 1.4.0", + "heck 0.3.3", + "itertools", + "lazy_static", + "log", + "multimap", + "petgraph", + "prost", + "prost-types", + "regex", + "tempfile", + "which", +] + +[[package]] +name = "prost-derive" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9cc1a3263e07e0bf68e96268f37665207b49560d98739662cdfaae215c720fe" +dependencies = [ + "anyhow", + "itertools", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "prost-types" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "534b7a0e836e3c482d2693070f982e39e7611da9695d4d1f5a4b186b51faef0a" +dependencies = [ + "bytes 1.4.0", + "prost", +] + +[[package]] +name = "prover-service" +version = "0.1.0" +source = "git+https://github.com/matter-labs/heavy-ops-service.git?branch=cleanup#c40d66b6d57f7f4d39259ffb993f5a1bba64bf6c" +dependencies = [ + "api", + "bincode", + "crossbeam-utils 0.8.14", + "log", + "num_cpus", + "rand 0.4.6", + "serde", + "serde_json", + "zkevm_test_harness", +] + +[[package]] +name = "quote" +version = "0.6.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1" +dependencies = [ + "proc-macro2 0.4.30", +] + +[[package]] +name = "quote" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b" +dependencies = [ + "proc-macro2 1.0.51", +] + +[[package]] +name = "radium" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "643f8f41a8ebc4c5dc4515c82bb8abd397b527fc20fd681b7c011c2aee5d44fb" + +[[package]] +name = "rand" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" +dependencies = [ + "fuchsia-cprng", + "libc", + "rand_core 0.3.1", + "rdrand", + "winapi", +] + +[[package]] +name = "rand" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" +dependencies = [ + "autocfg 0.1.8", + "libc", + "rand_chacha 0.1.1", + "rand_core 0.4.2", + "rand_hc 0.1.0", + "rand_isaac", + "rand_jitter", + "rand_os", + "rand_pcg", + "rand_xorshift", + "winapi", +] + +[[package]] +name = "rand" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" +dependencies = [ + "rand_chacha 0.2.2", + "rand_core 0.5.1", + "rand_hc 0.2.0", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" +dependencies = [ + "autocfg 0.1.8", + "rand_core 0.3.1", +] + +[[package]] +name = "rand_chacha" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" +dependencies = [ + "ppv-lite86", + "rand_core 0.5.1", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_core" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" +dependencies = [ + "rand_core 0.4.2", +] + +[[package]] +name = "rand_core" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" + +[[package]] +name = "rand_core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" +dependencies = [ + "getrandom 0.1.16", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.8", +] + +[[package]] +name = "rand_hc" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "rand_hc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" +dependencies = [ + "rand_core 0.5.1", +] + +[[package]] +name = "rand_isaac" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "rand_jitter" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b" +dependencies = [ + "libc", + "rand_core 0.4.2", + "winapi", +] + +[[package]] +name = "rand_os" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071" +dependencies = [ + "cloudabi", + "fuchsia-cprng", + "libc", + "rand_core 0.4.2", + "rdrand", + "winapi", +] + +[[package]] +name = "rand_pcg" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44" +dependencies = [ + "autocfg 0.1.8", + "rand_core 0.4.2", +] + +[[package]] +name = "rand_xorshift" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "rayon" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db3a213adf02b3bcfd2d3846bb41cb22857d131789e01df434fb7e7bc0759b7" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "356a0625f1954f730c0201cdab48611198dc6ce21f4acff55089b5a78e6e835b" +dependencies = [ + "crossbeam-channel 0.5.6", + "crossbeam-deque 0.8.2", + "crossbeam-utils 0.8.14", + "num_cpus", +] + +[[package]] +name = "rdrand" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "redox_syscall" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" +dependencies = [ + "bitflags", +] + +[[package]] +name = "regex" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.6.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848" + +[[package]] +name = "remove_dir_all" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" +dependencies = [ + "winapi", +] + +[[package]] +name = "reqwest" +version = "0.11.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21eed90ec8570952d53b772ecf8f206aa1ec9a3d76b2521c56c42973f2d91ee9" +dependencies = [ + "base64 0.21.0", + "bytes 1.4.0", + "encoding_rs", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-rustls", + "hyper-tls", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite 0.2.9", + "rustls", + "rustls-pemfile", + "serde", + "serde_json", + "serde_urlencoded", + "tokio 1.25.0", + "tokio-native-tls", + "tokio-rustls", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "webpki-roots", + "winreg", +] + +[[package]] +name = "rescue_poseidon" +version = "0.4.1" +source = "git+https://github.com/matter-labs/rescue-poseidon.git#fbb3882b8f1e63dff769a1f1a59211d0e0838351" +dependencies = [ + "addchain", + "arrayvec 0.7.2", + "blake2 0.10.6", + "byteorder", + "franklin-crypto 0.0.5 (git+https://github.com/matter-labs/franklin-crypto?branch=dev)", + "num-bigint 0.3.3", + "num-integer", + "num-iter", + "num-traits", + "rand 0.4.6", + "serde", + "sha3 0.9.1", + "smallvec", +] + +[[package]] +name = "rfc6979" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7743f17af12fa0b03b803ba12cd6a8d9483a587e89c69445e3909655c0b9fabb" +dependencies = [ + "crypto-bigint", + "hmac 0.12.1", + "zeroize", +] + +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin", + "untrusted", + "web-sys", + "winapi", +] + +[[package]] +name = "ripemd160" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2eca4ecc81b7f313189bf73ce724400a07da2a6dac19588b03c8bd76a2dcc251" +dependencies = [ + "block-buffer 0.9.0", + "digest 0.9.0", + "opaque-debug", +] + +[[package]] +name = "rlp" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb919243f34364b6bd2fc10ef797edbfa75f33c252e7998527479c6d6b47e1ec" +dependencies = [ + "bytes 1.4.0", + "rustc-hex", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342" + +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + +[[package]] +name = "rustc-hex" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" + +[[package]] +name = "rustc-serialize" +version = "0.3.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" + +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver", +] + +[[package]] +name = "rustix" +version = "0.36.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f43abb88211988493c1abb44a70efa56ff0ce98f233b7b276146f1f3f7ba9644" +dependencies = [ + "bitflags", + "errno", + "io-lifetimes", + "libc", + "linux-raw-sys", + "windows-sys 0.45.0", +] + +[[package]] +name = "rustls" +version = "0.20.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fff78fc74d175294f4e83b28343315ffcfb114b156f0185e9741cb5570f50e2f" +dependencies = [ + "log", + "ring", + "sct", + "webpki", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d194b56d58803a43635bdc398cd17e383d6f71f9182b9a192c127ca42494a59b" +dependencies = [ + "base64 0.21.0", +] + +[[package]] +name = "rustversion" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5583e89e108996506031660fe09baa5011b9dd0341b89029313006d1fb508d70" + +[[package]] +name = "ryu" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde" + +[[package]] +name = "salsa20" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "399f290ffc409596022fce5ea5d4138184be4784f2b28c62c59f0d8389059a15" +dependencies = [ + "cipher", +] + +[[package]] +name = "schannel" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "713cfb06c7059f3588fb8044c0fad1d09e3c01d225e25b9220dbfdcf16dbb1b3" +dependencies = [ + "windows-sys 0.42.0", +] + +[[package]] +name = "scopeguard" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" + +[[package]] +name = "scratch" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddccb15bcce173023b3fedd9436f882a0739b8dfb45e4f6b6002bee5929f61b2" + +[[package]] +name = "scrypt" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8da492dab03f925d977776a0b7233d7b934d6dc2b94faead48928e2e9bacedb9" +dependencies = [ + "base64 0.13.1", + "hmac 0.10.1", + "pbkdf2 0.6.0", + "rand 0.7.3", + "rand_core 0.5.1", + "salsa20", + "sha2 0.9.9", + "subtle", +] + +[[package]] +name = "sct" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "sec1" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3be24c1842290c45df0a7bf069e0c268a747ad05a192f2fd7dcfdbc1cba40928" +dependencies = [ + "base16ct", + "der", + "generic-array", + "pkcs8", + "subtle", + "zeroize", +] + +[[package]] +name = "secp256k1" +version = "0.20.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97d03ceae636d0fed5bae6a7f4f664354c5f4fcedf6eef053fef17e49f837d0a" +dependencies = [ + "rand 0.6.5", + "secp256k1-sys", +] + +[[package]] +name = "secp256k1" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c42e6f1735c5f00f51e43e28d6634141f2bcad10931b2609ddd74a86d751260" +dependencies = [ + "secp256k1-sys", +] + +[[package]] +name = "secp256k1-sys" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "957da2573cde917463ece3570eab4a0b3f19de6f1646cde62e6fd3868f566036" +dependencies = [ + "cc", +] + +[[package]] +name = "security-framework" +version = "2.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a332be01508d814fed64bf28f798a146d73792121129962fdf335bb3c49a4254" +dependencies = [ + "bitflags", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31c9bb296072e961fcbd8853511dd39c2d8be2deb1e17c6860b1d30732b323b4" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "1.0.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58bc9567378fc7690d6b2addae4e60ac2eeea07becb2c64b9f218b53865cba2a" + +[[package]] +name = "sentry" +version = "0.29.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6097dc270a9c4555c5d6222ed243eaa97ff38e29299ed7c5cb36099033c604e" +dependencies = [ + "httpdate", + "native-tls", + "reqwest", + "sentry-backtrace", + "sentry-contexts", + "sentry-core", + "sentry-panic", + "tokio 1.25.0", + "ureq", +] + +[[package]] +name = "sentry-backtrace" +version = "0.29.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d92d1e4d591534ae4f872d6142f3b500f4ffc179a6aed8a3e86c7cc96d10a6a" +dependencies = [ + "backtrace", + "once_cell", + "regex", + "sentry-core", +] + +[[package]] +name = "sentry-contexts" +version = "0.29.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3afa877b1898ff67dd9878cf4bec4e53cef7d3be9f14b1fc9e4fcdf36f8e4259" +dependencies = [ + "hostname", + "libc", + "os_info", + "rustc_version", + "sentry-core", + "uname", +] + +[[package]] +name = "sentry-core" +version = "0.29.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc43eb7e4e3a444151a0fe8a0e9ce60eabd905dae33d66e257fa26f1b509c1bd" +dependencies = [ + "once_cell", + "rand 0.8.5", + "sentry-types", + "serde", + "serde_json", +] + +[[package]] +name = "sentry-panic" +version = "0.29.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccab4fab11e3e63c45f4524bee2e75cde39cdf164cb0b0cbe6ccd1948ceddf66" +dependencies = [ + "sentry-backtrace", + "sentry-core", +] + +[[package]] +name = "sentry-types" +version = "0.29.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f63708ec450b6bdcb657af760c447416d69c38ce421f34e5e2e9ce8118410bc7" +dependencies = [ + "debugid", + "getrandom 0.2.8", + "hex", + "serde", + "serde_json", + "thiserror", + "time 0.3.17", + "url", + "uuid", +] + +[[package]] +name = "serde" +version = "1.0.152" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.152" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "serde_json" +version = "1.0.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cad406b69c91885b5107daf2c29572f6c8cdb3c66826821e286c533490c0bc76" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_with" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678b5a069e50bf00ecd22d0cd8ddf7c236f68581b03db652061ed5eb13a312ff" +dependencies = [ + "serde", + "serde_with_macros", +] + +[[package]] +name = "serde_with_macros" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e182d6ec6f05393cc0e5ed1bf81ad6db3a8feedf8ee515ecdd369809bcce8082" +dependencies = [ + "darling", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "setup_key_generator_and_server" +version = "1.0.0" +dependencies = [ + "api", + "circuit_testing", + "itertools", + "prover-service", + "structopt", + "vlog", + "zkevm_test_harness", + "zksync_config", + "zksync_types", +] + +[[package]] +name = "sha1" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" +dependencies = [ + "cfg-if 1.0.0", + "cpufeatures", + "digest 0.10.6", +] + +[[package]] +name = "sha2" +version = "0.9.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" +dependencies = [ + "block-buffer 0.9.0", + "cfg-if 1.0.0", + "cpufeatures", + "digest 0.9.0", + "opaque-debug", +] + +[[package]] +name = "sha2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" +dependencies = [ + "cfg-if 1.0.0", + "cpufeatures", + "digest 0.10.6", +] + +[[package]] +name = "sha3" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f81199417d4e5de3f04b1e871023acea7389672c4135918f05aa9cbf2f2fa809" +dependencies = [ + "block-buffer 0.9.0", + "digest 0.9.0", + "keccak", + "opaque-debug", +] + +[[package]] +name = "sha3" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bdf0c33fae925bdc080598b84bc15c55e7b9a4a43b3c704da051f977469691c9" +dependencies = [ + "digest 0.10.6", + "keccak", +] + +[[package]] +name = "sharded-slab" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shlex" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43b2853a4d09f215c24cc5489c992ce46052d359b5109343cbafbf26bc62f8a3" + +[[package]] +name = "signal-hook-registry" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e51e73328dc4ac0c7ccbda3a494dfa03df1de2f46018127f60c693f2648455b0" +dependencies = [ + "libc", +] + +[[package]] +name = "signature" +version = "1.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" +dependencies = [ + "digest 0.10.6", + "rand_core 0.6.4", +] + +[[package]] +name = "slab" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4614a76b2a8be0058caa9dbbaf66d988527d86d003c11a94fbd335d7661edcef" +dependencies = [ + "autocfg 1.1.0", +] + +[[package]] +name = "smallvec" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" + +[[package]] +name = "socket2" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02e2d2db9033d13a1567121ddd7a095ee144db4e1ca1b1bda3419bc0da294ebd" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + +[[package]] +name = "spki" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67cf02bbac7a337dc36e4f5a693db6c21e7863f45070f7064577eb4367a3212b" +dependencies = [ + "base64ct", + "der", +] + +[[package]] +name = "splitmut" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c85070f382340e8b23a75808e83573ddf65f9ad9143df9573ca37c1ed2ee956a" + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "strsim" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "structopt" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c6b5c64445ba8094a6ab0c3cd2ad323e07171012d9c98b0b15651daf1787a10" +dependencies = [ + "clap", + "lazy_static", + "structopt-derive", +] + +[[package]] +name = "structopt-derive" +version = "0.4.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0" +dependencies = [ + "heck 0.3.3", + "proc-macro-error", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "strum" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.24.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" +dependencies = [ + "heck 0.4.1", + "proc-macro2 1.0.51", + "quote 1.0.23", + "rustversion", + "syn 1.0.107", +] + +[[package]] +name = "subtle" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" + +[[package]] +name = "syn" +version = "0.15.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ca4b3b69a77cbe1ffc9e198781b7acb0c7365a883670e8f1c1bc66fba79a5c5" +dependencies = [ + "proc-macro2 0.4.30", + "quote 0.6.13", + "unicode-xid", +] + +[[package]] +name = "syn" +version = "1.0.107" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "unicode-ident", +] + +[[package]] +name = "sync_vm" +version = "1.3.1" +source = "git+https://github.com/matter-labs/sync_vm.git?branch=v1.3.1#a69bcef3eafcc39887ca8c09ec835c1a426d0813" +dependencies = [ + "arrayvec 0.7.2", + "cs_derive", + "derivative", + "eip712-signature", + "franklin-crypto 0.0.5 (git+https://github.com/matter-labs/franklin-crypto?branch=dev)", + "hex", + "itertools", + "num-bigint 0.4.3", + "num-derive 0.3.3", + "num-integer", + "num-traits", + "once_cell", + "rand 0.4.6", + "rescue_poseidon", + "serde", + "sha2 0.10.6", + "sha3 0.10.6", + "smallvec", + "zk_evm", + "zkevm_opcode_defs", +] + +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + +[[package]] +name = "tempfile" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4" +dependencies = [ + "cfg-if 1.0.0", + "fastrand", + "libc", + "redox_syscall", + "remove_dir_all", + "winapi", +] + +[[package]] +name = "termcolor" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "test-log" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38f0c854faeb68a048f0f2dc410c5ddae3bf83854ef0e4977d58306a5edef50e" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "textwrap" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" +dependencies = [ + "unicode-width", +] + +[[package]] +name = "thiserror" +version = "1.0.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a9cd18aa97d5c45c6603caea1da6628790b37f7a34b6ca89522331c5180fed0" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "thread_local" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180" +dependencies = [ + "once_cell", +] + +[[package]] +name = "time" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" +dependencies = [ + "libc", + "wasi 0.10.0+wasi-snapshot-preview1", + "winapi", +] + +[[package]] +name = "time" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a561bf4617eebd33bca6434b988f39ed798e527f51a1e797d0ee4f61c0a38376" +dependencies = [ + "itoa", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd" + +[[package]] +name = "time-macros" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d967f99f534ca7e495c575c62638eebc2898a8c84c119b89e250477bc4ba16b2" +dependencies = [ + "time-core", +] + +[[package]] +name = "tiny-keccak" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d8a021c69bb74a44ccedb824a046447e2c84a01df9e5c20779750acb38e11b2" +dependencies = [ + "crunchy", +] + +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "0.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6703a273949a90131b290be1fe7b039d0fc884aa1935860dfcbe056f28cd8092" +dependencies = [ + "bytes 0.5.6", + "pin-project-lite 0.1.12", + "slab", +] + +[[package]] +name = "tokio" +version = "1.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8e00990ebabbe4c14c08aca901caed183ecd5c09562a12c824bb53d3c3fd3af" +dependencies = [ + "autocfg 1.1.0", + "bytes 1.4.0", + "libc", + "memchr", + "mio", + "num_cpus", + "parking_lot", + "pin-project-lite 0.2.9", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.42.0", +] + +[[package]] +name = "tokio-io-timeout" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" +dependencies = [ + "pin-project-lite 0.2.9", + "tokio 1.25.0", +] + +[[package]] +name = "tokio-macros" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d266c00fde287f55d3f1c3e96c500c362a2b8c695076ec180f27918820bc6df8" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio 1.25.0", +] + +[[package]] +name = "tokio-rustls" +version = "0.23.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" +dependencies = [ + "rustls", + "tokio 1.25.0", + "webpki", +] + +[[package]] +name = "tokio-stream" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d660770404473ccd7bc9f8b28494a811bc18542b915c0855c51e8f419d5223ce" +dependencies = [ + "futures-core", + "pin-project-lite 0.2.9", + "tokio 1.25.0", +] + +[[package]] +name = "tokio-util" +version = "0.6.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36943ee01a6d67977dd3f84a5a1d2efeb4ada3a1ae771cadfaa535d9d9fc6507" +dependencies = [ + "bytes 1.4.0", + "futures-core", + "futures-sink", + "log", + "pin-project-lite 0.2.9", + "tokio 1.25.0", +] + +[[package]] +name = "tokio-util" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc6a3b08b64e6dfad376fa2432c7b1f01522e37a623c3050bc95db2d3ff21583" +dependencies = [ + "bytes 1.4.0", + "futures-core", + "futures-sink", + "pin-project-lite 0.2.9", + "tokio 1.25.0", + "tracing", +] + +[[package]] +name = "toml_datetime" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4553f467ac8e3d374bc9a177a26801e5d0f9b211aa1673fb137a403afd1c9cf5" + +[[package]] +name = "toml_edit" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56c59d8dd7d0dcbc6428bf7aa2f0e823e26e43b3c9aca15bbc9475d23e5fa12b" +dependencies = [ + "indexmap", + "nom8", + "toml_datetime", +] + +[[package]] +name = "tonic" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff08f4649d10a70ffa3522ca559031285d8e421d727ac85c60825761818f5d0a" +dependencies = [ + "async-stream", + "async-trait", + "base64 0.13.1", + "bytes 1.4.0", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-timeout", + "percent-encoding", + "pin-project", + "prost", + "prost-derive", + "tokio 1.25.0", + "tokio-stream", + "tokio-util 0.6.10", + "tower", + "tower-layer", + "tower-service", + "tracing", + "tracing-futures", +] + +[[package]] +name = "tonic-build" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9403f1bafde247186684b230dc6f38b5cd514584e8bec1dd32514be4745fa757" +dependencies = [ + "proc-macro2 1.0.51", + "prost-build", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "indexmap", + "pin-project", + "pin-project-lite 0.2.9", + "rand 0.8.5", + "slab", + "tokio 1.25.0", + "tokio-util 0.7.6", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + +[[package]] +name = "tracing" +version = "0.1.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +dependencies = [ + "cfg-if 1.0.0", + "log", + "pin-project-lite 0.2.9", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", +] + +[[package]] +name = "tracing-core" +version = "0.1.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-futures" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" +dependencies = [ + "pin-project", + "tracing", +] + +[[package]] +name = "tracing-log" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" +dependencies = [ + "lazy_static", + "log", + "tracing-core", +] + +[[package]] +name = "tracing-opentelemetry" +version = "0.17.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbbe89715c1dbbb790059e2565353978564924ee85017b5fff365c872ff6721f" +dependencies = [ + "once_cell", + "opentelemetry", + "tracing", + "tracing-core", + "tracing-log", + "tracing-subscriber", +] + +[[package]] +name = "tracing-serde" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1" +dependencies = [ + "serde", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6176eae26dd70d0c919749377897b54a9276bd7061339665dd68777926b5a70" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "serde", + "serde_json", + "sharded-slab", + "smallvec", + "thread_local", + "time 0.3.17", + "tracing", + "tracing-core", + "tracing-log", + "tracing-serde", +] + +[[package]] +name = "try-lock" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" + +[[package]] +name = "typenum" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" + +[[package]] +name = "ucd-trie" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e79c4d996edb816c91e4308506774452e55e95c3c9de07b6729e17e15a5ef81" + +[[package]] +name = "uint" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76f64bba2c53b04fcab63c01a7d7427eadc821e3bc48c34dc9ba29c501164b52" +dependencies = [ + "byteorder", + "crunchy", + "hex", + "static_assertions", +] + +[[package]] +name = "uname" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b72f89f0ca32e4db1c04e2a72f5345d59796d4866a1ee0609084569f73683dc8" +dependencies = [ + "libc", +] + +[[package]] +name = "unicode-bidi" +version = "0.3.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d54675592c1dbefd78cbd98db9bacd89886e1ca50692a0692baefffdeb92dd58" + +[[package]] +name = "unicode-ident" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc" + +[[package]] +name = "unicode-normalization" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-segmentation" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" + +[[package]] +name = "unicode-width" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" + +[[package]] +name = "unicode-xid" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" + +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + +[[package]] +name = "ureq" +version = "2.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "338b31dd1314f68f3aabf3ed57ab922df95ffcd902476ca7ba3c4ce7b908c46d" +dependencies = [ + "base64 0.13.1", + "log", + "native-tls", + "once_cell", + "url", +] + +[[package]] +name = "url" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643" +dependencies = [ + "form_urlencoded", + "idna 0.3.0", + "percent-encoding", + "serde", +] + +[[package]] +name = "uuid" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1674845326ee10d37ca60470760d4288a6f80f304007d92e5c53bab78c9cfd79" +dependencies = [ + "getrandom 0.2.8", + "serde", +] + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "vec_map" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "vlog" +version = "1.0.0" +dependencies = [ + "chrono", + "opentelemetry", + "opentelemetry-otlp", + "opentelemetry-semantic-conventions", + "sentry", + "serde_json", + "tracing", + "tracing-opentelemetry", + "tracing-subscriber", +] + +[[package]] +name = "want" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" +dependencies = [ + "log", + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + +[[package]] +name = "wasi" +version = "0.10.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b" +dependencies = [ + "cfg-if 1.0.0", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f219e0d211ba40266969f6dbdd90636da12f75bee4fc9d6c23d1260dadb51454" +dependencies = [ + "cfg-if 1.0.0", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5" +dependencies = [ + "quote 1.0.23", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6" +dependencies = [ + "proc-macro2 1.0.51", + "quote 1.0.23", + "syn 1.0.107", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d" + +[[package]] +name = "web-sys" +version = "0.3.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e33b99f4b23ba3eec1a53ac264e35a755f00e966e0065077d6027c0f575b0b97" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web3" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44f258e254752d210b84fe117b31f1e3cc9cbf04c0d747eb7f8cf7cf5e370f6d" +dependencies = [ + "arrayvec 0.7.2", + "base64 0.13.1", + "bytes 1.4.0", + "derive_more", + "ethabi", + "ethereum-types", + "futures", + "futures-timer", + "headers", + "hex", + "idna 0.2.3", + "jsonrpc-core", + "log", + "once_cell", + "parking_lot", + "pin-project", + "reqwest", + "rlp", + "secp256k1 0.21.3", + "serde", + "serde_json", + "tiny-keccak 2.0.2", + "url", +] + +[[package]] +name = "webpki" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "webpki-roots" +version = "0.22.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" +dependencies = [ + "webpki", +] + +[[package]] +name = "which" +version = "4.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2441c784c52b289a054b7201fc93253e288f094e2f4be9058343127c4226a269" +dependencies = [ + "either", + "libc", + "once_cell", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +dependencies = [ + "winapi", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-sys" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e2522491fbfcd58cc84d47aeb2958948c4b8982e9a2d8a2a35bbaed431390e7" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c9864e83243fdec7fc9c5444389dcbbfd258f745e7853198f365e3c4968a608" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c8b1b673ffc16c47a9ff48570a9d85e25d265735c503681332589af6253c6c7" + +[[package]] +name = "windows_i686_gnu" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de3887528ad530ba7bdbb1faa8275ec7a1155a45ffa57c37993960277145d640" + +[[package]] +name = "windows_i686_msvc" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf4d1122317eddd6ff351aa852118a2418ad4214e6613a50e0191f7004372605" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1040f221285e17ebccbc2591ffdc2d44ee1f9186324dd3e84e99ac68d699c45" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "628bfdf232daa22b0d64fdb62b09fcc36bb01f05a3939e20ab73aaf9470d0463" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "447660ad36a13288b1db4d4248e857b510e8c3a225c822ba4fb748c0aafecffd" + +[[package]] +name = "winreg" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d" +dependencies = [ + "winapi", +] + +[[package]] +name = "wyz" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85e60b0d1b5f99db2556934e21937020776a5d31520bf169e851ac44e6420214" + +[[package]] +name = "zeroize" +version = "1.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c394b5bd0c6f669e7275d9c20aa90ae064cb22e75a1cad54e1b34088034b149f" + +[[package]] +name = "zk_evm" +version = "1.3.1" +source = "git+https://github.com/matter-labs/zk_evm.git?branch=v1.3.1#76a3877f5a0b7449bcca73d35bae3ae226996fdd" +dependencies = [ + "blake2 0.10.6", + "k256", + "lazy_static", + "num 0.4.0", + "serde", + "serde_json", + "sha2 0.10.6", + "sha3 0.10.6", + "static_assertions", + "zkevm_opcode_defs", +] + +[[package]] +name = "zkevm-assembly" +version = "1.3.0" +source = "git+https://github.com/matter-labs/zkEVM-assembly.git?branch=v1.3.1#e2a2145a90ceeb54407df1be5254291a9f693422" +dependencies = [ + "env_logger 0.9.3", + "hex", + "lazy_static", + "log", + "nom", + "num-bigint 0.4.3", + "num-traits", + "regex", + "smallvec", + "structopt", + "thiserror", + "zkevm_opcode_defs", +] + +[[package]] +name = "zkevm_opcode_defs" +version = "1.3.1" +source = "git+https://github.com/matter-labs/zkevm_opcode_defs.git?branch=v1.3.1#dc5b5c463d867855514f03c179992acbde74face" +dependencies = [ + "bitflags", + "ethereum-types", + "lazy_static", + "sha2 0.10.6", +] + +[[package]] +name = "zkevm_test_harness" +version = "1.3.1" +source = "git+https://github.com/matter-labs/zkevm_test_harness.git?branch=v1.3.1#2100f00f9cb79e851bf11ec21391672244e382fc" +dependencies = [ + "bincode", + "blake2 0.10.6", + "circuit_testing", + "codegen 0.2.0", + "crossbeam 0.8.2", + "derivative", + "env_logger 0.10.0", + "hex", + "num-bigint 0.4.3", + "num-integer", + "num-traits", + "rayon", + "serde", + "serde_json", + "sha2 0.10.6", + "sha3 0.10.6", + "smallvec", + "structopt", + "sync_vm", + "test-log", + "tracing", + "zk_evm", + "zkevm-assembly", +] + +[[package]] +name = "zksync_basic_types" +version = "1.0.0" +dependencies = [ + "serde", + "web3", +] + +[[package]] +name = "zksync_config" +version = "1.0.0" +dependencies = [ + "bigdecimal", + "envy", + "num 0.3.1", + "once_cell", + "serde", + "serde_json", + "url", + "zksync_basic_types", + "zksync_utils", +] + +[[package]] +name = "zksync_contracts" +version = "1.0.0" +dependencies = [ + "ethabi", + "hex", + "once_cell", + "serde_json", + "zksync_utils", +] + +[[package]] +name = "zksync_crypto" +version = "1.0.0" +dependencies = [ + "base64 0.13.1", + "blake2 0.10.6", + "hex", + "once_cell", + "rand 0.4.6", + "serde", + "sha2 0.9.9", + "thiserror", + "zksync_basic_types", +] + +[[package]] +name = "zksync_mini_merkle_tree" +version = "1.0.0" +dependencies = [ + "once_cell", + "rayon", + "zksync_basic_types", + "zksync_crypto", +] + +[[package]] +name = "zksync_types" +version = "1.0.0" +dependencies = [ + "bigdecimal", + "blake2 0.10.6", + "chrono", + "codegen 0.1.0", + "ethbloom", + "hex", + "metrics", + "num 0.3.1", + "once_cell", + "parity-crypto", + "rayon", + "rlp", + "serde", + "serde_json", + "serde_with", + "strum", + "thiserror", + "tiny-keccak 1.5.0", + "zk_evm", + "zkevm-assembly", + "zkevm_test_harness", + "zksync_basic_types", + "zksync_config", + "zksync_contracts", + "zksync_mini_merkle_tree", + "zksync_utils", +] + +[[package]] +name = "zksync_utils" +version = "1.0.0" +dependencies = [ + "anyhow", + "bigdecimal", + "envy", + "futures", + "hex", + "num 0.3.1", + "serde", + "thiserror", + "tokio 1.25.0", + "zk_evm", + "zksync_basic_types", +] diff --git a/core/bin/setup_key_generator_and_server/Cargo.toml b/core/bin/setup_key_generator_and_server/Cargo.toml new file mode 100644 index 000000000000..4d6c923d6e11 --- /dev/null +++ b/core/bin/setup_key_generator_and_server/Cargo.toml @@ -0,0 +1,31 @@ +[package] +name = "setup_key_generator_and_server" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-2" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] + +[lib] +name = "zksync_setup_key_server" +path = "src/lib.rs" + +[[bin]] +name = "zksync_setup_key_generator" +path = "src/main.rs" + +[dependencies] +zksync_types = {path = "../../lib/types", version = "1.0" } +vlog = { path = "../../lib/vlog", version = "1.0" } +zksync_config = { path = "../../lib/config", version = "1.0" } + +circuit_testing = {git = "https://github.com/matter-labs/circuit_testing.git", branch = "main"} +api = { git = "https://github.com/matter-labs/heavy-ops-service.git", branch = "cleanup", features=["gpu"], default-features=false} +prover-service = { git = "https://github.com/matter-labs/heavy-ops-service.git", branch = "cleanup", features=["gpu"], default-features=false} +zkevm_test_harness = { git = "https://github.com/matter-labs/zkevm_test_harness.git", branch = "v1.3.1"} + +structopt = "0.3.26" +itertools = "0.10.5" diff --git a/core/bin/setup_key_generator_and_server/data/.gitkeep b/core/bin/setup_key_generator_and_server/data/.gitkeep new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/core/bin/setup_key_generator_and_server/src/lib.rs b/core/bin/setup_key_generator_and_server/src/lib.rs new file mode 100644 index 000000000000..b96af2784714 --- /dev/null +++ b/core/bin/setup_key_generator_and_server/src/lib.rs @@ -0,0 +1,56 @@ +use std::fs::File; +use std::io::Read; +use std::path::Path; + +use zkevm_test_harness::abstract_zksync_circuit::concrete_circuits::ZkSyncCircuit; +use zkevm_test_harness::bellman::bn256::Bn256; +use zkevm_test_harness::witness::oracle::VmWitnessOracle; +use zkevm_test_harness::witness::recursive_aggregation::padding_aggregations; +use zkevm_test_harness::witness::vk_set_generator::circuits_for_vk_generation; +use zksync_types::circuit::GEOMETRY_CONFIG; + +use zksync_config::ProverConfigs; +use zksync_types::circuit::{LEAF_SPLITTING_FACTOR, NODE_SPLITTING_FACTOR, SCHEDULER_UPPER_BOUND}; +pub fn get_setup_for_circuit_type(circuit_type: u8) -> Box { + let filepath = get_setup_key_file_path(circuit_type); + vlog::info!("Fetching setup key from path: {}", filepath); + let file = File::open(filepath.clone()) + .unwrap_or_else(|_| panic!("Failed reading setup key from path: {}", filepath)); + Box::new(file) +} + +pub fn get_circuits_for_vk() -> Vec>> { + ensure_setup_key_exist(); + let padding_aggregations = padding_aggregations(NODE_SPLITTING_FACTOR); + circuits_for_vk_generation( + GEOMETRY_CONFIG, + LEAF_SPLITTING_FACTOR, + NODE_SPLITTING_FACTOR, + SCHEDULER_UPPER_BOUND, + padding_aggregations, + ) +} + +fn ensure_setup_key_exist() { + if !Path::new("setup_2^26.key").exists() { + panic!("File setup_2^26.key is required to be present in current directory."); + } +} + +pub fn get_setup_key_write_file_path(circuit_type: u8) -> String { + let zksync_home = std::env::var("ZKSYNC_HOME").unwrap_or_else(|_| "/".into()); + format!("{}/{}", zksync_home, get_setup_key_filename(circuit_type)) +} + +fn get_setup_key_file_path(circuit_type: u8) -> String { + let prover_config = ProverConfigs::from_env().non_gpu; + format!( + "{}/{}", + prover_config.setup_keys_path, + get_setup_key_filename(circuit_type) + ) +} + +fn get_setup_key_filename(circuit_type: u8) -> String { + format!("setup_{}_key.bin", circuit_type) +} diff --git a/core/bin/setup_key_generator_and_server/src/main.rs b/core/bin/setup_key_generator_and_server/src/main.rs new file mode 100644 index 000000000000..bf9197d18691 --- /dev/null +++ b/core/bin/setup_key_generator_and_server/src/main.rs @@ -0,0 +1,54 @@ +use api::Prover; +use prover_service::utils::generate_setup_for_circuit; +use prover_service::Setup; +use std::env; +use std::fs::File; +use structopt::StructOpt; +use zkevm_test_harness::abstract_zksync_circuit::concrete_circuits::ZkSyncCircuit; +use zkevm_test_harness::bellman::bn256::Bn256; +use zkevm_test_harness::witness::oracle::VmWitnessOracle; +use zksync_setup_key_server::{get_circuits_for_vk, get_setup_key_write_file_path}; + +#[derive(Debug, StructOpt)] +#[structopt( + name = "Generate setup keys for individual circuit", + about = "Tool for generating setup key for individual circuit" +)] +struct Opt { + /// Numeric circuit type valid value from [0-17]. + #[structopt(long)] + numeric_circuit: u8, +} + +fn main() { + let opt = Opt::from_args(); + env::set_var("CRS_FILE", "setup_2^26.key"); + vlog::info!("Starting setup key generation!"); + get_circuits_for_vk() + .into_iter() + .filter(|c| c.numeric_circuit_type() == opt.numeric_circuit) + .for_each(generate_setup_key_for_circuit); +} + +fn generate_setup_key_for_circuit(circuit: ZkSyncCircuit>) { + let mut prover = Prover::new(); + let setup = generate_setup_for_circuit(&mut prover, &circuit); + save_setup_for_circuit_type(circuit.numeric_circuit_type(), setup); + vlog::info!( + "Finished setup key generation for circuit {:?} (id {:?})", + circuit.short_description(), + circuit.numeric_circuit_type() + ); +} + +fn save_setup_for_circuit_type(circuit_type: u8, setup: Setup) { + let filepath = get_setup_key_write_file_path(circuit_type); + vlog::info!("saving setup key to: {}", filepath); + let setup_file = File::create(&filepath).unwrap(); + setup + .write(setup_file) + .expect("Failed saving setup key to file."); + let setup_file = File::open(filepath).expect("Unable to open file"); + let size = setup_file.metadata().unwrap().len() as f64 / (1024.0 * 1024.0); + println!("Saved file size: {:?}MB", size); +} diff --git a/core/bin/storage_logs_migration/Cargo.toml b/core/bin/storage_logs_migration/Cargo.toml new file mode 100644 index 000000000000..8aaca82c5a8a --- /dev/null +++ b/core/bin/storage_logs_migration/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "storage_logs_migration" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +tokio = { version = "1" } +zksync_types = { path = "../../lib/types", version = "1.0" } +zksync_dal = { path = "../../lib/dal", version = "1.0" } diff --git a/core/bin/storage_logs_migration/src/main.rs b/core/bin/storage_logs_migration/src/main.rs new file mode 100644 index 000000000000..44871cb16dd5 --- /dev/null +++ b/core/bin/storage_logs_migration/src/main.rs @@ -0,0 +1,27 @@ +use zksync_dal::ConnectionPool; +use zksync_types::L1BatchNumber; + +#[tokio::main] +async fn main() { + let pool = ConnectionPool::new(Some(1), true); + let mut storage = pool.access_storage().await; + let last_sealed_l1_batch = storage.blocks_dal().get_sealed_block_number(); + + let mut current_l1_batch_number = L1BatchNumber(0); + let block_range = 100u32; + while current_l1_batch_number <= last_sealed_l1_batch { + let to_l1_batch_number = current_l1_batch_number + block_range - 1; + storage + .storage_logs_dedup_dal() + .migrate_protective_reads(current_l1_batch_number, to_l1_batch_number); + storage + .storage_logs_dedup_dal() + .migrate_initial_writes(current_l1_batch_number, to_l1_batch_number); + println!( + "Processed l1 batches {}-{}", + current_l1_batch_number, to_l1_batch_number + ); + + current_l1_batch_number += block_range; + } +} diff --git a/core/bin/system-constants-generator/Cargo.toml b/core/bin/system-constants-generator/Cargo.toml new file mode 100644 index 000000000000..3cc499e4f029 --- /dev/null +++ b/core/bin/system-constants-generator/Cargo.toml @@ -0,0 +1,28 @@ +[package] +name = "system-constants-generator" +version = "0.1.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] +description = "Tool for generating JSON files with the system constants for L1/L2 contracts" +publish = false # We don't want to publish our binaries. + +[dependencies] +zksync_state = { path = "../../lib/state", version = "1.0" } +zksync_storage = { path = "../../lib/storage", version = "1.0" } +zksync_types = { path = "../../lib/types", version = "1.0" } +zksync_utils = { path = "../../lib/utils", version = "1.0" } +zksync_contracts = {path = "../../lib/contracts", version = "1.0" } +vm = {path = "../../lib/vm", version="0.1.0" } + +codegen = "0.2.0" + +serde = "1.0" +serde_json = "1.0" +once_cell = "1.7" +rand = { version = "0.7" } +num = { version = "0.3", features = ["serde", "rand"] } +tempfile = "3.0.2" diff --git a/core/bin/system-constants-generator/src/intrinsic_costs.rs b/core/bin/system-constants-generator/src/intrinsic_costs.rs new file mode 100644 index 000000000000..ef8db44829f4 --- /dev/null +++ b/core/bin/system-constants-generator/src/intrinsic_costs.rs @@ -0,0 +1,209 @@ +//! +//! The script that returns the L2 gas price constants is that calculates the constants currently used by the +//! bootloader as well as L1 smart contracts. It should be used to edit the config file located in the etc/system-contracts/SystemConfig.json +//! as well as contracts/SystemConfig.json +//! + +use crate::utils::{ + execute_internal_transfer_test, execute_user_txs_in_test_gas_vm, get_l1_tx, get_l1_txs, + get_l2_txs, +}; +use crate::utils::{metrics_from_txs, TransactionGenerator}; +use vm::vm_with_bootloader::BOOTLOADER_TX_ENCODING_SPACE; +use zksync_types::{ethabi::Address, IntrinsicSystemGasConstants, U256}; + +#[derive(Debug, Clone, Copy, PartialEq)] +pub(crate) struct VmSpentResourcesResult { + // How many gas was spent on computation. + pub(crate) gas_consumed: u32, + // How many bytes of public data was published. + pub(crate) pubdata_published: u32, + // The total amount of gas the users have paid for. + pub(crate) total_gas_paid: u32, + // The total amount of gas the users have paid for public data. + pub(crate) total_pubdata_paid: u32, +} + +struct IntrinsicPrices { + tx_intrinsic_gas: u32, + tx_intrinsic_pubdata: u32, + bootloader_intrinsic_gas: u32, + bootloader_intrinsic_pubdata: u32, +} + +pub(crate) fn l2_gas_constants() -> IntrinsicSystemGasConstants { + let IntrinsicPrices { + tx_intrinsic_gas: l2_tx_intrinsic_gas, + tx_intrinsic_pubdata: l2_tx_intrinsic_pubdata, + bootloader_intrinsic_gas, + bootloader_intrinsic_pubdata, + } = get_intrinsic_overheads_for_tx_type(&get_l2_txs); + + let IntrinsicPrices { + tx_intrinsic_gas: l1_tx_intrinsic_gas, + tx_intrinsic_pubdata: l1_tx_intrinsic_pubdata, + bootloader_intrinsic_gas: bootloader_intrinsic_gas_with_l1_txs, + bootloader_intrinsic_pubdata: bootloader_intrinsic_pubdata_with_l1_txs, + } = get_intrinsic_overheads_for_tx_type(&get_l1_txs); + + // Sanity checks: the bootloader on its own should consume the same resources + // independently on the type of the transaction + assert_eq!( + bootloader_intrinsic_gas, + bootloader_intrinsic_gas_with_l1_txs + ); + assert_eq!( + bootloader_intrinsic_pubdata, + bootloader_intrinsic_pubdata_with_l1_txs + ); + + // Getting the amount of gas that is required for a transfer assuming no + // pubdata price (this will be the case for most L2 txs as refund usually will not + // change any new storage slots). + let l2_tx_gas_for_refund_transfer = execute_internal_transfer_test(); + + // This is needed to get the minimum number of gas a transaction could require. + let empty_l1_tx_result = execute_user_txs_in_test_gas_vm( + vec![ + // Using 0 gas limit to make sure that only the mandatory parts of the L1->L2 transaction are executed. + get_l1_tx( + 0, + Address::random(), + Address::random(), + 0, + Some(U256::zero()), + None, + None, + ) + .into(), + ], + true, + ); + + // This price does not include the overhead for the transaction itself, but rather auxilary parts + // that must be done by the transaction and it can not be enforced by the operator to not to accept + // the transaction if it does not cover the minimal costs. + let min_l1_tx_price = empty_l1_tx_result.gas_consumed - bootloader_intrinsic_gas; + + // The price for each keccak circuit is increased by 136 bytes at a time, while + // the transaction's size can be only increased by 32 at a time. Thus, for measurements + // we will use the LCM(136, 32) = 544 to measure the increase with the transaction's size. + const DELTA_IN_TX_SIZE: usize = 544; + + let lengthier_tx_result = execute_user_txs_in_test_gas_vm( + vec![get_l1_tx( + 0, + Address::random(), + Address::random(), + 0, + Some(U256::zero()), + Some(vec![0u8; DELTA_IN_TX_SIZE]), + None, + ) + .into()], + true, + ); + + let delta_from_544_bytes = lengthier_tx_result.gas_consumed - empty_l1_tx_result.gas_consumed; + + // The number of public data per factory dep should not depend on the size/structure of the factory + // dependency, since the dependency has already been published on L1. + let tx_with_more_factory_deps_result = execute_user_txs_in_test_gas_vm( + vec![get_l1_tx( + 0, + Address::random(), + Address::random(), + 0, + Some(U256::zero()), + None, + Some(vec![vec![0u8; 32]]), + ) + .into()], + true, + ); + + let gas_delta_from_factory_dep = + tx_with_more_factory_deps_result.gas_consumed - empty_l1_tx_result.gas_consumed; + let pubdata_delta_from_factory_dep = + tx_with_more_factory_deps_result.pubdata_published - empty_l1_tx_result.pubdata_published; + + // The number of the bootloader memory that can be filled up with transactions. + let bootloader_tx_memory_size_slots = BOOTLOADER_TX_ENCODING_SPACE; + + IntrinsicSystemGasConstants { + l2_tx_intrinsic_gas, + l2_tx_intrinsic_pubdata, + l2_tx_gas_for_refund_transfer, + l1_tx_intrinsic_gas, + l1_tx_intrinsic_pubdata, + l1_tx_delta_factory_dep_gas: gas_delta_from_factory_dep, + l1_tx_delta_factory_dep_pubdata: pubdata_delta_from_factory_dep, + l1_tx_delta_544_encoding_bytes: delta_from_544_bytes, + l1_tx_min_gas_base: min_l1_tx_price, + bootloader_intrinsic_gas, + bootloader_intrinsic_pubdata, + bootloader_tx_memory_size_slots, + } +} + +// Takes the consumed resources before the transaction's inclusion and with the transaction included +// and returns the pair of the intrinsic gas cost (for computation) and the intrinsic cost in pubdata +fn get_intrinsic_price( + prev_result: VmSpentResourcesResult, + new_result: VmSpentResourcesResult, +) -> (u32, u32) { + let delta_consumed_gas = new_result.gas_consumed - prev_result.gas_consumed; + let delta_paid_gas = new_result.total_gas_paid - prev_result.total_gas_paid; + + let overhead_gas = delta_consumed_gas.saturating_sub(delta_paid_gas); + + let delta_consumed_pubdata = new_result.pubdata_published - prev_result.pubdata_published; + let delta_paid_pubdata = new_result.total_pubdata_paid - prev_result.total_pubdata_paid; + + let overhead_pubdata = delta_consumed_pubdata.saturating_sub(delta_paid_pubdata); + + (overhead_gas, overhead_pubdata) +} + +fn get_intrinsic_overheads_for_tx_type(tx_generator: &TransactionGenerator) -> IntrinsicPrices { + let result_0 = metrics_from_txs(0, tx_generator); + let result_1 = metrics_from_txs(1, tx_generator); + let result_2 = metrics_from_txs(2, tx_generator); + let result_3 = metrics_from_txs(3, tx_generator); + + // Firstly, let's calculate the bootloader overhead in gas. + // It is equal to the number of gas spent when there are no transactions in the block. + let bootloader_intrinsic_gas = result_0.gas_consumed; + // The same goes for the overhead for the bootloader in pubdata + let bootloader_intrinsic_pubdata = result_0.pubdata_published; + + // For various small reasons the overhead for the first transaction and for all the subsequent ones + // might differ a bit, so we will calculate both and will use the maximum one as the result for l2 txs. + + let (tx1_intrinsic_gas, tx1_intrinsic_pubdata) = get_intrinsic_price(result_0, result_1); + let (tx2_intrinsic_gas, tx2_intrinsic_pubdata) = get_intrinsic_price(result_1, result_2); + let (tx3_intrinsic_gas, tx3_intrinsic_pubdata) = get_intrinsic_price(result_2, result_3); + + // A sanity check: to make sure that the assumptions that are used + // in this calculations hold: we expect the overheads from 2nd and 3rd transactions + // (and intuitively with all the higher indices) to be the same. + assert_eq!( + tx2_intrinsic_gas, tx3_intrinsic_gas, + "Overhead of 2nd and 3rd L2 transactions in gas are not equal" + ); + assert_eq!( + tx2_intrinsic_pubdata, tx3_intrinsic_pubdata, + "Overhead of 2nd and 3rd L2 transactions in pubdata are not equal" + ); + + // Finally, the overhead for computation and pubdata are maxes for overheads for the L2 txs + let tx_intrinsic_gas = std::cmp::max(tx1_intrinsic_gas, tx2_intrinsic_gas); + let tx_intrinsic_pubdata = std::cmp::max(tx1_intrinsic_pubdata, tx2_intrinsic_pubdata); + + IntrinsicPrices { + bootloader_intrinsic_gas, + bootloader_intrinsic_pubdata, + tx_intrinsic_gas, + tx_intrinsic_pubdata, + } +} diff --git a/core/bin/system-constants-generator/src/main.rs b/core/bin/system-constants-generator/src/main.rs new file mode 100644 index 000000000000..60d6b4c79f86 --- /dev/null +++ b/core/bin/system-constants-generator/src/main.rs @@ -0,0 +1,235 @@ +use std::fs; + +use serde::{Deserialize, Serialize}; +use vm::{ + vm_with_bootloader::{BLOCK_OVERHEAD_GAS, BLOCK_OVERHEAD_L1_GAS, BOOTLOADER_TX_ENCODING_SPACE}, + zk_evm::zkevm_opcode_defs::{ + circuit_prices::{ + ECRECOVER_CIRCUIT_COST_IN_ERGS, KECCAK256_CIRCUIT_COST_IN_ERGS, + SHA256_CIRCUIT_COST_IN_ERGS, + }, + system_params::{MAX_PUBDATA_PER_BLOCK, MAX_TX_ERGS_LIMIT}, + }, +}; +use zksync_types::{ + IntrinsicSystemGasConstants, FAIR_L2_GAS_PRICE, GUARANTEED_PUBDATA_IN_TX, + L1_GAS_PER_PUBDATA_BYTE, MAX_GAS_PER_PUBDATA_BYTE, MAX_NEW_FACTORY_DEPS, MAX_TXS_IN_BLOCK, +}; + +mod intrinsic_costs; +mod utils; + +use codegen::Block; +use codegen::Scope; + +// Params needed for L1 contracts +#[derive(Copy, Clone, Debug, Serialize, Deserialize)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +struct L1SystemConfig { + l2_tx_max_gas_limit: u32, + max_pubdata_per_block: u32, + priority_tx_max_pubdata: u32, + fair_l2_gas_price: u64, + l1_gas_per_pubdata_byte: u32, + block_overhead_l2_gas: u32, + block_overhead_l1_gas: u32, + max_transactions_in_block: u32, + bootloader_tx_encoding_space: u32, + l1_tx_intrinsic_l2_gas: u32, + l1_tx_intrinsic_pubdata: u32, + l1_tx_min_l2_gas_base: u32, + l1_tx_delta_544_encoding_bytes: u32, + l1_tx_delta_factory_deps_l2_gas: u32, + l1_tx_delta_factory_deps_pubdata: u32, + max_new_factory_deps: u32, + default_l2_gas_price_per_pubdata: u64, +} + +pub fn generate_l1_contracts_system_config(gas_constants: &IntrinsicSystemGasConstants) -> String { + let l1_contracts_config = L1SystemConfig { + l2_tx_max_gas_limit: MAX_TX_ERGS_LIMIT, + max_pubdata_per_block: MAX_PUBDATA_PER_BLOCK, + priority_tx_max_pubdata: (L1_TX_DECREASE * (MAX_PUBDATA_PER_BLOCK as f64)) as u32, + fair_l2_gas_price: FAIR_L2_GAS_PRICE, + l1_gas_per_pubdata_byte: L1_GAS_PER_PUBDATA_BYTE, + block_overhead_l2_gas: BLOCK_OVERHEAD_GAS, + block_overhead_l1_gas: BLOCK_OVERHEAD_L1_GAS, + max_transactions_in_block: MAX_TXS_IN_BLOCK as u32, + bootloader_tx_encoding_space: BOOTLOADER_TX_ENCODING_SPACE, + + l1_tx_intrinsic_l2_gas: gas_constants.l1_tx_intrinsic_gas, + l1_tx_intrinsic_pubdata: gas_constants.l1_tx_intrinsic_pubdata, + l1_tx_min_l2_gas_base: gas_constants.l1_tx_min_gas_base, + l1_tx_delta_544_encoding_bytes: gas_constants.l1_tx_delta_544_encoding_bytes, + l1_tx_delta_factory_deps_l2_gas: gas_constants.l1_tx_delta_factory_dep_gas, + l1_tx_delta_factory_deps_pubdata: gas_constants.l1_tx_delta_factory_dep_pubdata, + max_new_factory_deps: MAX_NEW_FACTORY_DEPS as u32, + default_l2_gas_price_per_pubdata: MAX_GAS_PER_PUBDATA_BYTE, + }; + + serde_json::to_string_pretty(&l1_contracts_config).unwrap() +} + +// Params needed for L2 system contracts +#[derive(Copy, Clone, Debug, Serialize, Deserialize)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +struct L2SystemConfig { + guaranteed_pubdata_bytes: u32, + max_pubdata_per_block: u32, + max_transactions_in_block: u32, + block_overhead_l2_gas: u32, + block_overhead_l1_gas: u32, + l2_tx_intrinsic_gas: u32, + l2_tx_intrinsic_pubdata: u32, + l1_tx_intrinsic_l2_gas: u32, + l1_tx_intrinsic_pubdata: u32, + max_gas_per_transaction: u32, + bootloader_memory_for_txs: u32, + refund_gas: u32, + keccak_round_cost_gas: u32, + sha256_round_cost_gas: u32, + ecrecover_cost_gas: u32, +} + +pub fn generate_l2_contracts_system_config(gas_constants: &IntrinsicSystemGasConstants) -> String { + let l2_contracts_config = L2SystemConfig { + guaranteed_pubdata_bytes: GUARANTEED_PUBDATA_IN_TX, + max_pubdata_per_block: MAX_PUBDATA_PER_BLOCK, + max_transactions_in_block: MAX_TXS_IN_BLOCK as u32, + block_overhead_l2_gas: BLOCK_OVERHEAD_GAS, + block_overhead_l1_gas: BLOCK_OVERHEAD_L1_GAS, + l2_tx_intrinsic_gas: gas_constants.l2_tx_intrinsic_gas, + l2_tx_intrinsic_pubdata: gas_constants.l2_tx_intrinsic_pubdata, + l1_tx_intrinsic_l2_gas: gas_constants.l1_tx_intrinsic_gas, + l1_tx_intrinsic_pubdata: gas_constants.l1_tx_intrinsic_pubdata, + max_gas_per_transaction: MAX_TX_ERGS_LIMIT, + bootloader_memory_for_txs: BOOTLOADER_TX_ENCODING_SPACE, + refund_gas: gas_constants.l2_tx_gas_for_refund_transfer, + keccak_round_cost_gas: KECCAK256_CIRCUIT_COST_IN_ERGS, + sha256_round_cost_gas: SHA256_CIRCUIT_COST_IN_ERGS, + ecrecover_cost_gas: ECRECOVER_CIRCUIT_COST_IN_ERGS, + }; + + serde_json::to_string_pretty(&l2_contracts_config).unwrap() +} + +// We allow L1 transactions to have only a fraction of the maximum gas limit/pubdata for L2 transactions +// Even though the transactions under L2 gas limit should never get out of the bounds for single-instance circuits +const L1_TX_DECREASE: f64 = 0.9; + +fn generate_rust_fee_constants(intrinsic_gas_constants: &IntrinsicSystemGasConstants) -> String { + let mut scope = Scope::new(); + + scope.import("super", "IntrinsicSystemGasConstants"); + + scope.raw( + vec![ + ] + .join("\n"), + ); + + let get_intrinsic_constants_fn = scope.new_fn("get_intrinsic_constants"); + get_intrinsic_constants_fn.vis("pub const"); + get_intrinsic_constants_fn.ret("IntrinsicSystemGasConstants"); + + { + let mut struct_block = Block::new("IntrinsicSystemGasConstants"); + struct_block.line(format!( + "l2_tx_intrinsic_gas: {},", + intrinsic_gas_constants.l2_tx_intrinsic_gas + )); + struct_block.line(format!( + "l2_tx_intrinsic_pubdata: {},", + intrinsic_gas_constants.l2_tx_intrinsic_pubdata + )); + struct_block.line(format!( + "l2_tx_gas_for_refund_transfer: {},", + intrinsic_gas_constants.l2_tx_gas_for_refund_transfer + )); + struct_block.line(format!( + "l1_tx_intrinsic_gas: {},", + intrinsic_gas_constants.l1_tx_intrinsic_gas + )); + struct_block.line(format!( + "l1_tx_intrinsic_pubdata: {},", + intrinsic_gas_constants.l1_tx_intrinsic_pubdata + )); + struct_block.line(format!( + "l1_tx_min_gas_base: {},", + intrinsic_gas_constants.l1_tx_min_gas_base + )); + struct_block.line(format!( + "l1_tx_delta_544_encoding_bytes: {},", + intrinsic_gas_constants.l1_tx_delta_544_encoding_bytes + )); + struct_block.line(format!( + "l1_tx_delta_factory_dep_gas: {},", + intrinsic_gas_constants.l1_tx_delta_factory_dep_gas + )); + struct_block.line(format!( + "l1_tx_delta_factory_dep_pubdata: {},", + intrinsic_gas_constants.l1_tx_delta_factory_dep_pubdata + )); + struct_block.line(format!( + "bootloader_intrinsic_gas: {},", + intrinsic_gas_constants.bootloader_intrinsic_gas + )); + struct_block.line(format!( + "bootloader_intrinsic_pubdata: {},", + intrinsic_gas_constants.bootloader_intrinsic_pubdata + )); + struct_block.line(format!( + "bootloader_tx_memory_size_slots: {},", + intrinsic_gas_constants.bootloader_tx_memory_size_slots + )); + + get_intrinsic_constants_fn.push_block(struct_block); + } + + vec![ + "//! THIS FILE IS AUTOGENERATED: DO NOT EDIT MANUALLY!\n".to_string(), + "//! The file with constants related to fees most of which need to be computed\n" + .to_string(), + scope.to_string(), + ] + .concat() +} + +fn save_file(path_in_repo: &str, content: String) { + let zksync_home = std::env::var("ZKSYNC_HOME").expect("No ZKSYNC_HOME env var"); + let fee_constants_path = format!("{zksync_home}/{path_in_repo}"); + + fs::write(fee_constants_path, content) + .unwrap_or_else(|_| panic!("Failed to write to {}", path_in_repo)); +} + +fn update_rust_system_constants(intrinsic_gas_constants: &IntrinsicSystemGasConstants) { + let rust_fee_constants = generate_rust_fee_constants(intrinsic_gas_constants); + save_file( + "core/lib/config/src/constants/fees/intrinsic.rs", + rust_fee_constants, + ); +} + +fn update_l1_system_constants(intrinsic_gas_constants: &IntrinsicSystemGasConstants) { + let l1_system_config = generate_l1_contracts_system_config(intrinsic_gas_constants); + save_file("contracts/SystemConfig.json", l1_system_config); +} + +fn update_l2_system_constants(intrinsic_gas_constants: &IntrinsicSystemGasConstants) { + let l2_system_config = generate_l2_contracts_system_config(intrinsic_gas_constants); + save_file("etc/system-contracts/SystemConfig.json", l2_system_config); +} + +fn main() { + let intrinsic_gas_constants = intrinsic_costs::l2_gas_constants(); + + println!("Updating Core system constants"); + update_rust_system_constants(&intrinsic_gas_constants); + + println!("Updating L1 system constants"); + update_l1_system_constants(&intrinsic_gas_constants); + + println!("Updating L2 system constants"); + update_l2_system_constants(&intrinsic_gas_constants); +} diff --git a/core/bin/system-constants-generator/src/utils.rs b/core/bin/system-constants-generator/src/utils.rs new file mode 100644 index 000000000000..3f2ea259ff56 --- /dev/null +++ b/core/bin/system-constants-generator/src/utils.rs @@ -0,0 +1,334 @@ +use once_cell::sync::Lazy; +use tempfile::TempDir; +use vm::{ + storage::Storage, + utils::{ + create_test_block_params, insert_system_contracts, read_bootloader_test_code, + BLOCK_GAS_LIMIT, + }, + vm_with_bootloader::{ + init_vm_inner, push_raw_transaction_to_bootloader_memory, BlockContextMode, + BootloaderJobType, DerivedBlockContext, TxExecutionMode, + }, + zk_evm::{aux_structures::Timestamp, zkevm_opcode_defs::BOOTLOADER_HEAP_PAGE}, + OracleTools, +}; +use zksync_contracts::{load_sys_contract, read_bootloader_code, SystemContractCode}; +use zksync_state::{secondary_storage::SecondaryStateStorage, storage_view::StorageView}; +use zksync_storage::{db::Database, RocksDB}; +use zksync_types::{ + ethabi::Token, + fee::Fee, + l1::L1Tx, + l2::L2Tx, + tx::{ + tx_execution_info::{TxExecutionStatus, VmExecutionLogs}, + ExecutionMetrics, + }, + utils::storage_key_for_eth_balance, + AccountTreeId, Address, Execute, L1TxCommonData, L2ChainId, Nonce, StorageKey, Transaction, + BOOTLOADER_ADDRESS, H256, SYSTEM_CONTEXT_ADDRESS, SYSTEM_CONTEXT_GAS_PRICE_POSITION, + SYSTEM_CONTEXT_TX_ORIGIN_POSITION, U256, +}; +use zksync_utils::{bytecode::hash_bytecode, bytes_to_be_words, h256_to_u256, u256_to_h256}; + +use crate::intrinsic_costs::VmSpentResourcesResult; + +pub static GAS_TEST_BOOTLOADER_CODE: Lazy = Lazy::new(|| { + let bytecode = read_bootloader_code("gas_test"); + let hash = hash_bytecode(&bytecode); + + SystemContractCode { + code: bytes_to_be_words(bytecode), + hash: h256_to_u256(hash), + } +}); + +// 100 gwei is base fee large enough for almost any L1 gas price +const BIG_BASE_FEE: u64 = 100_000_000_000; + +pub(super) fn get_l2_tx(contract_address: Address, signer: &H256, pubdata_price: u32) -> L2Tx { + L2Tx::new_signed( + contract_address, + vec![], + Nonce(0), + Fee { + gas_limit: U256::from(10000000u32), + max_fee_per_gas: U256::from(BIG_BASE_FEE), + max_priority_fee_per_gas: U256::from(0), + gas_per_pubdata_limit: pubdata_price.into(), + }, + U256::from(0), + L2ChainId(270), + signer, + None, + Default::default(), + ) + .unwrap() +} + +pub(super) fn get_l2_txs(number_of_txs: usize) -> (Vec, Vec) { + let mut txs_with_pubdata_price = vec![]; + let mut txs_without_pubdata_price = vec![]; + + for _ in 0..number_of_txs { + let signer = H256::random(); + let contract_address = Address::random(); + + txs_without_pubdata_price.push(get_l2_tx(contract_address, &signer, 0).into()); + + txs_with_pubdata_price.push(get_l2_tx(contract_address, &signer, 1).into()); + } + + (txs_with_pubdata_price, txs_without_pubdata_price) +} + +pub(super) fn get_l1_tx( + id: u64, + sender: Address, + contract_address: Address, + pubdata_price: u32, + custom_gas_limit: Option, + custom_calldata: Option>, + factory_deps: Option>>, +) -> L1Tx { + L1Tx { + execute: Execute { + contract_address, + calldata: custom_calldata.unwrap_or_default(), + value: U256::from(0), + factory_deps, + }, + common_data: L1TxCommonData { + sender, + serial_id: id.into(), + gas_limit: custom_gas_limit.unwrap_or_else(|| U256::from(10000000u32)), + gas_per_pubdata_limit: pubdata_price.into(), + ..Default::default() + }, + received_timestamp_ms: 0, + } +} + +pub(super) fn get_l1_txs(number_of_txs: usize) -> (Vec, Vec) { + let mut txs_with_pubdata_price = vec![]; + let mut txs_without_pubdata_price = vec![]; + + for id in 0..number_of_txs { + let sender = Address::random(); + let contract_address = Address::random(); + + txs_without_pubdata_price + .push(get_l1_tx(id as u64, sender, contract_address, 0, None, None, None).into()); + + txs_with_pubdata_price + .push(get_l1_tx(id as u64, sender, contract_address, 1, None, None, None).into()); + } + + (txs_with_pubdata_price, txs_without_pubdata_price) +} + +/// Executes the "internal transfer test" of the bootloader -- the test that +/// returns the amount of gas needed to perform and internal transfer, assuming no gas price +/// per pubdata, i.e. under assumption that the refund will not touch any new slots. +pub(super) fn execute_internal_transfer_test() -> u32 { + let (block_context, block_properties) = create_test_block_params(); + let block_context: DerivedBlockContext = block_context.into(); + + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let db = RocksDB::new(Database::StateKeeper, temp_dir.as_ref(), false); + let mut raw_storage = SecondaryStateStorage::new(db); + insert_system_contracts(&mut raw_storage); + let storage_ptr: &mut dyn Storage = &mut StorageView::new(&raw_storage); + + let bootloader_balane_key = storage_key_for_eth_balance(&BOOTLOADER_ADDRESS); + storage_ptr.set_value(&bootloader_balane_key, u256_to_h256(U256([0, 0, 1, 0]))); + + let mut oracle_tools = OracleTools::new(storage_ptr); + + let transfer_test_bootloader = read_bootloader_test_code("transfer_test"); + + let mut vm = init_vm_inner( + &mut oracle_tools, + BlockContextMode::NewBlock(block_context, Default::default()), + &block_properties, + BLOCK_GAS_LIMIT, + transfer_test_bootloader, + TxExecutionMode::VerifyExecute, + ); + + let eth_token_sys_contract = load_sys_contract("L2EthToken"); + let transfer_from_to = ð_token_sys_contract + .functions + .get("transferFromTo") + .unwrap()[0]; + let input = { + let mut input = transfer_from_to + .encode_input(&[ + Token::Address(BOOTLOADER_ADDRESS), + Token::Address(Address::random()), + Token::Uint(U256::from(1u32)), + ]) + .expect("Failed to encode the calldata"); + + // Padding input to be divisible by 32 + while input.len() % 32 != 0 { + input.push(0); + } + input + }; + let input: Vec<_> = bytes_to_be_words(input).into_iter().enumerate().collect(); + vm.state + .memory + .populate_page(BOOTLOADER_HEAP_PAGE as usize, input, Timestamp(0)); + + let result = vm.execute_till_block_end(BootloaderJobType::BlockPostprocessing); + + assert!( + result.block_tip_result.revert_reason.is_none(), + "The internal call has reverted" + ); + assert!( + result.full_result.revert_reason.is_none(), + "The internal call has reverted" + ); + + let value_recorded_from_test = vm.state.memory.read_slot(BOOTLOADER_HEAP_PAGE as usize, 0); + + value_recorded_from_test.value.as_u32() +} + +// Executes an array of transactions in the VM. +pub(super) fn execute_user_txs_in_test_gas_vm( + txs: Vec, + accept_failure: bool, +) -> VmSpentResourcesResult { + let total_gas_paid_upfront = txs + .iter() + .fold(U256::zero(), |sum, elem| sum + elem.gas_limit()); + + let (block_context, block_properties) = create_test_block_params(); + let block_context: DerivedBlockContext = block_context.into(); + + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let db = RocksDB::new(Database::StateKeeper, temp_dir.as_ref(), false); + let mut raw_storage = SecondaryStateStorage::new(db); + insert_system_contracts(&mut raw_storage); + let storage_ptr: &mut dyn Storage = &mut StorageView::new(&raw_storage); + + for tx in txs.iter() { + let sender_address = tx.initiator_account(); + let key = storage_key_for_eth_balance(&sender_address); + storage_ptr.set_value(&key, u256_to_h256(U256([0, 0, 1, 0]))); + } + + // We also set some of the storage slots to non-zero values. This is not how it will be + // done in production, but it allows to estimate the overhead of the bootloader more correctly + { + let bootloader_balance_key = storage_key_for_eth_balance(&BOOTLOADER_ADDRESS); + let tx_origin_key = StorageKey::new( + AccountTreeId::new(SYSTEM_CONTEXT_ADDRESS), + SYSTEM_CONTEXT_TX_ORIGIN_POSITION, + ); + let tx_gas_price_key = StorageKey::new( + AccountTreeId::new(SYSTEM_CONTEXT_ADDRESS), + SYSTEM_CONTEXT_GAS_PRICE_POSITION, + ); + + storage_ptr.set_value(&bootloader_balance_key, u256_to_h256(U256([1, 0, 0, 0]))); + storage_ptr.set_value(&tx_origin_key, u256_to_h256(U256([1, 0, 0, 0]))); + storage_ptr.set_value(&tx_gas_price_key, u256_to_h256(U256([1, 0, 0, 0]))); + } + + let mut oracle_tools = OracleTools::new(storage_ptr); + + let mut vm = init_vm_inner( + &mut oracle_tools, + BlockContextMode::NewBlock(block_context, Default::default()), + &block_properties, + BLOCK_GAS_LIMIT, + GAS_TEST_BOOTLOADER_CODE.code.clone(), + TxExecutionMode::VerifyExecute, + ); + + let mut total_gas_refunded = 0; + for tx in txs { + push_raw_transaction_to_bootloader_memory( + &mut vm, + tx.clone().into(), + TxExecutionMode::VerifyExecute, + 0, + ); + let tx_execution_result = vm + .execute_next_tx() + .expect("Bootloader failed while processing transaction"); + + total_gas_refunded += tx_execution_result.gas_refunded; + if !accept_failure { + assert_eq!( + tx_execution_result.status, + TxExecutionStatus::Success, + "A transaction has failed" + ); + } + } + + let result = vm.execute_till_block_end(BootloaderJobType::BlockPostprocessing); + let execution_logs = VmExecutionLogs { + storage_logs: result.full_result.storage_log_queries, + events: result.full_result.events, + l2_to_l1_logs: result.full_result.l2_to_l1_logs, + total_log_queries_count: result.full_result.total_log_queries, + }; + + let metrics = ExecutionMetrics::new( + &execution_logs, + result.full_result.gas_used as usize, + 0, // The number of contracts deployed is irrelevant for our needs + result.full_result.contracts_used, + result.full_result.cycles_used, + ); + + VmSpentResourcesResult { + gas_consumed: vm.gas_consumed(), + total_gas_paid: total_gas_paid_upfront.as_u32() - total_gas_refunded, + pubdata_published: metrics.size() as u32, + total_pubdata_paid: 0, + } +} + +// Denotes a function that should return a tuple of arrays transactions. +// The first array should be with transactions with pubdata price 1. +// The second array should be with transactions with pubdata price 0. +pub type TransactionGenerator = dyn Fn(usize) -> (Vec, Vec); + +// The easiest way to retrieve the amount of gas the user has spent +// on public data is by comparing the results for the same transaction, but +// with different pubdata price (0 vs 1 respectively). The difference in gas +// paid by the users will be the number of gas spent on pubdata. +pub(crate) fn metrics_from_txs( + number_of_txs: usize, + tx_generator: &TransactionGenerator, +) -> VmSpentResourcesResult { + let (txs_with_pubdata_price, txs_without_pubdata_price) = tx_generator(number_of_txs); + + let tx_results_with_pubdata_price = + execute_user_txs_in_test_gas_vm(txs_with_pubdata_price, false); + let tx_results_without_pubdata_price = + execute_user_txs_in_test_gas_vm(txs_without_pubdata_price, false); + + // Sanity check + assert_eq!( + tx_results_with_pubdata_price.pubdata_published, + tx_results_without_pubdata_price.pubdata_published, + "The transactions should have identical pubdata published" + ); + + // We will use the results from the zero pubdata price block as the basis for the results + // but we will use the difference in gas spent as the number of pubdata compensated by the users + VmSpentResourcesResult { + total_pubdata_paid: tx_results_with_pubdata_price.total_gas_paid + - tx_results_without_pubdata_price.total_gas_paid, + ..tx_results_without_pubdata_price + } +} diff --git a/core/bin/verification_key_generator_and_server/Cargo.toml b/core/bin/verification_key_generator_and_server/Cargo.toml new file mode 100644 index 000000000000..7f4968dbc2d2 --- /dev/null +++ b/core/bin/verification_key_generator_and_server/Cargo.toml @@ -0,0 +1,33 @@ +[package] +name = "zksync_verification_key_generator_and_server" +version = "1.0.0" +edition = "2018" + +[lib] +name = "zksync_verification_key_server" +path = "src/lib.rs" + +[[bin]] +name = "zksync_verification_key_generator" +path = "src/main.rs" + +[[bin]] +name = "zksync_json_to_binary_vk_converter" +path = "src/json_to_binary_vk_converter.rs" + +[[bin]] +name = "zksync_commitment_generator" +path = "src/commitment_generator.rs" + +[dependencies] +zksync_types = {path = "../../lib/types", version = "1.0" } +vlog = { path = "../../lib/vlog", version = "1.0" } +circuit_testing = {git = "https://github.com/matter-labs/circuit_testing.git", branch = "main"} +itertools = "0.10.5" +bincode = "1.3.3" + +serde_json = "1.0.85" +hex = "0.4.3" +structopt = "0.3.26" +ff = { package = "ff_ce", version = "0.14.1" } +toml_edit = "0.14.4" diff --git a/core/bin/verification_key_generator_and_server/data/verification_0_key.json b/core/bin/verification_key_generator_and_server/data/verification_0_key.json new file mode 100644 index 000000000000..f58dd8b77c96 --- /dev/null +++ b/core/bin/verification_key_generator_and_server/data/verification_0_key.json @@ -0,0 +1,399 @@ +{ + "n": 67108863, + "num_inputs": 1, + "state_width": 4, + "num_witness_polys": 0, + "gate_setup_commitments": [ + { + "x": [ + 6996639307385550831, + 17319912920749338549, + 13743098072687682109, + 647002596780190748 + ], + "y": [ + 14937211687798818996, + 3145678279503192900, + 2393479722380641473, + 1832345501773476397 + ], + "infinity": false + }, + { + "x": [ + 17598614517615535171, + 848181507441828018, + 5850486576278841220, + 32839070190916981 + ], + "y": [ + 15638119861902952636, + 15990070541865097016, + 9938241278726680530, + 3253668464465009821 + ], + "infinity": false + }, + { + "x": [ + 15820142264037469263, + 8568150507675978934, + 15184738710625145743, + 2918079076708036341 + ], + "y": [ + 11416456459976315626, + 3402571070523181186, + 10711656809635850215, + 3458605442614241783 + ], + "infinity": false + }, + { + "x": [ + 3247185907660573884, + 8680893512058284381, + 15293663808369282826, + 254464349321261856 + ], + "y": [ + 13343058922756376449, + 12317703331224821391, + 11806868346310958492, + 2684579758117701042 + ], + "infinity": false + }, + { + "x": [ + 13800337384513968192, + 7175236405886576174, + 13570768133887085852, + 387814232289887216 + ], + "y": [ + 9885901018229566019, + 523828011130471196, + 1519861256484738763, + 2952234907395154335 + ], + "infinity": false + }, + { + "x": [ + 18087005260147532945, + 17928529254779250966, + 15212090268560149518, + 2873489351192112694 + ], + "y": [ + 9754679093955511335, + 15228868340103952717, + 7400010337361881225, + 2135269756274591123 + ], + "infinity": false + }, + { + "x": [ + 3911797286444259863, + 2822641825033566015, + 17971742906231839373, + 3182860756772797703 + ], + "y": [ + 12343345623334385235, + 285662075504430273, + 3999467118287193716, + 2995587335427070590 + ], + "infinity": false + }, + { + "x": [ + 1905795732733198067, + 11557924895297067947, + 6594413365276397340, + 840436611656519074 + ], + "y": [ + 5150542979696082917, + 5795660884865753383, + 17893634013801397437, + 2242493309707647496 + ], + "infinity": false + } + ], + "gate_selectors_commitments": [ + { + "x": [ + 8185829286974780081, + 15004561001743944578, + 3280208794976361430, + 2047845739237089281 + ], + "y": [ + 13820263889085061406, + 4214367347259390974, + 2949054642305024413, + 1108871040338712662 + ], + "infinity": false + }, + { + "x": [ + 14580608619995336140, + 14569188170612185050, + 1922583079876338495, + 1008434097725400856 + ], + "y": [ + 10727948391864029299, + 13894372243681777679, + 15203339060867180632, + 67905455398516005 + ], + "infinity": false + } + ], + "permutation_commitments": [ + { + "x": [ + 4136655882081070061, + 10002634152140233885, + 980031450563900168, + 2157352299456443493 + ], + "y": [ + 11834666410496287947, + 12048212945911710814, + 12632184988200405344, + 3084449143866571881 + ], + "infinity": false + }, + { + "x": [ + 16491067695009214283, + 7420133316471415099, + 6689890872349113887, + 457551328045298948 + ], + "y": [ + 7262445873345515147, + 15297330015248906010, + 6131646910537731978, + 1498048652184369950 + ], + "infinity": false + }, + { + "x": [ + 8869864687692391690, + 7984673273578638559, + 16574497274063903001, + 110512104436182781 + ], + "y": [ + 13333735955399942427, + 3229330830010457349, + 15279257314107395654, + 661229224879455791 + ], + "infinity": false + }, + { + "x": [ + 2551282574896295260, + 9760668035586690469, + 2137355194036876554, + 997985933947120674 + ], + "y": [ + 14631704616305988699, + 10049029090982316688, + 18177176779365761171, + 465292341046541191 + ], + "infinity": false + } + ], + "total_lookup_entries_length": 1073528, + "lookup_selector_commitment": { + "x": [ + 2876161092882109885, + 11236384672929432721, + 3859399786505175224, + 3407285818214399211 + ], + "y": [ + 10435488890608201079, + 17599366864863653972, + 8044023367771821543, + 2530994762013802379 + ], + "infinity": false + }, + "lookup_tables_commitments": [ + { + "x": [ + 631990924006796604, + 16139625628991115157, + 13331739325995827711, + 1062301837743594995 + ], + "y": [ + 15303054606290800139, + 15906872095881647437, + 7093896572295020249, + 1342952934989901142 + ], + "infinity": false + }, + { + "x": [ + 7983921919542246393, + 13296544189644416678, + 17081022784392007697, + 1980832835348244027 + ], + "y": [ + 10874958134865200330, + 7702740658637630534, + 14052057929798961943, + 3193353539419869016 + ], + "infinity": false + }, + { + "x": [ + 1114587284824996932, + 4636906500482867924, + 15328247172597030456, + 87946895873973686 + ], + "y": [ + 15573033830207915877, + 5194694185599035278, + 2562407345425607214, + 2782078999306862675 + ], + "infinity": false + }, + { + "x": [ + 18225112781127431982, + 18048613958187123807, + 7325490730844456621, + 1953409020724855888 + ], + "y": [ + 7577000130125917198, + 6193701449695751861, + 4102082927677054717, + 395350071385269650 + ], + "infinity": false + } + ], + "lookup_table_type_commitment": { + "x": [ + 4651855694129232729, + 1837151187589998268, + 10423659239329633637, + 1354887747950126245 + ], + "y": [ + 11281536115673624785, + 9372850764821773580, + 16691907077441992896, + 1840015931540579970 + ], + "infinity": false + }, + "non_residues": [ + [ + 5, + 0, + 0, + 0 + ], + [ + 7, + 0, + 0, + 0 + ], + [ + 10, + 0, + 0, + 0 + ] + ], + "g2_elements": [ + { + "x": { + "c0": [ + 5106727233969649389, + 7440829307424791261, + 4785637993704342649, + 1729627375292849782 + ], + "c1": [ + 10945020018377822914, + 17413811393473931026, + 8241798111626485029, + 1841571559660931130 + ] + }, + "y": { + "c0": [ + 5541340697920699818, + 16416156555105522555, + 5380518976772849807, + 1353435754470862315 + ], + "c1": [ + 6173549831154472795, + 13567992399387660019, + 17050234209342075797, + 650358724130500725 + ] + }, + "infinity": false + }, + { + "x": { + "c0": [ + 9089143573911733168, + 11482283522806384523, + 13585589533905622862, + 79029415676722370 + ], + "c1": [ + 5692040832573735873, + 16884514497384809355, + 16717166481813659368, + 2742131088506155463 + ] + }, + "y": { + "c0": [ + 9604638503594647125, + 1289961608472612514, + 6217038149984805214, + 2521661352385209130 + ], + "c1": [ + 17168069778630926308, + 11309277837895768996, + 15154989611154567813, + 359271377050603491 + ] + }, + "infinity": false + } + ] +} \ No newline at end of file diff --git a/core/bin/verification_key_generator_and_server/data/verification_10_key.json b/core/bin/verification_key_generator_and_server/data/verification_10_key.json new file mode 100644 index 000000000000..ffccb01e6ec7 --- /dev/null +++ b/core/bin/verification_key_generator_and_server/data/verification_10_key.json @@ -0,0 +1,399 @@ +{ + "n": 67108863, + "num_inputs": 1, + "state_width": 4, + "num_witness_polys": 0, + "gate_setup_commitments": [ + { + "x": [ + 1201163354318640229, + 2055219705754088415, + 2073513857941343608, + 1963241099003853619 + ], + "y": [ + 17799409318346209032, + 4569825428251693578, + 6381298955182636653, + 2931304030683520542 + ], + "infinity": false + }, + { + "x": [ + 10205334960232870709, + 8738143462199324053, + 10144063519726047206, + 1927681263581782011 + ], + "y": [ + 1230087240085406483, + 122158889944391944, + 15489966604787936285, + 1330866847855106152 + ], + "infinity": false + }, + { + "x": [ + 15032728768196374721, + 14862087582458614750, + 7160777336710671141, + 1839914978510952544 + ], + "y": [ + 9167736299854863275, + 11029351644449385190, + 11704926274847189145, + 260928699177925870 + ], + "infinity": false + }, + { + "x": [ + 6154048382441357511, + 6065973151607458483, + 10376828199029605678, + 637872484884018456 + ], + "y": [ + 14591679681463835063, + 11525093582596803352, + 1709443408871507900, + 3429218151747917770 + ], + "infinity": false + }, + { + "x": [ + 1815602336806763776, + 13593648969739071827, + 2250784759987498478, + 1383101671644568596 + ], + "y": [ + 5019124305555384450, + 17601640867098758221, + 9775997368301268404, + 1408209289935626655 + ], + "infinity": false + }, + { + "x": [ + 15529551917580920354, + 15991144646297655273, + 9486652439657094814, + 1390602104586342411 + ], + "y": [ + 3929940190863560223, + 8236698157821025721, + 5646571270823092175, + 1332794415076748898 + ], + "infinity": false + }, + { + "x": [ + 16414056192599840640, + 9354012131211735456, + 9596248347509541885, + 618105062913462260 + ], + "y": [ + 14706399904358885528, + 18407654245399576822, + 12492666645205976157, + 1948689159065703317 + ], + "infinity": false + }, + { + "x": [ + 13558528973202397344, + 16490213789147112711, + 2545050042457701112, + 2461984285815921195 + ], + "y": [ + 1243522511418786191, + 14024991914114903033, + 3240957311806204415, + 1331963310566154043 + ], + "infinity": false + } + ], + "gate_selectors_commitments": [ + { + "x": [ + 2355207075120322239, + 154382368370147482, + 17535588518046215656, + 951301722877602968 + ], + "y": [ + 8143574239198426927, + 15181046710423727756, + 939512270474677381, + 2189391407681219550 + ], + "infinity": false + }, + { + "x": [ + 2890553312916912039, + 17705725012328133701, + 2022719624271833539, + 2637957613463216068 + ], + "y": [ + 5236464205678137999, + 16757283308499630897, + 8200824660959901166, + 2919902965709603522 + ], + "infinity": false + } + ], + "permutation_commitments": [ + { + "x": [ + 16714273949872413156, + 2992548526703231722, + 12994550586454036656, + 2850293963773054147 + ], + "y": [ + 20936503166899629, + 6605606497308863677, + 17099277659192707089, + 1380542399287382407 + ], + "infinity": false + }, + { + "x": [ + 13795422779792735167, + 5844171721881847978, + 15538273242292518413, + 1058423441485970566 + ], + "y": [ + 6786146000425626930, + 16000492908624161349, + 14508879795995142132, + 2058605944332981830 + ], + "infinity": false + }, + { + "x": [ + 18200274038480135632, + 16055911430166580378, + 300150758763362260, + 3469399278233580352 + ], + "y": [ + 13410997741446184872, + 18426661601342750165, + 13675996097717919382, + 2823744450040807611 + ], + "infinity": false + }, + { + "x": [ + 2441841452202772744, + 10073497087340947500, + 6034950516307738609, + 2764263384383384439 + ], + "y": [ + 2932218572245153404, + 6171621796632231602, + 14552010019679184040, + 290197788459327299 + ], + "infinity": false + } + ], + "total_lookup_entries_length": 5202052, + "lookup_selector_commitment": { + "x": [ + 4679962009764158864, + 12984488634629810805, + 12089544576783397583, + 2452985478928538759 + ], + "y": [ + 16434102449642732058, + 2996812911497483791, + 3668848517208777011, + 2423647953023799522 + ], + "infinity": false + }, + "lookup_tables_commitments": [ + { + "x": [ + 697552212563769686, + 7709943502535418760, + 15019345407325619175, + 3433081085078580257 + ], + "y": [ + 8668947019840357731, + 14698901351824712883, + 15088598879190660424, + 2873081208166433946 + ], + "infinity": false + }, + { + "x": [ + 7893133928909060673, + 7064922516930129957, + 3592836702741304814, + 2239702595710114437 + ], + "y": [ + 7691360541875191519, + 11379321785127235277, + 6653616064071569031, + 2555434628517540774 + ], + "infinity": false + }, + { + "x": [ + 6243944238013052821, + 7908243182210136125, + 17178099109525791299, + 2553622184721264566 + ], + "y": [ + 736121280088239428, + 6158073429758170526, + 11217302997977204117, + 2594798912020899417 + ], + "infinity": false + }, + { + "x": [ + 2064240298596094591, + 16917726764104887991, + 11042784977532408536, + 3377647228930170830 + ], + "y": [ + 10635525052494768819, + 387400048616497096, + 9379200582543310995, + 1571766153703296253 + ], + "infinity": false + } + ], + "lookup_table_type_commitment": { + "x": [ + 156247164488818630, + 15253439856813229615, + 8663309459020264968, + 3311090483224496957 + ], + "y": [ + 10512458335337838200, + 8831597928021087370, + 4017939162489373323, + 1056856470188779191 + ], + "infinity": false + }, + "non_residues": [ + [ + 5, + 0, + 0, + 0 + ], + [ + 7, + 0, + 0, + 0 + ], + [ + 10, + 0, + 0, + 0 + ] + ], + "g2_elements": [ + { + "x": { + "c0": [ + 5106727233969649389, + 7440829307424791261, + 4785637993704342649, + 1729627375292849782 + ], + "c1": [ + 10945020018377822914, + 17413811393473931026, + 8241798111626485029, + 1841571559660931130 + ] + }, + "y": { + "c0": [ + 5541340697920699818, + 16416156555105522555, + 5380518976772849807, + 1353435754470862315 + ], + "c1": [ + 6173549831154472795, + 13567992399387660019, + 17050234209342075797, + 650358724130500725 + ] + }, + "infinity": false + }, + { + "x": { + "c0": [ + 9089143573911733168, + 11482283522806384523, + 13585589533905622862, + 79029415676722370 + ], + "c1": [ + 5692040832573735873, + 16884514497384809355, + 16717166481813659368, + 2742131088506155463 + ] + }, + "y": { + "c0": [ + 9604638503594647125, + 1289961608472612514, + 6217038149984805214, + 2521661352385209130 + ], + "c1": [ + 17168069778630926308, + 11309277837895768996, + 15154989611154567813, + 359271377050603491 + ] + }, + "infinity": false + } + ] +} \ No newline at end of file diff --git a/core/bin/verification_key_generator_and_server/data/verification_11_key.json b/core/bin/verification_key_generator_and_server/data/verification_11_key.json new file mode 100644 index 000000000000..3bae44408910 --- /dev/null +++ b/core/bin/verification_key_generator_and_server/data/verification_11_key.json @@ -0,0 +1,399 @@ +{ + "n": 67108863, + "num_inputs": 1, + "state_width": 4, + "num_witness_polys": 0, + "gate_setup_commitments": [ + { + "x": [ + 6404793958941109752, + 600086648940026770, + 17621036346050218167, + 648286585825030202 + ], + "y": [ + 15536368541166505022, + 13874331483468128999, + 15299774519724050181, + 694528839710637549 + ], + "infinity": false + }, + { + "x": [ + 8437895530551083583, + 9515418928119648176, + 13043255827139294721, + 2995712510038409810 + ], + "y": [ + 2599666661350767554, + 5213004864468121936, + 3448071048439343925, + 3372727479169634860 + ], + "infinity": false + }, + { + "x": [ + 15617602246632337967, + 5734632097258026316, + 10326516376673449026, + 104556261410764610 + ], + "y": [ + 11823761102806147813, + 10127667027117555433, + 513779115084852666, + 2443281277309199970 + ], + "infinity": false + }, + { + "x": [ + 10765735847634894938, + 996016141851615448, + 17905928073714218280, + 1382306444325686451 + ], + "y": [ + 2138154197587423296, + 10332772886666867909, + 18365120064743353477, + 3036329558617382049 + ], + "infinity": false + }, + { + "x": [ + 10826908009799408310, + 17008417534705779156, + 6763973494549063072, + 2085829964414931488 + ], + "y": [ + 8778528796073273991, + 3575354418973385595, + 7700555759899743641, + 2991788183234680231 + ], + "infinity": false + }, + { + "x": [ + 4838537981048085423, + 17733460364049897496, + 2406410363431464143, + 317979983533551325 + ], + "y": [ + 1063783130085451648, + 17468950496650586998, + 1638492556781126884, + 2655791721465286744 + ], + "infinity": false + }, + { + "x": [ + 9900079822056413611, + 2971494295919434281, + 3851188096409515874, + 1674965457600938162 + ], + "y": [ + 278026997091552202, + 4169606578927284200, + 4285297176993939496, + 1835673146863992148 + ], + "infinity": false + }, + { + "x": [ + 14972922803706426724, + 1950002897609593521, + 14885502244328862256, + 2711533695106895845 + ], + "y": [ + 6445273103061253271, + 13093783937225622775, + 16913300898726970338, + 3338984185497324237 + ], + "infinity": false + } + ], + "gate_selectors_commitments": [ + { + "x": [ + 7023363902839996761, + 10470701207992157969, + 15655647820064667897, + 1574806151825297776 + ], + "y": [ + 5374465760860613169, + 17808737811039085287, + 9497881147171478776, + 2496973717640690197 + ], + "infinity": false + }, + { + "x": [ + 11667333913021610767, + 981513539224109240, + 906325130343873228, + 2938085706999497365 + ], + "y": [ + 12114685726509803851, + 8176447551157079615, + 4677211732718215770, + 612959750791398009 + ], + "infinity": false + } + ], + "permutation_commitments": [ + { + "x": [ + 5178916486603003859, + 12440762249350081718, + 17531240512375127539, + 562979322442547791 + ], + "y": [ + 13269831614205338393, + 14075713698585784838, + 5009519510530479124, + 346033861980045408 + ], + "infinity": false + }, + { + "x": [ + 9815443577325313677, + 10727907015331332054, + 7582395371050260833, + 1746872659838481572 + ], + "y": [ + 3973552805135639320, + 14426732004648741961, + 8133164322153358522, + 2668541869556858228 + ], + "infinity": false + }, + { + "x": [ + 4868257934818957423, + 11529848268525929099, + 7089666284160764141, + 796901367628793969 + ], + "y": [ + 991195814042705325, + 1559922382138761102, + 15616159453482282503, + 1031107741111093289 + ], + "infinity": false + }, + { + "x": [ + 17936772813090339705, + 10208762457499980701, + 14796710996322725970, + 638550977107438851 + ], + "y": [ + 5073905611192321777, + 2956648407808816974, + 7778989780119416172, + 2955106321082932072 + ], + "infinity": false + } + ], + "total_lookup_entries_length": 7960377, + "lookup_selector_commitment": { + "x": [ + 1083743271968869166, + 3134203175755215736, + 5835502497758804469, + 3010956977291777466 + ], + "y": [ + 3645612220088813035, + 32844736552579976, + 5426466326302260857, + 1489565191618899261 + ], + "infinity": false + }, + "lookup_tables_commitments": [ + { + "x": [ + 5825422128268478267, + 9219263846299851036, + 3879231702557190566, + 1702488722758880769 + ], + "y": [ + 18311881100262470992, + 5742998199368802392, + 18106865487471159417, + 502191980176920012 + ], + "infinity": false + }, + { + "x": [ + 17195892082859417081, + 7890531942603584793, + 2381805632820057528, + 3173232410464566465 + ], + "y": [ + 16359614627947132075, + 3459600273035137079, + 4550762061432972122, + 3394559699318358224 + ], + "infinity": false + }, + { + "x": [ + 1716103379277390185, + 18097936269579187542, + 16357329729761063450, + 1508640059338197502 + ], + "y": [ + 11014806739603983364, + 4396503314588777389, + 9397245609635151055, + 1703957955248411380 + ], + "infinity": false + }, + { + "x": [ + 4770171350693477354, + 17110558673192292253, + 9799800677557311408, + 761984875463445481 + ], + "y": [ + 1560561403388310063, + 31331275310848146, + 287152055803835484, + 457826332542037277 + ], + "infinity": false + } + ], + "lookup_table_type_commitment": { + "x": [ + 11327495732840772606, + 7407664417001729515, + 9486600059857658309, + 3060296564241189838 + ], + "y": [ + 7624492872489320847, + 18248981556039704277, + 3877205757853252152, + 939885486002612376 + ], + "infinity": false + }, + "non_residues": [ + [ + 5, + 0, + 0, + 0 + ], + [ + 7, + 0, + 0, + 0 + ], + [ + 10, + 0, + 0, + 0 + ] + ], + "g2_elements": [ + { + "x": { + "c0": [ + 5106727233969649389, + 7440829307424791261, + 4785637993704342649, + 1729627375292849782 + ], + "c1": [ + 10945020018377822914, + 17413811393473931026, + 8241798111626485029, + 1841571559660931130 + ] + }, + "y": { + "c0": [ + 5541340697920699818, + 16416156555105522555, + 5380518976772849807, + 1353435754470862315 + ], + "c1": [ + 6173549831154472795, + 13567992399387660019, + 17050234209342075797, + 650358724130500725 + ] + }, + "infinity": false + }, + { + "x": { + "c0": [ + 9089143573911733168, + 11482283522806384523, + 13585589533905622862, + 79029415676722370 + ], + "c1": [ + 5692040832573735873, + 16884514497384809355, + 16717166481813659368, + 2742131088506155463 + ] + }, + "y": { + "c0": [ + 9604638503594647125, + 1289961608472612514, + 6217038149984805214, + 2521661352385209130 + ], + "c1": [ + 17168069778630926308, + 11309277837895768996, + 15154989611154567813, + 359271377050603491 + ] + }, + "infinity": false + } + ] +} \ No newline at end of file diff --git a/core/bin/verification_key_generator_and_server/data/verification_12_key.json b/core/bin/verification_key_generator_and_server/data/verification_12_key.json new file mode 100644 index 000000000000..fec076f39eda --- /dev/null +++ b/core/bin/verification_key_generator_and_server/data/verification_12_key.json @@ -0,0 +1,399 @@ +{ + "n": 67108863, + "num_inputs": 1, + "state_width": 4, + "num_witness_polys": 0, + "gate_setup_commitments": [ + { + "x": [ + 456514006020943025, + 9595480195714948127, + 12254096252487404245, + 1742692690750856358 + ], + "y": [ + 16294223586064957217, + 3958270970168887906, + 11264067544872898258, + 1692817687935973108 + ], + "infinity": false + }, + { + "x": [ + 1359655052308122459, + 13840124148496555776, + 1774237333490664500, + 2964872651584750318 + ], + "y": [ + 11907598503482948769, + 8700506041798646988, + 15081040576888859990, + 3096802642049924528 + ], + "infinity": false + }, + { + "x": [ + 2884314851670818573, + 13442465544210396156, + 5937955495868181363, + 2486997439179977778 + ], + "y": [ + 9309776793338098458, + 14492906371677122697, + 8837309186596588911, + 1081143755093508499 + ], + "infinity": false + }, + { + "x": [ + 2655654413304275855, + 4244723109566147837, + 12150359360501203194, + 3338981627918702615 + ], + "y": [ + 2522870072161287404, + 17341373219317210182, + 13058930363994599297, + 210373422168410518 + ], + "infinity": false + }, + { + "x": [ + 16728834675380740056, + 2139390496020366235, + 9480389182940223467, + 2279560291896695719 + ], + "y": [ + 12461418813218976432, + 357566005384566098, + 5295578385080568808, + 1801243085576438875 + ], + "infinity": false + }, + { + "x": [ + 8716201428771436123, + 3392394702404760386, + 9990956922582058945, + 1388317411153212399 + ], + "y": [ + 11666415392681680155, + 10553517485129490455, + 16061047708722635939, + 2386622646140901822 + ], + "infinity": false + }, + { + "x": [ + 16162432560623854812, + 15537581062716888632, + 12927223782958923606, + 2800634589869451227 + ], + "y": [ + 5345141365329635916, + 2224393250977631865, + 396527108738048188, + 2298318725146167177 + ], + "infinity": false + }, + { + "x": [ + 18372685954785469756, + 10436523365152935441, + 15509622927999798123, + 2050428620045833325 + ], + "y": [ + 4996265985148335658, + 6073112270434155721, + 4873288683270752338, + 503179567393027927 + ], + "infinity": false + } + ], + "gate_selectors_commitments": [ + { + "x": [ + 4986139828502830074, + 8644425445976253042, + 4851433922656693398, + 1419574698085640872 + ], + "y": [ + 16192186537521161947, + 16183885683582261905, + 1655718756619164666, + 3420236094426390604 + ], + "infinity": false + }, + { + "x": [ + 10727231722644915889, + 13777116005624794169, + 1422623412369619026, + 1701279717637612575 + ], + "y": [ + 6503647097427010249, + 6381043883853023011, + 15391366286376907281, + 1261207976874708261 + ], + "infinity": false + } + ], + "permutation_commitments": [ + { + "x": [ + 11852073725466955067, + 179170887563176222, + 17529899074897279348, + 2496783194148289461 + ], + "y": [ + 15490041181991978284, + 6745436372504113852, + 7017978386715410058, + 3482556315200370895 + ], + "infinity": false + }, + { + "x": [ + 1330152738947291505, + 1668990644246591877, + 6805443255260621096, + 1309987766073890626 + ], + "y": [ + 18322300356676620444, + 8225233874302527542, + 5744327785164342590, + 410571567010522636 + ], + "infinity": false + }, + { + "x": [ + 13968210937929584911, + 17067601391996082961, + 4861463652254416951, + 2147834012714370408 + ], + "y": [ + 9012483356698219484, + 8660929519763525826, + 17744882010750642463, + 331423342438323189 + ], + "infinity": false + }, + { + "x": [ + 1352282553299127274, + 8587971715415488300, + 2471024479841756772, + 1239586065229072559 + ], + "y": [ + 1597792022909153930, + 5020991346876715357, + 5622801511814109910, + 1916460940163680567 + ], + "infinity": false + } + ], + "total_lookup_entries_length": 46287674, + "lookup_selector_commitment": { + "x": [ + 11573469000684493293, + 15304040816406013002, + 9206902553183544808, + 2597693769113957036 + ], + "y": [ + 10538181061926273477, + 5239567589495426242, + 3627181047901924882, + 302644994241575377 + ], + "infinity": false + }, + "lookup_tables_commitments": [ + { + "x": [ + 5134795695995115566, + 12287750992060803275, + 3112021177339560487, + 2737779104829043419 + ], + "y": [ + 12960786984497012138, + 17246059378047870426, + 11486754204718893642, + 46104506716724806 + ], + "infinity": false + }, + { + "x": [ + 148472607159578301, + 1393814398025790148, + 13651878286378332448, + 3460878321325997474 + ], + "y": [ + 10791022888598424744, + 1931353219232076143, + 12342018346439101174, + 23632989633122111 + ], + "infinity": false + }, + { + "x": [ + 1355031833403957875, + 10754997913401276231, + 8672292473740482178, + 3014145653612856517 + ], + "y": [ + 3728402825933673134, + 16492594359417243041, + 14619929139939206930, + 2894280666048705144 + ], + "infinity": false + }, + { + "x": [ + 11362104917939269301, + 3050269804312222606, + 17884269955997757593, + 2804911625130359365 + ], + "y": [ + 9563576475625880180, + 9736108320914226650, + 11545696954602328389, + 1108440262014676246 + ], + "infinity": false + } + ], + "lookup_table_type_commitment": { + "x": [ + 5367643753678334453, + 18149093736372716410, + 1335188566370936146, + 668596617655217713 + ], + "y": [ + 9984652217894703540, + 16253861114794085212, + 2139268495406835151, + 710303505771002735 + ], + "infinity": false + }, + "non_residues": [ + [ + 5, + 0, + 0, + 0 + ], + [ + 7, + 0, + 0, + 0 + ], + [ + 10, + 0, + 0, + 0 + ] + ], + "g2_elements": [ + { + "x": { + "c0": [ + 5106727233969649389, + 7440829307424791261, + 4785637993704342649, + 1729627375292849782 + ], + "c1": [ + 10945020018377822914, + 17413811393473931026, + 8241798111626485029, + 1841571559660931130 + ] + }, + "y": { + "c0": [ + 5541340697920699818, + 16416156555105522555, + 5380518976772849807, + 1353435754470862315 + ], + "c1": [ + 6173549831154472795, + 13567992399387660019, + 17050234209342075797, + 650358724130500725 + ] + }, + "infinity": false + }, + { + "x": { + "c0": [ + 9089143573911733168, + 11482283522806384523, + 13585589533905622862, + 79029415676722370 + ], + "c1": [ + 5692040832573735873, + 16884514497384809355, + 16717166481813659368, + 2742131088506155463 + ] + }, + "y": { + "c0": [ + 9604638503594647125, + 1289961608472612514, + 6217038149984805214, + 2521661352385209130 + ], + "c1": [ + 17168069778630926308, + 11309277837895768996, + 15154989611154567813, + 359271377050603491 + ] + }, + "infinity": false + } + ] +} \ No newline at end of file diff --git a/core/bin/verification_key_generator_and_server/data/verification_13_key.json b/core/bin/verification_key_generator_and_server/data/verification_13_key.json new file mode 100644 index 000000000000..73ffbd212002 --- /dev/null +++ b/core/bin/verification_key_generator_and_server/data/verification_13_key.json @@ -0,0 +1,399 @@ +{ + "n": 67108863, + "num_inputs": 1, + "state_width": 4, + "num_witness_polys": 0, + "gate_setup_commitments": [ + { + "x": [ + 17551054392858982554, + 6093238351564742844, + 9461983640740135929, + 665917981733823732 + ], + "y": [ + 5039211542045701927, + 14102316155129161178, + 7599318237652648682, + 1484263542771007309 + ], + "infinity": false + }, + { + "x": [ + 14015566113565304739, + 12895182424777444911, + 5150482782915031712, + 3280776276671330755 + ], + "y": [ + 5503211683737487414, + 5857977821275887356, + 1294122171191120577, + 2917900236095606783 + ], + "infinity": false + }, + { + "x": [ + 11180353512945796758, + 5467792637578213396, + 14862660111090994534, + 1678570344676416345 + ], + "y": [ + 16496106534540891926, + 4355829424666415263, + 8379906815867503783, + 2141225531456729878 + ], + "infinity": false + }, + { + "x": [ + 10512618919562577175, + 8909238001556772501, + 8669074760108324520, + 3259590816167766101 + ], + "y": [ + 15477336671232249792, + 10209451912771766896, + 13672268903388741173, + 682487251336397201 + ], + "infinity": false + }, + { + "x": [ + 14233534177298597555, + 14428793231398751908, + 18070433438826750034, + 1176819688107481869 + ], + "y": [ + 9251234182098356520, + 17131606126090989402, + 17185633762130361526, + 70013401388751862 + ], + "infinity": false + }, + { + "x": [ + 14148566925658671094, + 812517577375883951, + 5030512299767107864, + 44275794325016754 + ], + "y": [ + 3275438385460491589, + 12366768737850140720, + 10754478223029148744, + 64366431004577735 + ], + "infinity": false + }, + { + "x": [ + 5646513434714516506, + 12578668031398681290, + 6956692825033783810, + 536471110695536326 + ], + "y": [ + 876079378616587621, + 9787032999740439668, + 14965634813605966164, + 367083452910738472 + ], + "infinity": false + }, + { + "x": [ + 10902302115259229513, + 14044271471332330954, + 14571826360674828773, + 733766328575554031 + ], + "y": [ + 8186695183963076514, + 621472878958955881, + 14756382569165412398, + 3165780226323675661 + ], + "infinity": false + } + ], + "gate_selectors_commitments": [ + { + "x": [ + 17780673306296332984, + 10355922416617009060, + 5077451999006954761, + 2644291606399153501 + ], + "y": [ + 884498752701137122, + 731399349168706916, + 4286165746592754883, + 3279732117855760703 + ], + "infinity": false + }, + { + "x": [ + 11012802284910829398, + 7859388231941271159, + 17586341808458361180, + 1386364899721133297 + ], + "y": [ + 15634369655108108777, + 3858480397682251762, + 17706291110507066608, + 1663421415693803071 + ], + "infinity": false + } + ], + "permutation_commitments": [ + { + "x": [ + 18134041530736321349, + 4345724579806003155, + 2324407857452293002, + 2319164124977213120 + ], + "y": [ + 14302129084811449335, + 8588677756442252515, + 3323846949783670865, + 2109729211841784387 + ], + "infinity": false + }, + { + "x": [ + 14486843004985564085, + 10799247040254992370, + 7658639806933647132, + 2215292564171027727 + ], + "y": [ + 14258341133968554193, + 11685656973533320944, + 14111972937744219524, + 1172604679688980794 + ], + "infinity": false + }, + { + "x": [ + 12872375111956991701, + 14049784009914403066, + 15325016171856456312, + 2811875539960405333 + ], + "y": [ + 5711194902040443430, + 13827091592207472460, + 17950028361571343192, + 1672758585097311581 + ], + "infinity": false + }, + { + "x": [ + 11717525586585736911, + 730672019767199816, + 3010255132348992613, + 2780587454575324896 + ], + "y": [ + 1473124157542628664, + 1573646910034288561, + 10026766074599473146, + 563223750818543582 + ], + "infinity": false + } + ], + "total_lookup_entries_length": 42547753, + "lookup_selector_commitment": { + "x": [ + 4539928924349895484, + 2792770915461027618, + 11611697420465472575, + 1384307956752801018 + ], + "y": [ + 8840366360901511807, + 8892919985613263102, + 11941090149541110830, + 1930352681887390920 + ], + "infinity": false + }, + "lookup_tables_commitments": [ + { + "x": [ + 631990924006796604, + 16139625628991115157, + 13331739325995827711, + 1062301837743594995 + ], + "y": [ + 15303054606290800139, + 15906872095881647437, + 7093896572295020249, + 1342952934989901142 + ], + "infinity": false + }, + { + "x": [ + 7983921919542246393, + 13296544189644416678, + 17081022784392007697, + 1980832835348244027 + ], + "y": [ + 10874958134865200330, + 7702740658637630534, + 14052057929798961943, + 3193353539419869016 + ], + "infinity": false + }, + { + "x": [ + 1114587284824996932, + 4636906500482867924, + 15328247172597030456, + 87946895873973686 + ], + "y": [ + 15573033830207915877, + 5194694185599035278, + 2562407345425607214, + 2782078999306862675 + ], + "infinity": false + }, + { + "x": [ + 18225112781127431982, + 18048613958187123807, + 7325490730844456621, + 1953409020724855888 + ], + "y": [ + 7577000130125917198, + 6193701449695751861, + 4102082927677054717, + 395350071385269650 + ], + "infinity": false + } + ], + "lookup_table_type_commitment": { + "x": [ + 4121704254446914578, + 13863658665929861884, + 15362282368839162345, + 2762703036966024619 + ], + "y": [ + 102846692212239082, + 14904466746900448136, + 16872429770359000841, + 1687152581020907098 + ], + "infinity": false + }, + "non_residues": [ + [ + 5, + 0, + 0, + 0 + ], + [ + 7, + 0, + 0, + 0 + ], + [ + 10, + 0, + 0, + 0 + ] + ], + "g2_elements": [ + { + "x": { + "c0": [ + 5106727233969649389, + 7440829307424791261, + 4785637993704342649, + 1729627375292849782 + ], + "c1": [ + 10945020018377822914, + 17413811393473931026, + 8241798111626485029, + 1841571559660931130 + ] + }, + "y": { + "c0": [ + 5541340697920699818, + 16416156555105522555, + 5380518976772849807, + 1353435754470862315 + ], + "c1": [ + 6173549831154472795, + 13567992399387660019, + 17050234209342075797, + 650358724130500725 + ] + }, + "infinity": false + }, + { + "x": { + "c0": [ + 9089143573911733168, + 11482283522806384523, + 13585589533905622862, + 79029415676722370 + ], + "c1": [ + 5692040832573735873, + 16884514497384809355, + 16717166481813659368, + 2742131088506155463 + ] + }, + "y": { + "c0": [ + 9604638503594647125, + 1289961608472612514, + 6217038149984805214, + 2521661352385209130 + ], + "c1": [ + 17168069778630926308, + 11309277837895768996, + 15154989611154567813, + 359271377050603491 + ] + }, + "infinity": false + } + ] +} \ No newline at end of file diff --git a/core/bin/verification_key_generator_and_server/data/verification_14_key.json b/core/bin/verification_key_generator_and_server/data/verification_14_key.json new file mode 100644 index 000000000000..e8c42d407e35 --- /dev/null +++ b/core/bin/verification_key_generator_and_server/data/verification_14_key.json @@ -0,0 +1,399 @@ +{ + "n": 67108863, + "num_inputs": 1, + "state_width": 4, + "num_witness_polys": 0, + "gate_setup_commitments": [ + { + "x": [ + 6916434521451934576, + 614815553772638285, + 3742595993843812033, + 2823214088432624432 + ], + "y": [ + 11642815096362884283, + 18063950820723921281, + 6353943092001719992, + 3201898419478369298 + ], + "infinity": false + }, + { + "x": [ + 10647237757917239762, + 1269177049592707998, + 2650053775033150725, + 582198744757304104 + ], + "y": [ + 9804667267596536998, + 493663115027956828, + 13953159385227792767, + 1568248765042207679 + ], + "infinity": false + }, + { + "x": [ + 7910659438561833906, + 12456422925439856914, + 10869604528749370003, + 1213616301038416610 + ], + "y": [ + 2606202790862698157, + 6809934263763206210, + 17472080335242458272, + 2884639755368519501 + ], + "infinity": false + }, + { + "x": [ + 14211325859682683183, + 11018598407116786751, + 10064425366978091674, + 2748595948091261209 + ], + "y": [ + 13960202853590116423, + 1211975538022172568, + 16303435518817750320, + 1634234707214097860 + ], + "infinity": false + }, + { + "x": [ + 4528591178982443847, + 16310104707629911601, + 5532120103079323919, + 1347877820087040669 + ], + "y": [ + 17983603511717948746, + 9529659424488112452, + 7820918413906679254, + 1819855238351369466 + ], + "infinity": false + }, + { + "x": [ + 14415562798118912210, + 6550719056383417327, + 424281724891761932, + 1264340531903932141 + ], + "y": [ + 7768057951329404686, + 15024442753889769568, + 9676935351692818899, + 1492251668690310932 + ], + "infinity": false + }, + { + "x": [ + 2619366878850208112, + 12150914745315976156, + 8375197026043390274, + 1935272977563031501 + ], + "y": [ + 5381369692389055354, + 17978011500330472972, + 17420193441326928998, + 479187691463910357 + ], + "infinity": false + }, + { + "x": [ + 8720830951139717797, + 15985700059986022675, + 11876530273787337931, + 421322430672290976 + ], + "y": [ + 9700690437922183179, + 1976785701667862157, + 16634886936358874061, + 3002178567925406588 + ], + "infinity": false + } + ], + "gate_selectors_commitments": [ + { + "x": [ + 8284083154661042764, + 11776500066398184343, + 868620904897679124, + 2988582549909766892 + ], + "y": [ + 10794129605563176627, + 15487634480061313925, + 17194646451372113884, + 2087686927573540537 + ], + "infinity": false + }, + { + "x": [ + 7916190330285050096, + 11731220788334102406, + 6221883233572429550, + 2552280229203107267 + ], + "y": [ + 10510502959728300366, + 14682539966609739595, + 8275243146917870162, + 164811532254637923 + ], + "infinity": false + } + ], + "permutation_commitments": [ + { + "x": [ + 195850038587200624, + 10136289160450054078, + 4386512701252721226, + 219366815902177323 + ], + "y": [ + 12042545079209848932, + 599057886584676736, + 14545610403811537682, + 498958995843318019 + ], + "infinity": false + }, + { + "x": [ + 4721932753701441297, + 1676671918244393403, + 6943597542294442696, + 50994782040503038 + ], + "y": [ + 8321420884695240511, + 10606883887907326697, + 11471075822795411018, + 1311422627151559437 + ], + "infinity": false + }, + { + "x": [ + 85448132386017640, + 13016912343020112485, + 11647418800345296605, + 1741562939125330787 + ], + "y": [ + 10753835454658443286, + 8646325836340244979, + 7348777908140142985, + 2196062626460604424 + ], + "infinity": false + }, + { + "x": [ + 2125624295892265840, + 12754141819506101591, + 8789168208880604752, + 947087620272222934 + ], + "y": [ + 12566258871261234263, + 12307504590191426495, + 6700589767183706452, + 1828704371386663334 + ], + "infinity": false + } + ], + "total_lookup_entries_length": 42212029, + "lookup_selector_commitment": { + "x": [ + 7709849601046260359, + 6836713108454667472, + 17360769186231334246, + 2348971634881039863 + ], + "y": [ + 13380830060569421804, + 15446653016734774164, + 17884501636917484387, + 1386904567459265970 + ], + "infinity": false + }, + "lookup_tables_commitments": [ + { + "x": [ + 631990924006796604, + 16139625628991115157, + 13331739325995827711, + 1062301837743594995 + ], + "y": [ + 15303054606290800139, + 15906872095881647437, + 7093896572295020249, + 1342952934989901142 + ], + "infinity": false + }, + { + "x": [ + 7983921919542246393, + 13296544189644416678, + 17081022784392007697, + 1980832835348244027 + ], + "y": [ + 10874958134865200330, + 7702740658637630534, + 14052057929798961943, + 3193353539419869016 + ], + "infinity": false + }, + { + "x": [ + 1114587284824996932, + 4636906500482867924, + 15328247172597030456, + 87946895873973686 + ], + "y": [ + 15573033830207915877, + 5194694185599035278, + 2562407345425607214, + 2782078999306862675 + ], + "infinity": false + }, + { + "x": [ + 18225112781127431982, + 18048613958187123807, + 7325490730844456621, + 1953409020724855888 + ], + "y": [ + 7577000130125917198, + 6193701449695751861, + 4102082927677054717, + 395350071385269650 + ], + "infinity": false + } + ], + "lookup_table_type_commitment": { + "x": [ + 6960699536013090594, + 2075384204892265266, + 12053931571725248687, + 1371193846897305849 + ], + "y": [ + 8904850119058507432, + 10465598889525773001, + 16159541505228012497, + 1982452464017823539 + ], + "infinity": false + }, + "non_residues": [ + [ + 5, + 0, + 0, + 0 + ], + [ + 7, + 0, + 0, + 0 + ], + [ + 10, + 0, + 0, + 0 + ] + ], + "g2_elements": [ + { + "x": { + "c0": [ + 5106727233969649389, + 7440829307424791261, + 4785637993704342649, + 1729627375292849782 + ], + "c1": [ + 10945020018377822914, + 17413811393473931026, + 8241798111626485029, + 1841571559660931130 + ] + }, + "y": { + "c0": [ + 5541340697920699818, + 16416156555105522555, + 5380518976772849807, + 1353435754470862315 + ], + "c1": [ + 6173549831154472795, + 13567992399387660019, + 17050234209342075797, + 650358724130500725 + ] + }, + "infinity": false + }, + { + "x": { + "c0": [ + 9089143573911733168, + 11482283522806384523, + 13585589533905622862, + 79029415676722370 + ], + "c1": [ + 5692040832573735873, + 16884514497384809355, + 16717166481813659368, + 2742131088506155463 + ] + }, + "y": { + "c0": [ + 9604638503594647125, + 1289961608472612514, + 6217038149984805214, + 2521661352385209130 + ], + "c1": [ + 17168069778630926308, + 11309277837895768996, + 15154989611154567813, + 359271377050603491 + ] + }, + "infinity": false + } + ] +} \ No newline at end of file diff --git a/core/bin/verification_key_generator_and_server/data/verification_15_key.json b/core/bin/verification_key_generator_and_server/data/verification_15_key.json new file mode 100644 index 000000000000..356dbb3c531a --- /dev/null +++ b/core/bin/verification_key_generator_and_server/data/verification_15_key.json @@ -0,0 +1,399 @@ +{ + "n": 67108863, + "num_inputs": 1, + "state_width": 4, + "num_witness_polys": 0, + "gate_setup_commitments": [ + { + "x": [ + 3227382513538635502, + 10189582412003011525, + 1928710987967879299, + 1641062823248805930 + ], + "y": [ + 3271795224553087841, + 14036363906521936156, + 10253705337161624780, + 3091191233208402889 + ], + "infinity": false + }, + { + "x": [ + 3541471743181642086, + 8117051273006688414, + 685909872467163024, + 2614724468827209722 + ], + "y": [ + 1096952120887201428, + 8197980407203032569, + 3949713006885563085, + 2838982585728277197 + ], + "infinity": false + }, + { + "x": [ + 12432945880074879560, + 13444859845042471186, + 16599097070979057001, + 3064039790213026567 + ], + "y": [ + 3745088406100356357, + 11715355314289478148, + 2282946417129489745, + 1619614407449915711 + ], + "infinity": false + }, + { + "x": [ + 6864310053920223866, + 11095455024311706186, + 12229748247000682102, + 2475016349586561501 + ], + "y": [ + 2946781066962542712, + 14275500021265062654, + 7624481756022778467, + 1439658776940615826 + ], + "infinity": false + }, + { + "x": [ + 13589273139905087785, + 10411035015021574213, + 7322465558208873130, + 1805943743448229826 + ], + "y": [ + 13035238946064559886, + 8309482746549063820, + 14229757515324464781, + 1676135665275665956 + ], + "infinity": false + }, + { + "x": [ + 84006308859404982, + 13783127238980064918, + 14101945786439708601, + 3343881426944938693 + ], + "y": [ + 11959320721291234482, + 7288504259378326725, + 9638777183731403514, + 1648453409181088010 + ], + "infinity": false + }, + { + "x": [ + 10987163680360734145, + 3374907765066907489, + 14421201974855570464, + 3148542489906320493 + ], + "y": [ + 17180031485000081847, + 1609372527008367113, + 6050341427989573858, + 477684541505306009 + ], + "infinity": false + }, + { + "x": [ + 2257028353691713628, + 6330174784373016532, + 1686021628649718039, + 2159927805963705967 + ], + "y": [ + 10814125155819336479, + 9673780307204445954, + 7995606758095566598, + 2252251279727988680 + ], + "infinity": false + } + ], + "gate_selectors_commitments": [ + { + "x": [ + 12209724104183572477, + 11631007075974892904, + 18407423517909669447, + 1123848354500646471 + ], + "y": [ + 4749227851055533192, + 16918951234067984229, + 5345146076707243019, + 2836719468222132526 + ], + "infinity": false + }, + { + "x": [ + 7250866110466496804, + 16022969863388101391, + 16334300930347324147, + 2232272485807431638 + ], + "y": [ + 257675104580526310, + 8044331403028603186, + 2070174268860891010, + 412313474208091695 + ], + "infinity": false + } + ], + "permutation_commitments": [ + { + "x": [ + 6736882681315025594, + 13400430183084617843, + 17182588928882896917, + 413858188107207402 + ], + "y": [ + 11944170108613027081, + 10598841640624895850, + 9086311820289524704, + 994240611047161478 + ], + "infinity": false + }, + { + "x": [ + 9500318283622871785, + 5480449932874899465, + 13224510306395939252, + 1891329668301281157 + ], + "y": [ + 7314078756040350933, + 1023294602177498218, + 16475078688698425911, + 1793945182112302214 + ], + "infinity": false + }, + { + "x": [ + 17207548058425781429, + 2519222249126358251, + 16087595361924038018, + 3470846273906312296 + ], + "y": [ + 7578361094884620755, + 7082109151721400218, + 13675372677342046523, + 3204472226310685459 + ], + "infinity": false + }, + { + "x": [ + 7036282717341939568, + 3035419720331773758, + 6765191455902729185, + 1301973211946290083 + ], + "y": [ + 697377419426635450, + 14612037890797520515, + 11746079616766057625, + 1031190413179598818 + ], + "infinity": false + } + ], + "total_lookup_entries_length": 6391155, + "lookup_selector_commitment": { + "x": [ + 17111915492430945419, + 17971275185478677346, + 14211391044159602918, + 2381455978713737016 + ], + "y": [ + 13971515893527127207, + 7078722574057096191, + 6337080743811431820, + 757015217034494132 + ], + "infinity": false + }, + "lookup_tables_commitments": [ + { + "x": [ + 5825422128268478267, + 9219263846299851036, + 3879231702557190566, + 1702488722758880769 + ], + "y": [ + 18311881100262470992, + 5742998199368802392, + 18106865487471159417, + 502191980176920012 + ], + "infinity": false + }, + { + "x": [ + 17195892082859417081, + 7890531942603584793, + 2381805632820057528, + 3173232410464566465 + ], + "y": [ + 16359614627947132075, + 3459600273035137079, + 4550762061432972122, + 3394559699318358224 + ], + "infinity": false + }, + { + "x": [ + 1716103379277390185, + 18097936269579187542, + 16357329729761063450, + 1508640059338197502 + ], + "y": [ + 11014806739603983364, + 4396503314588777389, + 9397245609635151055, + 1703957955248411380 + ], + "infinity": false + }, + { + "x": [ + 4770171350693477354, + 17110558673192292253, + 9799800677557311408, + 761984875463445481 + ], + "y": [ + 1560561403388310063, + 31331275310848146, + 287152055803835484, + 457826332542037277 + ], + "infinity": false + } + ], + "lookup_table_type_commitment": { + "x": [ + 12452920133699897102, + 6896642231513345496, + 4655495116895575043, + 1453525729114564853 + ], + "y": [ + 3574087764464303986, + 10141819911397868785, + 2342639320036978232, + 556196027732983028 + ], + "infinity": false + }, + "non_residues": [ + [ + 5, + 0, + 0, + 0 + ], + [ + 7, + 0, + 0, + 0 + ], + [ + 10, + 0, + 0, + 0 + ] + ], + "g2_elements": [ + { + "x": { + "c0": [ + 5106727233969649389, + 7440829307424791261, + 4785637993704342649, + 1729627375292849782 + ], + "c1": [ + 10945020018377822914, + 17413811393473931026, + 8241798111626485029, + 1841571559660931130 + ] + }, + "y": { + "c0": [ + 5541340697920699818, + 16416156555105522555, + 5380518976772849807, + 1353435754470862315 + ], + "c1": [ + 6173549831154472795, + 13567992399387660019, + 17050234209342075797, + 650358724130500725 + ] + }, + "infinity": false + }, + { + "x": { + "c0": [ + 9089143573911733168, + 11482283522806384523, + 13585589533905622862, + 79029415676722370 + ], + "c1": [ + 5692040832573735873, + 16884514497384809355, + 16717166481813659368, + 2742131088506155463 + ] + }, + "y": { + "c0": [ + 9604638503594647125, + 1289961608472612514, + 6217038149984805214, + 2521661352385209130 + ], + "c1": [ + 17168069778630926308, + 11309277837895768996, + 15154989611154567813, + 359271377050603491 + ] + }, + "infinity": false + } + ] +} \ No newline at end of file diff --git a/core/bin/verification_key_generator_and_server/data/verification_16_key.json b/core/bin/verification_key_generator_and_server/data/verification_16_key.json new file mode 100644 index 000000000000..356dbb3c531a --- /dev/null +++ b/core/bin/verification_key_generator_and_server/data/verification_16_key.json @@ -0,0 +1,399 @@ +{ + "n": 67108863, + "num_inputs": 1, + "state_width": 4, + "num_witness_polys": 0, + "gate_setup_commitments": [ + { + "x": [ + 3227382513538635502, + 10189582412003011525, + 1928710987967879299, + 1641062823248805930 + ], + "y": [ + 3271795224553087841, + 14036363906521936156, + 10253705337161624780, + 3091191233208402889 + ], + "infinity": false + }, + { + "x": [ + 3541471743181642086, + 8117051273006688414, + 685909872467163024, + 2614724468827209722 + ], + "y": [ + 1096952120887201428, + 8197980407203032569, + 3949713006885563085, + 2838982585728277197 + ], + "infinity": false + }, + { + "x": [ + 12432945880074879560, + 13444859845042471186, + 16599097070979057001, + 3064039790213026567 + ], + "y": [ + 3745088406100356357, + 11715355314289478148, + 2282946417129489745, + 1619614407449915711 + ], + "infinity": false + }, + { + "x": [ + 6864310053920223866, + 11095455024311706186, + 12229748247000682102, + 2475016349586561501 + ], + "y": [ + 2946781066962542712, + 14275500021265062654, + 7624481756022778467, + 1439658776940615826 + ], + "infinity": false + }, + { + "x": [ + 13589273139905087785, + 10411035015021574213, + 7322465558208873130, + 1805943743448229826 + ], + "y": [ + 13035238946064559886, + 8309482746549063820, + 14229757515324464781, + 1676135665275665956 + ], + "infinity": false + }, + { + "x": [ + 84006308859404982, + 13783127238980064918, + 14101945786439708601, + 3343881426944938693 + ], + "y": [ + 11959320721291234482, + 7288504259378326725, + 9638777183731403514, + 1648453409181088010 + ], + "infinity": false + }, + { + "x": [ + 10987163680360734145, + 3374907765066907489, + 14421201974855570464, + 3148542489906320493 + ], + "y": [ + 17180031485000081847, + 1609372527008367113, + 6050341427989573858, + 477684541505306009 + ], + "infinity": false + }, + { + "x": [ + 2257028353691713628, + 6330174784373016532, + 1686021628649718039, + 2159927805963705967 + ], + "y": [ + 10814125155819336479, + 9673780307204445954, + 7995606758095566598, + 2252251279727988680 + ], + "infinity": false + } + ], + "gate_selectors_commitments": [ + { + "x": [ + 12209724104183572477, + 11631007075974892904, + 18407423517909669447, + 1123848354500646471 + ], + "y": [ + 4749227851055533192, + 16918951234067984229, + 5345146076707243019, + 2836719468222132526 + ], + "infinity": false + }, + { + "x": [ + 7250866110466496804, + 16022969863388101391, + 16334300930347324147, + 2232272485807431638 + ], + "y": [ + 257675104580526310, + 8044331403028603186, + 2070174268860891010, + 412313474208091695 + ], + "infinity": false + } + ], + "permutation_commitments": [ + { + "x": [ + 6736882681315025594, + 13400430183084617843, + 17182588928882896917, + 413858188107207402 + ], + "y": [ + 11944170108613027081, + 10598841640624895850, + 9086311820289524704, + 994240611047161478 + ], + "infinity": false + }, + { + "x": [ + 9500318283622871785, + 5480449932874899465, + 13224510306395939252, + 1891329668301281157 + ], + "y": [ + 7314078756040350933, + 1023294602177498218, + 16475078688698425911, + 1793945182112302214 + ], + "infinity": false + }, + { + "x": [ + 17207548058425781429, + 2519222249126358251, + 16087595361924038018, + 3470846273906312296 + ], + "y": [ + 7578361094884620755, + 7082109151721400218, + 13675372677342046523, + 3204472226310685459 + ], + "infinity": false + }, + { + "x": [ + 7036282717341939568, + 3035419720331773758, + 6765191455902729185, + 1301973211946290083 + ], + "y": [ + 697377419426635450, + 14612037890797520515, + 11746079616766057625, + 1031190413179598818 + ], + "infinity": false + } + ], + "total_lookup_entries_length": 6391155, + "lookup_selector_commitment": { + "x": [ + 17111915492430945419, + 17971275185478677346, + 14211391044159602918, + 2381455978713737016 + ], + "y": [ + 13971515893527127207, + 7078722574057096191, + 6337080743811431820, + 757015217034494132 + ], + "infinity": false + }, + "lookup_tables_commitments": [ + { + "x": [ + 5825422128268478267, + 9219263846299851036, + 3879231702557190566, + 1702488722758880769 + ], + "y": [ + 18311881100262470992, + 5742998199368802392, + 18106865487471159417, + 502191980176920012 + ], + "infinity": false + }, + { + "x": [ + 17195892082859417081, + 7890531942603584793, + 2381805632820057528, + 3173232410464566465 + ], + "y": [ + 16359614627947132075, + 3459600273035137079, + 4550762061432972122, + 3394559699318358224 + ], + "infinity": false + }, + { + "x": [ + 1716103379277390185, + 18097936269579187542, + 16357329729761063450, + 1508640059338197502 + ], + "y": [ + 11014806739603983364, + 4396503314588777389, + 9397245609635151055, + 1703957955248411380 + ], + "infinity": false + }, + { + "x": [ + 4770171350693477354, + 17110558673192292253, + 9799800677557311408, + 761984875463445481 + ], + "y": [ + 1560561403388310063, + 31331275310848146, + 287152055803835484, + 457826332542037277 + ], + "infinity": false + } + ], + "lookup_table_type_commitment": { + "x": [ + 12452920133699897102, + 6896642231513345496, + 4655495116895575043, + 1453525729114564853 + ], + "y": [ + 3574087764464303986, + 10141819911397868785, + 2342639320036978232, + 556196027732983028 + ], + "infinity": false + }, + "non_residues": [ + [ + 5, + 0, + 0, + 0 + ], + [ + 7, + 0, + 0, + 0 + ], + [ + 10, + 0, + 0, + 0 + ] + ], + "g2_elements": [ + { + "x": { + "c0": [ + 5106727233969649389, + 7440829307424791261, + 4785637993704342649, + 1729627375292849782 + ], + "c1": [ + 10945020018377822914, + 17413811393473931026, + 8241798111626485029, + 1841571559660931130 + ] + }, + "y": { + "c0": [ + 5541340697920699818, + 16416156555105522555, + 5380518976772849807, + 1353435754470862315 + ], + "c1": [ + 6173549831154472795, + 13567992399387660019, + 17050234209342075797, + 650358724130500725 + ] + }, + "infinity": false + }, + { + "x": { + "c0": [ + 9089143573911733168, + 11482283522806384523, + 13585589533905622862, + 79029415676722370 + ], + "c1": [ + 5692040832573735873, + 16884514497384809355, + 16717166481813659368, + 2742131088506155463 + ] + }, + "y": { + "c0": [ + 9604638503594647125, + 1289961608472612514, + 6217038149984805214, + 2521661352385209130 + ], + "c1": [ + 17168069778630926308, + 11309277837895768996, + 15154989611154567813, + 359271377050603491 + ] + }, + "infinity": false + } + ] +} \ No newline at end of file diff --git a/core/bin/verification_key_generator_and_server/data/verification_17_key.json b/core/bin/verification_key_generator_and_server/data/verification_17_key.json new file mode 100644 index 000000000000..4886f501712e --- /dev/null +++ b/core/bin/verification_key_generator_and_server/data/verification_17_key.json @@ -0,0 +1,399 @@ +{ + "n": 67108863, + "num_inputs": 1, + "state_width": 4, + "num_witness_polys": 0, + "gate_setup_commitments": [ + { + "x": [ + 17914331890341023175, + 5200903915088916638, + 7417971632353510341, + 989671567770015891 + ], + "y": [ + 2927207345798721401, + 12686845373576710402, + 977520799157489114, + 1882223742569339495 + ], + "infinity": false + }, + { + "x": [ + 17162848902278956536, + 16169550484471334725, + 10830640611178609260, + 1347016616567630867 + ], + "y": [ + 6224316231648682710, + 10518372790293065661, + 4887066336660303630, + 703109868065750569 + ], + "infinity": false + }, + { + "x": [ + 15783141083967762454, + 16153855592853073081, + 5667838393811413602, + 1552498518850981979 + ], + "y": [ + 4220445586486275972, + 13196202402039716924, + 17506868028821343237, + 2718319833724164541 + ], + "infinity": false + }, + { + "x": [ + 4896615254637588846, + 5804270398165250639, + 10274952983674590649, + 1937027782721476561 + ], + "y": [ + 14180244016629518742, + 1376497406583367686, + 11268467489552574214, + 2331396669725958189 + ], + "infinity": false + }, + { + "x": [ + 191294939748295885, + 2804205121966814820, + 3897841028303648224, + 3406986167359695085 + ], + "y": [ + 6000542982074572633, + 1697448874567677325, + 10313504031977824294, + 320347014349001728 + ], + "infinity": false + }, + { + "x": [ + 6817435454105168413, + 15823888625999007373, + 9766931118761036330, + 3392959293697897728 + ], + "y": [ + 3549039265311512008, + 4758653036115592629, + 219467419355603781, + 83059544477934848 + ], + "infinity": false + }, + { + "x": [ + 5038171725639341807, + 6859992384823395611, + 15284967171349293554, + 16807092603996758 + ], + "y": [ + 16504201956683368367, + 12931995037356002803, + 16812826192957092842, + 3169839139097845275 + ], + "infinity": false + }, + { + "x": [ + 7140480682142203727, + 9518528852331365100, + 6189914959408603471, + 535939568308325781 + ], + "y": [ + 5944679084532939174, + 17280810090456322382, + 3743919877743496107, + 1235924204609568068 + ], + "infinity": false + } + ], + "gate_selectors_commitments": [ + { + "x": [ + 1929812895882850703, + 10386198218814398503, + 17007521659662498274, + 1093092717342753672 + ], + "y": [ + 14834187133095267171, + 15506032964234961178, + 7626816120460943443, + 871778379365004315 + ], + "infinity": false + }, + { + "x": [ + 15660406110329165813, + 8146521122567923995, + 2421739551937359002, + 3037598346026174089 + ], + "y": [ + 526124545966722472, + 1168331442853419483, + 4128095883471549051, + 2951909971734725955 + ], + "infinity": false + } + ], + "permutation_commitments": [ + { + "x": [ + 6206240620508019400, + 3690935139087147193, + 15230272164329216928, + 2140680869789406894 + ], + "y": [ + 14967331981004447304, + 1624146052760537503, + 8986435052862626311, + 334011853307313390 + ], + "infinity": false + }, + { + "x": [ + 4342223064246074020, + 2037946044543710684, + 9057698479075332373, + 1955362957846693345 + ], + "y": [ + 13253375713250043938, + 6754658208742468331, + 9339617748652368850, + 3066524060291544175 + ], + "infinity": false + }, + { + "x": [ + 17765629723696241082, + 14243015821582305127, + 922013493526048847, + 186830516636733479 + ], + "y": [ + 14465184942185208224, + 11235596895177038197, + 5490682932088517686, + 1253279069662324930 + ], + "infinity": false + }, + { + "x": [ + 9369367805867402420, + 12663806522952881709, + 10184609326459106945, + 1664572000409921348 + ], + "y": [ + 4383960972942823390, + 6526609131568596717, + 1343118583674917141, + 113408414321095416 + ], + "infinity": false + } + ], + "total_lookup_entries_length": 6306340, + "lookup_selector_commitment": { + "x": [ + 8662938005624859815, + 9126108646717466191, + 14321121874090966307, + 2777446762308933634 + ], + "y": [ + 12555265159079607081, + 9054928862248682392, + 2784170007581120117, + 1769718192676345815 + ], + "infinity": false + }, + "lookup_tables_commitments": [ + { + "x": [ + 631990924006796604, + 16139625628991115157, + 13331739325995827711, + 1062301837743594995 + ], + "y": [ + 15303054606290800139, + 15906872095881647437, + 7093896572295020249, + 1342952934989901142 + ], + "infinity": false + }, + { + "x": [ + 7983921919542246393, + 13296544189644416678, + 17081022784392007697, + 1980832835348244027 + ], + "y": [ + 10874958134865200330, + 7702740658637630534, + 14052057929798961943, + 3193353539419869016 + ], + "infinity": false + }, + { + "x": [ + 1114587284824996932, + 4636906500482867924, + 15328247172597030456, + 87946895873973686 + ], + "y": [ + 15573033830207915877, + 5194694185599035278, + 2562407345425607214, + 2782078999306862675 + ], + "infinity": false + }, + { + "x": [ + 18225112781127431982, + 18048613958187123807, + 7325490730844456621, + 1953409020724855888 + ], + "y": [ + 7577000130125917198, + 6193701449695751861, + 4102082927677054717, + 395350071385269650 + ], + "infinity": false + } + ], + "lookup_table_type_commitment": { + "x": [ + 12644448349947379666, + 16345179309557779118, + 10854030671875297787, + 1358228639202695992 + ], + "y": [ + 2673142241557152443, + 11674634738064487673, + 12992693662201776412, + 1888958170754620568 + ], + "infinity": false + }, + "non_residues": [ + [ + 5, + 0, + 0, + 0 + ], + [ + 7, + 0, + 0, + 0 + ], + [ + 10, + 0, + 0, + 0 + ] + ], + "g2_elements": [ + { + "x": { + "c0": [ + 5106727233969649389, + 7440829307424791261, + 4785637993704342649, + 1729627375292849782 + ], + "c1": [ + 10945020018377822914, + 17413811393473931026, + 8241798111626485029, + 1841571559660931130 + ] + }, + "y": { + "c0": [ + 5541340697920699818, + 16416156555105522555, + 5380518976772849807, + 1353435754470862315 + ], + "c1": [ + 6173549831154472795, + 13567992399387660019, + 17050234209342075797, + 650358724130500725 + ] + }, + "infinity": false + }, + { + "x": { + "c0": [ + 9089143573911733168, + 11482283522806384523, + 13585589533905622862, + 79029415676722370 + ], + "c1": [ + 5692040832573735873, + 16884514497384809355, + 16717166481813659368, + 2742131088506155463 + ] + }, + "y": { + "c0": [ + 9604638503594647125, + 1289961608472612514, + 6217038149984805214, + 2521661352385209130 + ], + "c1": [ + 17168069778630926308, + 11309277837895768996, + 15154989611154567813, + 359271377050603491 + ] + }, + "infinity": false + } + ] +} \ No newline at end of file diff --git a/core/bin/verification_key_generator_and_server/data/verification_18_key.json b/core/bin/verification_key_generator_and_server/data/verification_18_key.json new file mode 100644 index 000000000000..0987039dd1fa --- /dev/null +++ b/core/bin/verification_key_generator_and_server/data/verification_18_key.json @@ -0,0 +1,399 @@ +{ + "n": 67108863, + "num_inputs": 1, + "state_width": 4, + "num_witness_polys": 0, + "gate_setup_commitments": [ + { + "x": [ + 8828437332483635107, + 13777915698231175292, + 11504510351588004199, + 2516385517175522236 + ], + "y": [ + 1530453459325046685, + 2126477283125660971, + 6874073688275717548, + 2971751478402184988 + ], + "infinity": false + }, + { + "x": [ + 3490885152333630169, + 4123320877294819459, + 5138828731030738163, + 3039569146695764058 + ], + "y": [ + 10725322881860790776, + 1512262420257872325, + 10563843054743673205, + 447776577449487981 + ], + "infinity": false + }, + { + "x": [ + 14957646468235752771, + 6216555943494703122, + 7827110015048654177, + 2702223139144227095 + ], + "y": [ + 505353369980003046, + 9687811614109626117, + 5346740791392836415, + 1340467989233731971 + ], + "infinity": false + }, + { + "x": [ + 3201028595190213325, + 9659059230246338206, + 901122635500995415, + 765851963674764103 + ], + "y": [ + 10609226610841230792, + 8145519080052709505, + 17851750066177581293, + 362176586681460505 + ], + "infinity": false + }, + { + "x": [ + 13374935211181268625, + 1347742735582506393, + 4588995338963087243, + 94453217016201562 + ], + "y": [ + 4077548225372117006, + 11859845367084549583, + 2736752177668563039, + 1134818940315684409 + ], + "infinity": false + }, + { + "x": [ + 9467178015658262369, + 10545965721679492606, + 5726831550010619228, + 2051827871593168334 + ], + "y": [ + 6169140154733194545, + 5574043976386236933, + 12140759986363309479, + 1521273866181786590 + ], + "infinity": false + }, + { + "x": [ + 9642818207174528085, + 15617465062711953088, + 11263174413902929450, + 639683138088730423 + ], + "y": [ + 15150652293369779803, + 11338278639695990684, + 12204993260723588081, + 2039902155290309382 + ], + "infinity": false + }, + { + "x": [ + 7292405600450693833, + 573142590034645775, + 1583019100043676600, + 1978695840953226358 + ], + "y": [ + 5154489367309996043, + 8763740977657654022, + 9821219773990064941, + 2636875463267519559 + ], + "infinity": false + } + ], + "gate_selectors_commitments": [ + { + "x": [ + 2075450237700219880, + 2920304484074114568, + 8294843245052708759, + 555293007149161182 + ], + "y": [ + 6360019558055677441, + 7673047654179899818, + 10263007591992092214, + 2148859098846651643 + ], + "infinity": false + }, + { + "x": [ + 3970783323754285443, + 13019363829879217592, + 18197490676081603277, + 630296172623407012 + ], + "y": [ + 7987745494904024640, + 9631048689610078757, + 1592818072678520163, + 2678374240960081558 + ], + "infinity": false + } + ], + "permutation_commitments": [ + { + "x": [ + 3055966415338102721, + 18231075292903695376, + 9187400351012014001, + 2311743062653684305 + ], + "y": [ + 2553578246375478674, + 930511927228692161, + 2271826946385879571, + 3124263363559878329 + ], + "infinity": false + }, + { + "x": [ + 6936812562216228782, + 15195638439305648290, + 17827467578192758430, + 2674740411261002393 + ], + "y": [ + 9738743088557108685, + 17225541903460577384, + 16627013813461429872, + 494410407050490065 + ], + "infinity": false + }, + { + "x": [ + 10570962909758341245, + 18167360144953681397, + 2744925075742623060, + 736412139310579435 + ], + "y": [ + 13849279071386536985, + 10093748777935480433, + 904764951143479286, + 138814932031469939 + ], + "infinity": false + }, + { + "x": [ + 4533871929444677010, + 10106157783629999301, + 4178648893377901718, + 3164693318611048089 + ], + "y": [ + 12699039702383686311, + 4388078229442418460, + 8961813905523894854, + 570254591975307765 + ], + "infinity": false + } + ], + "total_lookup_entries_length": 18884644, + "lookup_selector_commitment": { + "x": [ + 15022814412717317376, + 17444332185630324119, + 14685665421775887958, + 906494215348891007 + ], + "y": [ + 9833778905776399360, + 1648124311168457783, + 3500435402371619753, + 2370413643071351216 + ], + "infinity": false + }, + "lookup_tables_commitments": [ + { + "x": [ + 631990924006796604, + 16139625628991115157, + 13331739325995827711, + 1062301837743594995 + ], + "y": [ + 15303054606290800139, + 15906872095881647437, + 7093896572295020249, + 1342952934989901142 + ], + "infinity": false + }, + { + "x": [ + 7983921919542246393, + 13296544189644416678, + 17081022784392007697, + 1980832835348244027 + ], + "y": [ + 10874958134865200330, + 7702740658637630534, + 14052057929798961943, + 3193353539419869016 + ], + "infinity": false + }, + { + "x": [ + 1114587284824996932, + 4636906500482867924, + 15328247172597030456, + 87946895873973686 + ], + "y": [ + 15573033830207915877, + 5194694185599035278, + 2562407345425607214, + 2782078999306862675 + ], + "infinity": false + }, + { + "x": [ + 18225112781127431982, + 18048613958187123807, + 7325490730844456621, + 1953409020724855888 + ], + "y": [ + 7577000130125917198, + 6193701449695751861, + 4102082927677054717, + 395350071385269650 + ], + "infinity": false + } + ], + "lookup_table_type_commitment": { + "x": [ + 8321950609730151216, + 18010887235457883784, + 17038267498493175776, + 1380842840607309871 + ], + "y": [ + 3264160671000273944, + 16611917363401804468, + 8505391859632632917, + 2149881676646664319 + ], + "infinity": false + }, + "non_residues": [ + [ + 5, + 0, + 0, + 0 + ], + [ + 7, + 0, + 0, + 0 + ], + [ + 10, + 0, + 0, + 0 + ] + ], + "g2_elements": [ + { + "x": { + "c0": [ + 5106727233969649389, + 7440829307424791261, + 4785637993704342649, + 1729627375292849782 + ], + "c1": [ + 10945020018377822914, + 17413811393473931026, + 8241798111626485029, + 1841571559660931130 + ] + }, + "y": { + "c0": [ + 5541340697920699818, + 16416156555105522555, + 5380518976772849807, + 1353435754470862315 + ], + "c1": [ + 6173549831154472795, + 13567992399387660019, + 17050234209342075797, + 650358724130500725 + ] + }, + "infinity": false + }, + { + "x": { + "c0": [ + 9089143573911733168, + 11482283522806384523, + 13585589533905622862, + 79029415676722370 + ], + "c1": [ + 5692040832573735873, + 16884514497384809355, + 16717166481813659368, + 2742131088506155463 + ] + }, + "y": { + "c0": [ + 9604638503594647125, + 1289961608472612514, + 6217038149984805214, + 2521661352385209130 + ], + "c1": [ + 17168069778630926308, + 11309277837895768996, + 15154989611154567813, + 359271377050603491 + ] + }, + "infinity": false + } + ] +} \ No newline at end of file diff --git a/core/bin/verification_key_generator_and_server/data/verification_1_key.json b/core/bin/verification_key_generator_and_server/data/verification_1_key.json new file mode 100644 index 000000000000..0310303d2a53 --- /dev/null +++ b/core/bin/verification_key_generator_and_server/data/verification_1_key.json @@ -0,0 +1,399 @@ +{ + "n": 67108863, + "num_inputs": 1, + "state_width": 4, + "num_witness_polys": 0, + "gate_setup_commitments": [ + { + "x": [ + 7601801432079276288, + 15201863322122857773, + 8806193975262404580, + 2590787273683229105 + ], + "y": [ + 16702527967956763728, + 6181870639994435984, + 1867123357108619315, + 2767403024411663364 + ], + "infinity": false + }, + { + "x": [ + 2455316591212726341, + 2027771240685247927, + 10685588854446154162, + 3030775657966372875 + ], + "y": [ + 18300009037843703356, + 1612973442135305251, + 10693350009422283513, + 1442590213691840716 + ], + "infinity": false + }, + { + "x": [ + 12311884457715965312, + 10390638194798557018, + 11306832124741148566, + 300716765354847473 + ], + "y": [ + 9707964220031061231, + 14753080439380196493, + 5717535245627190368, + 702219636062983319 + ], + "infinity": false + }, + { + "x": [ + 7758453297146426337, + 1673770484163252092, + 14607544807007157753, + 857313958429629763 + ], + "y": [ + 14921629410308576937, + 15298335487420996140, + 2704982045392946878, + 2611590721009022852 + ], + "infinity": false + }, + { + "x": [ + 14311011031579784592, + 15625526098906078640, + 1319146597092063841, + 774276845418764858 + ], + "y": [ + 3893523842912943845, + 18146056093503974553, + 11030513442747849089, + 389965813625175232 + ], + "infinity": false + }, + { + "x": [ + 7007915445081129178, + 2401922490835966325, + 418720827124106725, + 2770268368066902308 + ], + "y": [ + 12116308634970006696, + 14528630571959109449, + 9950799281726780069, + 724152027617190422 + ], + "infinity": false + }, + { + "x": [ + 2442021019274420960, + 16295185893380203674, + 2439146651414642189, + 2243335375830582173 + ], + "y": [ + 3782090054162740071, + 4704457281172608987, + 4410900061257118309, + 764611777065564766 + ], + "infinity": false + }, + { + "x": [ + 17964884224938230037, + 7876675311267561320, + 16762398450655445790, + 1210707988542142007 + ], + "y": [ + 10470358785861361347, + 9485656365593190672, + 6046378362748740079, + 2457285875935475197 + ], + "infinity": false + } + ], + "gate_selectors_commitments": [ + { + "x": [ + 17157526827088368172, + 11284084393440625999, + 9351565798611728109, + 3234841809825307363 + ], + "y": [ + 8319704714678793930, + 4159327153032521498, + 15356346081767327573, + 3239913585027348493 + ], + "infinity": false + }, + { + "x": [ + 15456321646261647359, + 15891438700803416959, + 3317730603133051465, + 2641175705943818316 + ], + "y": [ + 1411951218052246200, + 1661720531643832913, + 13537400120511760371, + 2292851110898807736 + ], + "infinity": false + } + ], + "permutation_commitments": [ + { + "x": [ + 10328956753700766823, + 2827084848292920926, + 6753362467616392790, + 3266354497443915853 + ], + "y": [ + 4786671171082888838, + 11071539213550223285, + 3886224490311829958, + 1435384580945051012 + ], + "infinity": false + }, + { + "x": [ + 6970901872301032061, + 11845499850875638451, + 12523013241874863158, + 564589203700245768 + ], + "y": [ + 9149991346853645253, + 10833082414663634622, + 10032445307744641248, + 3184550747076826571 + ], + "infinity": false + }, + { + "x": [ + 2899501934612768796, + 7289832407727333580, + 15398305180487198919, + 2955735241334744486 + ], + "y": [ + 4963499698281910643, + 5723522390488208800, + 3637467607919864741, + 339118267031086794 + ], + "infinity": false + }, + { + "x": [ + 16561673014946600686, + 6893642268089467710, + 11554023210615815565, + 122477375056362239 + ], + "y": [ + 15978560303000591303, + 6087766803442805629, + 6114779478264008006, + 2753348573959524636 + ], + "infinity": false + } + ], + "total_lookup_entries_length": 30899639, + "lookup_selector_commitment": { + "x": [ + 4819118611809066421, + 16205075690681881406, + 8088108199972047891, + 2462381205202312681 + ], + "y": [ + 9403235417076804812, + 11746452954984920263, + 5479393366572364588, + 2168476120537571525 + ], + "infinity": false + }, + "lookup_tables_commitments": [ + { + "x": [ + 1589280911861251894, + 2000192568988587993, + 18399902493387281635, + 1843483375839232315 + ], + "y": [ + 14712825033319581746, + 11500494123399487569, + 4370642671010258701, + 567620704393396341 + ], + "infinity": false + }, + { + "x": [ + 0, + 0, + 0, + 0 + ], + "y": [ + 1, + 0, + 0, + 0 + ], + "infinity": true + }, + { + "x": [ + 0, + 0, + 0, + 0 + ], + "y": [ + 1, + 0, + 0, + 0 + ], + "infinity": true + }, + { + "x": [ + 5989740765536181742, + 7510673671757970234, + 7988398980529338112, + 2047433943537325290 + ], + "y": [ + 14952889876146512965, + 17141012675484923048, + 328206788961236528, + 866564802795139 + ], + "infinity": false + } + ], + "lookup_table_type_commitment": { + "x": [ + 4824978155651454377, + 12191454623887257586, + 12973919510878979890, + 52932438992466171 + ], + "y": [ + 17857145998747603901, + 2092039184434926372, + 11018504664231591204, + 1321736242331612854 + ], + "infinity": false + }, + "non_residues": [ + [ + 5, + 0, + 0, + 0 + ], + [ + 7, + 0, + 0, + 0 + ], + [ + 10, + 0, + 0, + 0 + ] + ], + "g2_elements": [ + { + "x": { + "c0": [ + 5106727233969649389, + 7440829307424791261, + 4785637993704342649, + 1729627375292849782 + ], + "c1": [ + 10945020018377822914, + 17413811393473931026, + 8241798111626485029, + 1841571559660931130 + ] + }, + "y": { + "c0": [ + 5541340697920699818, + 16416156555105522555, + 5380518976772849807, + 1353435754470862315 + ], + "c1": [ + 6173549831154472795, + 13567992399387660019, + 17050234209342075797, + 650358724130500725 + ] + }, + "infinity": false + }, + { + "x": { + "c0": [ + 9089143573911733168, + 11482283522806384523, + 13585589533905622862, + 79029415676722370 + ], + "c1": [ + 5692040832573735873, + 16884514497384809355, + 16717166481813659368, + 2742131088506155463 + ] + }, + "y": { + "c0": [ + 9604638503594647125, + 1289961608472612514, + 6217038149984805214, + 2521661352385209130 + ], + "c1": [ + 17168069778630926308, + 11309277837895768996, + 15154989611154567813, + 359271377050603491 + ] + }, + "infinity": false + } + ] +} \ No newline at end of file diff --git a/core/bin/verification_key_generator_and_server/data/verification_2_key.json b/core/bin/verification_key_generator_and_server/data/verification_2_key.json new file mode 100644 index 000000000000..79b16257213f --- /dev/null +++ b/core/bin/verification_key_generator_and_server/data/verification_2_key.json @@ -0,0 +1,399 @@ +{ + "n": 67108863, + "num_inputs": 1, + "state_width": 4, + "num_witness_polys": 0, + "gate_setup_commitments": [ + { + "x": [ + 5518783475412319303, + 13900056820557691891, + 3293972357974626054, + 2215936931279678502 + ], + "y": [ + 7955917949806788616, + 13341003959544330056, + 2090626280536970058, + 340565138339520735 + ], + "infinity": false + }, + { + "x": [ + 14185170917510557830, + 8046892618400404954, + 16599645397148333553, + 2994187418830549588 + ], + "y": [ + 7234254448777026502, + 8445782435526889669, + 14116370103157060862, + 2248206929083565209 + ], + "infinity": false + }, + { + "x": [ + 11154659552703848544, + 12941656139895069323, + 17062140236305086427, + 722110816848028084 + ], + "y": [ + 5009717036998782771, + 827592822749515890, + 15966856850732642654, + 618036931564479654 + ], + "infinity": false + }, + { + "x": [ + 5157594213696692987, + 15014090155482426422, + 706425002062263449, + 3203486979181293219 + ], + "y": [ + 14363949081622225749, + 9001876918808042476, + 1615414451418136701, + 444697301726425121 + ], + "infinity": false + }, + { + "x": [ + 9176460251336839321, + 17295305184785757140, + 7831134341003191604, + 2666806971657364559 + ], + "y": [ + 2598277252699259004, + 11916936738177575234, + 2912317122505195338, + 2404138220482962548 + ], + "infinity": false + }, + { + "x": [ + 11575910134534349159, + 14192914809594698195, + 18267718409201448839, + 142641722814285206 + ], + "y": [ + 5883506329268908990, + 2832339585209792351, + 14642260147093833347, + 392817691249359885 + ], + "infinity": false + }, + { + "x": [ + 12908012748245269010, + 6525727331816152736, + 16979431824428028279, + 2845131870310951239 + ], + "y": [ + 1571963770034876851, + 17602700402136611105, + 13310928253737079884, + 3347891464097055062 + ], + "infinity": false + }, + { + "x": [ + 832167803175150309, + 11457734167413059640, + 13250442890410377059, + 2814079984479722654 + ], + "y": [ + 1463471541691279258, + 1744973157713476297, + 1204969522442685286, + 1269233371856967282 + ], + "infinity": false + } + ], + "gate_selectors_commitments": [ + { + "x": [ + 10352656458395970023, + 3995520406692994966, + 13084432248093257522, + 2302839365715839904 + ], + "y": [ + 8225034751786073151, + 16771047952615636124, + 616708265068224682, + 186403683175385821 + ], + "infinity": false + }, + { + "x": [ + 4270731028924703792, + 3128341040439802084, + 15083522049785140229, + 2261189689222904761 + ], + "y": [ + 8781157350107493893, + 14766318733918494793, + 9428422381369337621, + 419743052593117743 + ], + "infinity": false + } + ], + "permutation_commitments": [ + { + "x": [ + 11112968480130414212, + 11913364106966677596, + 36671493864905181, + 496058283903160224 + ], + "y": [ + 9691136012048916590, + 12909186572206021308, + 1700657689434945171, + 3072265811815532764 + ], + "infinity": false + }, + { + "x": [ + 11360744654540534278, + 9830357778413675465, + 5192069313646589173, + 113131628631742646 + ], + "y": [ + 5515513518975242303, + 323890392099446701, + 2255482865429449468, + 2322464724330067577 + ], + "infinity": false + }, + { + "x": [ + 3414259545645111239, + 5416149397109634837, + 12993204506510556426, + 2894091844446687144 + ], + "y": [ + 4731949297479191167, + 1043460441127916951, + 16890401788673829290, + 1356564712828723527 + ], + "infinity": false + }, + { + "x": [ + 8993182433738017869, + 11441314659459910136, + 8181494681500166120, + 1591321336872387140 + ], + "y": [ + 5278254820002084488, + 17932571960593236295, + 7626453034762681225, + 3463596506399756742 + ], + "infinity": false + } + ], + "total_lookup_entries_length": 30783671, + "lookup_selector_commitment": { + "x": [ + 1336161834228740427, + 15823221750660268452, + 13689567356831376139, + 1839611883700311389 + ], + "y": [ + 14875759795137726191, + 20318096045504920, + 8816565555629805366, + 75556627728969178 + ], + "infinity": false + }, + "lookup_tables_commitments": [ + { + "x": [ + 1589280911861251894, + 2000192568988587993, + 18399902493387281635, + 1843483375839232315 + ], + "y": [ + 14712825033319581746, + 11500494123399487569, + 4370642671010258701, + 567620704393396341 + ], + "infinity": false + }, + { + "x": [ + 0, + 0, + 0, + 0 + ], + "y": [ + 1, + 0, + 0, + 0 + ], + "infinity": true + }, + { + "x": [ + 0, + 0, + 0, + 0 + ], + "y": [ + 1, + 0, + 0, + 0 + ], + "infinity": true + }, + { + "x": [ + 5989740765536181742, + 7510673671757970234, + 7988398980529338112, + 2047433943537325290 + ], + "y": [ + 14952889876146512965, + 17141012675484923048, + 328206788961236528, + 866564802795139 + ], + "infinity": false + } + ], + "lookup_table_type_commitment": { + "x": [ + 3408213281770836085, + 15382444791373914560, + 16110552627056571461, + 1161688479331593061 + ], + "y": [ + 13379188756114722390, + 12926267823879081751, + 14282599792449107495, + 3244837013658545871 + ], + "infinity": false + }, + "non_residues": [ + [ + 5, + 0, + 0, + 0 + ], + [ + 7, + 0, + 0, + 0 + ], + [ + 10, + 0, + 0, + 0 + ] + ], + "g2_elements": [ + { + "x": { + "c0": [ + 5106727233969649389, + 7440829307424791261, + 4785637993704342649, + 1729627375292849782 + ], + "c1": [ + 10945020018377822914, + 17413811393473931026, + 8241798111626485029, + 1841571559660931130 + ] + }, + "y": { + "c0": [ + 5541340697920699818, + 16416156555105522555, + 5380518976772849807, + 1353435754470862315 + ], + "c1": [ + 6173549831154472795, + 13567992399387660019, + 17050234209342075797, + 650358724130500725 + ] + }, + "infinity": false + }, + { + "x": { + "c0": [ + 9089143573911733168, + 11482283522806384523, + 13585589533905622862, + 79029415676722370 + ], + "c1": [ + 5692040832573735873, + 16884514497384809355, + 16717166481813659368, + 2742131088506155463 + ] + }, + "y": { + "c0": [ + 9604638503594647125, + 1289961608472612514, + 6217038149984805214, + 2521661352385209130 + ], + "c1": [ + 17168069778630926308, + 11309277837895768996, + 15154989611154567813, + 359271377050603491 + ] + }, + "infinity": false + } + ] +} \ No newline at end of file diff --git a/core/bin/verification_key_generator_and_server/data/verification_3_key.json b/core/bin/verification_key_generator_and_server/data/verification_3_key.json new file mode 100644 index 000000000000..ec51689663ce --- /dev/null +++ b/core/bin/verification_key_generator_and_server/data/verification_3_key.json @@ -0,0 +1,399 @@ +{ + "n": 67108863, + "num_inputs": 1, + "state_width": 4, + "num_witness_polys": 0, + "gate_setup_commitments": [ + { + "x": [ + 4552413926111948852, + 10957144662880794759, + 1806526858308545824, + 2720977094694375461 + ], + "y": [ + 9957232607201895378, + 9642598329050582748, + 7241368557931304566, + 2368139841965090910 + ], + "infinity": false + }, + { + "x": [ + 17664367774067691121, + 5434452106745188417, + 13061281583481015732, + 1636638472636008156 + ], + "y": [ + 4351985207465487643, + 11395591846019168789, + 16136180329293395748, + 3037028595557996988 + ], + "infinity": false + }, + { + "x": [ + 11088564426519797199, + 13911240355467381641, + 704193242607761309, + 2841093790816726473 + ], + "y": [ + 8962630520073659328, + 173793000390552550, + 5675477828895844644, + 1653616683677519745 + ], + "infinity": false + }, + { + "x": [ + 12882527139660212146, + 13453501656574828481, + 7645846961915393962, + 1425197621755678886 + ], + "y": [ + 16430175978765315928, + 15495807420095904477, + 2277767085665142018, + 2828865080019988295 + ], + "infinity": false + }, + { + "x": [ + 14984520689209033539, + 11643555254505584002, + 11263749877444050325, + 1778453501627364370 + ], + "y": [ + 13093435792719005783, + 3521622018192356851, + 17363442251541284841, + 1729103955346249787 + ], + "infinity": false + }, + { + "x": [ + 8472557649663210509, + 12959218494998230596, + 9420261090312891796, + 1557623015274275213 + ], + "y": [ + 16052238026542101971, + 10179034100393360237, + 9978634553947320008, + 578621147663557199 + ], + "infinity": false + }, + { + "x": [ + 7716060755495033877, + 2483781464143055151, + 1181897707039138261, + 111167263933376180 + ], + "y": [ + 8737220167574918233, + 11705129040253191126, + 324694051487940786, + 1358715253160880918 + ], + "infinity": false + }, + { + "x": [ + 16704606564921646360, + 10831094419697381185, + 8629189802092464726, + 1457480943878296289 + ], + "y": [ + 13963821601066807541, + 18041123136689723150, + 10814456746840952660, + 2173492275071023829 + ], + "infinity": false + } + ], + "gate_selectors_commitments": [ + { + "x": [ + 13927786032597906772, + 16736825632878665355, + 1344069241704076041, + 3395012689630160919 + ], + "y": [ + 2384561632721384994, + 14766877387148951981, + 2144452745561381419, + 457273820347677951 + ], + "infinity": false + }, + { + "x": [ + 15439764061223871624, + 3299628057930947680, + 14198600802212718285, + 1397095985255125902 + ], + "y": [ + 15377822596502057312, + 6285736694449381031, + 11301642243242685820, + 1377917967042996956 + ], + "infinity": false + } + ], + "permutation_commitments": [ + { + "x": [ + 15570364878064191471, + 6282941464448732634, + 9471325995619045861, + 1232197041581264155 + ], + "y": [ + 12280116801318148056, + 3076430362699060719, + 15641862360252683642, + 1036589014921989740 + ], + "infinity": false + }, + { + "x": [ + 5619821529319779769, + 369651205919014093, + 3573760605900424455, + 1578181493222357476 + ], + "y": [ + 13972658895413012837, + 3324728560278728728, + 1514863722019353225, + 283539618506176946 + ], + "infinity": false + }, + { + "x": [ + 7313654849318558545, + 3856980231091609256, + 13652594590991171434, + 1350431962017866810 + ], + "y": [ + 15802200192820379645, + 7943671073337715397, + 9635282915426247707, + 2405156099602918935 + ], + "infinity": false + }, + { + "x": [ + 16927593081999500567, + 12015826224538208031, + 13033887047158923253, + 508845269866381969 + ], + "y": [ + 3816934949381974009, + 15225306878268851022, + 13695992495268457957, + 2039957178158588775 + ], + "infinity": false + } + ], + "total_lookup_entries_length": 15103347, + "lookup_selector_commitment": { + "x": [ + 132701641292747981, + 9385191737942122578, + 9224572231433703600, + 422768887908740278 + ], + "y": [ + 7973095609514393935, + 9109168329146163159, + 1665658611543684747, + 2682386040886163584 + ], + "infinity": false + }, + "lookup_tables_commitments": [ + { + "x": [ + 12925597216490182210, + 13030942092034120135, + 17733316148446765999, + 112547709703624791 + ], + "y": [ + 13293415162200038331, + 13010565234555563811, + 15476251035925496743, + 2588541998389664114 + ], + "infinity": false + }, + { + "x": [ + 11928160479317928747, + 15728917754258005897, + 10320536119352799654, + 270214245519994465 + ], + "y": [ + 10541702140730698371, + 16733093124167020021, + 11131412253235036732, + 300062717716678428 + ], + "infinity": false + }, + { + "x": [ + 17999903301086933877, + 10468070608989378923, + 3479353092436121335, + 607756992244480908 + ], + "y": [ + 10863079642303790364, + 4737012301447477097, + 4605789209164294308, + 1430572887755557386 + ], + "infinity": false + }, + { + "x": [ + 4609762018249049814, + 4113097757442144437, + 4725434011535510809, + 2977599521231955696 + ], + "y": [ + 14636094180551257630, + 8819447661702130886, + 1091706295519429215, + 56675985696303183 + ], + "infinity": false + } + ], + "lookup_table_type_commitment": { + "x": [ + 3337588084106662427, + 632442254875580351, + 13994678952882390428, + 2231491013059247615 + ], + "y": [ + 1885998007338074473, + 15463564406739479850, + 16180378634933639894, + 2911611175129454243 + ], + "infinity": false + }, + "non_residues": [ + [ + 5, + 0, + 0, + 0 + ], + [ + 7, + 0, + 0, + 0 + ], + [ + 10, + 0, + 0, + 0 + ] + ], + "g2_elements": [ + { + "x": { + "c0": [ + 5106727233969649389, + 7440829307424791261, + 4785637993704342649, + 1729627375292849782 + ], + "c1": [ + 10945020018377822914, + 17413811393473931026, + 8241798111626485029, + 1841571559660931130 + ] + }, + "y": { + "c0": [ + 5541340697920699818, + 16416156555105522555, + 5380518976772849807, + 1353435754470862315 + ], + "c1": [ + 6173549831154472795, + 13567992399387660019, + 17050234209342075797, + 650358724130500725 + ] + }, + "infinity": false + }, + { + "x": { + "c0": [ + 9089143573911733168, + 11482283522806384523, + 13585589533905622862, + 79029415676722370 + ], + "c1": [ + 5692040832573735873, + 16884514497384809355, + 16717166481813659368, + 2742131088506155463 + ] + }, + "y": { + "c0": [ + 9604638503594647125, + 1289961608472612514, + 6217038149984805214, + 2521661352385209130 + ], + "c1": [ + 17168069778630926308, + 11309277837895768996, + 15154989611154567813, + 359271377050603491 + ] + }, + "infinity": false + } + ] +} \ No newline at end of file diff --git a/core/bin/verification_key_generator_and_server/data/verification_4_key.json b/core/bin/verification_key_generator_and_server/data/verification_4_key.json new file mode 100644 index 000000000000..1fca402a57d1 --- /dev/null +++ b/core/bin/verification_key_generator_and_server/data/verification_4_key.json @@ -0,0 +1,399 @@ +{ + "n": 67108863, + "num_inputs": 1, + "state_width": 4, + "num_witness_polys": 0, + "gate_setup_commitments": [ + { + "x": [ + 5624259952235159639, + 12493453537753524096, + 5162742944723295422, + 874801217568058344 + ], + "y": [ + 9255161020512573415, + 363459688878681289, + 820936643727254318, + 2429296485867858134 + ], + "infinity": false + }, + { + "x": [ + 2832540416149381652, + 15430317050493808950, + 17179036781265260663, + 186820416950920400 + ], + "y": [ + 1595111043068965121, + 12787589346811970515, + 16036754970398854466, + 1936430940635933371 + ], + "infinity": false + }, + { + "x": [ + 8954537073230160476, + 17141264894256989510, + 6295027223169328386, + 945694272488560390 + ], + "y": [ + 12946393132907201326, + 15831815351998984402, + 11467477440888834372, + 2122439063028644692 + ], + "infinity": false + }, + { + "x": [ + 16668856378658574030, + 12464185126307046024, + 4782167605155661077, + 2925818815088278657 + ], + "y": [ + 14254029004937284994, + 16838260511439178142, + 18330455412567106782, + 907257260948957347 + ], + "infinity": false + }, + { + "x": [ + 3757073371674290600, + 14749158543939658543, + 500288168398934349, + 899019556764130326 + ], + "y": [ + 2551967063854357353, + 11943083947319003127, + 3713483551270573853, + 3048346390727492861 + ], + "infinity": false + }, + { + "x": [ + 9681195336192488789, + 4385315926758329080, + 11387599749194291462, + 2590055918991698961 + ], + "y": [ + 9441467182736837048, + 14806656083518409337, + 6289102250953692061, + 2535387000517398099 + ], + "infinity": false + }, + { + "x": [ + 13507468900709913557, + 15716156111774497427, + 816956999229367707, + 1522047434717677609 + ], + "y": [ + 3702769098715132058, + 3982451445376700785, + 6723421039598308657, + 2251542644425584421 + ], + "infinity": false + }, + { + "x": [ + 1475691146597603932, + 17022584496562609257, + 17821269401726564016, + 802662685627574735 + ], + "y": [ + 10096079063677634763, + 12234172521086631920, + 297465434937655535, + 3243745710298231123 + ], + "infinity": false + } + ], + "gate_selectors_commitments": [ + { + "x": [ + 17529827936208631386, + 9261627982909469647, + 961082867947678862, + 960578719286164751 + ], + "y": [ + 8405963553525505153, + 907040572117159306, + 14956302475326239995, + 547263587163632990 + ], + "infinity": false + }, + { + "x": [ + 16590598224397602624, + 9925399142289127105, + 3290399501509259487, + 2600139476199697777 + ], + "y": [ + 11547014733601362211, + 8210713950853401970, + 18225836752365218802, + 3416518080459492792 + ], + "infinity": false + } + ], + "permutation_commitments": [ + { + "x": [ + 1361004974540175066, + 16274313641371783602, + 6174733117825004502, + 2094853940073638978 + ], + "y": [ + 562696341739915410, + 14890673686457558264, + 3661460202790599374, + 2475743175658913489 + ], + "infinity": false + }, + { + "x": [ + 17517940283060625271, + 5779867718792326928, + 17745057071421714730, + 1985060149839739251 + ], + "y": [ + 9540288339316398759, + 2058552121996607541, + 7871901128942825027, + 2699744308553389629 + ], + "infinity": false + }, + { + "x": [ + 10135642768859521260, + 10297677444020519573, + 9219235108573499828, + 2065187692845085359 + ], + "y": [ + 8372351253470713532, + 6842701361290620477, + 11926122104604392352, + 1882199601947395362 + ], + "infinity": false + }, + { + "x": [ + 7787083258082044412, + 8798544784294490279, + 15252059285839385340, + 209159930416492510 + ], + "y": [ + 9464645197696091031, + 12735510170331867214, + 10126980317766617408, + 1956203655559965748 + ], + "infinity": false + } + ], + "total_lookup_entries_length": 8484858, + "lookup_selector_commitment": { + "x": [ + 709495900213098376, + 5439249685673083978, + 10044314386307915207, + 868534819499649206 + ], + "y": [ + 7744104606837258411, + 9395404917324005459, + 5711899910359603748, + 3413594411142959024 + ], + "infinity": false + }, + "lookup_tables_commitments": [ + { + "x": [ + 697552212563769686, + 7709943502535418760, + 15019345407325619175, + 3433081085078580257 + ], + "y": [ + 8668947019840357731, + 14698901351824712883, + 15088598879190660424, + 2873081208166433946 + ], + "infinity": false + }, + { + "x": [ + 7893133928909060673, + 7064922516930129957, + 3592836702741304814, + 2239702595710114437 + ], + "y": [ + 7691360541875191519, + 11379321785127235277, + 6653616064071569031, + 2555434628517540774 + ], + "infinity": false + }, + { + "x": [ + 6243944238013052821, + 7908243182210136125, + 17178099109525791299, + 2553622184721264566 + ], + "y": [ + 736121280088239428, + 6158073429758170526, + 11217302997977204117, + 2594798912020899417 + ], + "infinity": false + }, + { + "x": [ + 2064240298596094591, + 16917726764104887991, + 11042784977532408536, + 3377647228930170830 + ], + "y": [ + 10635525052494768819, + 387400048616497096, + 9379200582543310995, + 1571766153703296253 + ], + "infinity": false + } + ], + "lookup_table_type_commitment": { + "x": [ + 6342996221389543983, + 2956974825746967379, + 5313102419815794231, + 2142845372908172595 + ], + "y": [ + 11930073583427772667, + 3300840902988996263, + 13848383192378869374, + 1678905557563667573 + ], + "infinity": false + }, + "non_residues": [ + [ + 5, + 0, + 0, + 0 + ], + [ + 7, + 0, + 0, + 0 + ], + [ + 10, + 0, + 0, + 0 + ] + ], + "g2_elements": [ + { + "x": { + "c0": [ + 5106727233969649389, + 7440829307424791261, + 4785637993704342649, + 1729627375292849782 + ], + "c1": [ + 10945020018377822914, + 17413811393473931026, + 8241798111626485029, + 1841571559660931130 + ] + }, + "y": { + "c0": [ + 5541340697920699818, + 16416156555105522555, + 5380518976772849807, + 1353435754470862315 + ], + "c1": [ + 6173549831154472795, + 13567992399387660019, + 17050234209342075797, + 650358724130500725 + ] + }, + "infinity": false + }, + { + "x": { + "c0": [ + 9089143573911733168, + 11482283522806384523, + 13585589533905622862, + 79029415676722370 + ], + "c1": [ + 5692040832573735873, + 16884514497384809355, + 16717166481813659368, + 2742131088506155463 + ] + }, + "y": { + "c0": [ + 9604638503594647125, + 1289961608472612514, + 6217038149984805214, + 2521661352385209130 + ], + "c1": [ + 17168069778630926308, + 11309277837895768996, + 15154989611154567813, + 359271377050603491 + ] + }, + "infinity": false + } + ] +} \ No newline at end of file diff --git a/core/bin/verification_key_generator_and_server/data/verification_5_key.json b/core/bin/verification_key_generator_and_server/data/verification_5_key.json new file mode 100644 index 000000000000..b9a31b919f1c --- /dev/null +++ b/core/bin/verification_key_generator_and_server/data/verification_5_key.json @@ -0,0 +1,399 @@ +{ + "n": 67108863, + "num_inputs": 1, + "state_width": 4, + "num_witness_polys": 0, + "gate_setup_commitments": [ + { + "x": [ + 12322129650547620518, + 4320033807979823995, + 4503809593276792861, + 630958448551597950 + ], + "y": [ + 4947307957322067889, + 1897773243457379956, + 1563584362302565484, + 802109862761172056 + ], + "infinity": false + }, + { + "x": [ + 5860641327684713918, + 16885915425353665713, + 7037370194263044401, + 1837438863045303696 + ], + "y": [ + 13386292219804271609, + 4960073609197619993, + 7328379249582994262, + 191728769121948464 + ], + "infinity": false + }, + { + "x": [ + 9390502900121613993, + 17218409610830310329, + 4830832371938391322, + 1805131323553685028 + ], + "y": [ + 15707040961083920686, + 16216062707384374953, + 16957058843586642758, + 1341814870249072628 + ], + "infinity": false + }, + { + "x": [ + 969252611989285232, + 181405773082212747, + 11110666465356509832, + 1888802363524687207 + ], + "y": [ + 5293477339288357424, + 12076391347720360980, + 11422893229655154394, + 3165450734777404812 + ], + "infinity": false + }, + { + "x": [ + 642192487369089358, + 9585449571929647331, + 3847960352134961209, + 984199510163128792 + ], + "y": [ + 13950390676065893881, + 975256099594703300, + 253120832016214204, + 1860679841584192219 + ], + "infinity": false + }, + { + "x": [ + 3564548447861991296, + 6278944799487206913, + 1163701992635366786, + 3214877162977671335 + ], + "y": [ + 13131873482361140204, + 14012120801722220187, + 13254371011592477950, + 1082108070640175604 + ], + "infinity": false + }, + { + "x": [ + 14190764189814537607, + 18412181832598818289, + 17213387738194113336, + 1662783623959823461 + ], + "y": [ + 7987199081435644988, + 17119136750046780209, + 8770669323846078492, + 3183489396270587333 + ], + "infinity": false + }, + { + "x": [ + 14638218826597535389, + 16409988612234258347, + 5025411344133541245, + 603088654230685360 + ], + "y": [ + 12538363432956258836, + 6558875956959901550, + 2415879426147965883, + 750702584304895055 + ], + "infinity": false + } + ], + "gate_selectors_commitments": [ + { + "x": [ + 2599908293582905760, + 13534206398743622493, + 15926090086034346074, + 467418127379229858 + ], + "y": [ + 9529512934078774185, + 1459270552041127965, + 13418846370362665102, + 2270996612016337371 + ], + "infinity": false + }, + { + "x": [ + 7264275706530137047, + 5590205367072257545, + 17891440127697345143, + 360638857846382524 + ], + "y": [ + 17983779934218975397, + 1625779403076670241, + 1474025795387210129, + 1716171421120825643 + ], + "infinity": false + } + ], + "permutation_commitments": [ + { + "x": [ + 9354841115000244260, + 12887310615208346489, + 1120617137774653400, + 424227936372254439 + ], + "y": [ + 3626714025954019309, + 4480975902927818206, + 10093567956580931634, + 2779897825000836477 + ], + "infinity": false + }, + { + "x": [ + 1864884782104066211, + 1247154271168453374, + 9982166936353409582, + 1177339527115773898 + ], + "y": [ + 9932597332303163060, + 1888682277213109000, + 11684220277443154622, + 3062389133489783806 + ], + "infinity": false + }, + { + "x": [ + 9943021177878836437, + 9004866876172522532, + 14085451328492136137, + 1567186274425392936 + ], + "y": [ + 7148906168793986389, + 4780330524752436486, + 10067456648871712650, + 179752856567560382 + ], + "infinity": false + }, + { + "x": [ + 14745822832390509907, + 13862030626549782961, + 10000268356302875837, + 705042314567833799 + ], + "y": [ + 11091254259539384938, + 11733968109785394056, + 11099103738494585500, + 1527456782567955191 + ], + "infinity": false + } + ], + "total_lookup_entries_length": 35330543, + "lookup_selector_commitment": { + "x": [ + 12333191731462980214, + 17841370099698959347, + 12878670991018181621, + 2894319630687016858 + ], + "y": [ + 76816727314643395, + 3214684791046221459, + 878301108738499830, + 126016925902987736 + ], + "infinity": false + }, + "lookup_tables_commitments": [ + { + "x": [ + 911668445361375614, + 12752365066512000136, + 11550232015863976467, + 2053619216798992367 + ], + "y": [ + 4194339833917391280, + 1643071887467668153, + 3377480965202592691, + 1664272901450533719 + ], + "infinity": false + }, + { + "x": [ + 2999316735203966181, + 5189676006781764591, + 14324679313847304783, + 1264086978509739587 + ], + "y": [ + 8714172036038650967, + 10907167170124829028, + 8950970593162102458, + 1596853051185997037 + ], + "infinity": false + }, + { + "x": [ + 1146500486770850326, + 13562754408872334896, + 14063471769392190265, + 3387351506820193517 + ], + "y": [ + 6677788829230735422, + 15425668102208730571, + 5341291772716012975, + 539156410041791428 + ], + "infinity": false + }, + { + "x": [ + 18159886519320172405, + 4286826840324377773, + 16364826089434525345, + 228697666397725767 + ], + "y": [ + 4850633487261444791, + 6327421534074497160, + 12883776034588695446, + 1510314148471267214 + ], + "infinity": false + } + ], + "lookup_table_type_commitment": { + "x": [ + 18245233954308230592, + 8193493714287610439, + 6521078295132558240, + 861511081336275611 + ], + "y": [ + 4275834222266292944, + 13179071278128968874, + 5943013356852335765, + 2456639561657053045 + ], + "infinity": false + }, + "non_residues": [ + [ + 5, + 0, + 0, + 0 + ], + [ + 7, + 0, + 0, + 0 + ], + [ + 10, + 0, + 0, + 0 + ] + ], + "g2_elements": [ + { + "x": { + "c0": [ + 5106727233969649389, + 7440829307424791261, + 4785637993704342649, + 1729627375292849782 + ], + "c1": [ + 10945020018377822914, + 17413811393473931026, + 8241798111626485029, + 1841571559660931130 + ] + }, + "y": { + "c0": [ + 5541340697920699818, + 16416156555105522555, + 5380518976772849807, + 1353435754470862315 + ], + "c1": [ + 6173549831154472795, + 13567992399387660019, + 17050234209342075797, + 650358724130500725 + ] + }, + "infinity": false + }, + { + "x": { + "c0": [ + 9089143573911733168, + 11482283522806384523, + 13585589533905622862, + 79029415676722370 + ], + "c1": [ + 5692040832573735873, + 16884514497384809355, + 16717166481813659368, + 2742131088506155463 + ] + }, + "y": { + "c0": [ + 9604638503594647125, + 1289961608472612514, + 6217038149984805214, + 2521661352385209130 + ], + "c1": [ + 17168069778630926308, + 11309277837895768996, + 15154989611154567813, + 359271377050603491 + ] + }, + "infinity": false + } + ] +} \ No newline at end of file diff --git a/core/bin/verification_key_generator_and_server/data/verification_6_key.json b/core/bin/verification_key_generator_and_server/data/verification_6_key.json new file mode 100644 index 000000000000..34419df17702 --- /dev/null +++ b/core/bin/verification_key_generator_and_server/data/verification_6_key.json @@ -0,0 +1,399 @@ +{ + "n": 67108863, + "num_inputs": 1, + "state_width": 4, + "num_witness_polys": 0, + "gate_setup_commitments": [ + { + "x": [ + 11033020679838791108, + 14920056278440370765, + 8156477685651219112, + 2935096142913695825 + ], + "y": [ + 12780055516709256833, + 966513406268819160, + 9584266886886532866, + 892347068344972829 + ], + "infinity": false + }, + { + "x": [ + 4044870432040348042, + 10630300946926732771, + 3143480015080245177, + 323917785885883620 + ], + "y": [ + 2297905282612888789, + 8206728682979815807, + 10628767928228215441, + 3062326525278498604 + ], + "infinity": false + }, + { + "x": [ + 14760731158538087565, + 9176522400170689419, + 9855180338242634009, + 2456568616568530201 + ], + "y": [ + 5168103953295979961, + 397013651969935557, + 13864468728668213717, + 2925074735515169158 + ], + "infinity": false + }, + { + "x": [ + 13613691592548742743, + 11339389230513898784, + 4864282628000142183, + 2568915564796772962 + ], + "y": [ + 13074021698952750513, + 14891339562597317806, + 6145754680491802845, + 913243322463864468 + ], + "infinity": false + }, + { + "x": [ + 9607983563343027008, + 1604609357347728263, + 6735137627175405143, + 91305611485454778 + ], + "y": [ + 2068449139446365265, + 6171753015906067998, + 16290186276604645197, + 420889087081901603 + ], + "infinity": false + }, + { + "x": [ + 15994614598808477960, + 5137738490508028659, + 6599503545391493738, + 3293094250487745346 + ], + "y": [ + 3246688300070721763, + 8836841286539929132, + 1231014124908407748, + 3042941126579517307 + ], + "infinity": false + }, + { + "x": [ + 12550390789117808745, + 14001030013656521177, + 16383284077678821701, + 1815317458772356897 + ], + "y": [ + 10125044837604978181, + 7468984969058409331, + 592554137766258541, + 2877688586321491725 + ], + "infinity": false + }, + { + "x": [ + 12238091769471133989, + 184716847866634800, + 5888077423956723698, + 609118759536864800 + ], + "y": [ + 7725369615076384544, + 7561073323636510559, + 10473734750023783127, + 861766554781597742 + ], + "infinity": false + } + ], + "gate_selectors_commitments": [ + { + "x": [ + 1206127807467530207, + 3510053718168412786, + 7933459343694333819, + 3179950874373950282 + ], + "y": [ + 5784856107466398982, + 395767970566909293, + 11244200096534021583, + 2068407511544404377 + ], + "infinity": false + }, + { + "x": [ + 4044617248058764838, + 11957266999135308674, + 17621747993137866783, + 990156155955733134 + ], + "y": [ + 17234504892477991728, + 17558826298225495489, + 9349531438753716103, + 2656409262947709594 + ], + "infinity": false + } + ], + "permutation_commitments": [ + { + "x": [ + 4308597000331285311, + 12130199317436319902, + 3842336010209461436, + 191866453597778475 + ], + "y": [ + 2144400171783010971, + 13016087318985913183, + 7166370365336301922, + 2216888390030560212 + ], + "infinity": false + }, + { + "x": [ + 4661184458541745063, + 12423889401726065791, + 11959346001895915074, + 779668716585305501 + ], + "y": [ + 16401363790535442499, + 7367694133722005848, + 8015837005184593399, + 454166987511489961 + ], + "infinity": false + }, + { + "x": [ + 858215262803403659, + 1405268530667707386, + 7763962169005921611, + 2845435536097215865 + ], + "y": [ + 10639490331338262540, + 6397733211512468794, + 968161689973799899, + 2054756257253905633 + ], + "infinity": false + }, + { + "x": [ + 17338818659525246480, + 13318488425310212471, + 10548319374858973842, + 87084958643052105 + ], + "y": [ + 2279840344577984658, + 15197280761751903251, + 16019225334594459873, + 149925650787595538 + ], + "infinity": false + } + ], + "total_lookup_entries_length": 3054916, + "lookup_selector_commitment": { + "x": [ + 4844230422625825285, + 956290027823441223, + 763010695794739308, + 2426170829255106638 + ], + "y": [ + 13850520521470006763, + 9003994589054655373, + 10310690204425503422, + 3012516431885755457 + ], + "infinity": false + }, + "lookup_tables_commitments": [ + { + "x": [ + 5825422128268478267, + 9219263846299851036, + 3879231702557190566, + 1702488722758880769 + ], + "y": [ + 18311881100262470992, + 5742998199368802392, + 18106865487471159417, + 502191980176920012 + ], + "infinity": false + }, + { + "x": [ + 17195892082859417081, + 7890531942603584793, + 2381805632820057528, + 3173232410464566465 + ], + "y": [ + 16359614627947132075, + 3459600273035137079, + 4550762061432972122, + 3394559699318358224 + ], + "infinity": false + }, + { + "x": [ + 1716103379277390185, + 18097936269579187542, + 16357329729761063450, + 1508640059338197502 + ], + "y": [ + 11014806739603983364, + 4396503314588777389, + 9397245609635151055, + 1703957955248411380 + ], + "infinity": false + }, + { + "x": [ + 4770171350693477354, + 17110558673192292253, + 9799800677557311408, + 761984875463445481 + ], + "y": [ + 1560561403388310063, + 31331275310848146, + 287152055803835484, + 457826332542037277 + ], + "infinity": false + } + ], + "lookup_table_type_commitment": { + "x": [ + 16775586915653722908, + 9787338077086882544, + 8381721730521821042, + 2974660093975661578 + ], + "y": [ + 3011389235487891234, + 15409507493813096391, + 17416460976276029026, + 324418288749844627 + ], + "infinity": false + }, + "non_residues": [ + [ + 5, + 0, + 0, + 0 + ], + [ + 7, + 0, + 0, + 0 + ], + [ + 10, + 0, + 0, + 0 + ] + ], + "g2_elements": [ + { + "x": { + "c0": [ + 5106727233969649389, + 7440829307424791261, + 4785637993704342649, + 1729627375292849782 + ], + "c1": [ + 10945020018377822914, + 17413811393473931026, + 8241798111626485029, + 1841571559660931130 + ] + }, + "y": { + "c0": [ + 5541340697920699818, + 16416156555105522555, + 5380518976772849807, + 1353435754470862315 + ], + "c1": [ + 6173549831154472795, + 13567992399387660019, + 17050234209342075797, + 650358724130500725 + ] + }, + "infinity": false + }, + { + "x": { + "c0": [ + 9089143573911733168, + 11482283522806384523, + 13585589533905622862, + 79029415676722370 + ], + "c1": [ + 5692040832573735873, + 16884514497384809355, + 16717166481813659368, + 2742131088506155463 + ] + }, + "y": { + "c0": [ + 9604638503594647125, + 1289961608472612514, + 6217038149984805214, + 2521661352385209130 + ], + "c1": [ + 17168069778630926308, + 11309277837895768996, + 15154989611154567813, + 359271377050603491 + ] + }, + "infinity": false + } + ] +} \ No newline at end of file diff --git a/core/bin/verification_key_generator_and_server/data/verification_7_key.json b/core/bin/verification_key_generator_and_server/data/verification_7_key.json new file mode 100644 index 000000000000..406afcf4f0fe --- /dev/null +++ b/core/bin/verification_key_generator_and_server/data/verification_7_key.json @@ -0,0 +1,399 @@ +{ + "n": 67108863, + "num_inputs": 1, + "state_width": 4, + "num_witness_polys": 0, + "gate_setup_commitments": [ + { + "x": [ + 14104278525941001335, + 6652111379088654370, + 12369045377338511525, + 969809670184836151 + ], + "y": [ + 10111598525423302991, + 15018239425425696172, + 3683372413830991953, + 1023765059890131543 + ], + "infinity": false + }, + { + "x": [ + 11576486884237685781, + 16315823052257401029, + 9860864515877414033, + 3179959598270002012 + ], + "y": [ + 487035971539979311, + 5573003039451484772, + 15711637819381564577, + 1904127920269177012 + ], + "infinity": false + }, + { + "x": [ + 18299921128106602792, + 211731469708793711, + 17645028854462121436, + 675870769139913517 + ], + "y": [ + 15146647508675165454, + 18353083579110652488, + 12704645658780892142, + 2929235299763077823 + ], + "infinity": false + }, + { + "x": [ + 11570586127780196277, + 2363872676317471379, + 7386811009552915084, + 959006902628416514 + ], + "y": [ + 17455735716787098890, + 14879699386306994564, + 5628100821420984321, + 2862659911936763739 + ], + "infinity": false + }, + { + "x": [ + 8746328571248006135, + 17089435014355939378, + 8764506524471462449, + 1810135458362589443 + ], + "y": [ + 14070512019208911265, + 8756287737315170424, + 14821473955626613, + 1559545289765661890 + ], + "infinity": false + }, + { + "x": [ + 2113591086436573082, + 12629483649401688389, + 11845953673798951216, + 3081238281103628853 + ], + "y": [ + 727696133406005469, + 14413827745813557208, + 6425035421156126073, + 291513487083052109 + ], + "infinity": false + }, + { + "x": [ + 15346257923988607256, + 10403316660718504706, + 7158515894996917286, + 2702098910103276762 + ], + "y": [ + 16559143492878738107, + 12716298061927369795, + 12296985344891017351, + 2814996798832983835 + ], + "infinity": false + }, + { + "x": [ + 2213195001372039295, + 8878300942582564036, + 10524986226191936528, + 1815326540993196034 + ], + "y": [ + 11397120982692424098, + 4455537142488107627, + 14205354993332845055, + 2313809587433567240 + ], + "infinity": false + } + ], + "gate_selectors_commitments": [ + { + "x": [ + 14849046431510808003, + 11699893139960418168, + 6000246307731364190, + 3362832011707902866 + ], + "y": [ + 3242560497217933852, + 11672398501106836413, + 987926723326096281, + 2451226739475091625 + ], + "infinity": false + }, + { + "x": [ + 9272095445402359796, + 1201046264826394411, + 7424934554242366462, + 1125893484262333608 + ], + "y": [ + 15903920299684884420, + 17703294385387204708, + 2256937129195345942, + 1905295733884217610 + ], + "infinity": false + } + ], + "permutation_commitments": [ + { + "x": [ + 7591926766688292250, + 10457199375342460747, + 3214976192729961314, + 1412860682249358355 + ], + "y": [ + 16894260140402496006, + 3666374878391815131, + 15124268261678582348, + 1340579262756129480 + ], + "infinity": false + }, + { + "x": [ + 2963934507934439034, + 17415763666461861018, + 6331792462137338053, + 3122358526111186727 + ], + "y": [ + 15040784043381591388, + 7188410244350767315, + 14077554108063383431, + 1704329843327300001 + ], + "infinity": false + }, + { + "x": [ + 7967507884960122293, + 13509230570773443525, + 11125712791473385552, + 2241808950326876268 + ], + "y": [ + 10594180941877323940, + 17179032413109513856, + 17941607623778808075, + 646138820984886096 + ], + "infinity": false + }, + { + "x": [ + 4729534828155895283, + 15489050734511381239, + 4847364931161261393, + 2461584260035042491 + ], + "y": [ + 15255817542606978857, + 6517429187947361297, + 17127878630247240853, + 3389541567226838859 + ], + "infinity": false + } + ], + "total_lookup_entries_length": 40724289, + "lookup_selector_commitment": { + "x": [ + 5449769839889646584, + 2072406321611922291, + 9391796773218391195, + 2377769168011090955 + ], + "y": [ + 1789189431152658324, + 2639430755172378798, + 136577695530283091, + 3045539535973502646 + ], + "infinity": false + }, + "lookup_tables_commitments": [ + { + "x": [ + 631990924006796604, + 16139625628991115157, + 13331739325995827711, + 1062301837743594995 + ], + "y": [ + 15303054606290800139, + 15906872095881647437, + 7093896572295020249, + 1342952934989901142 + ], + "infinity": false + }, + { + "x": [ + 7983921919542246393, + 13296544189644416678, + 17081022784392007697, + 1980832835348244027 + ], + "y": [ + 10874958134865200330, + 7702740658637630534, + 14052057929798961943, + 3193353539419869016 + ], + "infinity": false + }, + { + "x": [ + 1114587284824996932, + 4636906500482867924, + 15328247172597030456, + 87946895873973686 + ], + "y": [ + 15573033830207915877, + 5194694185599035278, + 2562407345425607214, + 2782078999306862675 + ], + "infinity": false + }, + { + "x": [ + 18225112781127431982, + 18048613958187123807, + 7325490730844456621, + 1953409020724855888 + ], + "y": [ + 7577000130125917198, + 6193701449695751861, + 4102082927677054717, + 395350071385269650 + ], + "infinity": false + } + ], + "lookup_table_type_commitment": { + "x": [ + 12639039925867405095, + 9606685454938605275, + 7802675863289639223, + 1948831418843225802 + ], + "y": [ + 11059150608777595761, + 10458812733010634961, + 16772660325487078311, + 340608886692078192 + ], + "infinity": false + }, + "non_residues": [ + [ + 5, + 0, + 0, + 0 + ], + [ + 7, + 0, + 0, + 0 + ], + [ + 10, + 0, + 0, + 0 + ] + ], + "g2_elements": [ + { + "x": { + "c0": [ + 5106727233969649389, + 7440829307424791261, + 4785637993704342649, + 1729627375292849782 + ], + "c1": [ + 10945020018377822914, + 17413811393473931026, + 8241798111626485029, + 1841571559660931130 + ] + }, + "y": { + "c0": [ + 5541340697920699818, + 16416156555105522555, + 5380518976772849807, + 1353435754470862315 + ], + "c1": [ + 6173549831154472795, + 13567992399387660019, + 17050234209342075797, + 650358724130500725 + ] + }, + "infinity": false + }, + { + "x": { + "c0": [ + 9089143573911733168, + 11482283522806384523, + 13585589533905622862, + 79029415676722370 + ], + "c1": [ + 5692040832573735873, + 16884514497384809355, + 16717166481813659368, + 2742131088506155463 + ] + }, + "y": { + "c0": [ + 9604638503594647125, + 1289961608472612514, + 6217038149984805214, + 2521661352385209130 + ], + "c1": [ + 17168069778630926308, + 11309277837895768996, + 15154989611154567813, + 359271377050603491 + ] + }, + "infinity": false + } + ] +} \ No newline at end of file diff --git a/core/bin/verification_key_generator_and_server/data/verification_8_key.json b/core/bin/verification_key_generator_and_server/data/verification_8_key.json new file mode 100644 index 000000000000..b8511e17b755 --- /dev/null +++ b/core/bin/verification_key_generator_and_server/data/verification_8_key.json @@ -0,0 +1,399 @@ +{ + "n": 67108863, + "num_inputs": 1, + "state_width": 4, + "num_witness_polys": 0, + "gate_setup_commitments": [ + { + "x": [ + 1834112096176967541, + 5137529514715617427, + 6540843391881340212, + 3033401888759110412 + ], + "y": [ + 8910602970094475216, + 13169513767982514776, + 5761530093694221441, + 2733318557350866268 + ], + "infinity": false + }, + { + "x": [ + 4701064149158432365, + 5425087325981406309, + 7911131985858828309, + 1683257627049186617 + ], + "y": [ + 13565328904521460918, + 17013189171844282257, + 4897087111183007258, + 2345861178674095559 + ], + "infinity": false + }, + { + "x": [ + 17285353863442654170, + 17787410547699779811, + 4803131526909484890, + 1607731426619418092 + ], + "y": [ + 3219378920021652314, + 11046862703797106703, + 10595836629242151972, + 2970963661532337787 + ], + "infinity": false + }, + { + "x": [ + 6619857367954187649, + 8023974497004524989, + 10088058961892288757, + 938018804109053807 + ], + "y": [ + 15549411064757453720, + 1776820811429478220, + 8222111141823917842, + 290593315633281086 + ], + "infinity": false + }, + { + "x": [ + 3338931670632164423, + 11330459786926502111, + 13560408114559586439, + 233279858410037466 + ], + "y": [ + 9757980615881472290, + 6475296714459436577, + 15954545788543926629, + 2522580407814024231 + ], + "infinity": false + }, + { + "x": [ + 2168501453409628158, + 16417992951888116942, + 1994813140597965849, + 1938552030580060698 + ], + "y": [ + 2393885012813093493, + 5109365147685051030, + 4449898145078443978, + 996506294158321126 + ], + "infinity": false + }, + { + "x": [ + 8163446935422765754, + 17127634458571165785, + 18101155318188210010, + 1502677094108070955 + ], + "y": [ + 4184320355428455210, + 15479528531137595907, + 8455846016430686855, + 2570922865513301289 + ], + "infinity": false + }, + { + "x": [ + 407579941387952352, + 17088458915370169940, + 16892753644011369852, + 2421666516533613805 + ], + "y": [ + 597435837737447683, + 18122233368438707442, + 4844832744563923839, + 396103093107107006 + ], + "infinity": false + } + ], + "gate_selectors_commitments": [ + { + "x": [ + 16242434178832819081, + 2218928756172422054, + 5871927983870638422, + 810020555846721779 + ], + "y": [ + 9387856576677982883, + 5119490172321159350, + 14295435318421985120, + 1325809191818871673 + ], + "infinity": false + }, + { + "x": [ + 5933965238687071287, + 10681704800081225943, + 14555731010498897395, + 959799154476325145 + ], + "y": [ + 1501632601560034962, + 9401704677918783964, + 12292111854761501889, + 858616662661742045 + ], + "infinity": false + } + ], + "permutation_commitments": [ + { + "x": [ + 12841507457971520539, + 6525486152471484441, + 3744486588589217686, + 2769451038405535407 + ], + "y": [ + 14145668232228974364, + 9864097401535863500, + 12665512227995054273, + 1710776254334161256 + ], + "infinity": false + }, + { + "x": [ + 12108157388466567796, + 12008825937320240484, + 11228446795405478904, + 1520424921904150640 + ], + "y": [ + 18157047055378899649, + 10836823561088895074, + 583613418617515639, + 2570085764232471205 + ], + "infinity": false + }, + { + "x": [ + 3117226099128838157, + 10181632193024509490, + 1215328570209780930, + 1536961491401844084 + ], + "y": [ + 11646905141441654681, + 6168936708987385450, + 14459621573162108487, + 2047975568887748173 + ], + "infinity": false + }, + { + "x": [ + 12034664246790330785, + 12032082546920592595, + 12002839514296456095, + 3009479689157977152 + ], + "y": [ + 180421277197569955, + 5815678523367268562, + 11718416396488597085, + 408186057258055191 + ], + "infinity": false + } + ], + "total_lookup_entries_length": 34384753, + "lookup_selector_commitment": { + "x": [ + 3872970821419373956, + 13556503327407661223, + 12832313376327677595, + 211677646774476601 + ], + "y": [ + 17281673428499585093, + 235933066531227024, + 17890327653152417391, + 2551853991532334733 + ], + "infinity": false + }, + "lookup_tables_commitments": [ + { + "x": [ + 14943975734974680929, + 9516136771242606543, + 6695719565456036638, + 3449077049666620393 + ], + "y": [ + 11678209093898264827, + 4499447145490933412, + 6317798459829178953, + 1439219764789809864 + ], + "infinity": false + }, + { + "x": [ + 13501290183905491407, + 17914451638435951710, + 5188762915201956497, + 1220375585898114161 + ], + "y": [ + 14519533874806433487, + 409100046306023, + 2203176115240501563, + 3105700623762337563 + ], + "infinity": false + }, + { + "x": [ + 13968159480895722732, + 6973568812120893251, + 6250254745096478587, + 2299355969860561070 + ], + "y": [ + 7695944005480078577, + 12009671787784557856, + 13727042561077817002, + 219052945806305675 + ], + "infinity": false + }, + { + "x": [ + 4871629130106420314, + 4091595855728790015, + 1851744390500340594, + 3123168382710331270 + ], + "y": [ + 9703969956757970162, + 1215036492891076659, + 11876727836856213678, + 2640893636590396388 + ], + "infinity": false + } + ], + "lookup_table_type_commitment": { + "x": [ + 10299044894603982393, + 4664166516779563250, + 13124827128688646542, + 3361599897730972314 + ], + "y": [ + 18259946931458798404, + 10145479316480429602, + 15446978899103328376, + 265382288883021070 + ], + "infinity": false + }, + "non_residues": [ + [ + 5, + 0, + 0, + 0 + ], + [ + 7, + 0, + 0, + 0 + ], + [ + 10, + 0, + 0, + 0 + ] + ], + "g2_elements": [ + { + "x": { + "c0": [ + 5106727233969649389, + 7440829307424791261, + 4785637993704342649, + 1729627375292849782 + ], + "c1": [ + 10945020018377822914, + 17413811393473931026, + 8241798111626485029, + 1841571559660931130 + ] + }, + "y": { + "c0": [ + 5541340697920699818, + 16416156555105522555, + 5380518976772849807, + 1353435754470862315 + ], + "c1": [ + 6173549831154472795, + 13567992399387660019, + 17050234209342075797, + 650358724130500725 + ] + }, + "infinity": false + }, + { + "x": { + "c0": [ + 9089143573911733168, + 11482283522806384523, + 13585589533905622862, + 79029415676722370 + ], + "c1": [ + 5692040832573735873, + 16884514497384809355, + 16717166481813659368, + 2742131088506155463 + ] + }, + "y": { + "c0": [ + 9604638503594647125, + 1289961608472612514, + 6217038149984805214, + 2521661352385209130 + ], + "c1": [ + 17168069778630926308, + 11309277837895768996, + 15154989611154567813, + 359271377050603491 + ] + }, + "infinity": false + } + ] +} \ No newline at end of file diff --git a/core/bin/verification_key_generator_and_server/data/verification_9_key.json b/core/bin/verification_key_generator_and_server/data/verification_9_key.json new file mode 100644 index 000000000000..75de5f75c78d --- /dev/null +++ b/core/bin/verification_key_generator_and_server/data/verification_9_key.json @@ -0,0 +1,399 @@ +{ + "n": 67108863, + "num_inputs": 1, + "state_width": 4, + "num_witness_polys": 0, + "gate_setup_commitments": [ + { + "x": [ + 15041888416700822899, + 15908701850433687369, + 6928173929840686173, + 501601364708497325 + ], + "y": [ + 9443860646360881208, + 15174745959183347299, + 3341918218952258763, + 1470216750942469587 + ], + "infinity": false + }, + { + "x": [ + 1713492202424532619, + 5921868784153327820, + 3919870428680620477, + 2459274846398943915 + ], + "y": [ + 8012717129874416534, + 13032363221581987781, + 9462161206147300944, + 1151760065513271967 + ], + "infinity": false + }, + { + "x": [ + 6636128327108235840, + 9362733145474272574, + 7779132015244601843, + 474802631021936400 + ], + "y": [ + 3900992471196218787, + 113851245079995197, + 7493904056590361535, + 3140468871801097229 + ], + "infinity": false + }, + { + "x": [ + 4340102674797800902, + 8715432707094353745, + 4331145745081713603, + 45456583984841487 + ], + "y": [ + 18326546742044058782, + 15443239165658185296, + 9765917874876721196, + 687859761729374839 + ], + "infinity": false + }, + { + "x": [ + 10804694580890857975, + 10550068287306981825, + 14956274043654722561, + 3060589920124935341 + ], + "y": [ + 17010223672048359580, + 263749806111642373, + 8349695975133446526, + 2826070525773268002 + ], + "infinity": false + }, + { + "x": [ + 16133249269780245267, + 4275571784340824698, + 6262619645627758753, + 3231281899173719188 + ], + "y": [ + 11839616617849449709, + 7142633755989890055, + 10840735473548209733, + 2847350786075278882 + ], + "infinity": false + }, + { + "x": [ + 16258572583186965203, + 1354691125575792689, + 17235265854934968790, + 1252220109588505888 + ], + "y": [ + 9336541637487074271, + 18402912967310224930, + 13223187653117829136, + 2979297976786733465 + ], + "infinity": false + }, + { + "x": [ + 8525686695522099028, + 4103157564078645049, + 18392570749492199187, + 2911539491816599180 + ], + "y": [ + 114653447583918953, + 10470307038453386601, + 11189850644566793538, + 1298227034210846592 + ], + "infinity": false + } + ], + "gate_selectors_commitments": [ + { + "x": [ + 2069700145549311928, + 4250782333685017927, + 14207216715687122978, + 1145927286048477791 + ], + "y": [ + 9341202692364554712, + 12346939747104737180, + 2826478533799125818, + 2279570556437452275 + ], + "infinity": false + }, + { + "x": [ + 12388902775325386546, + 1277383964095999647, + 10535796018183893831, + 3359866702323175506 + ], + "y": [ + 16500893366957272235, + 2806147688388338314, + 8233156072220488773, + 2867848844627212711 + ], + "infinity": false + } + ], + "permutation_commitments": [ + { + "x": [ + 17521183961631816299, + 18327810537117645266, + 16586212795163003556, + 3052771534158410452 + ], + "y": [ + 8441310283734453731, + 14146088755801181801, + 17480253356603213989, + 3217948944323396651 + ], + "infinity": false + }, + { + "x": [ + 16076801532842923524, + 7514743296775639295, + 2571323986448120255, + 184367540214459973 + ], + "y": [ + 13389643967183613114, + 17108261756464256828, + 11145735340309739417, + 2142196980030893874 + ], + "infinity": false + }, + { + "x": [ + 8034683328666433725, + 5436036566901194392, + 18053257213361014053, + 2821377847227509494 + ], + "y": [ + 14471305228212723444, + 8894846184648865892, + 7047725473055235530, + 2413388400332075493 + ], + "infinity": false + }, + { + "x": [ + 14026981588443304814, + 14671946927765496183, + 13387079215022495926, + 2554705188091675830 + ], + "y": [ + 440116222237740520, + 1630168477189852269, + 17833425794232523381, + 908824471705597078 + ], + "infinity": false + } + ], + "total_lookup_entries_length": 41494904, + "lookup_selector_commitment": { + "x": [ + 13889323383351416990, + 17887386740570674124, + 5463612855590268091, + 2434255340534820869 + ], + "y": [ + 2436699678434218349, + 11251365794004058995, + 11023509005141034197, + 2867854671852170604 + ], + "infinity": false + }, + "lookup_tables_commitments": [ + { + "x": [ + 631990924006796604, + 16139625628991115157, + 13331739325995827711, + 1062301837743594995 + ], + "y": [ + 15303054606290800139, + 15906872095881647437, + 7093896572295020249, + 1342952934989901142 + ], + "infinity": false + }, + { + "x": [ + 7983921919542246393, + 13296544189644416678, + 17081022784392007697, + 1980832835348244027 + ], + "y": [ + 10874958134865200330, + 7702740658637630534, + 14052057929798961943, + 3193353539419869016 + ], + "infinity": false + }, + { + "x": [ + 1114587284824996932, + 4636906500482867924, + 15328247172597030456, + 87946895873973686 + ], + "y": [ + 15573033830207915877, + 5194694185599035278, + 2562407345425607214, + 2782078999306862675 + ], + "infinity": false + }, + { + "x": [ + 18225112781127431982, + 18048613958187123807, + 7325490730844456621, + 1953409020724855888 + ], + "y": [ + 7577000130125917198, + 6193701449695751861, + 4102082927677054717, + 395350071385269650 + ], + "infinity": false + } + ], + "lookup_table_type_commitment": { + "x": [ + 3832160677272803715, + 2122279734318217808, + 811690144328522684, + 1416829483108546006 + ], + "y": [ + 10041279311991435550, + 14702496983143623186, + 4419862575487552747, + 1429817244630465543 + ], + "infinity": false + }, + "non_residues": [ + [ + 5, + 0, + 0, + 0 + ], + [ + 7, + 0, + 0, + 0 + ], + [ + 10, + 0, + 0, + 0 + ] + ], + "g2_elements": [ + { + "x": { + "c0": [ + 5106727233969649389, + 7440829307424791261, + 4785637993704342649, + 1729627375292849782 + ], + "c1": [ + 10945020018377822914, + 17413811393473931026, + 8241798111626485029, + 1841571559660931130 + ] + }, + "y": { + "c0": [ + 5541340697920699818, + 16416156555105522555, + 5380518976772849807, + 1353435754470862315 + ], + "c1": [ + 6173549831154472795, + 13567992399387660019, + 17050234209342075797, + 650358724130500725 + ] + }, + "infinity": false + }, + { + "x": { + "c0": [ + 9089143573911733168, + 11482283522806384523, + 13585589533905622862, + 79029415676722370 + ], + "c1": [ + 5692040832573735873, + 16884514497384809355, + 16717166481813659368, + 2742131088506155463 + ] + }, + "y": { + "c0": [ + 9604638503594647125, + 1289961608472612514, + 6217038149984805214, + 2521661352385209130 + ], + "c1": [ + 17168069778630926308, + 11309277837895768996, + 15154989611154567813, + 359271377050603491 + ] + }, + "infinity": false + } + ] +} \ No newline at end of file diff --git a/core/bin/verification_key_generator_and_server/src/commitment_generator.rs b/core/bin/verification_key_generator_and_server/src/commitment_generator.rs new file mode 100644 index 000000000000..6b107bbf6fc0 --- /dev/null +++ b/core/bin/verification_key_generator_and_server/src/commitment_generator.rs @@ -0,0 +1,104 @@ +use ff::to_hex; +use std::fs; +use toml_edit::{Document, Item, Value}; +use zksync_types::circuit::{LEAF_CIRCUIT_INDEX, NODE_CIRCUIT_INDEX}; +use zksync_types::zkevm_test_harness::witness; +use zksync_types::zkevm_test_harness::witness::recursive_aggregation::erase_vk_type; +use zksync_verification_key_server::{ + get_vk_for_circuit_type, get_vks_for_basic_circuits, get_vks_for_commitment, +}; + +fn main() { + vlog::info!("Starting commitment generation!"); + read_and_update_contract_toml(); +} + +fn read_and_update_contract_toml() { + let mut contract_doc = read_contract_toml(); + let ( + basic_circuit_commitment_hex, + leaf_aggregation_commitment_hex, + node_aggregation_commitment_hex, + ) = generate_commitments(); + contract_doc["contracts"]["VK_COMMITMENT_BASIC_CIRCUITS"] = + get_toml_formatted_value(basic_circuit_commitment_hex); + contract_doc["contracts"]["VK_COMMITMENT_LEAF"] = + get_toml_formatted_value(leaf_aggregation_commitment_hex); + contract_doc["contracts"]["VK_COMMITMENT_NODE"] = + get_toml_formatted_value(node_aggregation_commitment_hex); + vlog::info!("Updated toml content: {:?}", contract_doc.to_string()); + write_contract_toml(contract_doc); +} + +fn get_toml_formatted_value(string_value: String) -> Item { + let mut value = Value::from(string_value); + value.decor_mut().set_prefix(""); + Item::Value(value) +} + +fn write_contract_toml(contract_doc: Document) { + let path = get_contract_toml_path(); + fs::write(path, contract_doc.to_string()).expect("Failed writing to contract.toml file"); +} + +fn read_contract_toml() -> Document { + let path = get_contract_toml_path(); + let toml_data = std::fs::read_to_string(path.clone()) + .unwrap_or_else(|_| panic!("contract.toml file does not exist on path {}", path)); + toml_data.parse::().expect("invalid config file") +} + +fn get_contract_toml_path() -> String { + let zksync_home = std::env::var("ZKSYNC_HOME").unwrap_or_else(|_| "/".into()); + format!("{}/etc/env/base/contracts.toml", zksync_home) +} + +fn generate_commitments() -> (String, String, String) { + let (_, basic_circuit_commitment, _) = + witness::recursive_aggregation::form_base_circuits_committment(get_vks_for_commitment( + get_vks_for_basic_circuits(), + )); + + let leaf_aggregation_vk = get_vk_for_circuit_type(LEAF_CIRCUIT_INDEX); + let node_aggregation_vk = get_vk_for_circuit_type(NODE_CIRCUIT_INDEX); + + let (_, leaf_aggregation_vk_commitment) = + witness::recursive_aggregation::compute_vk_encoding_and_committment(erase_vk_type( + leaf_aggregation_vk, + )); + + let (_, node_aggregation_vk_commitment) = + witness::recursive_aggregation::compute_vk_encoding_and_committment(erase_vk_type( + node_aggregation_vk, + )); + let basic_circuit_commitment_hex = format!("0x{}", to_hex(&basic_circuit_commitment)); + let leaf_aggregation_commitment_hex = format!("0x{}", to_hex(&leaf_aggregation_vk_commitment)); + let node_aggregation_commitment_hex = format!("0x{}", to_hex(&node_aggregation_vk_commitment)); + vlog::info!( + "basic circuit commitment {:?}", + basic_circuit_commitment_hex + ); + vlog::info!( + "leaf aggregation commitment {:?}", + leaf_aggregation_commitment_hex + ); + vlog::info!( + "node aggregation commitment {:?}", + node_aggregation_commitment_hex + ); + ( + basic_circuit_commitment_hex, + leaf_aggregation_commitment_hex, + node_aggregation_commitment_hex, + ) +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn test_read_and_update_contract_toml() { + read_and_update_contract_toml(); + } +} diff --git a/core/bin/verification_key_generator_and_server/src/json_to_binary_vk_converter.rs b/core/bin/verification_key_generator_and_server/src/json_to_binary_vk_converter.rs new file mode 100644 index 000000000000..65a2e3361bf4 --- /dev/null +++ b/core/bin/verification_key_generator_and_server/src/json_to_binary_vk_converter.rs @@ -0,0 +1,31 @@ +use bincode::serialize_into; +use std::fs::File; +use std::io::BufWriter; +use structopt::StructOpt; +use zksync_verification_key_server::get_vk_for_circuit_type; + +#[derive(Debug, StructOpt)] +#[structopt( + name = "json existing json VK's to binary vk", + about = "converter tool" +)] +struct Opt { + /// Binary output path of verification keys. + #[structopt(short)] + output_bin_path: String, +} + +fn main() { + let opt = Opt::from_args(); + println!("Converting existing json keys to binary"); + generate_bin_vks(opt.output_bin_path); +} + +fn generate_bin_vks(output_path: String) { + for circuit_type in 1..=18 { + let filename = format!("{}/verification_{}.key", output_path, circuit_type); + let vk = get_vk_for_circuit_type(circuit_type); + let mut f = BufWriter::new(File::create(filename).unwrap()); + serialize_into(&mut f, &vk).unwrap(); + } +} diff --git a/core/bin/verification_key_generator_and_server/src/lib.rs b/core/bin/verification_key_generator_and_server/src/lib.rs new file mode 100644 index 000000000000..66fb37dcaf4c --- /dev/null +++ b/core/bin/verification_key_generator_and_server/src/lib.rs @@ -0,0 +1,114 @@ +use std::collections::HashMap; +use std::path::Path; +use zksync_types::zkevm_test_harness::abstract_zksync_circuit::concrete_circuits::ZkSyncCircuit; +use zksync_types::zkevm_test_harness::bellman::bn256::Bn256; +use zksync_types::zkevm_test_harness::bellman::plonk::better_better_cs::setup::VerificationKey; +use zksync_types::zkevm_test_harness::witness::oracle::VmWitnessOracle; + +use itertools::Itertools; +use zksync_types::circuit::{ + GEOMETRY_CONFIG, LEAF_SPLITTING_FACTOR, NODE_SPLITTING_FACTOR, SCHEDULER_UPPER_BOUND, +}; +use zksync_types::zkevm_test_harness::witness::full_block_artifact::BlockBasicCircuits; +use zksync_types::zkevm_test_harness::witness::recursive_aggregation::padding_aggregations; +use zksync_types::zkevm_test_harness::witness::vk_set_generator::circuits_for_vk_generation; + +#[cfg(test)] +mod tests; + +pub fn get_vks_for_basic_circuits( +) -> HashMap>>> { + // 3-17 are the ids of basic circuits + (3..=18) + .map(|circuit_type| (circuit_type, get_vk_for_circuit_type(circuit_type))) + .collect() +} + +pub fn get_vk_for_circuit_type( + circuit_type: u8, +) -> VerificationKey>> { + let filepath = get_file_path(circuit_type); + vlog::info!("Fetching verification key from path: {}", filepath); + let text = std::fs::read_to_string(&filepath) + .unwrap_or_else(|_| panic!("Failed reading verification key from path: {}", filepath)); + serde_json::from_str::>>>( + &text, + ) + .unwrap_or_else(|_| { + panic!( + "Failed deserializing verification key from path: {}", + filepath + ) + }) +} + +pub fn save_vk_for_circuit_type( + circuit_type: u8, + vk: VerificationKey>>, +) { + let filepath = get_file_path(circuit_type); + vlog::info!("saving verification key to: {}", filepath); + std::fs::write(filepath, serde_json::to_string_pretty(&vk).unwrap()).unwrap(); +} + +pub fn get_ordered_vks_for_basic_circuits( + circuits: &BlockBasicCircuits, + verification_keys: &HashMap< + u8, + VerificationKey>>, + >, +) -> Vec>>> { + circuits + .clone() + .into_flattened_set() + .iter() + .map(|circuit| { + let circuit_id = circuit.numeric_circuit_type(); + verification_keys + .get(&circuit_id) + .unwrap_or_else(|| { + panic!("no VK for circuit number {:?}", circuit.short_description()) + }) + .clone() + }) + .collect() +} + +pub fn get_vks_for_commitment( + verification_keys: HashMap< + u8, + VerificationKey>>, + >, +) -> Vec>>> { + // We need all the vks sorted by their respective circuit ids + verification_keys + .into_iter() + .sorted_by_key(|(id, _)| *id) + .map(|(_, val)| val) + .collect() +} + +pub fn get_circuits_for_vk() -> Vec>> { + ensure_setup_key_exist(); + let padding_aggregations = padding_aggregations(NODE_SPLITTING_FACTOR); + circuits_for_vk_generation( + GEOMETRY_CONFIG, + LEAF_SPLITTING_FACTOR, + NODE_SPLITTING_FACTOR, + SCHEDULER_UPPER_BOUND, + padding_aggregations, + ) +} + +fn ensure_setup_key_exist() { + if !Path::new("setup_2^26.key").exists() { + panic!("File setup_2^26.key is required to be present in current directory for verification keys generation. \ndownload from https://storage.googleapis.com/universal-setup/setup_2^26.key"); + } +} +fn get_file_path(circuit_type: u8) -> String { + let zksync_home = std::env::var("ZKSYNC_HOME").unwrap_or_else(|_| "/".into()); + format!( + "{}/core/bin/verification_key_generator_and_server/data/verification_{}_key.json", + zksync_home, circuit_type + ) +} diff --git a/core/bin/verification_key_generator_and_server/src/main.rs b/core/bin/verification_key_generator_and_server/src/main.rs new file mode 100644 index 000000000000..a05c112ec528 --- /dev/null +++ b/core/bin/verification_key_generator_and_server/src/main.rs @@ -0,0 +1,82 @@ +use serde_json::Value; +use std::collections::HashSet; +use std::env; +use std::fs::File; +use std::io::{BufReader, Read}; +use std::iter::FromIterator; +use zksync_types::zkevm_test_harness::abstract_zksync_circuit::concrete_circuits::ZkSyncCircuit; +use zksync_types::zkevm_test_harness::bellman::bn256::Bn256; +use zksync_types::zkevm_test_harness::bellman::plonk::better_better_cs::cs::PlonkCsWidth4WithNextStepAndCustomGatesParams; +use zksync_types::zkevm_test_harness::witness::oracle::VmWitnessOracle; +use zksync_verification_key_server::{get_circuits_for_vk, save_vk_for_circuit_type}; + +fn main() { + let args: Vec = env::args().collect(); + let mut circuit_types: HashSet = (3..17).collect(); + if args.len() > 1 { + circuit_types = HashSet::from_iter([get_and_ensure_valid_circuit_type(args[1].clone())]); + } + vlog::info!("Starting verification key generation!"); + get_circuits_for_vk() + .into_iter() + .filter(|c| circuit_types.contains(&c.numeric_circuit_type())) + .for_each(generate_verification_key); +} + +fn get_and_ensure_valid_circuit_type(circuit_type: String) -> u8 { + vlog::info!("Received circuit_type: {:?}", circuit_type); + circuit_type + .parse::() + .expect("Please specify a circuit type in range [1, 17]") +} + +fn generate_verification_key(circuit: ZkSyncCircuit>) { + let res = circuit_testing::create_vk_for_padding_size_log_2::< + Bn256, + _, + PlonkCsWidth4WithNextStepAndCustomGatesParams, + >(circuit.clone(), 26) + .unwrap(); + save_vk_for_circuit_type(circuit.numeric_circuit_type(), res); + vlog::info!( + "Finished VK generation for circuit {:?} (id {:?})", + circuit.short_description(), + circuit.numeric_circuit_type() + ); +} + +fn _extract_keys_from_json_file(filepath: &str) { + let mut file = File::open(filepath).unwrap(); + let mut data = String::new(); + file.read_to_string(&mut data).unwrap(); + let json: Vec = serde_json::from_str(&data).expect("malformed JSON"); + for mut item in json { + let kv = item.as_object_mut().unwrap(); + _build_and_save_verification_key(kv); + } +} + +fn _build_and_save_verification_key(kv: &mut serde_json::Map) { + let key: &str = kv + .get("key") + .expect("key must be present in json") + .as_str() + .expect("key must be of type string"); + let circuit_type: u8 = key + .chars() + .filter(|c| c.is_ascii_digit()) + .collect::() + .parse::() + .unwrap(); + let value: String = kv + .get("value") + .expect("value must be present in json") + .as_str() + .expect("value must be of type string") + .replace("E'\\\\x", "") + .replace('\'', ""); + let bytes = hex::decode(value).expect("Invalid hex string for verification key"); + let vk = bincode::deserialize_from(BufReader::new(bytes.as_slice())).unwrap(); + vlog::info!("Extracted circuit_type: {:?} vk : {:?}", circuit_type, vk); + save_vk_for_circuit_type(circuit_type, vk); +} diff --git a/core/bin/verification_key_generator_and_server/src/tests.rs b/core/bin/verification_key_generator_and_server/src/tests.rs new file mode 100644 index 000000000000..8f013bad2000 --- /dev/null +++ b/core/bin/verification_key_generator_and_server/src/tests.rs @@ -0,0 +1,66 @@ +use crate::{get_vk_for_circuit_type, get_vks_for_basic_circuits, get_vks_for_commitment}; +use itertools::Itertools; +use serde_json::Value; +use std::collections::HashMap; +use zksync_types::zkevm_test_harness::abstract_zksync_circuit::concrete_circuits::ZkSyncCircuit; +use zksync_types::zkevm_test_harness::bellman::bn256::Bn256; +use zksync_types::zkevm_test_harness::bellman::plonk::better_better_cs::setup::VerificationKey; + +use zksync_types::zkevm_test_harness::witness::oracle::VmWitnessOracle; + +#[test] +fn test_get_vk_for_circuit_type() { + for circuit_type in 1..=18 { + get_vk_for_circuit_type(circuit_type); + } +} + +#[test] +fn test_get_vks_for_basic_circuits() { + let circuit_type_to_vk = get_vks_for_basic_circuits(); + let circuit_types: Vec = circuit_type_to_vk.into_keys().sorted().collect::>(); + let expected: Vec = (3..=18).collect(); + assert_eq!( + expected, circuit_types, + "circuit types must be in the range [3, 17]" + ); +} + +#[test] +fn test_get_vks_for_commitment() { + let vk_5 = get_vk_for_circuit_type(5); + let vk_2 = get_vk_for_circuit_type(2); + let vk_3 = get_vk_for_circuit_type(3); + let map = HashMap::from([ + (5u8, vk_5.clone()), + (2u8, vk_2.clone()), + (3u8, vk_3.clone()), + ]); + let vks = get_vks_for_commitment(map); + let expected = vec![vk_2, vk_3, vk_5]; + compare_vks( + expected, + vks, + "expected verification key to be in order 2, 3, 5", + ); +} + +fn get_vk_json(vk: &VerificationKey>>) -> Value { + serde_json::to_value(vk).unwrap() +} + +fn get_vk_jsons( + vks: Vec>>>, +) -> Vec { + vks.into_iter().map(|vk| get_vk_json(&vk)).collect() +} + +fn compare_vks( + first: Vec>>>, + second: Vec>>>, + error_message: &str, +) { + let first_json = get_vk_jsons(first); + let second_json = get_vk_jsons(second); + assert_eq!(first_json, second_json, "{:?}", error_message); +} diff --git a/core/bin/zksync_core/Cargo.toml b/core/bin/zksync_core/Cargo.toml new file mode 100644 index 000000000000..569af827f644 --- /dev/null +++ b/core/bin/zksync_core/Cargo.toml @@ -0,0 +1,81 @@ +[package] +name = "zksync_core" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-2" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] +publish = false # We don't want to publish our binaries. + +[dependencies] +zksync_state = { path = "../../lib/state", version = "1.0" } +zksync_types = { path = "../../lib/types", version = "1.0" } +zksync_dal = { path = "../../lib/dal", version = "1.0" } +zksync_config = { path = "../../lib/config", version = "1.0" } +zksync_utils = { path = "../../lib/utils", version = "1.0" } +zksync_contracts = { path = "../../lib/contracts", version = "1.0" } +zksync_eth_client = { path = "../../lib/eth_client", version = "1.0" } +zksync_eth_signer = { path = "../../lib/eth_signer", version = "1.0" } +zksync_mempool = { path = "../../lib/mempool", version = "1.0" } +zksync_prover_utils = { path = "../../lib/prover_utils", version = "1.0" } +zksync_queued_job_processor = { path = "../../lib/queued_job_processor", version = "1.0" } +zksync_circuit_breaker = { path = "../../lib/circuit_breaker", version = "1.0" } +vm = { path = "../../lib/vm", version = "0.1.0" } +zksync_storage = { path = "../../lib/storage", version = "1.0" } +zksync_merkle_tree = { path = "../../lib/merkle_tree", version = "1.0" } +zksync_mini_merkle_tree = { path = "../../lib/mini_merkle_tree", version = "1.0" } +zksync_verification_key_generator_and_server = { path = "../verification_key_generator_and_server", version = "1.0" } +prometheus_exporter = { path = "../../lib/prometheus_exporter", version = "1.0" } +zksync_web3_decl = { path = "../../lib/web3_decl", version = "1.0", default-features = false, features = [ + "server", +] } +zksync_object_store = { path = "../../lib/object_store", version = "1.0" } +vlog = { path = "../../lib/vlog", version = "1.0" } + +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +metrics = "0.20" +itertools = "0.10.3" +structopt = "0.3.20" +once_cell = "1.7" +ctrlc = { version = "3.1", features = ["termination"] } +bincode = "1" +rand = "0.8" + +tokio = { version = "1", features = ["time"] } +futures = { version = "0.3", features = ["compat"] } +chrono = { version = "0.4", features = ["serde", "rustc-serialize"] } +anyhow = "1.0" +thiserror = "1.0" +async-trait = "0.1" +async-std = "1.12.0" + +# API dependencies +jsonrpc-core = { git = "https://github.com/matter-labs/jsonrpc.git", branch = "master" } +jsonrpc-core-client = { git = "https://github.com/matter-labs/jsonrpc.git", branch = "master" } # Required for the RPC trait +jsonrpc-http-server = { git = "https://github.com/matter-labs/jsonrpc.git", branch = "master" } +jsonrpc-ws-server = { git = "https://github.com/matter-labs/jsonrpc.git", branch = "master" } +jsonrpc-derive = { git = "https://github.com/matter-labs/jsonrpc.git", branch = "master" } +jsonrpc-pubsub = { git = "https://github.com/matter-labs/jsonrpc.git", branch = "master" } +num = { version = "0.3.1", features = ["serde"] } +bigdecimal = { version = "=0.2.0", features = ["serde"] } +reqwest = { version = "0.11", features = ["blocking", "json"] } +hex = "0.4" +governor = "0.4.2" +tempfile = "3.0.2" + +actix-rt = "2.2.0" +actix-cors = "0.6.0-beta.2" +actix-web = "4.0.0-beta.8" + +tracing = { version= "0.1.26" } + +[dev-dependencies] +db_test_macro = { path = "../../lib/db_test_macro", version = "0.1.0" } +assert_matches = "1.5" + +[features] +openzeppelin_tests = [] diff --git a/core/bin/zksync_core/src/api_server/execution_sandbox.rs b/core/bin/zksync_core/src/api_server/execution_sandbox.rs new file mode 100644 index 000000000000..86e266abc6dd --- /dev/null +++ b/core/bin/zksync_core/src/api_server/execution_sandbox.rs @@ -0,0 +1,663 @@ +use std::collections::HashSet; +use std::time::Instant; + +use super::tx_sender::SubmitTxError; +use crate::api_server::web3::backend_jsonrpc::error::internal_error; +use thiserror::Error; +use tracing::{span, Level}; +use vm::oracles::tracer::{ValidationError, ValidationTracerParams}; +use vm::utils::default_block_properties; +use zksync_types::api::BlockId; +use zksync_types::tx::tx_execution_info::get_initial_and_repeated_storage_writes; +use zksync_types::utils::storage_key_for_eth_balance; +use zksync_types::{ + get_known_code_key, H256, PUBLISH_BYTECODE_OVERHEAD, TRUSTED_ADDRESS_SLOTS, TRUSTED_TOKEN_SLOTS, +}; + +use crate::db_storage_provider::DbStorageProvider; +use vm::vm_with_bootloader::{ + derive_base_fee_and_gas_per_pubdata, init_vm, push_transaction_to_bootloader_memory, + BlockContext, BlockContextMode, BootloaderJobType, DerivedBlockContext, TxExecutionMode, +}; +use vm::{ + storage::Storage, utils::ETH_CALL_GAS_LIMIT, TxRevertReason, VmBlockResult, VmExecutionResult, + VmInstance, +}; +use zksync_dal::{ConnectionPool, StorageProcessor}; +use zksync_state::storage_view::StorageView; +use zksync_types::{ + api, + event::{extract_long_l2_to_l1_messages, extract_published_bytecodes}, + fee::TransactionExecutionMetrics, + get_nonce_key, + l2::L2Tx, + utils::{decompose_full_nonce, nonces_to_full_nonce}, + AccountTreeId, MiniblockNumber, Nonce, Transaction, U256, +}; +use zksync_utils::bytecode::{bytecode_len_in_bytes, hash_bytecode}; +use zksync_utils::time::millis_since_epoch; +use zksync_utils::{h256_to_u256, u256_to_h256}; +use zksync_web3_decl::error::Web3Error; + +#[derive(Debug, Error)] +pub enum SandboxExecutionError { + #[error("Account validation failed: {0}")] + AccountValidationFailed(String), + #[error("Failed to charge fee: {0}")] + FailedToChargeFee(String), + #[error("Paymaster validation failed: {0}")] + PaymasterValidationFailed(String), + #[error("Pre-paymaster preparation failed: {0}")] + PrePaymasterPreparationFailed(String), + #[error("From is not an account")] + FromIsNotAnAccount, + #[error("Bootloader failure: {0}")] + BootloaderFailure(String), + #[error("Revert: {0}")] + Revert(String), + #[error("Failed to pay for the transaction: {0}")] + FailedToPayForTransaction(String), + #[error("Bootloader-based tx failed")] + InnerTxError, + #[error( + "Virtual machine entered unexpected state. Please contact developers and provide transaction details \ + that caused this error. Error description: {0}" + )] + UnexpectedVMBehavior(String), + #[error("Transaction is unexecutable. Reason: {0}")] + Unexecutable(String), +} + +pub fn execute_tx_eth_call( + connection_pool: &ConnectionPool, + mut tx: L2Tx, + block_id: api::BlockId, + l1_gas_price: u64, + fair_l2_gas_price: u64, + enforced_base_fee: Option, +) -> Result { + let mut storage = connection_pool.access_storage_blocking(); + let resolved_block_number = storage + .blocks_web3_dal() + .resolve_block_id(block_id) + .map_err(|err| internal_error("eth_call", err))??; + let block_timestamp_s = storage + .blocks_web3_dal() + .get_block_timestamp(resolved_block_number) + .unwrap(); + + // Protection against infinite-loop eth_calls and alike: + // limiting the amount of gas the call can use. + // We can't use BLOCK_ERGS_LIMIT here since the VM itself has some overhead. + tx.common_data.fee.gas_limit = ETH_CALL_GAS_LIMIT.into(); + let vm_result = execute_tx_in_sandbox( + storage, + tx, + TxExecutionMode::EthCall, + AccountTreeId::default(), + block_id, + resolved_block_number, + block_timestamp_s, + None, + U256::zero(), + BootloaderJobType::TransactionExecution, + l1_gas_price, + fair_l2_gas_price, + enforced_base_fee, + ) + .1 + .map_err(|err| { + let submit_tx_error: SubmitTxError = err.into(); + Web3Error::SubmitTransactionError(submit_tx_error.to_string()) + })?; + Ok(vm_result) +} + +fn get_pending_state( + connection_pool: &ConnectionPool, +) -> (BlockId, StorageProcessor<'_>, MiniblockNumber) { + let block_id = api::BlockId::Number(api::BlockNumber::Pending); + let mut connection = connection_pool.access_storage_blocking(); + let resolved_block_number = connection + .blocks_web3_dal() + .resolve_block_id(block_id) + .unwrap() + .expect("Pending block should be present"); + + (block_id, connection, resolved_block_number) +} + +#[tracing::instrument(skip(connection_pool, tx, operator_account, enforced_nonce))] +#[allow(clippy::too_many_arguments)] +pub fn execute_tx_with_pending_state( + connection_pool: &ConnectionPool, + tx: L2Tx, + operator_account: AccountTreeId, + execution_mode: TxExecutionMode, + enforced_nonce: Option, + added_balance: U256, + l1_gas_price: u64, + fair_l2_gas_price: u64, + enforced_base_fee: Option, +) -> ( + TransactionExecutionMetrics, + Result, +) { + let (block_id, connection, resolved_block_number) = get_pending_state(connection_pool); + + // In order for execution to pass smoothlessly, we need to ensure that block's required gasPerPubdata will be + // <= to the one in the transaction itself. + let l1_gas_price = adjust_l1_gas_price_for_tx( + l1_gas_price, + fair_l2_gas_price, + tx.common_data.fee.gas_per_pubdata_limit, + ); + + execute_tx_in_sandbox( + connection, + tx, + execution_mode, + operator_account, + block_id, + resolved_block_number, + None, + enforced_nonce, + added_balance, + BootloaderJobType::TransactionExecution, + l1_gas_price, + fair_l2_gas_price, + enforced_base_fee, + ) +} + +// Returns the number of the pubdata that the transaction will spend on factory deps +pub fn get_pubdata_for_factory_deps( + connection_pool: &ConnectionPool, + factory_deps: &Option>>, +) -> u32 { + let (_, connection, block_number) = get_pending_state(connection_pool); + let db_storage_provider = DbStorageProvider::new(connection, block_number, false); + let mut storage_view = StorageView::new(db_storage_provider); + + factory_deps + .as_ref() + .map(|deps| { + let mut total_published_length = 0; + + for dep in deps.iter() { + let bytecode_hash = hash_bytecode(dep); + let key = get_known_code_key(&bytecode_hash); + + // The bytecode needs to be published only if it is not known + let is_known = storage_view.get_value(&key); + if is_known == H256::zero() { + total_published_length += dep.len() as u32 + PUBLISH_BYTECODE_OVERHEAD; + } + } + + total_published_length + }) + .unwrap_or_default() +} + +#[allow(clippy::too_many_arguments)] +pub fn validate_tx_with_pending_state( + connection_pool: &ConnectionPool, + tx: L2Tx, + operator_account: AccountTreeId, + execution_mode: TxExecutionMode, + enforced_nonce: Option, + added_balance: U256, + l1_gas_price: u64, + fair_l2_gas_price: u64, + enforced_base_fee: Option, +) -> Result<(), ValidationError> { + let (block_id, connection, resolved_block_number) = get_pending_state(connection_pool); + + // In order for validation to pass smoothlessly, we need to ensure that block's required gasPerPubdata will be + // <= to the one in the transaction itself. + let l1_gas_price = adjust_l1_gas_price_for_tx( + l1_gas_price, + fair_l2_gas_price, + tx.common_data.fee.gas_per_pubdata_limit, + ); + + validate_tx_in_sandbox( + connection, + tx, + execution_mode, + operator_account, + block_id, + resolved_block_number, + None, + enforced_nonce, + added_balance, + l1_gas_price, + fair_l2_gas_price, + enforced_base_fee, + ) +} + +fn adjust_l1_gas_price_for_tx( + l1_gas_price: u64, + fair_l2_gas_price: u64, + tx_gas_per_pubdata_limit: U256, +) -> u64 { + let current_pubdata_price = + derive_base_fee_and_gas_per_pubdata(l1_gas_price, fair_l2_gas_price).1; + if U256::from(current_pubdata_price) <= tx_gas_per_pubdata_limit { + // The current pubdata price is small enough + l1_gas_price + } else { + // gasPerPubdata = ceil(17 * l1gasprice / fair_l2_gas_price) + // gasPerPubdata <= 17 * l1gasprice / fair_l2_gas_price + 1 + // fair_l2_gas_price(gasPerPubdata - 1) / 17 <= l1gasprice + let l1_gas_price = U256::from(fair_l2_gas_price) + * (tx_gas_per_pubdata_limit - U256::from(1u32)) + / U256::from(17); + + l1_gas_price.as_u64() + } +} + +/// This method assumes that (block with number `resolved_block_number` is present in DB) +/// or (`block_id` is `pending` and block with number `resolved_block_number - 1` is present in DB) +#[tracing::instrument(skip(connection, tx, operator_account, block_timestamp_s))] +#[allow(clippy::too_many_arguments)] +fn execute_tx_in_sandbox( + connection: StorageProcessor<'_>, + tx: L2Tx, + execution_mode: TxExecutionMode, + operator_account: AccountTreeId, + block_id: api::BlockId, + resolved_block_number: zksync_types::MiniblockNumber, + block_timestamp_s: Option, + enforced_nonce: Option, + added_balance: U256, + job_type: BootloaderJobType, + l1_gas_price: u64, + fair_l2_gas_price: u64, + enforced_base_fee: Option, +) -> ( + TransactionExecutionMetrics, + Result, +) { + let stage_started_at = Instant::now(); + let span = span!(Level::DEBUG, "execute_in_sandbox").entered(); + + let total_factory_deps = tx + .execute + .factory_deps + .as_ref() + .map_or(0, |deps| deps.len() as u16); + + let execution_result = apply_vm_in_sandbox( + connection, + tx, + execution_mode, + operator_account, + block_id, + resolved_block_number, + block_timestamp_s, + enforced_nonce, + added_balance, + l1_gas_price, + fair_l2_gas_price, + enforced_base_fee, + |vm, tx| { + let tx: Transaction = tx.into(); + push_transaction_to_bootloader_memory(vm, &tx, execution_mode); + let VmBlockResult { + full_result: result, + .. + } = vm.execute_till_block_end(job_type); + + metrics::histogram!("api.web3.sandbox", stage_started_at.elapsed(), "stage" => "execution"); + span.exit(); + + result + }, + ); + + let tx_execution_metrics = collect_tx_execution_metrics(total_factory_deps, &execution_result); + + ( + tx_execution_metrics, + match execution_result.revert_reason { + None => Ok(execution_result), + Some(revert) => Err(revert.revert_reason.into()), + }, + ) +} + +#[allow(clippy::too_many_arguments)] +fn apply_vm_in_sandbox( + mut connection: StorageProcessor<'_>, + tx: L2Tx, + execution_mode: TxExecutionMode, + operator_account: AccountTreeId, + block_id: api::BlockId, + resolved_block_number: zksync_types::MiniblockNumber, + block_timestamp_s: Option, + enforced_nonce: Option, + added_balance: U256, + l1_gas_price: u64, + fair_l2_gas_price: u64, + enforced_base_fee: Option, + apply: impl FnOnce(&mut Box>, L2Tx) -> T, +) -> T { + let stage_started_at = Instant::now(); + let span = span!(Level::DEBUG, "initialization").entered(); + + let (state_block_number, vm_block_number) = match block_id { + api::BlockId::Number(api::BlockNumber::Pending) => { + let sealed_l1_batch_number = connection + .blocks_web3_dal() + .get_sealed_l1_batch_number() + .unwrap(); + let sealed_miniblock_number = connection + .blocks_web3_dal() + .get_sealed_miniblock_number() + .unwrap(); + (sealed_miniblock_number, sealed_l1_batch_number + 1) + } + _ => { + let l1_batch_number = match connection + .blocks_web3_dal() + .get_l1_batch_number_of_miniblock(resolved_block_number) + .unwrap() + { + Some(l1_batch_number) => l1_batch_number, + None => { + connection + .blocks_web3_dal() + .get_sealed_l1_batch_number() + .unwrap() + + 1 + } + }; + (resolved_block_number, l1_batch_number) + } + }; + + let db_storage_provider = DbStorageProvider::new(connection, state_block_number, false); + + let mut storage_view = StorageView::new(db_storage_provider); + + let block_timestamp_ms = match block_id { + api::BlockId::Number(api::BlockNumber::Pending) => millis_since_epoch(), + _ => { + let block_timestamp_s = block_timestamp_s.unwrap_or_else(|| { + panic!( + "Block timestamp is `None`, `block_id`: {:?}, `resolved_block_number`: {}", + block_id, resolved_block_number.0 + ) + }); + (block_timestamp_s as u128) * 1000 + } + }; + + if let Some(nonce) = enforced_nonce { + let nonce_key = get_nonce_key(&tx.initiator_account()); + let full_nonce = storage_view.get_value(&nonce_key); + let (_, deployment_nonce) = decompose_full_nonce(h256_to_u256(full_nonce)); + + let enforced_full_nonce = nonces_to_full_nonce(U256::from(nonce.0), deployment_nonce); + + storage_view.set_value(&nonce_key, u256_to_h256(enforced_full_nonce)); + } + + { + let payer = tx.payer(); + let balance_key = storage_key_for_eth_balance(&payer); + + let current_balance = h256_to_u256(storage_view.get_value(&balance_key)); + storage_view.set_value(&balance_key, u256_to_h256(current_balance + added_balance)); + } + + let mut oracle_tools = vm::OracleTools::new(&mut storage_view as &mut dyn Storage); + let block_properties = default_block_properties(); + + let block_context = DerivedBlockContext { + context: BlockContext { + block_number: vm_block_number.0, + block_timestamp: (block_timestamp_ms / 1000) as u64, + l1_gas_price, + fair_l2_gas_price, + operator_address: *operator_account.address(), + }, + base_fee: enforced_base_fee.unwrap_or_else(|| { + derive_base_fee_and_gas_per_pubdata(l1_gas_price, fair_l2_gas_price).0 + }), + }; + + // Since this method assumes that the block vm_block_number-1 is present in the DB, it means that its hash + // has already been stored in the VM. + let block_context_properties = BlockContextMode::OverrideCurrent(block_context); + + let mut vm = init_vm( + &mut oracle_tools, + block_context_properties, + &block_properties, + execution_mode, + ); + + metrics::histogram!("api.web3.sandbox", stage_started_at.elapsed(), "stage" => "initialization"); + span.exit(); + + let result = apply(&mut vm, tx); + + metrics::histogram!("runtime_context.storage_interaction", storage_view.storage_invocations as f64, "interaction" => "set_value_storage_invocations"); + metrics::histogram!("runtime_context.storage_interaction", storage_view.new_storage_invocations as f64, "interaction" => "set_value_new_storage_invocations"); + metrics::histogram!("runtime_context.storage_interaction", storage_view.get_value_storage_invocations as f64, "interaction" => "set_value_get_value_storage_invocations"); + metrics::histogram!("runtime_context.storage_interaction", storage_view.set_value_storage_invocations as f64, "interaction" => "set_value_set_value_storage_invocations"); + metrics::histogram!("runtime_context.storage_interaction", storage_view.contract_load_invocations as f64, "interaction" => "set_value_contract_load_invocations"); + + const STORAGE_INVOCATIONS_DEBUG_THRESHOLD: usize = 1000; + + if storage_view.storage_invocations > STORAGE_INVOCATIONS_DEBUG_THRESHOLD { + vlog::info!( + "Tx resulted in {} storage_invocations, {} new_storage_invocations, {} get_value_storage_invocations, {} set_value_storage_invocations, {} contract_load_invocations", + storage_view.storage_invocations, + storage_view.new_storage_invocations, + storage_view.get_value_storage_invocations, + storage_view.set_value_storage_invocations, + storage_view.contract_load_invocations + ); + } + + result +} + +// Some slots can be marked as "trusted". That is needed for slots which can not be +// trusted to change between validation and execution in general case, but +// sometimes we can safely rely on them to not change often. +fn get_validation_params( + connection: &mut StorageProcessor<'_>, + tx: &L2Tx, +) -> ValidationTracerParams { + let user_address = tx.common_data.initiator_address; + let paymaster_address = tx.common_data.paymaster_params.paymaster; + + // This method assumes that the number of "well-known" tokens is relatively low. When it grows + // we may need to introduce some kind of caching. + let well_known_tokens: Vec<_> = connection + .tokens_dal() + .get_well_known_token_addresses() + .into_iter() + .map(|token| token.1) + .collect(); + + let trusted_slots: HashSet<_> = well_known_tokens + .clone() + .into_iter() + .flat_map(|token| { + TRUSTED_TOKEN_SLOTS + .clone() + .into_iter() + .map(move |slot| (token, slot)) + }) + .collect(); + + // We currently don't support any specific trusted addresses. + let trusted_addresses = HashSet::new(); + + // The slots the value of which will be added as allowed address on the fly. + // Required for working with transparent proxies. + let trusted_address_slots: HashSet<_> = well_known_tokens + .into_iter() + .flat_map(|token| { + TRUSTED_ADDRESS_SLOTS + .clone() + .into_iter() + .map(move |slot| (token, slot)) + }) + .collect(); + + ValidationTracerParams { + user_address, + paymaster_address, + trusted_slots, + trusted_addresses, + trusted_address_slots, + } +} + +#[allow(clippy::too_many_arguments)] +fn validate_tx_in_sandbox( + mut connection: StorageProcessor<'_>, + tx: L2Tx, + execution_mode: TxExecutionMode, + operator_account: AccountTreeId, + block_id: api::BlockId, + resolved_block_number: zksync_types::MiniblockNumber, + block_timestamp_s: Option, + enforced_nonce: Option, + added_balance: U256, + l1_gas_price: u64, + fair_l2_gas_price: u64, + enforced_base_fee: Option, +) -> Result<(), ValidationError> { + let stage_started_at = Instant::now(); + let span = span!(Level::DEBUG, "validate_in_sandbox").entered(); + let validation_params = get_validation_params(&mut connection, &tx); + + let validation_result = apply_vm_in_sandbox( + connection, + tx, + execution_mode, + operator_account, + block_id, + resolved_block_number, + block_timestamp_s, + enforced_nonce, + added_balance, + l1_gas_price, + fair_l2_gas_price, + enforced_base_fee, + |vm, tx| { + let stage_started_at = Instant::now(); + let span = span!(Level::DEBUG, "validation").entered(); + + let tx: Transaction = tx.into(); + push_transaction_to_bootloader_memory(vm, &tx, execution_mode); + let result = vm.execute_validation(validation_params); + + metrics::histogram!("api.web3.sandbox", stage_started_at.elapsed(), "stage" => "validation"); + span.exit(); + + result + }, + ); + + metrics::histogram!("server.api.validation_sandbox", stage_started_at.elapsed(), "stage" => "validate_in_sandbox"); + span.exit(); + + validation_result +} + +fn collect_tx_execution_metrics( + contracts_deployed: u16, + result: &VmExecutionResult, +) -> TransactionExecutionMetrics { + let event_topics = result + .events + .iter() + .map(|event| event.indexed_topics.len() as u16) + .sum(); + + let l2_l1_long_messages = extract_long_l2_to_l1_messages(&result.events) + .iter() + .map(|event| event.len()) + .sum(); + + let published_bytecode_bytes = extract_published_bytecodes(&result.events) + .iter() + .map(|bytecodehash| bytecode_len_in_bytes(*bytecodehash)) + .sum(); + + let (initial_storage_writes, repeated_storage_writes) = + get_initial_and_repeated_storage_writes(result.storage_log_queries.as_slice()); + + TransactionExecutionMetrics { + initial_storage_writes: initial_storage_writes as usize, + repeated_storage_writes: repeated_storage_writes as usize, + gas_used: result.gas_used as usize, + event_topics, + l2_l1_long_messages, + published_bytecode_bytes, + contracts_used: result.contracts_used, + contracts_deployed, + l2_l1_logs: result.l2_to_l1_logs.len(), + vm_events: result.events.len(), + storage_logs: result.storage_log_queries.len(), + total_log_queries: result.total_log_queries, + cycles_used: result.cycles_used, + } +} + +impl From for SandboxExecutionError { + fn from(reason: TxRevertReason) -> Self { + match reason { + TxRevertReason::EthCall(reason) => SandboxExecutionError::Revert(reason.to_string()), + TxRevertReason::TxOutOfGas => { + SandboxExecutionError::Revert(TxRevertReason::TxOutOfGas.to_string()) + } + TxRevertReason::FailedToChargeFee(reason) => { + SandboxExecutionError::FailedToChargeFee(reason.to_string()) + } + TxRevertReason::FromIsNotAnAccount => SandboxExecutionError::FromIsNotAnAccount, + TxRevertReason::InnerTxError => SandboxExecutionError::InnerTxError, + TxRevertReason::Unknown(reason) => { + SandboxExecutionError::BootloaderFailure(reason.to_string()) + } + TxRevertReason::ValidationFailed(reason) => { + SandboxExecutionError::AccountValidationFailed(reason.to_string()) + } + TxRevertReason::PaymasterValidationFailed(reason) => { + SandboxExecutionError::PaymasterValidationFailed(reason.to_string()) + } + TxRevertReason::PrePaymasterPreparationFailed(reason) => { + SandboxExecutionError::PrePaymasterPreparationFailed(reason.to_string()) + } + TxRevertReason::UnexpectedVMBehavior(reason) => { + SandboxExecutionError::UnexpectedVMBehavior(reason) + } + TxRevertReason::BootloaderOutOfGas => { + SandboxExecutionError::UnexpectedVMBehavior("bootloader is out of gas".to_string()) + } + TxRevertReason::NotEnoughGasProvided => SandboxExecutionError::UnexpectedVMBehavior( + "The bootloader did not contain enough gas to execute the transaction".to_string(), + ), + revert_reason @ TxRevertReason::FailedToMarkFactoryDependencies(_) => { + SandboxExecutionError::Revert(revert_reason.to_string()) + } + TxRevertReason::PayForTxFailed(reason) => { + SandboxExecutionError::FailedToPayForTransaction(reason.to_string()) + } + TxRevertReason::TooBigGasLimit => { + SandboxExecutionError::Revert(TxRevertReason::TooBigGasLimit.to_string()) + } + } + } +} diff --git a/core/bin/zksync_core/src/api_server/explorer/api_decl.rs b/core/bin/zksync_core/src/api_server/explorer/api_decl.rs new file mode 100644 index 000000000000..9e0324d5f3db --- /dev/null +++ b/core/bin/zksync_core/src/api_server/explorer/api_decl.rs @@ -0,0 +1,80 @@ +use zksync_config::ZkSyncConfig; +use zksync_dal::connection::ConnectionPool; + +use actix_web::web; +use futures::channel::mpsc; +use tokio::sync::watch; + +use super::network_stats::SharedNetworkStats; + +#[derive(Debug, Clone)] +pub struct RestApi { + pub(super) master_connection_pool: ConnectionPool, + pub(super) replica_connection_pool: ConnectionPool, + pub(super) network_stats: SharedNetworkStats, + pub(super) config: ZkSyncConfig, +} + +impl RestApi { + pub fn new( + master_connection_pool: ConnectionPool, + replica_connection_pool: ConnectionPool, + config: ZkSyncConfig, + ) -> Self { + Self { + master_connection_pool, + replica_connection_pool, + network_stats: SharedNetworkStats::default(), + config, + } + } + + /// Creates an actix-web `Scope`, which can be mounted to the Http server. + pub fn into_scope(self) -> actix_web::Scope { + web::scope("") + .app_data(web::Data::new(self)) + .route("/network_stats", web::get().to(Self::network_stats)) + .route("/blocks", web::get().to(Self::block_pagination)) + .route("/block/{number}", web::get().to(Self::block_details)) + .route("/transactions", web::get().to(Self::transaction_pagination)) + .route( + "/transaction/{hash}", + web::get().to(Self::transaction_details), + ) + .route("/account/{address}", web::get().to(Self::account_details)) + .route("/contract/{address}", web::get().to(Self::contract_details)) + .route("/address/{address}", web::get().to(Self::address_details)) + .route("/token/{address}", web::get().to(Self::token_details)) + .route("/events", web::get().to(Self::events_pagination)) + .route( + "/contract_verification", + web::post().to(Self::contract_verification), + ) + .route( + "/contract_verification/zksolc_versions", + web::get().to(Self::contract_verification_zksolc_versions), + ) + .route( + "/contract_verification/solc_versions", + web::get().to(Self::contract_verification_solc_versions), + ) + .route( + "/contract_verification/{id}", + web::get().to(Self::contract_verification_request_status), + ) + } + + // Spawns future updating SharedNetworkStats in the current `actix::System` + pub fn spawn_network_stats_updater( + &self, + panic_notify: mpsc::Sender, + stop_receiver: watch::Receiver, + ) { + self.network_stats.clone().start_updater_detached( + panic_notify, + self.replica_connection_pool.clone(), + self.config.api.explorer.network_stats_interval(), + stop_receiver, + ); + } +} diff --git a/core/bin/zksync_core/src/api_server/explorer/api_impl.rs b/core/bin/zksync_core/src/api_server/explorer/api_impl.rs new file mode 100644 index 000000000000..82cfcb529d53 --- /dev/null +++ b/core/bin/zksync_core/src/api_server/explorer/api_impl.rs @@ -0,0 +1,488 @@ +use std::time::Instant; + +use actix_web::{ + web::{self, Json}, + HttpResponse, Result as ActixResult, +}; +use serde::Serialize; + +use zksync_types::{ + explorer_api::{ + AccountDetails, AccountType, AddressDetails, BlocksQuery, ContractDetails, EventsQuery, + PaginationQuery, TransactionsQuery, VerificationIncomingRequest, + }, + storage::L2_ETH_TOKEN_ADDRESS, + Address, MiniblockNumber, H256, +}; + +use super::api_decl::RestApi; + +fn ok_json(data: impl Serialize) -> ActixResult { + Ok(HttpResponse::Ok().json(data)) +} + +impl RestApi { + #[tracing::instrument(skip(self_))] + pub async fn network_stats(self_: web::Data) -> ActixResult { + let start = Instant::now(); + + let stats = self_.network_stats.read().await; + + metrics::histogram!("api.explorer.call", start.elapsed(), "method" => "network_stats"); + ok_json(stats) + } + + #[tracing::instrument(skip(self_))] + pub async fn address_details( + self_: web::Data, + address: web::Path
, + ) -> ActixResult { + let start = Instant::now(); + + let account_type = if *address == Address::zero() { + AccountType::Contract + } else { + self_ + .replica_connection_pool + .access_storage() + .await + .explorer() + .accounts_dal() + .get_account_type(*address) + .unwrap() + }; + let response = match account_type { + AccountType::EOA => ok_json(AddressDetails::Account( + self_.account_details_inner(address).await, + )), + AccountType::Contract => { + // If account type is a contract, then `contract_details_inner` must return `Some`. + let contract_details = self_ + .contract_details_inner(address) + .await + .expect("Failed to get contract info"); + ok_json(AddressDetails::Contract(contract_details)) + } + }; + + metrics::histogram!("api.explorer.call", start.elapsed(), "method" => "address_details"); + response + } + + async fn account_details_inner(&self, address: web::Path
) -> AccountDetails { + let mut storage = self.replica_connection_pool.access_storage().await; + + let balances = storage + .explorer() + .accounts_dal() + .get_balances_for_address(*address) + .unwrap(); + let (sealed_nonce, verified_nonce) = storage + .explorer() + .accounts_dal() + .get_account_nonces(*address) + .unwrap(); + + // Dirty fix for zero address. + let account_type = if *address == Address::zero() { + AccountType::Contract + } else { + storage + .explorer() + .accounts_dal() + .get_account_type(*address) + .unwrap() + }; + + AccountDetails { + address: *address, + balances, + sealed_nonce, + verified_nonce, + account_type, + } + } + + #[tracing::instrument(skip(self_))] + pub async fn account_details( + self_: web::Data, + address: web::Path
, + ) -> ActixResult { + let start = Instant::now(); + let account_details = self_.account_details_inner(address).await; + metrics::histogram!("api.explorer.call", start.elapsed(), "method" => "account_details"); + ok_json(account_details) + } + + async fn contract_details_inner(&self, address: web::Path
) -> Option { + // Dirty fix for zero address. + let contract_address = if *address == Address::zero() { + L2_ETH_TOKEN_ADDRESS + } else { + *address + }; + let mut storage = self.replica_connection_pool.access_storage().await; + let contract_info = storage + .explorer() + .misc_dal() + .get_contract_info(contract_address) + .unwrap(); + if let Some(mut contract_info) = contract_info { + contract_info.address = *address; + let contract_stats = storage + .explorer() + .misc_dal() + .get_contract_stats(*address) + .unwrap(); + let balances = storage + .explorer() + .accounts_dal() + .get_balances_for_address(*address) + .unwrap(); + Some(ContractDetails { + info: contract_info, + stats: contract_stats, + balances, + }) + } else { + None + } + } + + #[tracing::instrument(skip(self_))] + pub async fn contract_details( + self_: web::Data, + address: web::Path
, + ) -> ActixResult { + let start = Instant::now(); + + let response = match self_.contract_details_inner(address).await { + Some(contract_details) => ok_json(contract_details), + None => Ok(HttpResponse::NotFound().finish()), + }; + + metrics::histogram!("api.explorer.call", start.elapsed(), "method" => "contract_details"); + response + } + + #[tracing::instrument] + fn validate_transactions_query(query: TransactionsQuery) -> Result<(), HttpResponse> { + if query.from_block_number.is_none() + && query.block_number.is_none() + && query.from_tx_index.is_some() + { + return Err(HttpResponse::BadRequest() + .body("Can't use `fromTxIndex` without `fromBlockNumber` or `blockNumber`")); + } + if query.account_address.is_some() && query.contract_address.is_some() { + return Err(HttpResponse::BadRequest() + .body("Can't use both `accountAddress` and `contractAddress`")); + } + + Ok(()) + } + + #[tracing::instrument(skip(self))] + fn validate_pagination_query(&self, pagination: PaginationQuery) -> Result<(), HttpResponse> { + if pagination.limit > self.config.api.explorer.req_entities_limit() { + return Err(HttpResponse::BadRequest().body(format!( + "Limit should not exceed {}", + self.config.api.explorer.req_entities_limit() + ))); + } + if pagination.offset + pagination.limit > self.config.api.explorer.offset_limit() { + return Err(HttpResponse::BadRequest().body(format!( + "(offset + limit) should not exceed {}", + self.config.api.explorer.offset_limit() + ))); + } + + Ok(()) + } + + #[tracing::instrument(skip(self_))] + pub async fn transaction_pagination( + self_: web::Data, + web::Query(mut query): web::Query, + ) -> ActixResult { + let start = Instant::now(); + if let Err(res) = Self::validate_transactions_query(query) { + return Ok(res); + } + if let Err(res) = self_.validate_pagination_query(query.pagination) { + return Ok(res); + } + + let mut storage = self_.replica_connection_pool.access_storage().await; + if let Some(address) = query.address { + match storage + .explorer() + .accounts_dal() + .get_account_type(address) + .unwrap() + { + AccountType::EOA => query.account_address = Some(address), + AccountType::Contract => query.contract_address = Some(address), + } + } + + let response = if let Some(account_address) = query.account_address { + // If there is filter by account address + // we should query transactions from `events` table. + storage + .explorer() + .transactions_dal() + .get_account_transactions_page( + account_address, + query.tx_position(), + query.block_number, + query.pagination, + self_.config.api.explorer.offset_limit(), + self_.config.contracts.l2_erc20_bridge_addr, + ) + .unwrap() + } else { + // If there is no filter by account address + // we can query transactions directly from `transactions` table. + storage + .explorer() + .transactions_dal() + .get_transactions_page( + query.tx_position(), + query.block_number, + query.contract_address, + query.pagination, + self_.config.api.explorer.offset_limit(), + self_.config.contracts.l2_erc20_bridge_addr, + ) + .unwrap() + }; + + let query_type = if query.block_number.is_some() { + "block_txs" + } else if query.account_address.is_some() { + "account_txs" + } else if query.contract_address.is_some() { + "contract_txs" + } else { + "all_txs" + }; + let metric_endpoint_name = format!("transaction_pagination_{}", query_type); + metrics::histogram!("api.explorer.call", start.elapsed(), "method" => metric_endpoint_name); + + ok_json(response) + } + + #[tracing::instrument(skip(self_))] + pub async fn transaction_details( + self_: web::Data, + hash: web::Path, + ) -> ActixResult { + let start = Instant::now(); + + let tx_details = self_ + .replica_connection_pool + .access_storage() + .await + .explorer() + .transactions_dal() + .get_transaction_details(*hash, self_.config.contracts.l2_erc20_bridge_addr) + .unwrap(); + + metrics::histogram!("api.explorer.call", start.elapsed(), "method" => "transaction_details"); + match tx_details { + Some(tx_details) => ok_json(tx_details), + None => Ok(HttpResponse::NotFound().finish()), + } + } + + #[tracing::instrument(skip(self_))] + pub async fn block_pagination( + self_: web::Data, + web::Query(query): web::Query, + ) -> ActixResult { + let start = Instant::now(); + if let Err(res) = self_.validate_pagination_query(query.pagination) { + return Ok(res); + } + + let blocks = self_ + .replica_connection_pool + .access_storage() + .await + .explorer() + .blocks_dal() + .get_blocks_page(query, self_.network_stats.read().await.last_verified) + .unwrap(); + + metrics::histogram!("api.explorer.call", start.elapsed(), "method" => "block_pagination"); + ok_json(blocks) + } + + #[tracing::instrument(skip(self_))] + pub async fn block_details( + self_: web::Data, + number: web::Path, + ) -> ActixResult { + let start = Instant::now(); + + let block_details = self_ + .replica_connection_pool + .access_storage() + .await + .explorer() + .blocks_dal() + .get_block_details(MiniblockNumber(*number)) + .unwrap(); + + metrics::histogram!("api.explorer.call", start.elapsed(), "method" => "block_details"); + match block_details { + Some(block_details) => ok_json(block_details), + None => Ok(HttpResponse::NotFound().finish()), + } + } + + #[tracing::instrument(skip(self_))] + pub async fn token_details( + self_: web::Data, + address: web::Path
, + ) -> ActixResult { + let start = Instant::now(); + + let token_details = self_ + .replica_connection_pool + .access_storage() + .await + .explorer() + .misc_dal() + .get_token_details(*address) + .unwrap(); + + metrics::histogram!("api.explorer.call", start.elapsed(), "method" => "token_details"); + match token_details { + Some(token_details) => ok_json(token_details), + None => Ok(HttpResponse::NotFound().finish()), + } + } + + /// Add a contract verification job to the queue if the requested contract wasn't previously verified. + #[tracing::instrument(skip(self_, request))] + pub async fn contract_verification( + self_: web::Data, + Json(request): Json, + ) -> ActixResult { + let start = Instant::now(); + + let mut storage = self_.master_connection_pool.access_storage().await; + + if !storage + .storage_logs_dal() + .is_contract_deployed_at_address(request.contract_address) + { + return Ok( + HttpResponse::BadRequest().body("There is no deployed contract on this address") + ); + } + if storage + .explorer() + .contract_verification_dal() + .is_contract_verified(request.contract_address) + { + return Ok(HttpResponse::BadRequest().body("This contract is already verified")); + } + + let request_id = storage + .explorer() + .contract_verification_dal() + .add_contract_verification_request(request) + .unwrap(); + + metrics::histogram!("api.explorer.call", start.elapsed(), "method" => "contract_verification"); + ok_json(request_id) + } + + #[tracing::instrument(skip(self_))] + pub async fn events_pagination( + self_: web::Data, + web::Query(query): web::Query, + ) -> ActixResult { + let start = Instant::now(); + if let Err(res) = self_.validate_pagination_query(query.pagination) { + return Ok(res); + } + + let events = self_ + .replica_connection_pool + .access_storage() + .await + .explorer() + .events_dal() + .get_events_page(query, self_.config.api.explorer.offset_limit()) + .unwrap(); + + metrics::histogram!("api.explorer.call", start.elapsed(), "method" => "events_pagination"); + + ok_json(events) + } + + #[tracing::instrument(skip(self_))] + pub async fn contract_verification_request_status( + self_: web::Data, + id: web::Path, + ) -> ActixResult { + let start = Instant::now(); + + let status = self_ + .replica_connection_pool + .access_storage() + .await + .explorer() + .contract_verification_dal() + .get_verification_request_status(*id) + .unwrap(); + + metrics::histogram!("api.explorer.call", start.elapsed(), "method" => "contract_verification_request_status"); + match status { + Some(status) => ok_json(status), + None => Ok(HttpResponse::NotFound().finish()), + } + } + + #[tracing::instrument(skip(self_))] + pub async fn contract_verification_zksolc_versions( + self_: web::Data, + ) -> ActixResult { + let start = Instant::now(); + + let versions = self_ + .replica_connection_pool + .access_storage() + .await + .explorer() + .contract_verification_dal() + .get_zksolc_versions() + .unwrap(); + + metrics::histogram!("api.explorer.call", start.elapsed(), "method" => "contract_verification_zksolc_versions"); + ok_json(versions) + } + + #[tracing::instrument(skip(self_))] + pub async fn contract_verification_solc_versions( + self_: web::Data, + ) -> ActixResult { + let start = Instant::now(); + + let versions = self_ + .replica_connection_pool + .access_storage() + .await + .explorer() + .contract_verification_dal() + .get_solc_versions() + .unwrap(); + + metrics::histogram!("api.explorer.call", start.elapsed(), "method" => "contract_verification_solc_versions"); + ok_json(versions) + } +} diff --git a/core/bin/zksync_core/src/api_server/explorer/mod.rs b/core/bin/zksync_core/src/api_server/explorer/mod.rs new file mode 100644 index 000000000000..27c96d7691e1 --- /dev/null +++ b/core/bin/zksync_core/src/api_server/explorer/mod.rs @@ -0,0 +1,83 @@ +use std::net::SocketAddr; +use std::time::Duration; + +use zksync_config::ZkSyncConfig; +use zksync_dal::connection::ConnectionPool; +use zksync_utils::panic_notify::{spawn_panic_handler, ThreadPanicNotify}; + +use actix_cors::Cors; +use actix_web::dev::Server; +use actix_web::{web, App, HttpResponse, HttpServer}; +use tokio::sync::watch; +use tokio::task::JoinHandle; + +use api_decl::RestApi; + +pub mod api_decl; +pub mod api_impl; +pub mod network_stats; + +fn start_server(api: RestApi, bind_to: SocketAddr, threads: usize) -> Server { + HttpServer::new(move || { + let api = api.clone(); + App::new() + .wrap( + Cors::default() + .send_wildcard() + .max_age(3600) + .allow_any_origin() + .allow_any_header() + .allow_any_method(), + ) + .service(api.into_scope()) + // Endpoint needed for js isReachable + .route( + "/favicon.ico", + web::get().to(|| async { HttpResponse::Ok().finish() }), + ) + }) + .workers(threads) + .bind(bind_to) + .unwrap() + .shutdown_timeout(60) + .keep_alive(Duration::from_secs(10)) + .client_request_timeout(Duration::from_secs(60)) + .run() +} + +/// Start HTTP REST API +pub fn start_server_thread_detached( + config: &ZkSyncConfig, + master_connection_pool: ConnectionPool, + replica_connection_pool: ConnectionPool, + mut stop_receiver: watch::Receiver, +) -> JoinHandle<()> { + let (handler, panic_sender) = spawn_panic_handler(); + let config = config.clone(); + + std::thread::Builder::new() + .name("explorer-api".to_string()) + .spawn(move || { + let _panic_sentinel = ThreadPanicNotify(panic_sender.clone()); + + actix_rt::System::new().block_on(async move { + let bind_address = config.api.explorer.bind_addr(); + let threads = config.api.explorer.threads_per_server as usize; + let api = RestApi::new(master_connection_pool, replica_connection_pool, config); + api.spawn_network_stats_updater(panic_sender, stop_receiver.clone()); + + let server = start_server(api, bind_address, threads); + let close_handle = server.handle(); + actix_rt::spawn(async move { + if stop_receiver.changed().await.is_ok() { + close_handle.stop(true).await; + vlog::info!("Stop signal received, explorer API is shutting down"); + } + }); + server.await.expect("Explorer API crashed"); + }); + }) + .expect("Failed to spawn thread for REST API"); + + handler +} diff --git a/core/bin/zksync_core/src/api_server/explorer/network_stats.rs b/core/bin/zksync_core/src/api_server/explorer/network_stats.rs new file mode 100644 index 000000000000..bd546b85d32b --- /dev/null +++ b/core/bin/zksync_core/src/api_server/explorer/network_stats.rs @@ -0,0 +1,84 @@ +use futures::channel::mpsc; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; +use std::time::Duration; +use tokio::sync::{watch, RwLock}; +use tokio::{runtime::Runtime, time}; +use zksync_dal::ConnectionPool; +use zksync_types::{api, MiniblockNumber}; +use zksync_utils::panic_notify::ThreadPanicNotify; + +#[derive(Default, Debug, Serialize, Deserialize, Clone)] +pub struct NetworkStats { + pub last_sealed: MiniblockNumber, + pub last_verified: MiniblockNumber, + pub total_transactions: usize, +} + +#[derive(Debug, Default, Clone)] +pub struct SharedNetworkStats(Arc>); + +impl SharedNetworkStats { + pub async fn read(&self) -> NetworkStats { + (*self.0.as_ref().read().await).clone() + } + + pub fn start_updater_detached( + self, + panic_notify: mpsc::Sender, + connection_pool: ConnectionPool, + polling_interval: Duration, + stop_receiver: watch::Receiver, + ) { + std::thread::Builder::new() + .name("explorer-stats-updater".to_string()) + .spawn(move || { + let _panic_sentinel = ThreadPanicNotify(panic_notify.clone()); + + let runtime = Runtime::new().expect("Failed to create tokio runtime"); + + let stats_update_task = async move { + let mut timer = time::interval(polling_interval); + loop { + if *stop_receiver.borrow() { + vlog::warn!( + "Stop signal received, explorer_stats_updater is shutting down" + ); + break; + } + + timer.tick().await; + + let mut storage = connection_pool.access_storage().await; + + let last_sealed = storage + .blocks_web3_dal() + .get_sealed_miniblock_number() + .unwrap(); + let last_verified = storage + .blocks_web3_dal() + .resolve_block_id(api::BlockId::Number(api::BlockNumber::Finalized)) + .unwrap() + .unwrap_or(MiniblockNumber(0)); + let prev_stats = self.read().await; + let new_transactions = storage + .explorer() + .transactions_dal() + .get_transactions_count_after(prev_stats.last_sealed) + .unwrap(); + + let stats = NetworkStats { + last_sealed, + last_verified, + total_transactions: prev_stats.total_transactions + new_transactions, + }; + + // save stats to state + *self.0.as_ref().write().await = stats; + } + }; + runtime.block_on(stats_update_task); + }) + .expect("Failed to start thread for network stats updating"); + } +} diff --git a/core/bin/zksync_core/src/api_server/mod.rs b/core/bin/zksync_core/src/api_server/mod.rs new file mode 100644 index 000000000000..dd50dab9f0ff --- /dev/null +++ b/core/bin/zksync_core/src/api_server/mod.rs @@ -0,0 +1,5 @@ +// Everywhere in this module the word "block" actually means "miniblock". +pub mod execution_sandbox; +pub mod explorer; +pub mod tx_sender; +pub mod web3; diff --git a/core/bin/zksync_core/src/api_server/tx_sender/error.rs b/core/bin/zksync_core/src/api_server/tx_sender/error.rs new file mode 100644 index 000000000000..c6c37ec9e87a --- /dev/null +++ b/core/bin/zksync_core/src/api_server/tx_sender/error.rs @@ -0,0 +1,127 @@ +use crate::api_server::execution_sandbox::SandboxExecutionError; +use thiserror::Error; +use vm::oracles::tracer::ValidationError; +use zksync_types::l2::error::TxCheckError; +use zksync_types::U256; + +#[derive(Debug, Error)] +pub enum SubmitTxError { + #[error("nonce too high. allowed nonce range: {0} - {1}, actual: {2}")] + NonceIsTooHigh(u32, u32, u32), + #[error("nonce too low. allowed nonce range: {0} - {1}, actual: {2}")] + NonceIsTooLow(u32, u32, u32), + #[error("{0}")] + IncorrectTx(#[from] TxCheckError), + #[error("insufficient funds for gas + value. balance: {0}, fee: {1}, value: {2}")] + NotEnoughBalanceForFeeValue(U256, U256, U256), + #[error("cannot estimate transaction: {0}.")] + CannotEstimateTransaction(String), + #[error("exceeds block gas limit")] + GasLimitIsTooBig, + #[error("{0}")] + Unexecutable(String), + #[error("too many transactions")] + RateLimitExceeded, + #[error("failed to include transaction in the system. reason: {0}")] + BootloaderFailure(String), + #[error("failed to validate the transaction. reason: {0}")] + ValidationFailed(String), + #[error("not enough balance to cover the fee. error message: {0}")] + FailedToChargeFee(String), + #[error("failed paymaster validation. error message: {0}")] + PaymasterValidationFailed(String), + #[error("failed pre-paymaster preparation. error message: {0}")] + PrePaymasterPreparationFailed(String), + #[error("invalid sender. can't start a transaction from a non-account")] + FromIsNotAnAccount, + #[error("max fee per gas less than block base fee")] + MaxFeePerGasTooLow, + #[error("max priority fee per gas higher than max fee per gas")] + MaxPriorityFeeGreaterThanMaxFee, + #[error( + "virtual machine entered unexpected state. please contact developers and provide transaction details \ + that caused this error. Error description: {0}" + )] + UnexpectedVMBehavior(String), + #[error("pubdata price limit is too low, ensure that the price limit is correct")] + UnrealisticPubdataPriceLimit, + #[error( + "too many factory dependencies in the transaction. {0} provided, while only {1} allowed" + )] + TooManyFactoryDependencies(usize, usize), + #[error("max fee per gas higher than 2^32")] + FeePerGasTooHigh, + #[error("max fee per pubdata byte higher than 2^32")] + FeePerPubdataByteTooHigh, +} +impl SubmitTxError { + pub fn grafana_error_code(&self) -> &'static str { + match self { + SubmitTxError::NonceIsTooHigh(_, _, _) => "nonce-is-too-high", + SubmitTxError::NonceIsTooLow(_, _, _) => "nonce-is-too-low", + SubmitTxError::IncorrectTx(_) => "incorrect-tx", + SubmitTxError::NotEnoughBalanceForFeeValue(_, _, _) => "not-enough-balance-for-fee", + SubmitTxError::CannotEstimateTransaction(_) => "cannot-estimate-transaction", + SubmitTxError::GasLimitIsTooBig => "gas-limit-is-too-big", + SubmitTxError::Unexecutable(_) => "unexecutable", + SubmitTxError::RateLimitExceeded => "rate-limit-exceeded", + SubmitTxError::BootloaderFailure(_) => "bootloader-failure", + SubmitTxError::ValidationFailed(_) => "validation-failed", + SubmitTxError::FailedToChargeFee(_) => "failed-too-charge-fee", + SubmitTxError::PaymasterValidationFailed(_) => "failed-paymaster-validation", + SubmitTxError::PrePaymasterPreparationFailed(_) => "failed-prepaymaster-preparation", + SubmitTxError::FromIsNotAnAccount => "from-is-not-an-account", + SubmitTxError::MaxFeePerGasTooLow => "max-fee-per-gas-too-low", + SubmitTxError::MaxPriorityFeeGreaterThanMaxFee => { + "max-priority-fee-greater-than-max-fee" + } + SubmitTxError::UnexpectedVMBehavior(_) => "unexpected-vm-behavior", + SubmitTxError::UnrealisticPubdataPriceLimit => "unrealistic-pubdata-price-limit", + SubmitTxError::TooManyFactoryDependencies(_, _) => "too-many-factory-dependencies", + SubmitTxError::FeePerGasTooHigh => "gas-price-limit-too-high", + SubmitTxError::FeePerPubdataByteTooHigh => "pubdata-price-limit-too-high", + } + } +} + +impl From for SubmitTxError { + fn from(err: SandboxExecutionError) -> SubmitTxError { + match err { + SandboxExecutionError::Revert(reason) => { + SubmitTxError::CannotEstimateTransaction(reason) + } + SandboxExecutionError::BootloaderFailure(reason) => { + SubmitTxError::BootloaderFailure(reason) + } + SandboxExecutionError::AccountValidationFailed(reason) => { + SubmitTxError::ValidationFailed(reason) + } + SandboxExecutionError::PaymasterValidationFailed(reason) => { + SubmitTxError::PaymasterValidationFailed(reason) + } + SandboxExecutionError::PrePaymasterPreparationFailed(reason) => { + SubmitTxError::PrePaymasterPreparationFailed(reason) + } + SandboxExecutionError::FailedToChargeFee(reason) => { + SubmitTxError::FailedToChargeFee(reason) + } + SandboxExecutionError::FromIsNotAnAccount => SubmitTxError::FromIsNotAnAccount, + SandboxExecutionError::InnerTxError => { + SubmitTxError::CannotEstimateTransaction("Bootloader-based tx failed".to_owned()) + } + SandboxExecutionError::UnexpectedVMBehavior(reason) => { + SubmitTxError::UnexpectedVMBehavior(reason) + } + SandboxExecutionError::FailedToPayForTransaction(reason) => { + SubmitTxError::FailedToChargeFee(reason) + } + SandboxExecutionError::Unexecutable(reason) => SubmitTxError::Unexecutable(reason), + } + } +} + +impl From for SubmitTxError { + fn from(err: ValidationError) -> Self { + Self::ValidationFailed(err.to_string()) + } +} diff --git a/core/bin/zksync_core/src/api_server/tx_sender/mod.rs b/core/bin/zksync_core/src/api_server/tx_sender/mod.rs new file mode 100644 index 000000000000..93a40eee3f13 --- /dev/null +++ b/core/bin/zksync_core/src/api_server/tx_sender/mod.rs @@ -0,0 +1,571 @@ +//! Helper module to submit transactions into the zkSync Network. + +// Built-in uses +use std::num::NonZeroU32; +use std::sync::Arc; +use std::time::Instant; + +// External uses +use bigdecimal::BigDecimal; +use governor::clock::MonotonicClock; +use governor::middleware::NoOpMiddleware; +use governor::state::{InMemoryState, NotKeyed}; +use governor::{Quota, RateLimiter}; + +use vm::vm_with_bootloader::{derive_base_fee_and_gas_per_pubdata, TxExecutionMode}; +use vm::zk_evm::zkevm_opcode_defs::system_params::MAX_PUBDATA_PER_BLOCK; +use zksync_config::configs::chain::StateKeeperConfig; +use zksync_dal::transactions_dal::L2TxSubmissionResult; +use zksync_eth_client::clients::http_client::EthereumClient; + +use zksync_types::fee::TransactionExecutionMetrics; +use zksync_types::utils::storage_key_for_eth_balance; +use zksync_types::{ + FAIR_L2_GAS_PRICE, MAX_GAS_PER_PUBDATA_BYTE, MAX_L2_TX_GAS_LIMIT, MAX_NEW_FACTORY_DEPS, +}; +// Workspace uses +use zksync_types::{ + api, fee::Fee, l2::error::TxCheckError::TxDuplication, AccountTreeId, Address, L2ChainId, U256, +}; +use zksync_utils::h256_to_u256; +pub mod error; +// Local uses +use crate::api_server::execution_sandbox::{ + execute_tx_with_pending_state, get_pubdata_for_factory_deps, validate_tx_with_pending_state, + SandboxExecutionError, +}; + +use crate::fee_ticker::{error::TickerError, FeeTicker, TokenPriceRequestType}; +use crate::gas_adjuster::GasAdjuster; +use crate::gas_tracker::gas_count_from_tx_and_metrics; +use crate::state_keeper::seal_criteria::{SealManager, SealResolution}; + +pub use error::SubmitTxError; +use zksync_config::ZkSyncConfig; +use zksync_dal::ConnectionPool; +use zksync_types::{l2::L2Tx, tx::ExecutionMetrics, Nonce}; + +pub struct TxSenderInner { + pub master_connection_pool: ConnectionPool, + pub replica_connection_pool: ConnectionPool, + pub fee_account_addr: Address, + pub chain_id: L2ChainId, + pub gas_price_scale_factor: f64, + pub max_nonce_ahead: u32, + pub max_single_tx_gas: u32, + pub rate_limiter: + Option>>, + // Used to keep track of gas prices for the fee ticker. + pub gas_adjuster: Arc>, + pub state_keeper_config: StateKeeperConfig, +} + +#[derive(Clone)] +pub struct TxSender(pub Arc); + +impl std::fmt::Debug for TxSender { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("TxSender").finish() + } +} + +impl TxSender { + pub fn new( + config: &ZkSyncConfig, + master_connection_pool: ConnectionPool, + replica_connection_pool: ConnectionPool, + gas_adjuster: Arc>, + ) -> Self { + let rate_limiter = config + .api + .web3_json_rpc + .transactions_per_sec_limit + .map(|value| { + RateLimiter::direct_with_clock( + Quota::per_second(NonZeroU32::new(value).unwrap()), + &MonotonicClock::default(), + ) + }); + + Self(Arc::new(TxSenderInner { + chain_id: L2ChainId(config.chain.eth.zksync_network_id), + master_connection_pool, + replica_connection_pool, + fee_account_addr: config.chain.state_keeper.fee_account_addr, + max_nonce_ahead: config.api.web3_json_rpc.max_nonce_ahead, + gas_price_scale_factor: config.api.web3_json_rpc.gas_price_scale_factor, + max_single_tx_gas: config.chain.state_keeper.max_single_tx_gas, + rate_limiter, + gas_adjuster, + state_keeper_config: config.chain.state_keeper.clone(), + })) + } + + #[tracing::instrument(skip(self, tx))] + pub fn submit_tx(&self, tx: L2Tx) -> Result { + if let Some(rate_limiter) = &self.0.rate_limiter { + if rate_limiter.check().is_err() { + return Err(SubmitTxError::RateLimitExceeded); + } + } + let mut stage_started_at = Instant::now(); + + if tx.common_data.fee.gas_limit > U256::from(u32::MAX) + || tx.common_data.fee.gas_per_pubdata_limit > U256::from(u32::MAX) + { + return Err(SubmitTxError::GasLimitIsTooBig); + } + + let _maximal_allowed_overhead = 0; + + if tx.common_data.fee.gas_limit + > U256::from(self.0.state_keeper_config.max_allowed_l2_tx_gas_limit) + { + vlog::info!( + "Submitted Tx is Unexecutable {:?} because of GasLimitIsTooBig {}", + tx.hash(), + tx.common_data.fee.gas_limit, + ); + return Err(SubmitTxError::GasLimitIsTooBig); + } + if tx.common_data.fee.max_fee_per_gas < FAIR_L2_GAS_PRICE.into() { + vlog::info!( + "Submitted Tx is Unexecutable {:?} because of MaxFeePerGasTooLow {}", + tx.hash(), + tx.common_data.fee.max_fee_per_gas + ); + return Err(SubmitTxError::MaxFeePerGasTooLow); + } + if tx.common_data.fee.max_fee_per_gas < tx.common_data.fee.max_priority_fee_per_gas { + vlog::info!( + "Submitted Tx is Unexecutable {:?} because of MaxPriorityFeeGreaterThanMaxFee {}", + tx.hash(), + tx.common_data.fee.max_fee_per_gas + ); + return Err(SubmitTxError::MaxPriorityFeeGreaterThanMaxFee); + } + if tx.execute.factory_deps_length() > MAX_NEW_FACTORY_DEPS { + return Err(SubmitTxError::TooManyFactoryDependencies( + tx.execute.factory_deps_length(), + MAX_NEW_FACTORY_DEPS, + )); + } + + // We still double-check the nonce manually + // to make sure that only the correct nonce is submitted and the transaction's hashes never repeat + self.validate_account_nonce(&tx)?; + + // Even though without enough balance the tx will not pass anyway + // we check the user for enough balance explicitly here for better DevEx. + self.validate_enough_balance(&tx)?; + + metrics::histogram!("api.web3.submit_tx", stage_started_at.elapsed(), "stage" => "1_validate"); + stage_started_at = Instant::now(); + + let l1_gas_price = self.0.gas_adjuster.estimate_effective_gas_price(); + let fair_l2_gas_price = FAIR_L2_GAS_PRICE; + + let (tx_metrics, _) = execute_tx_with_pending_state( + &self.0.replica_connection_pool, + tx.clone(), + AccountTreeId::new(self.0.fee_account_addr), + TxExecutionMode::EthCall, + Some(tx.nonce()), + U256::zero(), + l1_gas_price, + FAIR_L2_GAS_PRICE, + Some(tx.common_data.fee.max_fee_per_gas.as_u64()), + ); + + vlog::info!( + "Submit tx {:?} with execution metrics {:?}", + tx.hash(), + tx_metrics + ); + metrics::histogram!("api.web3.submit_tx", stage_started_at.elapsed(), "stage" => "2_dry_run"); + stage_started_at = Instant::now(); + + let validation_result = validate_tx_with_pending_state( + &self.0.replica_connection_pool, + tx.clone(), + AccountTreeId::new(self.0.fee_account_addr), + TxExecutionMode::VerifyExecute, + Some(tx.nonce()), + U256::zero(), + l1_gas_price, + fair_l2_gas_price, + Some(tx.common_data.fee.max_fee_per_gas.as_u64()), + ); + + metrics::histogram!("api.web3.submit_tx", stage_started_at.elapsed(), "stage" => "3_verify_execute"); + stage_started_at = Instant::now(); + + if let Err(err) = validation_result { + return Err(err.into()); + } + + self.ensure_tx_executable(&tx, &tx_metrics, true)?; + + let nonce = tx.common_data.nonce.0; + let hash = tx.hash(); + let expected_nonce = self.get_expected_nonce(&tx); + let submission_res_handle = self + .0 + .master_connection_pool + .access_storage_blocking() + .transactions_dal() + .insert_transaction_l2(tx, tx_metrics); + + let status: String; + let submission_result = match submission_res_handle { + L2TxSubmissionResult::AlreadyExecuted => { + status = "already_executed".to_string(); + Err(SubmitTxError::NonceIsTooLow( + expected_nonce.0, + expected_nonce.0 + self.0.max_nonce_ahead, + nonce, + )) + } + L2TxSubmissionResult::Duplicate => { + status = "duplicated".to_string(); + Err(SubmitTxError::IncorrectTx(TxDuplication(hash))) + } + _ => { + metrics::histogram!("api.web3.submit_tx", stage_started_at.elapsed(), "stage" => "4_db_insert"); + status = format!( + "mempool_{}", + submission_res_handle.to_string().to_lowercase() + ); + Ok(submission_res_handle) + } + }; + + metrics::counter!( + "server.processed_txs", + 1, + "stage" => status + ); + + submission_result + } + + fn validate_account_nonce(&self, tx: &L2Tx) -> Result<(), SubmitTxError> { + let expected_nonce = self.get_expected_nonce(tx); + + if tx.common_data.nonce.0 < expected_nonce.0 { + Err(SubmitTxError::NonceIsTooLow( + expected_nonce.0, + expected_nonce.0 + self.0.max_nonce_ahead, + tx.nonce().0, + )) + } else if !(expected_nonce.0..=(expected_nonce.0 + self.0.max_nonce_ahead)) + .contains(&tx.common_data.nonce.0) + { + Err(SubmitTxError::NonceIsTooHigh( + expected_nonce.0, + expected_nonce.0 + self.0.max_nonce_ahead, + tx.nonce().0, + )) + } else { + Ok(()) + } + } + + fn get_expected_nonce(&self, tx: &L2Tx) -> Nonce { + self.0 + .replica_connection_pool + .access_storage_blocking() + .storage_web3_dal() + .get_address_historical_nonce( + tx.initiator_account(), + api::BlockId::Number(api::BlockNumber::Latest), + ) + .unwrap() + .map(|n| Nonce(n.as_u32())) + .unwrap() + } + + fn validate_enough_balance(&self, tx: &L2Tx) -> Result<(), SubmitTxError> { + let paymaster = tx.common_data.paymaster_params.paymaster; + + // The paymaster is expected to pay for the tx, + // whatever balance the user has, we don't care. + if paymaster != Address::default() { + return Ok(()); + } + + let eth_balance_key = storage_key_for_eth_balance(&tx.common_data.initiator_address); + + let balance = self + .0 + .replica_connection_pool + .access_storage_blocking() + .storage_dal() + .get_by_key(ð_balance_key) + .unwrap_or_default(); + let balance = h256_to_u256(balance); + + // Estimate the minimum fee price user will agree to. + let gas_price = std::cmp::min( + tx.common_data.fee.max_fee_per_gas, + U256::from(FAIR_L2_GAS_PRICE) + tx.common_data.fee.max_priority_fee_per_gas, + ); + let max_fee = tx.common_data.fee.gas_limit * gas_price; + let max_fee_and_value = max_fee + tx.execute.value; + + if balance < max_fee_and_value { + Err(SubmitTxError::NotEnoughBalanceForFeeValue( + balance, + max_fee, + tx.execute.value, + )) + } else { + Ok(()) + } + } + + /// Given the gas per pubdata limit signed by the user, returns + /// the gas per pubdata byte that should be used in the block for simulation + pub fn validate_gas_per_pubdata_byte( + &self, + agreed_by_user: U256, + ) -> Result { + // The user has agreed an a higher gas price than it is even possible to have in block. + // While we could just let it go, it is better to ensure that users know what they are doing. + if agreed_by_user > U256::from(u32::MAX) { + return Err(SubmitTxError::FeePerPubdataByteTooHigh); + } + + // It is now safe to convert here + let agreed_by_user = agreed_by_user.as_u32(); + + // This check is needed to filter out unrealistic transactions that will reside in mempool forever. + // If transaction has such limit set, most likely it was done manually or there is some mistake + // in user's code. This check is only needed for better UX. + const MIN_GAS_PER_PUBDATA_LIMIT: u32 = 10; // At 0.1 gwei per l2 gas it gives us max 1 gwei of l1 gas price. + if agreed_by_user < MIN_GAS_PER_PUBDATA_LIMIT { + return Err(SubmitTxError::UnrealisticPubdataPriceLimit); + } + + let l1_gas_price = self.0.gas_adjuster.estimate_effective_gas_price(); + let suggested_gas_price_per_pubdata = + derive_base_fee_and_gas_per_pubdata(l1_gas_price, FAIR_L2_GAS_PRICE).1 as u32; + + // If user provided gas per pubdata limit lower than currently suggested + // by the server, the users' transaction will not be included in the blocks right away + // but it will stay in mempool. We still have to simulate it somehow, so we'll use the user's + // provided pubdata price + let result = agreed_by_user.min(suggested_gas_price_per_pubdata); + + Ok(result) + } + + pub fn get_txs_fee_in_wei( + &self, + mut tx: L2Tx, + estimated_fee_scale_factor: f64, + acceptable_overestimation: u32, + ) -> Result { + let l1_gas_price = { + let effective_gas_price = self.0.gas_adjuster.estimate_effective_gas_price(); + ((effective_gas_price as f64) * self.0.gas_price_scale_factor) as u64 + }; + + let (base_fee, gas_per_pubdata_byte) = + derive_base_fee_and_gas_per_pubdata(l1_gas_price, FAIR_L2_GAS_PRICE); + + // If no signature has been provided, we will use the correctly-formatted EOA + // dummy signature + if tx.common_data.signature.is_empty() { + tx.common_data.signature = vec![0u8; 65]; + tx.common_data.signature[64] = 27; + } + + /// The calculated transaction length below does not include the signature and most likely + /// most of the paymasterInput. We will assume that those take no more than 64 slots (2048 bytes) + /// in total. If they do, the user should provide manually a higher gasLimit. + /// + /// In the future, a more advanced protocol for getting the fee for transactions from custom accounts + /// will be used. + const TX_LENGTH_OVERHEAD: usize = 64; + + let _tx_encoded_len = tx.abi_encoding_len() + TX_LENGTH_OVERHEAD; + + // We already know how many gas is needed to cover for the publishing of the bytecodes. + let gas_for_bytecodes_pubdata = { + let pubdata_for_factory_deps = get_pubdata_for_factory_deps( + &self.0.replica_connection_pool, + &tx.execute.factory_deps, + ); + if pubdata_for_factory_deps > MAX_PUBDATA_PER_BLOCK { + return Err(SubmitTxError::Unexecutable( + "exceeds limit for published pubdata".to_string(), + )); + } + pubdata_for_factory_deps * (gas_per_pubdata_byte as u32) + }; + + // We are using binary search to find the minimal values of gas_limit under which + // the transaction succeedes + let mut lower_bound = 0; + let mut upper_bound = MAX_L2_TX_GAS_LIMIT as u32; + + tx.common_data.fee.gas_per_pubdata_limit = MAX_GAS_PER_PUBDATA_BYTE.into(); + + // Given the gas_limit to be used for the body of the transaction, + // returns the result for executing the transaction with such gas_limit + let mut execute = |tx_gas_limit: u32| { + let gas_limit_with_overhead = tx_gas_limit; + + tx.common_data.fee.gas_limit = gas_limit_with_overhead.into(); + let initial_paid_fee = + U256::from(gas_limit_with_overhead) * tx.common_data.fee.max_fee_per_gas; + + let (tx_metrics, exec_result) = execute_tx_with_pending_state( + &self.0.replica_connection_pool, + tx.clone(), + AccountTreeId::new(self.0.fee_account_addr), + TxExecutionMode::EstimateFee, + Some(tx.nonce()), + initial_paid_fee, + l1_gas_price, + FAIR_L2_GAS_PRICE, + Some(tx.common_data.fee.max_fee_per_gas.as_u64()), + ); + + self.ensure_tx_executable(&tx, &tx_metrics, false) + .map_err(|err| { + let err_message = match err { + SubmitTxError::Unexecutable(err_message) => err_message, + _ => unreachable!(), + }; + + SandboxExecutionError::Unexecutable(err_message) + })?; + + exec_result + }; + + let mut number_of_iterations = 0usize; + while lower_bound + acceptable_overestimation < upper_bound { + let mid = (lower_bound + upper_bound) / 2; + + // There is no way to distinct between errors due to out of gas + // or normal exeuction errors, so we just hope that increasing the + // gas limit will make the transaction successful + if execute(gas_for_bytecodes_pubdata + mid).is_err() { + lower_bound = mid + 1; + } else { + upper_bound = mid; + } + + number_of_iterations += 1; + } + metrics::histogram!( + "api.web3.estimate_gas_binary_search_iterations", + number_of_iterations as f64 + ); + + let tx_body_gas_limit = std::cmp::min( + MAX_L2_TX_GAS_LIMIT as u32, + ((upper_bound as f64) * estimated_fee_scale_factor) as u32, + ); + + match execute(tx_body_gas_limit + gas_for_bytecodes_pubdata) { + Err(err) => Err(err.into()), + Ok(_) => { + let overhead = 0; + + let full_gas_limit = + match tx_body_gas_limit.overflowing_add(gas_for_bytecodes_pubdata + overhead) { + (_, true) => { + return Err(SubmitTxError::CannotEstimateTransaction( + "exceeds block gas limit".to_string(), + )) + } + (x, _) => x, + }; + + Ok(Fee { + max_fee_per_gas: base_fee.into(), + max_priority_fee_per_gas: 0u32.into(), + gas_limit: full_gas_limit.into(), + gas_per_pubdata_limit: gas_per_pubdata_byte.into(), + }) + } + } + } + + pub fn token_price( + &self, + request_type: TokenPriceRequestType, + l2_token_address: Address, + ) -> Result { + let mut storage = self.0.replica_connection_pool.access_storage_blocking(); + let mut tokens_web3_dal = storage.tokens_web3_dal(); + FeeTicker::get_l2_token_price(&mut tokens_web3_dal, request_type, &l2_token_address) + } + + pub fn gas_price(&self) -> u64 { + let gas_price = self.0.gas_adjuster.estimate_effective_gas_price(); + + derive_base_fee_and_gas_per_pubdata( + (gas_price as f64 * self.0.gas_price_scale_factor).round() as u64, + FAIR_L2_GAS_PRICE, + ) + .0 + } + + fn ensure_tx_executable( + &self, + transaction: &L2Tx, + tx_metrics: &TransactionExecutionMetrics, + log_message: bool, + ) -> Result<(), SubmitTxError> { + let execution_metrics = ExecutionMetrics { + initial_storage_writes: tx_metrics.initial_storage_writes, + repeated_storage_writes: tx_metrics.repeated_storage_writes, + published_bytecode_bytes: tx_metrics.published_bytecode_bytes, + l2_l1_long_messages: tx_metrics.l2_l1_long_messages, + l2_l1_logs: tx_metrics.l2_l1_logs, + contracts_deployed: tx_metrics.contracts_deployed, + contracts_used: tx_metrics.contracts_used, + gas_used: tx_metrics.gas_used, + storage_logs: tx_metrics.storage_logs, + vm_events: tx_metrics.vm_events, + total_log_queries: tx_metrics.total_log_queries, + cycles_used: tx_metrics.cycles_used, + }; + + // In api server it's ok to expect that all writes are initial it's safer + let tx_gas_count = + gas_count_from_tx_and_metrics(&transaction.clone().into(), &execution_metrics); + + for sealer in &SealManager::get_default_sealers() { + let seal_resolution = sealer.should_seal( + &self.0.state_keeper_config, + 0u128, + 1, + execution_metrics, + execution_metrics, + tx_gas_count, + tx_gas_count, + ); + if matches!(seal_resolution, SealResolution::Unexecutable(_)) { + let message = format!( + "Tx is Unexecutable because of {} with execution values {:?} and gas {:?}", + sealer.prom_criterion_name(), + execution_metrics, + tx_gas_count + ); + + if log_message { + vlog::info!("{:#?} {}", transaction.hash(), message); + } + + return Err(SubmitTxError::Unexecutable(message)); + } + } + Ok(()) + } +} diff --git a/core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/error.rs b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/error.rs new file mode 100644 index 000000000000..d7cc9edb2cd0 --- /dev/null +++ b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/error.rs @@ -0,0 +1,45 @@ +use jsonrpc_core::{Error, ErrorCode}; +use serde_json::json; +use zksync_web3_decl::error::Web3Error; + +pub fn into_jsrpc_error(err: Web3Error) -> Error { + Error { + code: match err { + Web3Error::InternalError | Web3Error::NotImplemented => ErrorCode::InternalError, + Web3Error::NoBlock + | Web3Error::NoSuchFunction + | Web3Error::RLPError(_) + | Web3Error::InvalidTransactionData(_) + | Web3Error::TooManyTopics + | Web3Error::FilterNotFound + | Web3Error::InvalidFeeParams(_) + | Web3Error::LogsLimitExceeded(_, _, _) => ErrorCode::InvalidParams, + Web3Error::SubmitTransactionError(_) | Web3Error::SerializationError(_) => 3.into(), + Web3Error::PubSubTimeout => 4.into(), + Web3Error::RequestTimeout => 5.into(), + }, + message: match err { + Web3Error::SubmitTransactionError(_) => err.to_string(), + _ => err.to_string(), + }, + data: match err { + Web3Error::SubmitTransactionError(err) => json! ({ + "code": 104, + "message": err + }) + .into(), + _ => None, + }, + } +} + +pub fn internal_error(method_name: &str, error: impl ToString) -> Web3Error { + vlog::error!( + "Internal error in method {}: {}", + method_name, + error.to_string(), + ); + metrics::counter!("api.web3.internal_errors", 1, "method" => method_name.to_string()); + + Web3Error::InternalError +} diff --git a/core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/mod.rs b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/mod.rs new file mode 100644 index 000000000000..ea957a5df6fb --- /dev/null +++ b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/mod.rs @@ -0,0 +1,3 @@ +pub mod error; +pub mod namespaces; +pub mod pub_sub; diff --git a/core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/namespaces/eth.rs b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/namespaces/eth.rs new file mode 100644 index 000000000000..b9d094ee2190 --- /dev/null +++ b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/namespaces/eth.rs @@ -0,0 +1,374 @@ +// Built-in uses + +// External uses +use jsonrpc_core::Result; +use jsonrpc_derive::rpc; + +// Workspace uses +use zksync_types::{ + api::{ + BlockId, BlockIdVariant, BlockNumber, Transaction, TransactionId, TransactionReceipt, + TransactionVariant, + }, + transaction_request::CallRequest, + web3::types::{Index, SyncState}, + Address, Bytes, H256, U256, U64, +}; +use zksync_web3_decl::error::Web3Error; +use zksync_web3_decl::types::{Block, Filter, FilterChanges, Log}; + +// Local uses +use crate::web3::backend_jsonrpc::error::into_jsrpc_error; +use crate::web3::namespaces::EthNamespace; + +#[rpc] +pub trait EthNamespaceT { + #[rpc(name = "eth_blockNumber", returns = "U64")] + fn get_block_number(&self) -> Result; + + #[rpc(name = "eth_chainId", returns = "U64")] + fn chain_id(&self) -> Result; + + #[rpc(name = "eth_call", returns = "Bytes")] + fn call(&self, req: CallRequest, block: Option) -> Result; + + #[rpc(name = "eth_estimateGas", returns = "U256")] + fn estimate_gas(&self, req: CallRequest, _block: Option) -> Result; + + #[rpc(name = "eth_gasPrice", returns = "U256")] + fn gas_price(&self) -> Result; + + #[rpc(name = "eth_newFilter", returns = "U256")] + fn new_filter(&self, filter: Filter) -> Result; + + #[rpc(name = "eth_newBlockFilter", returns = "U256")] + fn new_block_filter(&self) -> Result; + + #[rpc(name = "eth_uninstallFilter", returns = "U256")] + fn uninstall_filter(&self, idx: U256) -> Result; + + #[rpc(name = "eth_newPendingTransactionFilter", returns = "U256")] + fn new_pending_transaction_filter(&self) -> Result; + + #[rpc(name = "eth_getLogs", returns = "Vec")] + fn get_logs(&self, filter: Filter) -> Result>; + + #[rpc(name = "eth_getFilterLogs", returns = "FilterChanges")] + fn get_filter_logs(&self, filter_index: U256) -> Result; + + #[rpc(name = "eth_getFilterChanges", returns = "FilterChanges")] + fn get_filter_changes(&self, filter_index: U256) -> Result; + + #[rpc(name = "eth_getBalance", returns = "U256")] + fn get_balance(&self, address: Address, block: Option) -> Result; + + #[rpc( + name = "eth_getBlockByNumber", + returns = "Option>" + )] + fn get_block_by_number( + &self, + block_number: BlockNumber, + full_transactions: bool, + ) -> Result>>; + + #[rpc( + name = "eth_getBlockByHash", + returns = "Option>" + )] + fn get_block_by_hash( + &self, + hash: H256, + full_transactions: bool, + ) -> Result>>; + + #[rpc( + name = "eth_getBlockTransactionCountByNumber", + returns = "Option" + )] + fn get_block_transaction_count_by_number( + &self, + block_number: BlockNumber, + ) -> Result>; + + #[rpc(name = "eth_getBlockTransactionCountByHash", returns = "Option")] + fn get_block_transaction_count_by_hash(&self, block_hash: H256) -> Result>; + + #[rpc(name = "eth_getCode", returns = "Bytes")] + fn get_code(&self, address: Address, block: Option) -> Result; + + #[rpc(name = "eth_getStorageAt", returns = "H256")] + fn get_storage( + &self, + address: Address, + idx: U256, + block: Option, + ) -> Result; + + #[rpc(name = "eth_getTransactionCount", returns = "U256")] + fn get_transaction_count( + &self, + address: Address, + block: Option, + ) -> Result; + + #[rpc(name = "eth_getTransactionByHash", returns = "Option")] + fn get_transaction_by_hash(&self, hash: H256) -> Result>; + + #[rpc( + name = "eth_getTransactionByBlockHashAndIndex", + returns = "Option" + )] + fn get_transaction_by_block_hash_and_index( + &self, + block_hash: H256, + index: Index, + ) -> Result>; + + #[rpc( + name = "eth_getTransactionByBlockNumberAndIndex", + returns = "Option" + )] + fn get_transaction_by_block_number_and_index( + &self, + block_number: BlockNumber, + index: Index, + ) -> Result>; + + #[rpc( + name = "eth_getTransactionReceipt", + returns = "Option" + )] + fn get_transaction_receipt(&self, hash: H256) -> Result>; + + #[rpc(name = "eth_protocolVersion", returns = "String")] + fn protocol_version(&self) -> Result; + + #[rpc(name = "eth_sendRawTransaction", returns = "H256")] + fn send_raw_transaction(&self, tx_bytes: Bytes) -> Result; + + #[rpc(name = "eth_syncing", returns = "SyncState")] + fn syncing(&self) -> Result; + + #[rpc(name = "eth_accounts", returns = "Vec
")] + fn accounts(&self) -> Result>; + + #[rpc(name = "eth_coinbase", returns = "Address")] + fn coinbase(&self) -> Result
; + + #[rpc(name = "eth_getCompilers", returns = "Vec")] + fn compilers(&self) -> Result>; + + #[rpc(name = "eth_hashrate", returns = "U256")] + fn hashrate(&self) -> Result; + + #[rpc(name = "eth_getUncleCountByBlockHash", returns = "Option")] + fn get_uncle_count_by_block_hash(&self, hash: H256) -> Result>; + + #[rpc(name = "eth_getUncleCountByBlockNumber", returns = "Option")] + fn get_uncle_count_by_block_number(&self, number: BlockNumber) -> Result>; + + #[rpc(name = "eth_mining", returns = "bool")] + fn mining(&self) -> Result; + + #[rpc(name = "eth_sendTransaction", returns = "H256")] + fn send_transaction( + &self, + transaction_request: zksync_types::web3::types::TransactionRequest, + ) -> Result; +} + +impl EthNamespaceT for EthNamespace { + fn get_block_number(&self) -> Result { + self.get_block_number_impl().map_err(into_jsrpc_error) + } + + fn chain_id(&self) -> Result { + Ok(self.chain_id_impl()) + } + + fn call(&self, req: CallRequest, block: Option) -> Result { + self.call_impl(req, block.map(Into::into)) + .map_err(into_jsrpc_error) + } + + fn estimate_gas(&self, req: CallRequest, block: Option) -> Result { + self.estimate_gas_impl(req, block).map_err(into_jsrpc_error) + } + + fn gas_price(&self) -> Result { + self.gas_price_impl().map_err(into_jsrpc_error) + } + + fn new_filter(&self, filter: Filter) -> Result { + self.new_filter_impl(filter).map_err(into_jsrpc_error) + } + + fn new_block_filter(&self) -> Result { + self.new_block_filter_impl().map_err(into_jsrpc_error) + } + + fn uninstall_filter(&self, idx: U256) -> Result { + Ok(self.uninstall_filter_impl(idx)) + } + + fn new_pending_transaction_filter(&self) -> Result { + Ok(self.new_pending_transaction_filter_impl()) + } + + fn get_logs(&self, filter: Filter) -> Result> { + self.get_logs_impl(filter).map_err(into_jsrpc_error) + } + + fn get_filter_logs(&self, filter_index: U256) -> Result { + self.get_filter_logs_impl(filter_index) + .map_err(into_jsrpc_error) + } + + fn get_filter_changes(&self, filter_index: U256) -> Result { + self.get_filter_changes_impl(filter_index) + .map_err(into_jsrpc_error) + } + + fn get_balance(&self, address: Address, block: Option) -> Result { + self.get_balance_impl(address, block.map(Into::into)) + .map_err(into_jsrpc_error) + } + + fn get_block_by_number( + &self, + block_number: BlockNumber, + full_transactions: bool, + ) -> Result>> { + self.get_block_impl(BlockId::Number(block_number), full_transactions) + .map_err(into_jsrpc_error) + } + + fn get_block_by_hash( + &self, + hash: H256, + full_transactions: bool, + ) -> Result>> { + self.get_block_impl(BlockId::Hash(hash), full_transactions) + .map_err(into_jsrpc_error) + } + + fn get_block_transaction_count_by_number( + &self, + block_number: BlockNumber, + ) -> Result> { + self.get_block_transaction_count_impl(BlockId::Number(block_number)) + .map_err(into_jsrpc_error) + } + + fn get_block_transaction_count_by_hash(&self, block_hash: H256) -> Result> { + self.get_block_transaction_count_impl(BlockId::Hash(block_hash)) + .map_err(into_jsrpc_error) + } + + fn get_code(&self, address: Address, block: Option) -> Result { + self.get_code_impl(address, block.map(Into::into)) + .map_err(into_jsrpc_error) + } + + fn get_storage( + &self, + address: Address, + idx: U256, + block: Option, + ) -> Result { + self.get_storage_at_impl(address, idx, block.map(Into::into)) + .map_err(into_jsrpc_error) + } + + fn get_transaction_count( + &self, + address: Address, + block: Option, + ) -> Result { + self.get_transaction_count_impl(address, block.map(Into::into)) + .map_err(into_jsrpc_error) + } + + fn get_transaction_by_hash(&self, hash: H256) -> Result> { + self.get_transaction_impl(TransactionId::Hash(hash)) + .map_err(into_jsrpc_error) + } + + fn get_transaction_by_block_hash_and_index( + &self, + block_hash: H256, + index: Index, + ) -> Result> { + self.get_transaction_impl(TransactionId::Block(BlockId::Hash(block_hash), index)) + .map_err(into_jsrpc_error) + } + + fn get_transaction_by_block_number_and_index( + &self, + block_number: BlockNumber, + index: Index, + ) -> Result> { + self.get_transaction_impl(TransactionId::Block(BlockId::Number(block_number), index)) + .map_err(into_jsrpc_error) + } + + fn get_transaction_receipt(&self, hash: H256) -> Result> { + self.get_transaction_receipt_impl(hash) + .map_err(into_jsrpc_error) + } + + fn protocol_version(&self) -> Result { + Ok(self.protocol_version()) + } + + fn send_raw_transaction(&self, tx_bytes: Bytes) -> Result { + self.send_raw_transaction_impl(tx_bytes) + .map_err(into_jsrpc_error) + } + + fn syncing(&self) -> Result { + Ok(self.syncing_impl()) + } + + fn accounts(&self) -> Result> { + Ok(self.accounts_impl()) + } + + fn coinbase(&self) -> Result
{ + Ok(self.coinbase_impl()) + } + + fn compilers(&self) -> Result> { + Ok(self.compilers_impl()) + } + + fn hashrate(&self) -> Result { + Ok(self.hashrate_impl()) + } + + fn get_uncle_count_by_block_hash(&self, hash: H256) -> Result> { + Ok(self.uncle_count_impl(BlockId::Hash(hash))) + } + + fn get_uncle_count_by_block_number(&self, number: BlockNumber) -> Result> { + Ok(self.uncle_count_impl(BlockId::Number(number))) + } + + fn mining(&self) -> Result { + Ok(self.mining_impl()) + } + + fn send_transaction( + &self, + _transaction_request: zksync_types::web3::types::TransactionRequest, + ) -> Result { + #[cfg(feature = "openzeppelin_tests")] + return self + .send_transaction_impl(_transaction_request) + .map_err(into_jsrpc_error); + + #[cfg(not(feature = "openzeppelin_tests"))] + Err(into_jsrpc_error(Web3Error::NotImplemented)) + } +} diff --git a/core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/namespaces/mod.rs b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/namespaces/mod.rs new file mode 100644 index 000000000000..01baf794abf8 --- /dev/null +++ b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/namespaces/mod.rs @@ -0,0 +1,4 @@ +pub mod eth; +pub mod net; +pub mod web3; +pub mod zks; diff --git a/core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/namespaces/net.rs b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/namespaces/net.rs new file mode 100644 index 000000000000..89abd3177c84 --- /dev/null +++ b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/namespaces/net.rs @@ -0,0 +1,37 @@ +// Built-in uses + +// External uses +use jsonrpc_core::Result; +use jsonrpc_derive::rpc; + +// Workspace uses +use zksync_types::U256; + +// Local uses +use crate::web3::namespaces::NetNamespace; + +#[rpc] +pub trait NetNamespaceT { + #[rpc(name = "net_version", returns = "String")] + fn net_version(&self) -> Result; + + #[rpc(name = "net_peerCount", returns = "U256")] + fn net_peer_count(&self) -> Result; + + #[rpc(name = "net_listening", returns = "bool")] + fn net_listening(&self) -> Result; +} + +impl NetNamespaceT for NetNamespace { + fn net_version(&self) -> Result { + Ok(self.version_impl()) + } + + fn net_peer_count(&self) -> Result { + Ok(self.peer_count_impl()) + } + + fn net_listening(&self) -> Result { + Ok(self.is_listening_impl()) + } +} diff --git a/core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/namespaces/web3.rs b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/namespaces/web3.rs new file mode 100644 index 000000000000..1df21812e748 --- /dev/null +++ b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/namespaces/web3.rs @@ -0,0 +1,22 @@ +// Built-in uses + +// External uses +use jsonrpc_core::Result; +use jsonrpc_derive::rpc; + +// Workspace uses + +// Local uses +use crate::web3::namespaces::Web3Namespace; + +#[rpc] +pub trait Web3NamespaceT { + #[rpc(name = "web3_clientVersion", returns = "String")] + fn client_version(&self) -> Result; +} + +impl Web3NamespaceT for Web3Namespace { + fn client_version(&self) -> Result { + Ok(self.client_version_impl()) + } +} diff --git a/core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/namespaces/zks.rs b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/namespaces/zks.rs new file mode 100644 index 000000000000..e5c198af08a6 --- /dev/null +++ b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/namespaces/zks.rs @@ -0,0 +1,200 @@ +// Built-in uses +use std::collections::HashMap; + +// External uses +use bigdecimal::BigDecimal; +use jsonrpc_core::Result; +use jsonrpc_derive::rpc; + +// Workspace uses +use zksync_types::{ + api::{BridgeAddresses, L2ToL1LogProof, TransactionDetails}, + explorer_api::BlockDetails, + fee::Fee, + transaction_request::CallRequest, + vm_trace::{ContractSourceDebugInfo, VmDebugTrace}, + Address, Bytes, L1BatchNumber, MiniblockNumber, H256, U256, U64, +}; +use zksync_web3_decl::error::Web3Error; +use zksync_web3_decl::types::Token; + +// Local uses +use crate::web3::backend_jsonrpc::error::into_jsrpc_error; +use crate::web3::namespaces::ZksNamespace; + +#[rpc] +pub trait ZksNamespaceT { + #[rpc(name = "zks_estimateFee", returns = "Fee")] + fn estimate_fee(&self, req: CallRequest) -> Result; + + #[rpc(name = "zks_getMainContract", returns = "Address")] + fn get_main_contract(&self) -> Result
; + + #[rpc(name = "zks_getTestnetPaymaster", returns = "Option
")] + fn get_testnet_paymaster(&self) -> Result>; + + #[rpc(name = "zks_getBridgeContracts", returns = "BridgeAddresses")] + fn get_bridge_contracts(&self) -> Result; + + #[rpc(name = "zks_L1ChainId", returns = "U64")] + fn l1_chain_id(&self) -> Result; + + #[rpc(name = "zks_getConfirmedTokens", returns = "Vec")] + fn get_confirmed_tokens(&self, from: u32, limit: u8) -> Result>; + + #[rpc(name = "zks_getTokenPrice", returns = "BigDecimal")] + fn get_token_price(&self, token_address: Address) -> Result; + + #[rpc(name = "zks_setContractDebugInfo", returns = "bool")] + fn set_contract_debug_info( + &self, + contract_address: Address, + info: ContractSourceDebugInfo, + ) -> Result; + + #[rpc(name = "zks_getContractDebugInfo", returns = "ContractSourceDebugInfo")] + fn get_contract_debug_info( + &self, + contract_address: Address, + ) -> Result>; + + #[rpc(name = "zks_getTransactionTrace", returns = "Option")] + fn get_transaction_trace(&self, hash: H256) -> Result>; + + #[rpc(name = "zks_getAllAccountBalances", returns = "HashMap")] + fn get_all_account_balances(&self, address: Address) -> Result>; + + #[rpc(name = "zks_getL2ToL1MsgProof", returns = "Option>")] + fn get_l2_to_l1_msg_proof( + &self, + block: MiniblockNumber, + sender: Address, + msg: H256, + l2_log_position: Option, + ) -> Result>; + + #[rpc(name = "zks_getL2ToL1LogProof", returns = "Option>")] + fn get_l2_to_l1_log_proof( + &self, + tx_hash: H256, + index: Option, + ) -> Result>; + + #[rpc(name = "zks_L1BatchNumber", returns = "U64")] + fn get_l1_batch_number(&self) -> Result; + + #[rpc(name = "zks_getBlockDetails", returns = "Option")] + fn get_block_details(&self, block_number: MiniblockNumber) -> Result>; + + #[rpc(name = "zks_getL1BatchBlockRange", returns = "Option<(U64, U64)>")] + fn get_miniblock_range(&self, batch: L1BatchNumber) -> Result>; + + #[rpc(name = "zks_setKnownBytecode", returns = "bool")] + fn set_known_bytecode(&self, bytecode: Bytes) -> Result; + + #[rpc( + name = "zks_getTransactionDetails", + returns = "Option" + )] + fn get_transaction_details(&self, hash: H256) -> Result>; +} + +impl ZksNamespaceT for ZksNamespace { + fn estimate_fee(&self, req: CallRequest) -> Result { + self.estimate_fee_impl(req).map_err(into_jsrpc_error) + } + + fn get_main_contract(&self) -> Result
{ + Ok(self.get_main_contract_impl()) + } + + fn get_miniblock_range(&self, batch: L1BatchNumber) -> Result> { + self.get_miniblock_range_impl(batch) + .map_err(into_jsrpc_error) + } + + fn get_testnet_paymaster(&self) -> Result> { + Ok(self.get_testnet_paymaster_impl()) + } + + fn get_bridge_contracts(&self) -> Result { + Ok(self.get_bridge_contracts_impl()) + } + + fn l1_chain_id(&self) -> Result { + Ok(self.l1_chain_id_impl()) + } + + fn get_confirmed_tokens(&self, from: u32, limit: u8) -> Result> { + self.get_confirmed_tokens_impl(from, limit) + .map_err(into_jsrpc_error) + } + + fn get_token_price(&self, token_address: Address) -> Result { + self.get_token_price_impl(token_address) + .map_err(into_jsrpc_error) + } + + fn set_contract_debug_info( + &self, + address: Address, + info: ContractSourceDebugInfo, + ) -> Result { + Ok(self.set_contract_debug_info_impl(address, info)) + } + + fn get_contract_debug_info(&self, address: Address) -> Result> { + Ok(self.get_contract_debug_info_impl(address)) + } + + fn get_transaction_trace(&self, hash: H256) -> Result> { + Ok(self.get_transaction_trace_impl(hash)) + } + + fn get_all_account_balances(&self, address: Address) -> Result> { + self.get_all_account_balances_impl(address) + .map_err(into_jsrpc_error) + } + + fn get_l2_to_l1_msg_proof( + &self, + block: MiniblockNumber, + sender: Address, + msg: H256, + l2_log_position: Option, + ) -> Result> { + self.get_l2_to_l1_msg_proof_impl(block, sender, msg, l2_log_position) + .map_err(into_jsrpc_error) + } + + fn get_l2_to_l1_log_proof( + &self, + tx_hash: H256, + index: Option, + ) -> Result> { + self.get_l2_to_l1_log_proof_impl(tx_hash, index) + .map_err(into_jsrpc_error) + } + + fn get_l1_batch_number(&self) -> Result { + self.get_l1_batch_number_impl().map_err(into_jsrpc_error) + } + + fn get_block_details(&self, block_number: MiniblockNumber) -> Result> { + self.get_block_details_impl(block_number) + .map_err(into_jsrpc_error) + } + + fn get_transaction_details(&self, hash: H256) -> Result> { + self.get_transaction_details_impl(hash) + .map_err(into_jsrpc_error) + } + + fn set_known_bytecode(&self, _bytecode: Bytes) -> Result { + #[cfg(feature = "openzeppelin_tests")] + return Ok(self.set_known_bytecode_impl(_bytecode)); + + #[cfg(not(feature = "openzeppelin_tests"))] + Err(into_jsrpc_error(Web3Error::NotImplemented)) + } +} diff --git a/core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/pub_sub.rs b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/pub_sub.rs new file mode 100644 index 000000000000..24161a48fee0 --- /dev/null +++ b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpc/pub_sub.rs @@ -0,0 +1,53 @@ +use std::sync::Arc; + +use jsonrpc_core::Result; +use jsonrpc_derive::rpc; +use jsonrpc_pubsub::typed; +use jsonrpc_pubsub::{Session, SubscriptionId}; + +use zksync_web3_decl::types::PubSubResult; + +use super::super::namespaces::EthSubscribe; + +#[rpc] +pub trait Web3PubSub { + type Metadata; + + #[pubsub(subscription = "eth_subscription", subscribe, name = "eth_subscribe")] + fn subscribe( + &self, + meta: Self::Metadata, + subscriber: typed::Subscriber, + sub_type: String, + params: Option, + ); + + #[pubsub( + subscription = "eth_subscription", + unsubscribe, + name = "eth_unsubscribe" + )] + fn unsubscribe( + &self, + meta: Option, + subscription: SubscriptionId, + ) -> Result; +} + +impl Web3PubSub for EthSubscribe { + type Metadata = Arc; + + fn subscribe( + &self, + _meta: Self::Metadata, + subscriber: typed::Subscriber, + sub_type: String, + params: Option, + ) { + self.sub(subscriber, sub_type, params); + } + + fn unsubscribe(&self, _meta: Option, id: SubscriptionId) -> Result { + self.unsub(id) + } +} diff --git a/core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/mod.rs b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/mod.rs new file mode 100644 index 000000000000..dcdba9bfdb52 --- /dev/null +++ b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/mod.rs @@ -0,0 +1,5 @@ +//! Backend "glue" which ties the actual Web3 API implementation to the `jsonrpsee` JSON RPC backend. +//! Consists mostly of boilerplate code implementing the `jsonrpsee` server traits for the corresponding +//! namespace structures defined in `zksync_core`. + +pub mod namespaces; diff --git a/core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/eth.rs b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/eth.rs new file mode 100644 index 000000000000..2c1b9bc85277 --- /dev/null +++ b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/eth.rs @@ -0,0 +1,205 @@ +use crate::api_server::web3::namespaces::eth::EthNamespace; + +use zksync_types::{ + api::{ + Block, BlockId, BlockIdVariant, BlockNumber, Log, Transaction, TransactionId, + TransactionReceipt, TransactionVariant, + }, + transaction_request::CallRequest, + web3::types::{Index, SyncState}, + Address, Bytes, H256, U256, U64, +}; + +use zksync_web3_decl::{ + jsonrpsee::{core::RpcResult, types::error::CallError}, + namespaces::eth::EthNamespaceServer, + types::{Filter, FilterChanges}, +}; + +impl EthNamespaceServer for EthNamespace { + fn get_block_number(&self) -> RpcResult { + self.get_block_number_impl() + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn chain_id(&self) -> RpcResult { + Ok(self.chain_id_impl()) + } + + fn call(&self, req: CallRequest, block: Option) -> RpcResult { + self.call_impl(req, block.map(Into::into)) + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn estimate_gas(&self, req: CallRequest, block: Option) -> RpcResult { + self.estimate_gas_impl(req, block) + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn gas_price(&self) -> RpcResult { + self.gas_price_impl() + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn new_filter(&self, filter: Filter) -> RpcResult { + self.new_filter_impl(filter) + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn new_block_filter(&self) -> RpcResult { + self.new_block_filter_impl() + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn uninstall_filter(&self, idx: U256) -> RpcResult { + Ok(self.uninstall_filter_impl(idx)) + } + + fn new_pending_transaction_filter(&self) -> RpcResult { + Ok(self.new_pending_transaction_filter_impl()) + } + + fn get_logs(&self, filter: Filter) -> RpcResult> { + self.get_logs_impl(filter) + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn get_filter_logs(&self, filter_index: U256) -> RpcResult { + self.get_filter_logs_impl(filter_index) + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn get_filter_changes(&self, filter_index: U256) -> RpcResult { + self.get_filter_changes_impl(filter_index) + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn get_balance(&self, address: Address, block: Option) -> RpcResult { + self.get_balance_impl(address, block.map(Into::into)) + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn get_block_by_number( + &self, + block_number: BlockNumber, + full_transactions: bool, + ) -> RpcResult>> { + self.get_block_impl(BlockId::Number(block_number), full_transactions) + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn get_block_by_hash( + &self, + hash: H256, + full_transactions: bool, + ) -> RpcResult>> { + self.get_block_impl(BlockId::Hash(hash), full_transactions) + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn get_block_transaction_count_by_number( + &self, + block_number: BlockNumber, + ) -> RpcResult> { + self.get_block_transaction_count_impl(BlockId::Number(block_number)) + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn get_block_transaction_count_by_hash(&self, block_hash: H256) -> RpcResult> { + self.get_block_transaction_count_impl(BlockId::Hash(block_hash)) + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn get_code(&self, address: Address, block: Option) -> RpcResult { + self.get_code_impl(address, block.map(Into::into)) + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn get_storage_at( + &self, + address: Address, + idx: U256, + block: Option, + ) -> RpcResult { + self.get_storage_at_impl(address, idx, block.map(Into::into)) + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn get_transaction_count( + &self, + address: Address, + block: Option, + ) -> RpcResult { + self.get_transaction_count_impl(address, block.map(Into::into)) + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn get_transaction_by_hash(&self, hash: H256) -> RpcResult> { + self.get_transaction_impl(TransactionId::Hash(hash)) + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn get_transaction_by_block_hash_and_index( + &self, + block_hash: H256, + index: Index, + ) -> RpcResult> { + self.get_transaction_impl(TransactionId::Block(BlockId::Hash(block_hash), index)) + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn get_transaction_by_block_number_and_index( + &self, + block_number: BlockNumber, + index: Index, + ) -> RpcResult> { + self.get_transaction_impl(TransactionId::Block(BlockId::Number(block_number), index)) + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn get_transaction_receipt(&self, hash: H256) -> RpcResult> { + self.get_transaction_receipt_impl(hash) + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn protocol_version(&self) -> RpcResult { + Ok(self.protocol_version()) + } + + fn send_raw_transaction(&self, tx_bytes: Bytes) -> RpcResult { + self.send_raw_transaction_impl(tx_bytes) + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn syncing(&self) -> RpcResult { + Ok(self.syncing_impl()) + } + + fn accounts(&self) -> RpcResult> { + Ok(self.accounts_impl()) + } + + fn coinbase(&self) -> RpcResult
{ + Ok(self.coinbase_impl()) + } + + fn compilers(&self) -> RpcResult> { + Ok(self.compilers_impl()) + } + + fn hashrate(&self) -> RpcResult { + Ok(self.hashrate_impl()) + } + + fn get_uncle_count_by_block_hash(&self, hash: H256) -> RpcResult> { + Ok(self.uncle_count_impl(BlockId::Hash(hash))) + } + + fn get_uncle_count_by_block_number(&self, number: BlockNumber) -> RpcResult> { + Ok(self.uncle_count_impl(BlockId::Number(number))) + } + + fn mining(&self) -> RpcResult { + Ok(self.mining_impl()) + } +} diff --git a/core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/eth_subscribe.rs b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/eth_subscribe.rs new file mode 100644 index 000000000000..8b137891791f --- /dev/null +++ b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/eth_subscribe.rs @@ -0,0 +1 @@ + diff --git a/core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/mod.rs b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/mod.rs new file mode 100644 index 000000000000..0e9ffad42953 --- /dev/null +++ b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/mod.rs @@ -0,0 +1,5 @@ +pub mod eth; +pub mod eth_subscribe; +pub mod net; +pub mod web3; +pub mod zks; diff --git a/core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/net.rs b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/net.rs new file mode 100644 index 000000000000..a949b41e9403 --- /dev/null +++ b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/net.rs @@ -0,0 +1,17 @@ +use crate::api_server::web3::namespaces::net::NetNamespace; +use zksync_types::U256; +use zksync_web3_decl::{jsonrpsee::core::RpcResult, namespaces::net::NetNamespaceServer}; + +impl NetNamespaceServer for NetNamespace { + fn version(&self) -> RpcResult { + Ok(self.version_impl()) + } + + fn peer_count(&self) -> RpcResult { + Ok(self.peer_count_impl()) + } + + fn is_listening(&self) -> RpcResult { + Ok(self.is_listening_impl()) + } +} diff --git a/core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/web3.rs b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/web3.rs new file mode 100644 index 000000000000..0d45cf47c61d --- /dev/null +++ b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/web3.rs @@ -0,0 +1,8 @@ +use crate::api_server::web3::namespaces::web3::Web3Namespace; +use zksync_web3_decl::{jsonrpsee::core::RpcResult, namespaces::web3::Web3NamespaceServer}; + +impl Web3NamespaceServer for Web3Namespace { + fn client_version(&self) -> RpcResult { + Ok(self.client_version_impl()) + } +} diff --git a/core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/zks.rs b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/zks.rs new file mode 100644 index 000000000000..69a6508cd77f --- /dev/null +++ b/core/bin/zksync_core/src/api_server/web3/backend_jsonrpsee/namespaces/zks.rs @@ -0,0 +1,113 @@ +use crate::api_server::web3::namespaces::zks::ZksNamespace; +use bigdecimal::BigDecimal; +use std::collections::HashMap; +use zksync_types::{ + api::{BridgeAddresses, L2ToL1LogProof, TransactionDetails, U64}, + explorer_api::BlockDetails, + fee::Fee, + transaction_request::CallRequest, + vm_trace::{ContractSourceDebugInfo, VmDebugTrace}, + Address, L1BatchNumber, MiniblockNumber, H256, U256, +}; +use zksync_web3_decl::{ + jsonrpsee::{core::RpcResult, types::error::CallError}, + namespaces::zks::ZksNamespaceServer, + types::Token, +}; + +impl ZksNamespaceServer for ZksNamespace { + fn estimate_fee(&self, req: CallRequest) -> RpcResult { + self.estimate_fee_impl(req) + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn get_main_contract(&self) -> RpcResult
{ + Ok(self.get_main_contract_impl()) + } + + fn get_testnet_paymaster(&self) -> RpcResult> { + Ok(self.get_testnet_paymaster_impl()) + } + + fn get_bridge_contracts(&self) -> RpcResult { + Ok(self.get_bridge_contracts_impl()) + } + + fn l1_chain_id(&self) -> RpcResult { + Ok(self.l1_chain_id_impl()) + } + + fn get_confirmed_tokens(&self, from: u32, limit: u8) -> RpcResult> { + self.get_confirmed_tokens_impl(from, limit) + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn get_token_price(&self, token_address: Address) -> RpcResult { + self.get_token_price_impl(token_address) + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn set_contract_debug_info( + &self, + address: Address, + info: ContractSourceDebugInfo, + ) -> RpcResult { + Ok(self.set_contract_debug_info_impl(address, info)) + } + + fn get_contract_debug_info( + &self, + address: Address, + ) -> RpcResult> { + Ok(self.get_contract_debug_info_impl(address)) + } + + fn get_transaction_trace(&self, hash: H256) -> RpcResult> { + Ok(self.get_transaction_trace_impl(hash)) + } + + fn get_all_account_balances(&self, address: Address) -> RpcResult> { + self.get_all_account_balances_impl(address) + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn get_l2_to_l1_msg_proof( + &self, + block: MiniblockNumber, + sender: Address, + msg: H256, + l2_log_position: Option, + ) -> RpcResult> { + self.get_l2_to_l1_msg_proof_impl(block, sender, msg, l2_log_position) + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn get_l2_to_l1_log_proof( + &self, + tx_hash: H256, + index: Option, + ) -> RpcResult> { + self.get_l2_to_l1_log_proof_impl(tx_hash, index) + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn get_l1_batch_number(&self) -> RpcResult { + self.get_l1_batch_number_impl() + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn get_miniblock_range(&self, batch: L1BatchNumber) -> RpcResult> { + self.get_miniblock_range_impl(batch) + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn get_block_details(&self, block_number: MiniblockNumber) -> RpcResult> { + self.get_block_details_impl(block_number) + .map_err(|err| CallError::from_std_error(err).into()) + } + + fn get_transaction_details(&self, hash: H256) -> RpcResult> { + self.get_transaction_details_impl(hash) + .map_err(|err| CallError::from_std_error(err).into()) + } +} diff --git a/core/bin/zksync_core/src/api_server/web3/mod.rs b/core/bin/zksync_core/src/api_server/web3/mod.rs new file mode 100644 index 000000000000..e06c3705ac0a --- /dev/null +++ b/core/bin/zksync_core/src/api_server/web3/mod.rs @@ -0,0 +1,393 @@ +// Built-in uses +use std::sync::{Arc, RwLock}; +use std::time::Duration; +// External uses +use futures::channel::oneshot; +use futures::FutureExt; +use jsonrpc_core::IoHandler; +use jsonrpc_pubsub::PubSubHandler; +use once_cell::{self, sync::Lazy}; +use tokio::sync::watch; +use zksync_dal::ConnectionPool; + +// Workspace uses +use zksync_config::ZkSyncConfig; +use zksync_eth_client::clients::http_client::EthereumClient; +use zksync_eth_signer::{EthereumSigner, PrivateKeySigner}; +use zksync_types::H256; +use zksync_web3_decl::{ + jsonrpsee::{server::ServerBuilder, RpcModule}, + namespaces::{EthNamespaceServer, NetNamespaceServer, Web3NamespaceServer, ZksNamespaceServer}, +}; + +use crate::gas_adjuster::GasAdjuster; + +// Local uses +use super::tx_sender::TxSender; +use backend_jsonrpc::{ + namespaces::{ + eth::EthNamespaceT, net::NetNamespaceT, web3::Web3NamespaceT, zks::ZksNamespaceT, + }, + pub_sub::Web3PubSub, +}; +use namespaces::{EthNamespace, EthSubscribe, NetNamespace, Web3Namespace, ZksNamespace}; +use pubsub_notifier::{notify_blocks, notify_logs, notify_txs}; +use state::{Filters, RpcState}; + +pub mod backend_jsonrpc; +pub mod backend_jsonrpsee; +pub mod namespaces; +mod pubsub_notifier; +pub mod state; + +pub fn get_config() -> &'static ZkSyncConfig { + static ZKSYNC_CONFIG: Lazy = Lazy::new(ZkSyncConfig::from_env); + + &ZKSYNC_CONFIG +} + +impl RpcState { + pub fn init( + master_connection_pool: ConnectionPool, + replica_connection_pool: ConnectionPool, + req_entities_limit: usize, + filters_limit: usize, + account_pks: Vec, + gas_adjuster: Arc>, + ) -> Self { + let config = get_config(); + let tx_sender = TxSender::new( + config, + master_connection_pool, + replica_connection_pool.clone(), + gas_adjuster, + ); + + let accounts = if cfg!(feature = "openzeppelin_tests") { + account_pks + .into_iter() + .map(|pk| { + let signer = PrivateKeySigner::new(pk); + let address = futures::executor::block_on(signer.get_address()) + .expect("Failed to get address of a signer"); + (address, signer) + }) + .collect() + } else { + Default::default() + }; + + RpcState { + installed_filters: Arc::new(RwLock::new(Filters::new(filters_limit))), + connection_pool: replica_connection_pool, + tx_sender, + req_entities_limit, + accounts, + config, + #[cfg(feature = "openzeppelin_tests")] + known_bytecodes: Arc::new(RwLock::new(Default::default())), + } + } +} + +pub fn start_http_rpc_server_old( + master_connection_pool: ConnectionPool, + replica_connection_pool: ConnectionPool, + config: &ZkSyncConfig, + _stop_receiver: watch::Receiver, + gas_adjuster: Arc>, +) -> tokio::task::JoinHandle<()> { + let io_handler = build_http_io_handler( + master_connection_pool, + replica_connection_pool, + config, + gas_adjuster, + ); + let addr = config.api.web3_json_rpc.http_bind_addr(); + let threads_per_server = config.api.web3_json_rpc.threads_per_server as usize; + + let (sender, recv) = oneshot::channel::<()>(); + std::thread::spawn(move || { + let runtime = tokio::runtime::Builder::new_multi_thread() + .enable_all() + .worker_threads(threads_per_server) + .build() + .unwrap(); + + let server = jsonrpc_http_server::ServerBuilder::new(io_handler) + .threads(1) + .event_loop_executor(runtime.handle().clone()) + .start_http(&addr) + .unwrap(); + + server.wait(); + let _ = sender; + }); + + tokio::spawn(recv.map(drop)) +} + +fn start_notifying_active_subs( + pub_sub: EthSubscribe, + connection_pool: ConnectionPool, + polling_interval: Duration, + stop_receiver: watch::Receiver, +) -> Vec> { + vec![ + tokio::spawn(notify_blocks( + pub_sub.active_block_subs, + connection_pool.clone(), + polling_interval, + stop_receiver.clone(), + )), + tokio::spawn(notify_txs( + pub_sub.active_tx_subs, + connection_pool.clone(), + polling_interval, + stop_receiver.clone(), + )), + tokio::spawn(notify_logs( + pub_sub.active_log_subs, + connection_pool, + polling_interval, + stop_receiver, + )), + ] +} + +pub fn start_ws_rpc_server_old( + master_connection_pool: ConnectionPool, + replica_connection_pool: ConnectionPool, + config: &ZkSyncConfig, + stop_receiver: watch::Receiver, + gas_adjuster: Arc>, +) -> Vec> { + let pub_sub = EthSubscribe::default(); + let mut notify_handles = start_notifying_active_subs( + pub_sub.clone(), + replica_connection_pool.clone(), + config.api.web3_json_rpc.pubsub_interval(), + stop_receiver.clone(), + ); + + let addr = config.api.web3_json_rpc.ws_bind_addr(); + let (sender, recv) = oneshot::channel::<()>(); + let io = build_pubsub_io_handler( + master_connection_pool, + replica_connection_pool, + config, + gas_adjuster.clone(), + pub_sub, + ); + + let server = jsonrpc_ws_server::ServerBuilder::with_meta_extractor( + io, + |context: &jsonrpc_ws_server::RequestContext| { + Arc::new(jsonrpc_pubsub::Session::new(context.sender())) + }, + ) + .max_connections(config.api.web3_json_rpc.subscriptions_limit()) + .start(&addr) + .unwrap(); + let close_handler = server.close_handle(); + + std::thread::spawn(move || { + server.wait().unwrap(); + let _ = sender; + }); + let mut thread_stop_receiver = stop_receiver.clone(); + std::thread::spawn(move || { + let stop_signal = futures::executor::block_on(thread_stop_receiver.changed()); + if stop_signal.is_ok() { + close_handler.close(); + vlog::info!("Stop signal received, WS JSON RPC API is shutting down"); + } + }); + + notify_handles.push(tokio::spawn(gas_adjuster.run(stop_receiver))); + notify_handles.push(tokio::spawn(recv.map(drop))); + notify_handles +} + +pub fn start_http_rpc_server( + master_connection_pool: ConnectionPool, + replica_connection_pool: ConnectionPool, + config: &ZkSyncConfig, + gas_adjuster: Arc>, +) -> tokio::task::JoinHandle<()> { + let rpc = build_rpc_module( + master_connection_pool, + replica_connection_pool, + config, + gas_adjuster, + ); + let addr = config.api.web3_json_rpc.http_bind_addr(); + let threads_per_server = config.api.web3_json_rpc.threads_per_server as usize; + + // Start the server in a separate tokio runtime from a dedicated thread. + let (sender, recv) = oneshot::channel::<()>(); + std::thread::spawn(move || { + let runtime = tokio::runtime::Builder::new_multi_thread() + .enable_all() + .worker_threads(threads_per_server) + .build() + .unwrap(); + + runtime.block_on(async move { + let server = ServerBuilder::default() + .http_only() + .max_connections(5000) + .build(addr) + .await + .expect("Can't start the HTTP JSON RPC server"); + + let server_handle = server + .start(rpc) + .expect("Failed to start HTTP JSON RPC application"); + server_handle.stopped().await + }); + + sender.send(()).unwrap(); + }); + + // Notifier for the rest of application about the end of the task. + tokio::spawn(recv.map(drop)) +} + +pub fn start_ws_rpc_server( + master_connection_pool: ConnectionPool, + replica_connection_pool: ConnectionPool, + config: &ZkSyncConfig, + gas_adjuster: Arc>, +) -> tokio::task::JoinHandle<()> { + let rpc = build_rpc_module( + master_connection_pool, + replica_connection_pool, + config, + gas_adjuster, + ); + let addr = config.api.web3_json_rpc.ws_bind_addr(); + let threads_per_server = config.api.web3_json_rpc.threads_per_server as usize; + + // Start the server in a separate tokio runtime from a dedicated thread. + let (sender, recv) = oneshot::channel::<()>(); + std::thread::spawn(move || { + let runtime = tokio::runtime::Builder::new_multi_thread() + .enable_all() + .worker_threads(threads_per_server) + .build() + .unwrap(); + + runtime.block_on(async move { + let server = ServerBuilder::default() + .ws_only() + .build(addr) + .await + .expect("Can't start the WS JSON RPC server"); + + let server_handle = server + .start(rpc) + .expect("Failed to start WS JSON RPC application"); + server_handle.stopped().await + }); + + sender.send(()).unwrap(); + }); + + // Notifier for the rest of application about the end of the task. + tokio::spawn(recv.map(drop)) +} + +fn build_rpc_state( + master_connection_pool: ConnectionPool, + replica_connection_pool: ConnectionPool, + config: &ZkSyncConfig, + gas_adjuster: Arc>, +) -> RpcState { + let req_entities_limit = config.api.web3_json_rpc.req_entities_limit(); + let filters_limit = config.api.web3_json_rpc.filters_limit(); + let account_pks = config.api.web3_json_rpc.account_pks(); + + RpcState::init( + master_connection_pool, + replica_connection_pool, + req_entities_limit, + filters_limit, + account_pks, + gas_adjuster, + ) +} + +fn build_http_io_handler( + master_connection_pool: ConnectionPool, + replica_connection_pool: ConnectionPool, + config: &ZkSyncConfig, + gas_adjuster: Arc>, +) -> IoHandler { + let rpc_state = build_rpc_state( + master_connection_pool, + replica_connection_pool, + config, + gas_adjuster, + ); + let mut io = IoHandler::new(); + io.extend_with(EthNamespace::new(rpc_state.clone()).to_delegate()); + io.extend_with(ZksNamespace::new(rpc_state).to_delegate()); + io.extend_with(Web3Namespace.to_delegate()); + io.extend_with(NetNamespace.to_delegate()); + + io +} + +fn build_pubsub_io_handler( + master_connection_pool: ConnectionPool, + replica_connection_pool: ConnectionPool, + config: &ZkSyncConfig, + gas_adjuster: Arc>, + pub_sub: EthSubscribe, +) -> PubSubHandler> { + let rpc_state = build_rpc_state( + master_connection_pool, + replica_connection_pool, + config, + gas_adjuster, + ); + let mut io = PubSubHandler::default(); + io.extend_with(pub_sub.to_delegate()); + io.extend_with(EthNamespace::new(rpc_state.clone()).to_delegate()); + io.extend_with(ZksNamespace::new(rpc_state).to_delegate()); + io.extend_with(Web3Namespace.to_delegate()); + io.extend_with(NetNamespace.to_delegate()); + + io +} + +fn build_rpc_module( + master_connection_pool: ConnectionPool, + replica_connection_pool: ConnectionPool, + config: &ZkSyncConfig, + gas_adjuster: Arc>, +) -> RpcModule { + let rpc_app = build_rpc_state( + master_connection_pool, + replica_connection_pool, + config, + gas_adjuster, + ); + + // Declare namespaces we have. + let eth = EthNamespace::new(rpc_app.clone()); + let net = NetNamespace; + let web3 = Web3Namespace; + let zks = ZksNamespace::new(rpc_app); + + // Collect all the methods into a single RPC module. + let mut rpc: RpcModule<_> = eth.into_rpc(); + rpc.merge(net.into_rpc()) + .expect("Can't merge net namespace"); + rpc.merge(web3.into_rpc()) + .expect("Can't merge web3 namespace"); + rpc.merge(zks.into_rpc()) + .expect("Can't merge zks namespace"); + rpc +} diff --git a/core/bin/zksync_core/src/api_server/web3/namespaces/eth.rs b/core/bin/zksync_core/src/api_server/web3/namespaces/eth.rs new file mode 100644 index 000000000000..4db3339f8ce9 --- /dev/null +++ b/core/bin/zksync_core/src/api_server/web3/namespaces/eth.rs @@ -0,0 +1,852 @@ +use std::convert::TryInto; +use std::time::Instant; + +use itertools::Itertools; + +use zksync_types::{ + api::{ + BlockId, BlockNumber, GetLogsFilter, Transaction, TransactionId, TransactionReceipt, + TransactionVariant, + }, + l2::{L2Tx, TransactionType}, + transaction_request::CallRequest, + utils::decompose_full_nonce, + web3::types::SyncState, + AccountTreeId, Bytes, L2ChainId, MiniblockNumber, StorageKey, FAIR_L2_GAS_PRICE, H256, + L2_ETH_TOKEN_ADDRESS, MAX_GAS_PER_PUBDATA_BYTE, U256, +}; + +use zksync_web3_decl::{ + error::Web3Error, + types::{Address, Block, Filter, FilterChanges, Log, TypedFilter, U64}, +}; + +use crate::api_server::execution_sandbox::execute_tx_eth_call; +use crate::api_server::web3::backend_jsonrpc::error::internal_error; +use crate::api_server::web3::state::RpcState; + +use zksync_utils::u256_to_h256; + +#[cfg(feature = "openzeppelin_tests")] +use zksync_utils::bytecode::hash_bytecode; +#[cfg(feature = "openzeppelin_tests")] +use { + zksync_eth_signer::EthereumSigner, + zksync_types::{ + api::TransactionRequest, storage::CONTRACT_DEPLOYER_ADDRESS, + transaction_request::Eip712Meta, web3::contract::tokens::Tokenizable, Eip712Domain, + EIP_712_TX_TYPE, + }, +}; + +pub const EVENT_TOPIC_NUMBER_LIMIT: usize = 4; +pub const PROTOCOL_VERSION: &str = "zks/1"; + +#[derive(Debug, Clone)] +pub struct EthNamespace { + pub state: RpcState, +} + +impl EthNamespace { + pub fn new(state: RpcState) -> Self { + Self { state } + } + + #[tracing::instrument(skip(self))] + pub fn get_block_number_impl(&self) -> Result { + let start = Instant::now(); + let endpoint_name = "get_block_number"; + + let block_number = self + .state + .connection_pool + .access_storage_blocking() + .blocks_web3_dal() + .get_sealed_miniblock_number() + .map(|n| U64::from(n.0)) + .map_err(|err| internal_error(endpoint_name, err)); + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => endpoint_name); + block_number + } + + #[tracing::instrument(skip(self, request, block))] + pub fn call_impl( + &self, + request: CallRequest, + block: Option, + ) -> Result { + let start = Instant::now(); + + let block = block.unwrap_or(BlockId::Number(BlockNumber::Pending)); + #[cfg(not(feature = "openzeppelin_tests"))] + let tx: L2Tx = request.try_into().map_err(Web3Error::SerializationError)?; + + #[cfg(feature = "openzeppelin_tests")] + let tx: L2Tx = self + .convert_evm_like_deploy_requests(request.into())? + .try_into() + .map_err(Web3Error::SerializationError)?; + + let enforced_base_fee = Some(tx.common_data.fee.max_fee_per_gas.as_u64()); + let result = execute_tx_eth_call( + &self.state.connection_pool, + tx, + block, + self.state + .tx_sender + .0 + .gas_adjuster + .estimate_effective_gas_price(), + FAIR_L2_GAS_PRICE, + enforced_base_fee, + )?; + + let mut res_bytes = match result.revert_reason { + Some(result) => result.original_data, + None => result + .return_data + .into_iter() + .flat_map(|val| { + let bytes: [u8; 32] = val.into(); + bytes.to_vec() + }) + .collect::>(), + }; + + if cfg!(feature = "openzeppelin_tests") + && res_bytes.len() >= 100 + && hex::encode(&res_bytes[96..100]).as_str() == "08c379a0" + { + res_bytes = res_bytes[96..].to_vec(); + } + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => "call"); + Ok(res_bytes.into()) + } + + #[tracing::instrument(skip(self, request, _block))] + pub fn estimate_gas_impl( + &self, + request: CallRequest, + _block: Option, + ) -> Result { + let start = Instant::now(); + + let is_eip712 = request.eip712_meta.is_some(); + + #[cfg(not(feature = "openzeppelin_tests"))] + let mut tx: L2Tx = request.try_into().map_err(Web3Error::SerializationError)?; + + #[cfg(feature = "openzeppelin_tests")] + let mut tx: L2Tx = self + .convert_evm_like_deploy_requests(request.into())? + .try_into() + .map_err(Web3Error::SerializationError)?; + + // The user may not include the proper transaction type during the estimation of + // the gas fee. However, it is needed for the bootloader checks to pass properly. + if is_eip712 { + tx.common_data.transaction_type = TransactionType::EIP712Transaction; + } + + // When we're estimating fee, we are trying to deduce values related to fee, so we should + // not consider provided ones. + + tx.common_data.fee.max_fee_per_gas = self.state.tx_sender.gas_price().into(); + tx.common_data.fee.max_priority_fee_per_gas = tx.common_data.fee.max_fee_per_gas; + tx.common_data.fee.gas_per_pubdata_limit = MAX_GAS_PER_PUBDATA_BYTE.into(); + + // Modify the l1 gas price with the scale factor + let scale_factor = self + .state + .config + .api + .web3_json_rpc + .estimate_gas_scale_factor; + let acceptable_overestimation = self + .state + .config + .api + .web3_json_rpc + .estimate_gas_acceptable_overestimation; + + let fee = self + .state + .tx_sender + .get_txs_fee_in_wei(tx, scale_factor, acceptable_overestimation) + .map_err(|err| Web3Error::SubmitTransactionError(err.to_string()))?; + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => "estimate_gas"); + Ok(fee.gas_limit) + } + + #[tracing::instrument(skip(self))] + pub fn gas_price_impl(&self) -> Result { + let start = Instant::now(); + let endpoint_name = "gas_price"; + + let price = self.state.tx_sender.gas_price(); + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => endpoint_name); + Ok(price.into()) + } + + #[tracing::instrument(skip(self))] + pub fn get_balance_impl( + &self, + address: Address, + block: Option, + ) -> Result { + let start = Instant::now(); + let endpoint_name = "get_balance"; + + let block = block.unwrap_or(BlockId::Number(BlockNumber::Pending)); + let balance = self + .state + .connection_pool + .access_storage_blocking() + .storage_web3_dal() + .standard_token_historical_balance( + AccountTreeId::new(L2_ETH_TOKEN_ADDRESS), + AccountTreeId::new(address), + block, + ) + .map_err(|err| internal_error(endpoint_name, err))?; + metrics::histogram!("api.web3.call", start.elapsed(), "method" => endpoint_name); + balance + } + + #[tracing::instrument(skip(self, filter))] + pub fn get_logs_impl(&self, mut filter: Filter) -> Result, Web3Error> { + let start = Instant::now(); + + let (from_block, to_block) = self.state.resolve_filter_block_range(&filter)?; + + filter.to_block = Some(BlockNumber::Number(to_block.0.into())); + let changes = self + .filter_changes(TypedFilter::Events(filter, from_block))? + .0; + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => "get_logs"); + Ok(match changes { + FilterChanges::Logs(list) => list, + _ => unreachable!("Unexpected `FilterChanges` type, expected `Logs`"), + }) + } + + #[tracing::instrument(skip(self))] + pub fn get_filter_logs_impl(&self, idx: U256) -> Result { + let start = Instant::now(); + + let filter = match self + .state + .installed_filters + .read() + .unwrap() + .get(idx) + .cloned() + { + Some(TypedFilter::Events(filter, _)) => { + let from_block = self.state.resolve_filter_block_number(filter.from_block)?; + TypedFilter::Events(filter, from_block) + } + _ => return Err(Web3Error::FilterNotFound), + }; + + let logs = self.filter_changes(filter)?.0; + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => "get_filter_logs"); + Ok(logs) + } + + #[tracing::instrument(skip(self))] + pub fn get_block_impl( + &self, + block: BlockId, + full_transactions: bool, + ) -> Result>, Web3Error> { + let start = Instant::now(); + let endpoint_name = if full_transactions { + "get_block_with_txs" + } else { + "get_block" + }; + + let block = self + .state + .connection_pool + .access_storage_blocking() + .blocks_web3_dal() + .get_block_by_web3_block_id( + block, + full_transactions, + L2ChainId(self.state.config.chain.eth.zksync_network_id), + ) + .map_err(|err| internal_error(endpoint_name, err)); + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => endpoint_name); + + block + } + + #[tracing::instrument(skip(self))] + pub fn get_block_transaction_count_impl( + &self, + block: BlockId, + ) -> Result, Web3Error> { + let start = Instant::now(); + let endpoint_name = "get_block_transaction_count"; + + let tx_count = self + .state + .connection_pool + .access_storage_blocking() + .blocks_web3_dal() + .get_block_tx_count(block) + .map_err(|err| internal_error(endpoint_name, err)); + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => endpoint_name); + tx_count + } + + #[tracing::instrument(skip(self))] + pub fn get_code_impl( + &self, + address: Address, + block: Option, + ) -> Result { + let start = Instant::now(); + let endpoint_name = "get_code"; + + let block = block.unwrap_or(BlockId::Number(BlockNumber::Pending)); + + let contract_code = self + .state + .connection_pool + .access_storage_blocking() + .storage_web3_dal() + .get_contract_code(address, block) + .map_err(|err| internal_error(endpoint_name, err))?; + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => endpoint_name); + contract_code.map(|code| code.unwrap_or_default().into()) + } + + #[tracing::instrument(skip(self))] + pub fn chain_id_impl(&self) -> U64 { + self.state.config.chain.eth.zksync_network_id.into() + } + + #[tracing::instrument(skip(self))] + pub fn get_storage_at_impl( + &self, + address: Address, + idx: U256, + block: Option, + ) -> Result { + let start = Instant::now(); + let endpoint_name = "get_storage_at"; + + let block = block.unwrap_or(BlockId::Number(BlockNumber::Pending)); + let value = self + .state + .connection_pool + .access_storage_blocking() + .storage_web3_dal() + .get_historical_value( + &StorageKey::new(AccountTreeId::new(address), u256_to_h256(idx)), + block, + ) + .map_err(|err| internal_error(endpoint_name, err))?; + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => endpoint_name); + value + } + + /// Account nonce. + #[tracing::instrument(skip(self))] + pub fn get_transaction_count_impl( + &self, + address: Address, + block: Option, + ) -> Result { + let start = Instant::now(); + let block = block.unwrap_or(BlockId::Number(BlockNumber::Pending)); + + let method_name = match block { + BlockId::Number(BlockNumber::Pending) => "get_pending_transaction_count", + _ => "get_historical_transaction_count", + }; + + let full_nonce = match block { + BlockId::Number(BlockNumber::Pending) => self + .state + .connection_pool + .access_storage_blocking() + .transactions_web3_dal() + .next_nonce_by_initiator_account(address) + .map_err(|err| internal_error(method_name, err)), + _ => self + .state + .connection_pool + .access_storage_blocking() + .storage_web3_dal() + .get_address_historical_nonce(address, block) + .map_err(|err| internal_error(method_name, err))?, + }; + + let account_nonce = full_nonce.map(|nonce| decompose_full_nonce(nonce).0); + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => method_name); + account_nonce + } + + #[tracing::instrument(skip(self))] + pub fn get_transaction_impl( + &self, + id: TransactionId, + ) -> Result, Web3Error> { + let start = Instant::now(); + let endpoint_name = "get_transaction"; + + let transaction = self + .state + .connection_pool + .access_storage_blocking() + .transactions_web3_dal() + .get_transaction(id, L2ChainId(self.state.config.chain.eth.zksync_network_id)) + .map_err(|err| internal_error(endpoint_name, err)); + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => endpoint_name); + transaction + } + + #[tracing::instrument(skip(self))] + pub fn get_transaction_receipt_impl( + &self, + hash: H256, + ) -> Result, Web3Error> { + let start = Instant::now(); + let endpoint_name = "get_transaction_receipt"; + + let res = self + .state + .connection_pool + .access_storage_blocking() + .transactions_web3_dal() + .get_transaction_receipt(hash) + .map_err(|err| internal_error(endpoint_name, err)); + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => endpoint_name); + res + } + + #[tracing::instrument(skip(self))] + pub fn new_block_filter_impl(&self) -> Result { + let start = Instant::now(); + let endpoint_name = "new_block_filter"; + + let last_block_number = self + .state + .connection_pool + .access_storage_blocking() + .blocks_web3_dal() + .get_sealed_miniblock_number() + .map_err(|err| internal_error(endpoint_name, err))?; + + let idx = self + .state + .installed_filters + .write() + .unwrap() + .add(TypedFilter::Blocks(last_block_number)); + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => endpoint_name); + Ok(idx) + } + + #[tracing::instrument(skip(self, filter))] + pub fn new_filter_impl(&self, filter: Filter) -> Result { + let start = Instant::now(); + + if let Some(topics) = filter.topics.as_ref() { + if topics.len() > EVENT_TOPIC_NUMBER_LIMIT { + return Err(Web3Error::TooManyTopics); + } + } + let from_block = self.state.get_filter_from_block(&filter)?; + let idx = self + .state + .installed_filters + .write() + .unwrap() + .add(TypedFilter::Events(filter, from_block)); + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => "new_filter"); + Ok(idx) + } + + #[tracing::instrument(skip(self))] + pub fn new_pending_transaction_filter_impl(&self) -> U256 { + let start = Instant::now(); + + let idx = + self.state + .installed_filters + .write() + .unwrap() + .add(TypedFilter::PendingTransactions( + chrono::Utc::now().naive_utc(), + )); + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => "new_pending_transaction_filter"); + idx + } + + #[tracing::instrument(skip(self))] + pub fn get_filter_changes_impl(&self, idx: U256) -> Result { + let start = Instant::now(); + + let filter = match self + .state + .installed_filters + .read() + .unwrap() + .get(idx) + .cloned() + { + Some(filter) => filter, + None => return Err(Web3Error::FilterNotFound), + }; + + let result = match self.filter_changes(filter) { + Ok((changes, updated_filter)) => { + self.state + .installed_filters + .write() + .unwrap() + .update(idx, updated_filter); + Ok(changes) + } + Err(Web3Error::LogsLimitExceeded(_, _, _)) => { + // The filter was not being polled for a long time, so we remove it. + self.state.installed_filters.write().unwrap().remove(idx); + Err(Web3Error::FilterNotFound) + } + Err(err) => Err(err), + }; + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => "get_filter_changes"); + result + } + + #[tracing::instrument(skip(self))] + pub fn uninstall_filter_impl(&self, idx: U256) -> bool { + let start = Instant::now(); + + let removed = self.state.installed_filters.write().unwrap().remove(idx); + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => "uninstall_filter"); + removed + } + + #[tracing::instrument(skip(self))] + pub fn protocol_version(&self) -> String { + PROTOCOL_VERSION.to_string() + } + + #[tracing::instrument(skip(self, tx_bytes))] + pub fn send_raw_transaction_impl(&self, tx_bytes: Bytes) -> Result { + let start = Instant::now(); + let (mut tx, hash) = self.state.parse_transaction_bytes(&tx_bytes.0)?; + tx.set_input(tx_bytes.0, hash); + + let submit_res = match self.state.tx_sender.submit_tx(tx) { + Err(err) => { + vlog::debug!("Send raw transaction error {}", err); + metrics::counter!( + "api.submit_tx_error", + 1, + "reason" => err.grafana_error_code() + ); + Err(Web3Error::SubmitTransactionError(err.to_string())) + } + Ok(_) => Ok(hash), + }; + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => "send_raw_transaction"); + submit_res + } + + #[tracing::instrument(skip(self))] + pub fn accounts_impl(&self) -> Vec
{ + self.state.accounts.keys().cloned().sorted().collect() + } + + #[tracing::instrument(skip(self))] + pub fn syncing_impl(&self) -> SyncState { + SyncState::NotSyncing + } + + #[tracing::instrument(skip(self, typed_filter))] + fn filter_changes( + &self, + typed_filter: TypedFilter, + ) -> Result<(FilterChanges, TypedFilter), Web3Error> { + let method_name = "filter_changes"; + + let res = match typed_filter { + TypedFilter::Blocks(from_block) => { + let (block_hashes, last_block_number) = self + .state + .connection_pool + .access_storage_blocking() + .blocks_web3_dal() + .get_block_hashes_after(from_block, self.state.req_entities_limit) + .map_err(|err| internal_error(method_name, err))?; + ( + FilterChanges::Hashes(block_hashes), + TypedFilter::Blocks(last_block_number.unwrap_or(from_block)), + ) + } + TypedFilter::PendingTransactions(from_timestamp) => { + let (tx_hashes, last_timestamp) = self + .state + .connection_pool + .access_storage_blocking() + .transactions_web3_dal() + .get_pending_txs_hashes_after( + from_timestamp, + Some(self.state.req_entities_limit), + ) + .map_err(|err| internal_error(method_name, err))?; + ( + FilterChanges::Hashes(tx_hashes), + TypedFilter::PendingTransactions(last_timestamp.unwrap_or(from_timestamp)), + ) + } + TypedFilter::Events(filter, from_block) => { + let addresses: Vec<_> = filter + .address + .clone() + .into_iter() + .flat_map(|v| v.0) + .collect(); + if let Some(topics) = filter.topics.as_ref() { + if topics.len() > EVENT_TOPIC_NUMBER_LIMIT { + return Err(Web3Error::TooManyTopics); + } + } + let topics: Vec<_> = filter + .topics + .clone() + .into_iter() + .flatten() + .enumerate() + .filter_map(|(idx, topics)| topics.map(|topics| (idx as u32 + 1, topics.0))) + .collect(); + let get_logs_filter = GetLogsFilter { + from_block, + to_block: filter.to_block, + addresses, + topics, + }; + + let mut storage = self.state.connection_pool.access_storage_blocking(); + + // Check if there are more than `req_entities_limit` logs that satisfies filter. + // In this case we should return error and suggest requesting logs with smaller block range. + if let Some(miniblock_number) = storage + .events_web3_dal() + .get_log_block_number(get_logs_filter.clone(), self.state.req_entities_limit) + .map_err(|err| internal_error(method_name, err))? + { + return Err(Web3Error::LogsLimitExceeded( + self.state.req_entities_limit, + from_block.0, + miniblock_number.0 - 1, + )); + } + + let logs = storage + .events_web3_dal() + .get_logs(get_logs_filter, self.state.req_entities_limit) + .map_err(|err| internal_error(method_name, err))?; + let new_from_block = logs + .last() + .map(|log| MiniblockNumber(log.block_number.unwrap().as_u32())) + .unwrap_or(from_block); + ( + FilterChanges::Logs(logs), + TypedFilter::Events(filter, new_from_block), + ) + } + }; + + Ok(res) + } + + #[cfg(feature = "openzeppelin_tests")] + pub fn send_transaction_impl( + &self, + transaction_request: zksync_types::web3::types::TransactionRequest, + ) -> Result { + let nonce = if let Some(nonce) = transaction_request.nonce { + nonce + } else { + self.state + .connection_pool + .access_storage_blocking() + .transactions_web3_dal() + .next_nonce_by_initiator_account(transaction_request.from) + .map_err(|err| internal_error("send_transaction", err))? + }; + let mut eip712_meta = Eip712Meta::default(); + eip712_meta.gas_per_pubdata = U256::from(MAX_GAS_PER_PUBDATA_BYTE); + let transaction_request = TransactionRequest { + nonce, + from: Some(transaction_request.from), + to: transaction_request.to, + value: transaction_request.value.unwrap_or(U256::from(0)), + gas_price: U256::from(FAIR_L2_GAS_PRICE), + gas: transaction_request.gas.unwrap(), + max_priority_fee_per_gas: Some(U256::from(FAIR_L2_GAS_PRICE)), + input: transaction_request.data.unwrap_or_default(), + v: None, + r: None, + s: None, + raw: None, + transaction_type: Some(EIP_712_TX_TYPE.into()), + access_list: None, + eip712_meta: Some(eip712_meta), + chain_id: None, + }; + let transaction_request = self.convert_evm_like_deploy_requests(transaction_request)?; + + let bytes = if let Some(signer) = transaction_request + .from + .and_then(|from| self.state.accounts.get(&from).cloned()) + { + let chain_id = self.state.config.chain.eth.zksync_network_id.into(); + let domain = Eip712Domain::new(chain_id); + let signature = async_std::task::block_on(async { + signer + .sign_typed_data(&domain, &transaction_request) + .await + .map_err(|err| internal_error("send_transaction", err)) + })?; + + let encoded_tx = transaction_request.get_signed_bytes(&signature, chain_id); + Bytes(encoded_tx) + } else { + return Err(internal_error("send_transaction", "Account not found")); + }; + + self.send_raw_transaction_impl(bytes) + } + + #[cfg(feature = "openzeppelin_tests")] + /// Converts EVM-like transaction requests of deploying contracts to zkEVM format. + /// These feature is needed to run openzeppelin tests + /// because they use `truffle` which uses `web3.js` to generate transaction requests. + /// Note, that we can remove this method when ZkSync support + /// will be added for `truffle`. + fn convert_evm_like_deploy_requests( + &self, + mut transaction_request: TransactionRequest, + ) -> Result { + if transaction_request.to.unwrap_or(Address::zero()) == Address::zero() { + transaction_request.to = Some(CONTRACT_DEPLOYER_ADDRESS); + transaction_request.transaction_type = Some(EIP_712_TX_TYPE.into()); + + const BYTECODE_CHUNK_LEN: usize = 32; + + let data = transaction_request.input.0; + let (bytecode, constructor_calldata) = + data.split_at(data.len() / BYTECODE_CHUNK_LEN * BYTECODE_CHUNK_LEN); + let mut bytecode = bytecode.to_vec(); + let mut constructor_calldata = constructor_calldata.to_vec(); + let lock = self.state.known_bytecodes.read().unwrap(); + while !lock.contains(&bytecode) { + if bytecode.len() < BYTECODE_CHUNK_LEN { + return Err(internal_error( + "convert_evm_like_deploy_requests", + "Bytecode not found", + )); + } + let (new_bytecode, new_constructor_part) = + bytecode.split_at(bytecode.len() - BYTECODE_CHUNK_LEN); + constructor_calldata = new_constructor_part + .iter() + .chain(constructor_calldata.iter()) + .cloned() + .collect(); + bytecode = new_bytecode.to_vec(); + } + drop(lock); + + let mut eip712_meta = Eip712Meta::default(); + eip712_meta.gas_per_pubdata = U256::from(MAX_GAS_PER_PUBDATA_BYTE); + eip712_meta.factory_deps = Some(vec![bytecode.clone()]); + transaction_request.eip712_meta = Some(eip712_meta); + + let salt = H256::zero(); + let bytecode_hash = hash_bytecode(&bytecode); + + let deployer = zksync_contracts::deployer_contract(); + transaction_request.input = Bytes( + deployer + .function("create") + .unwrap() + .encode_input(&[ + salt.into_token(), + bytecode_hash.into_token(), + constructor_calldata.into_token(), + ]) + .unwrap(), + ); + } + Ok(transaction_request) + } +} + +// Bogus methods. +// They are moved into a separate `impl` block so they don't make the actual implementation noisy. +// This `impl` block contains methods that we *have* to implement for compliance, but don't really +// make sense in terms in L2. +impl EthNamespace { + pub fn coinbase_impl(&self) -> Address { + // There is no coinbase account. + Address::default() + } + + pub fn compilers_impl(&self) -> Vec { + // This node doesn't support compilation. + Vec::new() + } + + pub fn uncle_count_impl(&self, _block: BlockId) -> Option { + // We don't have uncles in zkSync. + Some(0.into()) + } + + pub fn hashrate_impl(&self) -> U256 { + // zkSync is not a PoW chain. + U256::zero() + } + + pub fn mining_impl(&self) -> bool { + // zkSync is not a PoW chain. + false + } + + // List of methods that are not supported at all: + // + // - `sign`. + // - `submit_hashrate`. + // - `submit_work`. + // - `compile_lll`. + // - `compile_solidity`. + // - `compile_serpent`. +} diff --git a/core/bin/zksync_core/src/api_server/web3/namespaces/eth_subscribe.rs b/core/bin/zksync_core/src/api_server/web3/namespaces/eth_subscribe.rs new file mode 100644 index 000000000000..9601cd889254 --- /dev/null +++ b/core/bin/zksync_core/src/api_server/web3/namespaces/eth_subscribe.rs @@ -0,0 +1,118 @@ +use std::collections::HashMap; +use std::sync::{Arc, RwLock}; +use zksync_types::web3::types::H128; +use zksync_web3_decl::types::{PubSubFilter, PubSubResult}; + +use jsonrpc_core::error::{Error, ErrorCode}; +use jsonrpc_pubsub::typed; +use jsonrpc_pubsub::SubscriptionId; + +use super::eth::EVENT_TOPIC_NUMBER_LIMIT; + +pub type SubscriptionMap = Arc>>; + +#[derive(Debug, Clone, Default)] +pub struct EthSubscribe { + pub active_block_subs: SubscriptionMap>, + pub active_tx_subs: SubscriptionMap>, + pub active_log_subs: SubscriptionMap<(typed::Sink, PubSubFilter)>, +} + +impl EthSubscribe { + fn assign_id( + subscriber: typed::Subscriber, + ) -> (typed::Sink, SubscriptionId) { + let id = H128::random(); + let sub_id = SubscriptionId::String(format!("0x{}", hex::encode(id.0))); + let sink = subscriber.assign_id(sub_id.clone()).unwrap(); + (sink, sub_id) + } + + fn reject(subscriber: typed::Subscriber) { + subscriber + .reject(Error { + code: ErrorCode::InvalidParams, + message: "Rejecting subscription - invalid parameters provided.".into(), + data: None, + }) + .unwrap(); + } + + #[tracing::instrument(skip(self, subscriber, params))] + pub fn sub( + &self, + subscriber: typed::Subscriber, + sub_type: String, + params: Option, + ) { + let mut block_subs = self.active_block_subs.write().unwrap(); + let mut tx_subs = self.active_tx_subs.write().unwrap(); + let mut log_subs = self.active_log_subs.write().unwrap(); + match sub_type.as_str() { + "newHeads" => { + let (sink, id) = Self::assign_id(subscriber); + block_subs.insert(id, sink); + } + "newPendingTransactions" => { + let (sink, id) = Self::assign_id(subscriber); + tx_subs.insert(id, sink); + } + "logs" => { + let filter = params.map(serde_json::from_value).transpose(); + match filter { + Ok(filter) => { + let filter: PubSubFilter = filter.unwrap_or_default(); + if filter + .topics + .as_ref() + .map(|topics| topics.len()) + .unwrap_or(0) + > EVENT_TOPIC_NUMBER_LIMIT + { + Self::reject(subscriber); + } else { + let (sink, id) = Self::assign_id(subscriber); + log_subs.insert(id, (sink, filter)); + } + } + Err(_) => Self::reject(subscriber), + } + } + "syncing" => { + let (sink, _) = Self::assign_id(subscriber); + let _ = sink.notify(Ok(PubSubResult::Syncing(false))); + } + _ => Self::reject(subscriber), + }; + + metrics::gauge!("api.web3.pubsub.active_subscribers", block_subs.len() as f64, "subscription_type" => "blocks"); + metrics::gauge!("api.web3.pubsub.active_subscribers", tx_subs.len() as f64, "subscription_type" => "txs"); + metrics::gauge!("api.web3.pubsub.active_subscribers", log_subs.len() as f64, "subscription_type" => "logs"); + } + + #[tracing::instrument(skip(self))] + pub fn unsub(&self, id: SubscriptionId) -> Result { + let removed = self + .active_block_subs + .write() + .unwrap() + .remove(&id) + .or_else(|| self.active_tx_subs.write().unwrap().remove(&id)) + .or_else(|| { + self.active_log_subs + .write() + .unwrap() + .remove(&id) + .map(|(sink, _)| sink) + }); + if removed.is_some() { + Ok(true) + } else { + Err(Error { + code: ErrorCode::InvalidParams, + message: "Invalid subscription.".into(), + data: None, + }) + } + } +} diff --git a/core/bin/zksync_core/src/api_server/web3/namespaces/mod.rs b/core/bin/zksync_core/src/api_server/web3/namespaces/mod.rs new file mode 100644 index 000000000000..b008c8181b2b --- /dev/null +++ b/core/bin/zksync_core/src/api_server/web3/namespaces/mod.rs @@ -0,0 +1,24 @@ +//! Actual implementation of Web3 API namespaces logic, not tied to the backend +//! used to create a JSON RPC server. + +pub mod eth; +pub mod eth_subscribe; +pub mod net; +pub mod web3; +pub mod zks; + +use num::{rational::Ratio, BigUint}; +use zksync_types::U256; +use zksync_utils::{biguint_to_u256, u256_to_biguint}; + +pub use self::{ + eth::EthNamespace, eth_subscribe::EthSubscribe, net::NetNamespace, web3::Web3Namespace, + zks::ZksNamespace, +}; + +pub fn scale_u256(val: U256, scale_factor: &Ratio) -> U256 { + let val_as_ratio = &Ratio::from_integer(u256_to_biguint(val)); + let result = (val_as_ratio * scale_factor).ceil(); + + biguint_to_u256(result.to_integer()) +} diff --git a/core/bin/zksync_core/src/api_server/web3/namespaces/net.rs b/core/bin/zksync_core/src/api_server/web3/namespaces/net.rs new file mode 100644 index 000000000000..8616eac62a3f --- /dev/null +++ b/core/bin/zksync_core/src/api_server/web3/namespaces/net.rs @@ -0,0 +1,20 @@ +use crate::api_server::web3::get_config; + +use zksync_types::U256; + +#[derive(Debug, Clone)] +pub struct NetNamespace; + +impl NetNamespace { + pub fn version_impl(&self) -> String { + get_config().chain.eth.zksync_network_id.to_string() + } + + pub fn peer_count_impl(&self) -> U256 { + 0.into() + } + + pub fn is_listening_impl(&self) -> bool { + false + } +} diff --git a/core/bin/zksync_core/src/api_server/web3/namespaces/web3.rs b/core/bin/zksync_core/src/api_server/web3/namespaces/web3.rs new file mode 100644 index 000000000000..10fa6faf778b --- /dev/null +++ b/core/bin/zksync_core/src/api_server/web3/namespaces/web3.rs @@ -0,0 +1,12 @@ +#[derive(Debug, Clone)] +pub struct Web3Namespace; + +impl Web3Namespace { + #[tracing::instrument(skip(self))] + pub fn client_version_impl(&self) -> String { + "zkSync/v2.0".to_string() + } +} + +// `sha3` method is intentionally not implemented for the main server implementation: +// it can easily be implemented on the user side. diff --git a/core/bin/zksync_core/src/api_server/web3/namespaces/zks.rs b/core/bin/zksync_core/src/api_server/web3/namespaces/zks.rs new file mode 100644 index 000000000000..24dc0bbb122f --- /dev/null +++ b/core/bin/zksync_core/src/api_server/web3/namespaces/zks.rs @@ -0,0 +1,486 @@ +use bigdecimal::{BigDecimal, Zero}; +use core::convert::TryInto; +use std::collections::HashMap; +use std::time::Instant; + +use zksync_mini_merkle_tree::mini_merkle_tree_proof; +use zksync_types::{ + api::{BridgeAddresses, GetLogsFilter, L2ToL1LogProof, TransactionDetails, U64}, + commitment::CommitmentSerializable, + explorer_api::BlockDetails, + fee::Fee, + l2::L2Tx, + l2_to_l1_log::L2ToL1Log, + tokens::ETHEREUM_ADDRESS, + transaction_request::CallRequest, + vm_trace::{ContractSourceDebugInfo, VmDebugTrace}, + L1BatchNumber, MiniblockNumber, FAIR_L2_GAS_PRICE, L1_MESSENGER_ADDRESS, L2_ETH_TOKEN_ADDRESS, + MAX_GAS_PER_PUBDATA_BYTE, U256, +}; +use zksync_utils::address_to_h256; +use zksync_web3_decl::{ + error::Web3Error, + types::{Address, Token, H256}, +}; + +use crate::api_server::web3::{backend_jsonrpc::error::internal_error, RpcState}; +use crate::fee_ticker::{error::TickerError, TokenPriceRequestType}; + +#[cfg(feature = "openzeppelin_tests")] +use zksync_types::Bytes; + +#[derive(Debug, Clone)] +pub struct ZksNamespace { + pub state: RpcState, +} + +impl ZksNamespace { + pub fn new(state: RpcState) -> Self { + Self { state } + } + + #[tracing::instrument(skip(self, request))] + pub fn estimate_fee_impl(&self, request: CallRequest) -> Result { + let start = Instant::now(); + + let mut tx: L2Tx = request.try_into().map_err(Web3Error::SerializationError)?; + + // When we're estimating fee, we are trying to deduce values related to fee, so we should + // not consider provided ones. + tx.common_data.fee.max_fee_per_gas = FAIR_L2_GAS_PRICE.into(); + tx.common_data.fee.max_priority_fee_per_gas = FAIR_L2_GAS_PRICE.into(); + tx.common_data.fee.gas_per_pubdata_limit = MAX_GAS_PER_PUBDATA_BYTE.into(); + + let scale_factor = self + .state + .config + .api + .web3_json_rpc + .estimate_gas_scale_factor; + let acceptable_overestimation = self + .state + .config + .api + .web3_json_rpc + .estimate_gas_acceptable_overestimation; + + let fee = self + .state + .tx_sender + .get_txs_fee_in_wei(tx, scale_factor, acceptable_overestimation) + .map_err(|err| Web3Error::SubmitTransactionError(err.to_string()))?; + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => "estimate_fee"); + Ok(fee) + } + + #[tracing::instrument(skip(self))] + pub fn get_main_contract_impl(&self) -> Address { + self.state.config.contracts.diamond_proxy_addr + } + + #[tracing::instrument(skip(self))] + pub fn get_testnet_paymaster_impl(&self) -> Option
{ + self.state.config.contracts.l2_testnet_paymaster_addr + } + + #[tracing::instrument(skip(self))] + pub fn get_bridge_contracts_impl(&self) -> BridgeAddresses { + BridgeAddresses { + l1_erc20_default_bridge: self.state.config.contracts.l1_erc20_bridge_proxy_addr, + l2_erc20_default_bridge: self.state.config.contracts.l2_erc20_bridge_addr, + } + } + + #[tracing::instrument(skip(self))] + pub fn l1_chain_id_impl(&self) -> U64 { + U64::from(*self.state.config.chain.eth.network.chain_id()) + } + + #[tracing::instrument(skip(self))] + pub fn get_confirmed_tokens_impl(&self, from: u32, limit: u8) -> Result, Web3Error> { + let start = Instant::now(); + let endpoint_name = "get_confirmed_tokens"; + + let tokens = self + .state + .connection_pool + .access_storage_blocking() + .tokens_web3_dal() + .get_well_known_tokens() + .map_err(|err| internal_error(endpoint_name, err))? + .into_iter() + .skip(from as usize) + .take(limit.into()) + .map(|token_info| Token { + l1_address: token_info.l1_address, + l2_address: token_info.l2_address, + name: token_info.metadata.name, + symbol: token_info.metadata.symbol, + decimals: token_info.metadata.decimals, + }) + .collect(); + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => endpoint_name); + Ok(tokens) + } + + #[tracing::instrument(skip(self))] + pub fn get_token_price_impl(&self, l2_token: Address) -> Result { + let start = Instant::now(); + let endpoint_name = "get_token_price"; + + let result = match self + .state + .tx_sender + .token_price(TokenPriceRequestType::USDForOneToken, l2_token) + { + Ok(price) => Ok(price), + Err(TickerError::PriceNotTracked(_)) => Ok(BigDecimal::zero()), + Err(err) => Err(internal_error(endpoint_name, err)), + }; + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => endpoint_name); + result + } + + // This method is currently to be used for internal debug purposes only. + // It should be reworked for being public (validate contract info and maybe store it elsewhere). + #[tracing::instrument(skip(self, info))] + pub fn set_contract_debug_info_impl( + &self, + address: Address, + info: ContractSourceDebugInfo, + ) -> bool { + let start = Instant::now(); + + self.state + .connection_pool + .access_storage_blocking() + .storage_dal() + .set_contract_source(address, info); + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => "set_contract_debug_info"); + true + } + + #[tracing::instrument(skip(self))] + pub fn get_contract_debug_info_impl( + &self, + address: Address, + ) -> Option { + let start = Instant::now(); + + let info = self + .state + .connection_pool + .access_storage_blocking() + .storage_dal() + .get_contract_source(address); + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => "get_contract_debug_info"); + info + } + + #[tracing::instrument(skip(self))] + pub fn get_transaction_trace_impl(&self, hash: H256) -> Option { + let start = Instant::now(); + let mut storage = self.state.connection_pool.access_storage_blocking(); + let trace = storage.transactions_dal().get_trace(hash); + let result = trace.map(|trace| { + let mut storage_dal = storage.storage_dal(); + let mut sources = HashMap::new(); + for address in trace.contracts { + let source = storage_dal.get_contract_source(address); + sources.insert(address, source); + } + VmDebugTrace { + steps: trace.steps, + sources, + } + }); + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => "get_transaction_trace"); + result + } + + #[tracing::instrument(skip(self))] + pub fn get_all_account_balances_impl( + &self, + address: Address, + ) -> Result, Web3Error> { + let start = Instant::now(); + let endpoint_name = "get_all_balances"; + + let balances = self + .state + .connection_pool + .access_storage_blocking() + .explorer() + .accounts_dal() + .get_balances_for_address(address) + .map_err(|err| internal_error(endpoint_name, err))? + .into_iter() + .map(|(address, balance_item)| { + if address == L2_ETH_TOKEN_ADDRESS { + (ETHEREUM_ADDRESS, balance_item.balance) + } else { + (address, balance_item.balance) + } + }) + .collect(); + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => endpoint_name); + Ok(balances) + } + + #[tracing::instrument(skip(self))] + pub fn get_l2_to_l1_msg_proof_impl( + &self, + block_number: MiniblockNumber, + sender: Address, + msg: H256, + l2_log_position: Option, + ) -> Result, Web3Error> { + let start = Instant::now(); + let endpoint_name = "get_l2_to_l1_msg_proof"; + + let mut storage = self.state.connection_pool.access_storage_blocking(); + let l1_batch_number = match storage + .blocks_web3_dal() + .get_l1_batch_number_of_miniblock(block_number) + .map_err(|err| internal_error(endpoint_name, err))? + { + Some(number) => number, + None => return Ok(None), + }; + let (first_miniblock_of_l1_batch, _) = storage + .blocks_web3_dal() + .get_miniblock_range_of_l1_batch(l1_batch_number) + .map_err(|err| internal_error(endpoint_name, err))? + .expect("L1 batch should contain at least one miniblock"); + + let all_l1_logs_in_block = storage + .blocks_web3_dal() + .get_l2_to_l1_logs(l1_batch_number) + .map_err(|err| internal_error(endpoint_name, err))?; + + // Position of l1 log in block relative to logs with identical data + let l1_log_relative_position = if let Some(l2_log_position) = l2_log_position { + let pos = storage + .events_web3_dal() + .get_logs( + GetLogsFilter { + from_block: first_miniblock_of_l1_batch, + to_block: Some(block_number.0.into()), + addresses: vec![L1_MESSENGER_ADDRESS], + topics: vec![(2, vec![address_to_h256(&sender)]), (3, vec![msg])], + }, + self.state.req_entities_limit, + ) + .map_err(|err| internal_error(endpoint_name, err))? + .iter() + .position(|event| { + event.block_number == Some(block_number.0.into()) + && event.log_index == Some(l2_log_position.into()) + }); + match pos { + Some(pos) => pos, + None => { + return Ok(None); + } + } + } else { + 0 + }; + + let l1_log_index = match all_l1_logs_in_block + .iter() + .enumerate() + .filter(|(_, log)| { + log.sender == L1_MESSENGER_ADDRESS + && log.key == address_to_h256(&sender) + && log.value == msg + }) + .nth(l1_log_relative_position) + { + Some(nth_elem) => nth_elem.0, + None => { + return Ok(None); + } + }; + let values: Vec> = all_l1_logs_in_block + .into_iter() + .map(|a| a.to_bytes()) + .collect(); + let mut proof: Vec = mini_merkle_tree_proof( + values, + l1_log_index, + L2ToL1Log::SERIALIZED_SIZE, + L2ToL1Log::limit_per_block(), + ) + .into_iter() + .map(|elem| H256::from_slice(&elem)) + .collect(); + let root = proof.pop().unwrap(); + let msg_proof = L2ToL1LogProof { + proof, + root, + id: l1_log_index as u32, + }; + metrics::histogram!("api.web3.call", start.elapsed(), "method" => endpoint_name); + Ok(Some(msg_proof)) + } + + #[tracing::instrument(skip(self))] + pub fn get_l2_to_l1_log_proof_impl( + &self, + tx_hash: H256, + index: Option, + ) -> Result, Web3Error> { + let start = Instant::now(); + let endpoint_name = "get_l2_to_l1_msg_proof"; + + let mut storage = self.state.connection_pool.access_storage_blocking(); + let (l1_batch_number, l1_batch_tx_index) = match storage + .blocks_web3_dal() + .get_l1_batch_info_for_tx(tx_hash) + .map_err(|err| internal_error(endpoint_name, err))? + { + Some(x) => x, + None => return Ok(None), + }; + + let all_l1_logs_in_block = storage + .blocks_web3_dal() + .get_l2_to_l1_logs(l1_batch_number) + .map_err(|err| internal_error(endpoint_name, err))?; + + let l1_log_index = match all_l1_logs_in_block + .iter() + .enumerate() + .filter(|(_, log)| log.tx_number_in_block == l1_batch_tx_index) + .nth(index.unwrap_or(0)) + { + Some(nth_elem) => nth_elem.0, + None => { + return Ok(None); + } + }; + + let values: Vec> = all_l1_logs_in_block + .into_iter() + .map(|a| a.to_bytes()) + .collect(); + let mut proof: Vec = mini_merkle_tree_proof( + values, + l1_log_index, + L2ToL1Log::SERIALIZED_SIZE, + L2ToL1Log::limit_per_block(), + ) + .into_iter() + .map(|elem| H256::from_slice(&elem)) + .collect(); + let root = proof.pop().unwrap(); + + let msg_proof = L2ToL1LogProof { + proof, + root, + id: l1_log_index as u32, + }; + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => endpoint_name); + Ok(Some(msg_proof)) + } + + #[tracing::instrument(skip(self))] + pub fn get_l1_batch_number_impl(&self) -> Result { + let start = Instant::now(); + let endpoint_name = "get_l1_batch_number"; + + let l1_batch_number = self + .state + .connection_pool + .access_storage_blocking() + .blocks_web3_dal() + .get_sealed_l1_batch_number() + .map(|n| U64::from(n.0)) + .map_err(|err| internal_error(endpoint_name, err)); + + metrics::histogram!("api.web3.call", start.elapsed(), "endpoint" => endpoint_name); + l1_batch_number + } + + #[tracing::instrument(skip(self))] + pub fn get_miniblock_range_impl( + &self, + batch: L1BatchNumber, + ) -> Result, Web3Error> { + let start = Instant::now(); + let endpoint_name = "get_miniblock_range"; + + let minmax = self + .state + .connection_pool + .access_storage_blocking() + .blocks_web3_dal() + .get_miniblock_range_of_l1_batch(batch) + .map(|minmax| minmax.map(|(min, max)| (U64::from(min.0), U64::from(max.0)))) + .map_err(|err| internal_error(endpoint_name, err)); + + metrics::histogram!("api.web3.call", start.elapsed(), "endpoint" => endpoint_name); + minmax + } + + #[tracing::instrument(skip(self))] + pub fn get_block_details_impl( + &self, + block_number: MiniblockNumber, + ) -> Result, Web3Error> { + let start = Instant::now(); + let endpoint_name = "get_block_details"; + + let block_details = self + .state + .connection_pool + .access_storage_blocking() + .explorer() + .blocks_dal() + .get_block_details(block_number) + .map_err(|err| internal_error(endpoint_name, err)); + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => endpoint_name); + + block_details + } + + #[tracing::instrument(skip(self))] + pub fn get_transaction_details_impl( + &self, + hash: H256, + ) -> Result, Web3Error> { + let start = Instant::now(); + let endpoint_name = "get_transaction_details"; + + let tx_details = self + .state + .connection_pool + .access_storage_blocking() + .transactions_web3_dal() + .get_transaction_details(hash) + .map_err(|err| internal_error(endpoint_name, err)); + + metrics::histogram!("api.web3.call", start.elapsed(), "method" => endpoint_name); + + tx_details + } + + #[cfg(feature = "openzeppelin_tests")] + /// Saves contract bytecode to memory. + pub fn set_known_bytecode_impl(&self, bytecode: Bytes) -> bool { + let mut lock = self.state.known_bytecodes.write().unwrap(); + lock.insert(bytecode.0.clone()); + + true + } +} diff --git a/core/bin/zksync_core/src/api_server/web3/pubsub_notifier.rs b/core/bin/zksync_core/src/api_server/web3/pubsub_notifier.rs new file mode 100644 index 000000000000..51d2600d519c --- /dev/null +++ b/core/bin/zksync_core/src/api_server/web3/pubsub_notifier.rs @@ -0,0 +1,137 @@ +use jsonrpc_pubsub::typed; +use tokio::sync::watch; +use tokio::time::{interval, Duration, Instant}; + +use zksync_dal::ConnectionPool; +use zksync_types::MiniblockNumber; +use zksync_web3_decl::types::{PubSubFilter, PubSubResult}; + +use super::namespaces::eth_subscribe::SubscriptionMap; + +pub async fn notify_blocks( + subscribers: SubscriptionMap>, + connection_pool: ConnectionPool, + polling_interval: Duration, + stop_receiver: watch::Receiver, +) { + let mut last_block_number = connection_pool + .access_storage() + .await + .blocks_web3_dal() + .get_sealed_miniblock_number() + .unwrap(); + let mut timer = interval(polling_interval); + loop { + if *stop_receiver.borrow() { + vlog::info!("Stop signal received, pubsub_block_notifier is shutting down"); + break; + } + + timer.tick().await; + + let start = Instant::now(); + let new_blocks = connection_pool + .access_storage() + .await + .blocks_web3_dal() + .get_block_headers_after(last_block_number) + .unwrap(); + metrics::histogram!("api.web3.pubsub.db_poll_latency", start.elapsed(), "subscription_type" => "blocks"); + if !new_blocks.is_empty() { + last_block_number = + MiniblockNumber(new_blocks.last().unwrap().number.unwrap().as_u32()); + let start = Instant::now(); + for sink in subscribers.read().unwrap().values() { + for block in new_blocks.clone() { + let _ = sink.notify(Ok(PubSubResult::Header(block))); + metrics::counter!("api.web3.pubsub.notify", 1, "subscription_type" => "blocks"); + } + } + metrics::histogram!("api.web3.pubsub.notify_subscribers_latency", start.elapsed(), "subscription_type" => "blocks"); + } + } +} + +pub async fn notify_txs( + subscribers: SubscriptionMap>, + connection_pool: ConnectionPool, + polling_interval: Duration, + stop_receiver: watch::Receiver, +) { + let mut last_time = chrono::Utc::now().naive_utc(); + let mut timer = interval(polling_interval); + loop { + if *stop_receiver.borrow() { + vlog::info!("Stop signal received, pubsub_tx_notifier is shutting down"); + break; + } + + timer.tick().await; + + let start = Instant::now(); + let (new_txs, new_last_time) = connection_pool + .access_storage() + .await + .transactions_web3_dal() + .get_pending_txs_hashes_after(last_time, None) + .unwrap(); + metrics::histogram!("api.web3.pubsub.db_poll_latency", start.elapsed(), "subscription_type" => "txs"); + if let Some(new_last_time) = new_last_time { + last_time = new_last_time; + let start = Instant::now(); + for sink in subscribers.read().unwrap().values() { + for tx_hash in new_txs.clone() { + let _ = sink.notify(Ok(PubSubResult::TxHash(tx_hash))); + metrics::counter!("api.web3.pubsub.notify", 1, "subscription_type" => "txs"); + } + } + metrics::histogram!("api.web3.pubsub.notify_subscribers_latency", start.elapsed(), "subscription_type" => "txs"); + } + } +} + +pub async fn notify_logs( + subscribers: SubscriptionMap<(typed::Sink, PubSubFilter)>, + connection_pool: ConnectionPool, + polling_interval: Duration, + stop_receiver: watch::Receiver, +) { + let mut last_block_number = connection_pool + .access_storage() + .await + .blocks_web3_dal() + .get_sealed_miniblock_number() + .unwrap(); + let mut timer = interval(polling_interval); + loop { + if *stop_receiver.borrow() { + vlog::info!("Stop signal received, pubsub_logs_notifier is shutting down"); + break; + } + + timer.tick().await; + + let start = Instant::now(); + let new_logs = connection_pool + .access_storage() + .await + .events_web3_dal() + .get_all_logs(last_block_number) + .unwrap(); + metrics::histogram!("api.web3.pubsub.db_poll_latency", start.elapsed(), "subscription_type" => "logs"); + if !new_logs.is_empty() { + last_block_number = + MiniblockNumber(new_logs.last().unwrap().block_number.unwrap().as_u32()); + let start = Instant::now(); + for (sink, filter) in subscribers.read().unwrap().values() { + for log in new_logs.clone() { + if filter.matches(&log) { + let _ = sink.notify(Ok(PubSubResult::Log(log))); + metrics::counter!("api.web3.pubsub.notify", 1, "subscription_type" => "logs"); + } + } + } + metrics::histogram!("api.web3.pubsub.notify_subscribers_latency", start.elapsed(), "subscription_type" => "logs"); + } + } +} diff --git a/core/bin/zksync_core/src/api_server/web3/state.rs b/core/bin/zksync_core/src/api_server/web3/state.rs new file mode 100644 index 000000000000..58a1c73d655d --- /dev/null +++ b/core/bin/zksync_core/src/api_server/web3/state.rs @@ -0,0 +1,162 @@ +use std::collections::HashMap; +#[cfg(feature = "openzeppelin_tests")] +use std::collections::HashSet; +use std::convert::TryInto; +use std::sync::Arc; + +use std::sync::RwLock; + +use crate::api_server::tx_sender::TxSender; +use crate::api_server::web3::backend_jsonrpc::error::internal_error; + +use zksync_config::ZkSyncConfig; +use zksync_dal::ConnectionPool; +use zksync_eth_signer::PrivateKeySigner; +use zksync_types::api::{self, TransactionRequest}; +use zksync_types::{l2::L2Tx, Address, MiniblockNumber, H256, U256, U64}; +use zksync_web3_decl::{ + error::Web3Error, + types::{Filter, TypedFilter}, +}; + +/// Holder for the data required for the API to be functional. +#[derive(Debug, Clone)] +pub struct RpcState { + pub installed_filters: Arc>, + pub connection_pool: ConnectionPool, + pub tx_sender: TxSender, + pub req_entities_limit: usize, + pub config: &'static ZkSyncConfig, + pub accounts: HashMap, + #[cfg(feature = "openzeppelin_tests")] + pub known_bytecodes: Arc>>>, +} + +impl RpcState { + pub fn parse_transaction_bytes(&self, bytes: &[u8]) -> Result<(L2Tx, H256), Web3Error> { + let chain_id = self.config.chain.eth.zksync_network_id; + let (tx_request, hash) = TransactionRequest::from_bytes(bytes, chain_id)?; + + Ok((tx_request.try_into()?, hash)) + } + + pub fn u64_to_block_number(n: U64) -> MiniblockNumber { + if n.as_u64() > u32::MAX as u64 { + MiniblockNumber(u32::MAX) + } else { + MiniblockNumber(n.as_u32()) + } + } + + pub fn resolve_filter_block_number( + &self, + block_number: Option, + ) -> Result { + let method_name = "resolve_filter_block_number"; + let block_number = match block_number { + None => self + .connection_pool + .access_storage_blocking() + .blocks_web3_dal() + .resolve_block_id(api::BlockId::Number(api::BlockNumber::Latest)) + .map_err(|err| internal_error(method_name, err))? + .unwrap(), + Some(api::BlockNumber::Number(number)) => Self::u64_to_block_number(number), + Some(block_number) => self + .connection_pool + .access_storage_blocking() + .blocks_web3_dal() + .resolve_block_id(api::BlockId::Number(block_number)) + .map_err(|err| internal_error(method_name, err))? + .unwrap(), + }; + Ok(block_number) + } + + pub fn resolve_filter_block_range( + &self, + filter: &Filter, + ) -> Result<(MiniblockNumber, MiniblockNumber), Web3Error> { + let from_block = self.resolve_filter_block_number(filter.from_block)?; + let to_block = self.resolve_filter_block_number(filter.to_block)?; + Ok((from_block, to_block)) + } + + /// Returns initial `from_block` for filter. + /// It is equal to max(filter.from_block, PENDING_BLOCK). + pub fn get_filter_from_block(&self, filter: &Filter) -> Result { + let method_name = "get_filter_from_block"; + let pending_block = self + .connection_pool + .access_storage_blocking() + .blocks_web3_dal() + .resolve_block_id(api::BlockId::Number(api::BlockNumber::Pending)) + .map_err(|err| internal_error(method_name, err))? + .expect("Pending block number shouldn't be None"); + let block_number = match filter.from_block { + Some(api::BlockNumber::Number(number)) => { + let block_number = Self::u64_to_block_number(number); + block_number.max(pending_block) + } + _ => pending_block, + }; + Ok(block_number) + } +} + +/// Contains mapping from index to `Filter` with optional location. +#[derive(Default, Debug, Clone)] +pub struct Filters { + state: HashMap, + max_cap: usize, +} + +impl Filters { + /// Instantiates `Filters` with given max capacity. + pub fn new(max_cap: usize) -> Self { + Self { + state: Default::default(), + max_cap, + } + } + + /// Adds filter to the state and returns its key. + pub fn add(&mut self, filter: TypedFilter) -> U256 { + let idx = loop { + let val = H256::random().to_fixed_bytes().into(); + if !self.state.contains_key(&val) { + break val; + } + }; + self.state.insert(idx, filter); + + // Check if we reached max capacity + if self.state.len() > self.max_cap { + if let Some(first) = self.state.keys().next().cloned() { + self.remove(first); + } + } + + idx + } + + /// Retrieves filter from the state. + pub fn get(&self, index: U256) -> Option<&TypedFilter> { + self.state.get(&index) + } + + /// Updates filter in the state. + pub fn update(&mut self, index: U256, new_filter: TypedFilter) -> bool { + if let Some(typed_filter) = self.state.get_mut(&index) { + *typed_filter = new_filter; + true + } else { + false + } + } + + /// Removes filter from the map. + pub fn remove(&mut self, index: U256) -> bool { + self.state.remove(&index).is_some() + } +} diff --git a/core/bin/zksync_core/src/bin/block_reverter.rs b/core/bin/zksync_core/src/bin/block_reverter.rs new file mode 100644 index 000000000000..6cc55db8c720 --- /dev/null +++ b/core/bin/zksync_core/src/bin/block_reverter.rs @@ -0,0 +1,475 @@ +use std::path::Path; +use std::thread::sleep; +use std::time::Duration; +use structopt::StructOpt; + +use zksync_config::ZkSyncConfig; +use zksync_contracts::zksync_contract; +use zksync_dal::ConnectionPool; +use zksync_eth_client::clients::http_client::{EthInterface, EthereumClient}; +use zksync_merkle_tree::ZkSyncTree; +use zksync_state::secondary_storage::SecondaryStateStorage; +use zksync_storage::db::Database; +use zksync_storage::RocksDB; +use zksync_types::aggregated_operations::AggregatedActionType; +use zksync_types::ethabi::Token; +use zksync_types::web3::contract::Options; +use zksync_types::{L1BatchNumber, H256, U256}; + +struct BlockReverter { + config: ZkSyncConfig, + connection_pool: ConnectionPool, +} + +impl BlockReverter { + /// rollback db(postgres + rocksdb) to previous state + async fn rollback_db( + &mut self, + last_l1_batch_to_keep: L1BatchNumber, + rollback_postgres: bool, + rollback_tree: bool, + rollback_sk_cache: bool, + ) { + let last_executed_l1_batch = self + .get_l1_batch_number_from_contract(AggregatedActionType::ExecuteBlocks) + .await; + assert!( + last_l1_batch_to_keep >= last_executed_l1_batch, + "Attempt to revert already executed blocks" + ); + + if !rollback_tree && rollback_postgres { + println!("You want to rollback Postgres DB without rolling back tree."); + println!("If tree is not yet rolled back to this block then the only way to make it synced with Postgres will be to completely rebuild it."); + println!("Are you sure? Print y/n"); + let mut input = String::new(); + std::io::stdin().read_line(&mut input).unwrap(); + if input.trim() != "y" { + std::process::exit(0); + } + } + + // tree needs to be reverted first to keep state recoverable + self.rollback_rocks_dbs(last_l1_batch_to_keep, rollback_tree, rollback_sk_cache) + .await; + + if rollback_postgres { + self.rollback_postgres(last_l1_batch_to_keep).await; + } + } + + async fn rollback_rocks_dbs( + &mut self, + last_l1_batch_to_keep: L1BatchNumber, + rollback_tree: bool, + rollback_sk_cache: bool, + ) { + println!("getting logs that should be applied to rollback state..."); + let logs = self + .connection_pool + .access_storage() + .await + .storage_logs_dedup_dal() + .get_storage_logs_for_revert(last_l1_batch_to_keep); + + if rollback_tree { + // Rolling back both full tree and lightweight tree + if Path::new(self.config.db.path()).exists() { + println!("Rolling back full tree..."); + self.rollback_tree( + last_l1_batch_to_keep, + logs.clone(), + self.config.db.path.clone(), + ) + .await; + } else { + println!("Full Tree not found; skipping"); + } + + if Path::new(self.config.db.merkle_tree_fast_ssd_path()).exists() { + println!("Rolling back lightweight tree..."); + self.rollback_tree( + last_l1_batch_to_keep, + logs.clone(), + self.config.db.merkle_tree_fast_ssd_path.clone(), + ) + .await; + } else { + println!("Lightweight Tree not found; skipping"); + } + } + + if rollback_sk_cache { + assert!( + Path::new(self.config.db.state_keeper_db_path()).exists(), + "Path with state keeper cache DB doesn't exist" + ); + self.rollback_state_keeper_cache(last_l1_batch_to_keep, logs) + .await; + } + } + + /// reverts blocks in merkle tree + async fn rollback_tree( + &mut self, + last_l1_batch_to_keep: L1BatchNumber, + logs: Vec<(H256, Option)>, + path: impl AsRef, + ) { + let db = RocksDB::new(Database::MerkleTree, path, true); + let mut tree = ZkSyncTree::new(db); + + if tree.block_number() <= last_l1_batch_to_keep.0 { + println!("Tree is behind the block to revert to; skipping"); + return; + } + + // Convert H256 -> U256, note that tree keys are encoded using little endianness. + let logs: Vec<_> = logs + .into_iter() + .map(|(key, value)| (U256::from_little_endian(&key.to_fixed_bytes()), value)) + .collect(); + tree.revert_logs(last_l1_batch_to_keep, logs); + + println!("checking match of the tree root hash and root hash from Postgres..."); + let storage_root_hash = self + .connection_pool + .access_storage() + .await + .blocks_dal() + .get_merkle_state_root(last_l1_batch_to_keep) + .expect("failed to fetch root hash for target block"); + let tree_root_hash = tree.root_hash(); + assert_eq!(&tree_root_hash, storage_root_hash.as_bytes()); + + println!("saving tree changes to disk..."); + tree.save().expect("Unable to update tree state"); + } + + /// reverts blocks in state keeper cache + async fn rollback_state_keeper_cache( + &mut self, + last_l1_batch_to_keep: L1BatchNumber, + logs: Vec<(H256, Option)>, + ) { + println!("opening DB with state keeper cache..."); + let db = RocksDB::new( + Database::StateKeeper, + self.config.db.state_keeper_db_path(), + true, + ); + let mut storage = SecondaryStateStorage::new(db); + + if storage.get_l1_batch_number() > last_l1_batch_to_keep + 1 { + println!("getting contracts and factory deps that should be removed..."); + let (_, last_miniblock_to_keep) = self + .connection_pool + .access_storage() + .await + .blocks_dal() + .get_miniblock_range_of_l1_batch(last_l1_batch_to_keep) + .expect("L1 batch should contain at least one miniblock"); + let contracts = self + .connection_pool + .access_storage() + .await + .storage_dal() + .get_contracts_for_revert(last_miniblock_to_keep); + let factory_deps = self + .connection_pool + .access_storage() + .await + .storage_dal() + .get_factory_deps_for_revert(last_miniblock_to_keep); + + println!("rolling back state keeper cache..."); + storage.rollback(logs, contracts, factory_deps, last_l1_batch_to_keep); + } else { + println!("nothing to revert in state keeper cache"); + } + } + + /// reverts data in postgres database + async fn rollback_postgres(&mut self, last_l1_batch_to_keep: L1BatchNumber) { + let (_, last_miniblock_to_keep) = self + .connection_pool + .access_storage() + .await + .blocks_dal() + .get_miniblock_range_of_l1_batch(last_l1_batch_to_keep) + .expect("L1 batch should contain at least one miniblock"); + + println!("rolling back postgres data..."); + let mut storage = self.connection_pool.access_storage().await; + let mut transaction = storage.start_transaction().await; + + println!("rolling back transactions state..."); + transaction + .transactions_dal() + .reset_transactions_state(last_miniblock_to_keep); + println!("rolling back events..."); + transaction + .events_dal() + .rollback_events(last_miniblock_to_keep); + println!("rolling back l2 to l1 logs..."); + transaction + .events_dal() + .rollback_l2_to_l1_logs(last_miniblock_to_keep); + println!("rolling back created tokens..."); + transaction + .tokens_dal() + .rollback_tokens(last_miniblock_to_keep); + println!("rolling back factory deps...."); + transaction + .storage_dal() + .rollback_factory_deps(last_miniblock_to_keep); + println!("rolling back storage..."); + transaction + .storage_logs_dal() + .rollback_storage(last_miniblock_to_keep); + println!("rolling back storage logs..."); + transaction + .storage_logs_dal() + .rollback_storage_logs(last_miniblock_to_keep); + println!("rolling back dedup storage logs..."); + transaction + .storage_logs_dedup_dal() + .rollback_storage_logs(last_l1_batch_to_keep); + println!("rolling back l1 batches..."); + transaction + .blocks_dal() + .delete_l1_batches(last_l1_batch_to_keep); + println!("rolling back miniblocks..."); + transaction + .blocks_dal() + .delete_miniblocks(last_miniblock_to_keep); + + transaction.commit().await; + } + + /// sends revert transaction to L1 + async fn send_ethereum_revert_transaction( + &mut self, + last_l1_batch_to_keep: L1BatchNumber, + priority_fee_per_gas: U256, + nonce: u64, + ) { + let eth_gateway = EthereumClient::from_config(&self.config); + let revert_blocks = zksync_contract() + .functions + .get("revertBlocks") + .cloned() + .expect("revertBlocks function not found") + .pop() + .expect("revertBlocks function entry not found"); + let args = vec![Token::Uint(U256::from(last_l1_batch_to_keep.0))]; + let raw_tx = revert_blocks + .encode_input(&args) + .expect("Failed to encode transaction data.") + .to_vec(); + let signed_tx = eth_gateway + .sign_prepared_tx_for_addr( + raw_tx.clone(), + self.config.contracts.diamond_proxy_addr, + Options::with(|opt| { + opt.gas = Some(5_000_000.into()); + opt.max_priority_fee_per_gas = Some(priority_fee_per_gas); + opt.nonce = Some(nonce.into()); + }), + "block-reverter", + ) + .await + .expect("Failed to sign transaction"); + let tx_hash = eth_gateway + .send_raw_tx(signed_tx.raw_tx) + .await + .expect("failed to send revert transaction to L1"); + + loop { + match eth_gateway + .get_tx_status(tx_hash, "block reverter") + .await + .expect("Failed to get tx status from eth node") + { + Some(status) => { + assert!(status.success); + println!("revert transaction has completed"); + return; + } + None => { + println!("waiting for L1 transaction confirmation..."); + sleep(Duration::from_secs(5)); + } + } + } + } + + async fn get_l1_batch_number_from_contract(&self, op: AggregatedActionType) -> L1BatchNumber { + let function_name = match op { + AggregatedActionType::CommitBlocks => "getTotalBlocksCommitted", + AggregatedActionType::PublishProofBlocksOnchain => "getTotalBlocksVerified", + AggregatedActionType::ExecuteBlocks => "getTotalBlocksExecuted", + }; + let eth_gateway = EthereumClient::from_config(&self.config); + let block_number: U256 = eth_gateway + .call_main_contract_function(function_name, (), None, Options::default(), None) + .await + .unwrap(); + L1BatchNumber(block_number.as_u32()) + } + + /// displays suggested values for rollback + async fn print_suggested_values(&mut self) { + let eth_gateway = EthereumClient::from_config(&self.config); + let last_committed_l1_batch_number = self + .get_l1_batch_number_from_contract(AggregatedActionType::CommitBlocks) + .await; + let last_verified_l1_batch_number = self + .get_l1_batch_number_from_contract(AggregatedActionType::PublishProofBlocksOnchain) + .await; + let last_executed_l1_batch_number = self + .get_l1_batch_number_from_contract(AggregatedActionType::ExecuteBlocks) + .await; + println!( + "Last L1 batch numbers on contract: committed {}, verified {}, executed {}", + last_committed_l1_batch_number, + last_verified_l1_batch_number, + last_executed_l1_batch_number + ); + + let nonce = eth_gateway + .pending_nonce("reverter") + .await + .unwrap() + .as_u64(); + println!("Suggested values for rollback:"); + println!(" l1 batch number: {}", last_executed_l1_batch_number.0); + println!(" nonce: {}", nonce); + println!( + " priority fee: {:?}", + self.config + .eth_sender + .gas_adjuster + .default_priority_fee_per_gas + ); + } + + /// Clears failed L1 transactions + async fn clear_failed_l1_transactions(&mut self) { + println!("clearing failed L1 transactions..."); + self.connection_pool + .access_storage() + .await + .eth_sender_dal() + .clear_failed_transactions(); + } +} + +#[derive(StructOpt, Debug)] +#[structopt(name = "block revert utility")] +enum Opt { + #[structopt( + name = "print-suggested-values", + about = "Displays suggested values to use" + )] + Display, + + #[structopt( + name = "send-eth-transaction", + about = "Sends revert transaction to L1" + )] + SendEthTransaction { + /// L1 batch number used to rollback to + #[structopt(long)] + l1_batch_number: u32, + + /// Priority fee used for rollback ethereum transaction + // We operate only by priority fee because we want to use base fee from ethereum + // and send transaction as soon as possible without any resend logic + #[structopt(long)] + priority_fee_per_gas: Option, + + /// Nonce used for rollback ethereum transaction + #[structopt(long)] + nonce: u64, + }, + + #[structopt( + name = "rollback-db", + about = "Reverts internal database state to previous block" + )] + RollbackDB { + /// L1 batch number used to rollback to + #[structopt(long)] + l1_batch_number: u32, + /// Flag that specifies if Postgres DB should be rolled back. + #[structopt(long)] + rollback_postgres: bool, + /// Flag that specifies if RocksDB with tree should be rolled back. + #[structopt(long)] + rollback_tree: bool, + /// Flag that specifies if RocksDB with state keeper cache should be rolled back. + #[structopt(long)] + rollback_sk_cache: bool, + }, + + #[structopt( + name = "clear-failed-transactions", + about = "Clears failed L1 transactions" + )] + ClearFailedL1Transactions, +} + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + vlog::init(); + let config = ZkSyncConfig::from_env(); + let connection_pool = ConnectionPool::new(None, true); + let mut block_reverter = BlockReverter { + config: config.clone(), + connection_pool: connection_pool.clone(), + }; + + match Opt::from_args() { + Opt::Display => block_reverter.print_suggested_values().await, + Opt::SendEthTransaction { + l1_batch_number, + priority_fee_per_gas, + nonce, + } => { + let priority_fee_per_gas = priority_fee_per_gas.map(U256::from).unwrap_or_else(|| { + U256::from( + block_reverter + .config + .eth_sender + .gas_adjuster + .default_priority_fee_per_gas, + ) + }); + block_reverter + .send_ethereum_revert_transaction( + L1BatchNumber(l1_batch_number), + priority_fee_per_gas, + nonce, + ) + .await + } + Opt::RollbackDB { + l1_batch_number, + rollback_postgres, + rollback_tree, + rollback_sk_cache, + } => { + block_reverter + .rollback_db( + L1BatchNumber(l1_batch_number), + rollback_postgres, + rollback_tree, + rollback_sk_cache, + ) + .await + } + Opt::ClearFailedL1Transactions => block_reverter.clear_failed_l1_transactions().await, + } + Ok(()) +} diff --git a/core/bin/zksync_core/src/bin/merkle_tree_consistency_checker.rs b/core/bin/zksync_core/src/bin/merkle_tree_consistency_checker.rs new file mode 100644 index 000000000000..ff6c1708bc58 --- /dev/null +++ b/core/bin/zksync_core/src/bin/merkle_tree_consistency_checker.rs @@ -0,0 +1,12 @@ +use zksync_config::ZkSyncConfig; +use zksync_merkle_tree::ZkSyncTree; +use zksync_storage::db::Database; +use zksync_storage::RocksDB; + +fn main() { + vlog::init(); + let config = ZkSyncConfig::from_env(); + let db = RocksDB::new(Database::MerkleTree, config.db.path(), true); + let tree = ZkSyncTree::new(db); + tree.verify_consistency(); +} diff --git a/core/bin/zksync_core/src/bin/rocksdb_util.rs b/core/bin/zksync_core/src/bin/rocksdb_util.rs new file mode 100644 index 000000000000..41e247703cd5 --- /dev/null +++ b/core/bin/zksync_core/src/bin/rocksdb_util.rs @@ -0,0 +1,73 @@ +use structopt::StructOpt; +use zksync_config::DBConfig; +use zksync_storage::rocksdb::backup::{BackupEngine, BackupEngineOptions, RestoreOptions}; +use zksync_storage::rocksdb::{Error, Options, DB}; + +#[derive(StructOpt, Debug)] +#[structopt(name = "rocksdb management utility")] +enum Opt { + #[structopt( + name = "backup", + about = "Creates new backup of running rocksdb instance" + )] + Backup, + + #[structopt(name = "restore-from-backup", about = "Restores rocksdb from backup")] + Restore, +} + +fn create_backup(config: &DBConfig) -> Result<(), Error> { + let mut engine = BackupEngine::open( + &BackupEngineOptions::default(), + config.merkle_tree_backup_path(), + )?; + let db = DB::open_for_read_only(&Options::default(), config.path(), false)?; + engine.create_new_backup(&db)?; + engine.purge_old_backups(config.backup_count()) +} + +fn restore_from_latest_backup(config: &DBConfig) -> Result<(), Error> { + let mut engine = BackupEngine::open( + &BackupEngineOptions::default(), + config.merkle_tree_backup_path(), + )?; + engine.restore_from_latest_backup(config.path(), config.path(), &RestoreOptions::default()) +} + +fn main() { + let config = DBConfig::from_env(); + match Opt::from_args() { + Opt::Backup => create_backup(&config).unwrap(), + Opt::Restore => restore_from_latest_backup(&config).unwrap(), + } +} + +#[cfg(test)] +mod tests { + use super::*; + use tempfile::TempDir; + + #[test] + fn backup_restore_workflow() { + let backup_dir = TempDir::new().expect("failed to get temporary directory for RocksDB"); + let temp_dir = TempDir::new().expect("failed to get temporary directory for RocksDB"); + let config = DBConfig { + path: temp_dir.path().to_str().unwrap().to_string(), + merkle_tree_backup_path: backup_dir.path().to_str().unwrap().to_string(), + ..Default::default() + }; + + let mut options = Options::default(); + options.create_if_missing(true); + let db = DB::open(&options, temp_dir.as_ref()).unwrap(); + db.put(b"key", b"value").expect("failed to write to db"); + + create_backup(&config).expect("failed to create backup"); + // drop original db + drop((db, temp_dir)); + + restore_from_latest_backup(&config).expect("failed to restore from backup"); + let db = DB::open(&Options::default(), config.path()).unwrap(); + assert_eq!(db.get(b"key").unwrap().unwrap(), b"value"); + } +} diff --git a/core/bin/zksync_core/src/bin/zksync_server.rs b/core/bin/zksync_core/src/bin/zksync_server.rs new file mode 100644 index 000000000000..87e6410eb520 --- /dev/null +++ b/core/bin/zksync_core/src/bin/zksync_server.rs @@ -0,0 +1,130 @@ +use std::cell::RefCell; + +use futures::{channel::mpsc, executor::block_on, SinkExt, StreamExt}; +use structopt::StructOpt; + +use zksync_config::ZkSyncConfig; +use zksync_core::{genesis_init, initialize_components, wait_for_tasks, Component, Components}; +use zksync_storage::RocksDB; + +#[derive(Debug, Clone, Copy)] +pub enum ServerCommand { + Genesis, + Launch, +} + +#[derive(StructOpt)] +#[structopt(name = "zkSync operator node", author = "Matter Labs")] +struct Opt { + /// Generate genesis block for the first contract deployment using temporary db + #[structopt(long)] + genesis: bool, + + /// Rebuild tree + #[structopt(long)] + rebuild_tree: bool, + + /// comma-separated list of components to launch + #[structopt( + long, + default_value = "api,tree,tree_lightweight,eth,data_fetcher,state_keeper,witness_generator" + )] + components: ComponentsToRun, +} + +struct ComponentsToRun(Vec); + +impl std::str::FromStr for ComponentsToRun { + type Err = String; + + fn from_str(s: &str) -> Result { + let components = s + .split(',') + .map(|x| Components::from_str(x.trim())) + .collect::, String>>()?; + let components = components + .into_iter() + .flat_map(|c| c.0) + .collect::>(); + Ok(ComponentsToRun(components)) + } +} + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + let opt = Opt::from_args(); + let mut config = ZkSyncConfig::from_env(); + let sentry_guard = vlog::init(); + + if opt.genesis { + genesis_init(config).await; + return Ok(()); + } + + match sentry_guard { + Some(_) => vlog::info!( + "Starting Sentry url: {}, l1_network: {}, l2_network {}", + std::env::var("MISC_SENTRY_URL").unwrap(), + std::env::var("CHAIN_ETH_NETWORK").unwrap(), + std::env::var("CHAIN_ETH_ZKSYNC_NETWORK").unwrap(), + ), + None => vlog::info!("No sentry url configured"), + } + + let components = if opt.rebuild_tree { + vec![Component::Tree] + } else { + opt.components.0 + }; + + if cfg!(feature = "openzeppelin_tests") { + // Set very small block timeout for tests to work faster. + config.chain.state_keeper.block_commit_deadline_ms = 1; + } + + genesis_init(config.clone()).await; + + // OneShotWitnessGenerator is the only component that is not expected to run indefinitely + // if this value is `false`, we expect all components to run indefinitely: we panic if any component returns. + let is_only_an_oneshotwitness_generator_task = components.len() == 1 + && components + .iter() + .all(|c| matches!(c, Component::WitnessGenerator(Some(_)))); + + // Run core actors. + let (core_task_handles, stop_sender, cb_receiver) = initialize_components( + &config, + components, + is_only_an_oneshotwitness_generator_task, + ) + .await + .expect("Unable to start Core actors"); + + vlog::info!("Running {} core task handlers", core_task_handles.len()); + let (stop_signal_sender, mut stop_signal_receiver) = mpsc::channel(256); + { + let stop_signal_sender = RefCell::new(stop_signal_sender.clone()); + ctrlc::set_handler(move || { + let mut sender = stop_signal_sender.borrow_mut(); + block_on(sender.send(true)).expect("Ctrl+C signal send"); + }) + .expect("Error setting Ctrl+C handler"); + } + + tokio::select! { + _ = async { wait_for_tasks(core_task_handles, is_only_an_oneshotwitness_generator_task).await } => {}, + _ = async { stop_signal_receiver.next().await } => { + vlog::info!("Stop signal received, shutting down"); + }, + error = async { cb_receiver.await } => { + if let Ok(error_msg) = error { + vlog::warn!("Circuit breaker received, shutting down. Reason: {}", error_msg); + } + }, + }; + let _ = stop_sender.send(true); + RocksDB::await_rocksdb_termination(); + // Sleep for some time to let some components gracefully stop. + tokio::time::sleep(tokio::time::Duration::from_secs(5)).await; + Ok(()) +} diff --git a/core/bin/zksync_core/src/data_fetchers/error.rs b/core/bin/zksync_core/src/data_fetchers/error.rs new file mode 100644 index 000000000000..13febd297d3f --- /dev/null +++ b/core/bin/zksync_core/src/data_fetchers/error.rs @@ -0,0 +1,83 @@ +use std::time::Duration; + +use thiserror::Error; + +#[derive(Debug, Clone, Error)] +pub enum ApiFetchError { + #[error("Requests to the remote API were rate limited. Should wait for {} seconds", .0.as_secs())] + RateLimit(Duration), + #[error("Remote API is unavailable. Used URL: {0}")] + ApiUnavailable(String), + #[error("Unexpected JSON format. Error: {0}")] + UnexpectedJsonFormat(String), + #[error("Unable to receive data due to request timeout")] + RequestTimeout, + #[error("Unspecified error: {0}")] + Other(String), +} + +#[derive(Debug, Clone)] +pub struct ErrorAnalyzer { + fetcher: String, + min_errors_to_report: u64, + error_counter: u64, + requested_delay: Option, +} + +impl ErrorAnalyzer { + pub fn new(fetcher: &str) -> Self { + const MIN_ERRORS_FOR_REPORT: u64 = 20; + + Self { + fetcher: fetcher.to_string(), + min_errors_to_report: MIN_ERRORS_FOR_REPORT, + error_counter: 0, + requested_delay: None, + } + } + + pub fn reset(&mut self) { + self.error_counter = 0; + } + + pub async fn update(&mut self) { + if self.error_counter >= self.min_errors_to_report { + vlog::error!( + "[{}] A lot of requests to the remote API failed in a row. Current error count: {}", + &self.fetcher, + self.error_counter + ); + } + + if let Some(time) = self.requested_delay.take() { + tokio::time::sleep(time).await; + } + } + + pub fn process_error(&mut self, error: ApiFetchError) { + let fetcher = &self.fetcher; + self.error_counter += 1; + match error { + ApiFetchError::RateLimit(time) => { + vlog::warn!( + "[{}] Remote API notified us about rate limiting. Going to wait {} seconds before next loop iteration", + fetcher, + time.as_secs() + ); + self.requested_delay = Some(time); + } + ApiFetchError::UnexpectedJsonFormat(err) => { + vlog::warn!("[{}] Parse data error: {}", fetcher, err); + } + ApiFetchError::ApiUnavailable(err) => { + vlog::warn!("[{}] Remote API is unavailable: {}", fetcher, err); + } + ApiFetchError::RequestTimeout => { + vlog::warn!("[{}] Request for data timed out", fetcher); + } + ApiFetchError::Other(err) => { + vlog::warn!("[{}] Unspecified API error: {}", fetcher, err); + } + } + } +} diff --git a/core/bin/zksync_core/src/data_fetchers/mod.rs b/core/bin/zksync_core/src/data_fetchers/mod.rs new file mode 100644 index 000000000000..ab91728ca818 --- /dev/null +++ b/core/bin/zksync_core/src/data_fetchers/mod.rs @@ -0,0 +1,36 @@ +//! This module provides several third-party API data fetchers. +//! Examples of fetchers we use: +//! +//! - Token price fetcher, which updates prices for all the tokens we use in zkSync. +//! Data of this fetcher is used to calculate fees. +//! - Token trading volume fetcher, which updates trading volumes for tokens. +//! Data of this fetcher is used to decide whether we are going to accept fees in this token. +//! +//! Every data fetcher is represented by an autonomic routine, which spend most of the time sleeping; +//! once in the configurable interval it fetches the data from an API and store it into the database. + +use tokio::sync::watch; +use tokio::task::JoinHandle; +use zksync_config::ZkSyncConfig; +use zksync_dal::ConnectionPool; + +pub mod error; +pub mod token_list; +pub mod token_price; +pub mod token_trading_volume; + +pub fn run_data_fetchers( + config: &ZkSyncConfig, + pool: ConnectionPool, + stop_receiver: watch::Receiver, +) -> Vec> { + let list_fetcher = token_list::TokenListFetcher::new(config.clone()); + let price_fetcher = token_price::TokenPriceFetcher::new(config.clone()); + let volume_fetcher = token_trading_volume::TradingVolumeFetcher::new(config.clone()); + + vec![ + tokio::spawn(list_fetcher.run(pool.clone(), stop_receiver.clone())), + tokio::spawn(price_fetcher.run(pool.clone(), stop_receiver.clone())), + tokio::spawn(volume_fetcher.run(pool, stop_receiver)), + ] +} diff --git a/core/bin/zksync_core/src/data_fetchers/token_list/mock.rs b/core/bin/zksync_core/src/data_fetchers/token_list/mock.rs new file mode 100644 index 000000000000..1c202ecb74b3 --- /dev/null +++ b/core/bin/zksync_core/src/data_fetchers/token_list/mock.rs @@ -0,0 +1,78 @@ +use std::{collections::HashMap, fs::read_to_string, path::PathBuf, str::FromStr}; + +use async_trait::async_trait; +use serde::{Deserialize, Serialize}; + +use zksync_config::ZkSyncConfig; +use zksync_types::{ + tokens::{TokenMetadata, ETHEREUM_ADDRESS}, + Address, +}; +use zksync_utils::parse_env; + +use crate::data_fetchers::error::ApiFetchError; + +use super::FetcherImpl; + +#[derive(Debug, Clone)] +pub struct MockTokenListFetcher { + tokens: HashMap, +} + +impl MockTokenListFetcher { + pub fn new(config: &ZkSyncConfig) -> Self { + let network = config.chain.eth.network.to_string(); + let tokens: HashMap<_, _> = get_genesis_token_list(&network) + .into_iter() + .map(|item| { + let addr = Address::from_str(&item.address[2..]).unwrap(); + let metadata = TokenMetadata { + name: item.name, + symbol: item.symbol, + decimals: item.decimals, + }; + + (addr, metadata) + }) + .chain(std::iter::once(( + ETHEREUM_ADDRESS, + TokenMetadata { + name: "Ethereum".into(), + symbol: "ETH".into(), + decimals: 18, + }, + ))) + .collect(); + + Self { tokens } + } +} + +#[async_trait] +impl FetcherImpl for MockTokenListFetcher { + async fn fetch_token_list(&self) -> Result, ApiFetchError> { + Ok(self.tokens.clone()) + } +} + +/// Tokens that added when deploying contract +#[derive(Debug, Clone, Serialize, Deserialize)] +struct TokenGenesisListItem { + /// Address (prefixed with 0x) + pub address: String, + /// Powers of 10 in 1.0 token (18 for default ETH-like tokens) + pub decimals: u8, + /// Token symbol + pub symbol: String, + /// Token name + pub name: String, +} + +fn get_genesis_token_list(network: &str) -> Vec { + let mut file_path = parse_env::("ZKSYNC_HOME"); + file_path.push("etc"); + file_path.push("tokens"); + file_path.push(network); + file_path.set_extension("json"); + serde_json::from_str(&read_to_string(file_path).unwrap()).unwrap() +} diff --git a/core/bin/zksync_core/src/data_fetchers/token_list/mod.rs b/core/bin/zksync_core/src/data_fetchers/token_list/mod.rs new file mode 100644 index 000000000000..b61cda13b26a --- /dev/null +++ b/core/bin/zksync_core/src/data_fetchers/token_list/mod.rs @@ -0,0 +1,126 @@ +//! Token list fetcher is an entity capable of receiving information about token symbols, decimals, etc. +//! +//! Since we accept manual token addition to zkSync, we must be aware of some scam-tokens that are trying +//! to pretend to be something else. This is why we don't rely on the information that is provided by +//! the token smart contract itself. +//! Instead, we analyze somewhat truthful information source to pick the list of relevant tokens. +//! +//! If requested token is not in the list provided by the API, it's symbol will be displayed as +//! "ERC20-{token_address}" and decimals will be set to 18 (default value for most of the tokens). + +use std::{ + collections::{HashMap, HashSet}, + time::Duration, +}; + +use async_trait::async_trait; +use tokio::sync::watch; + +use zksync_config::{configs::fetcher::TokenListSource, ZkSyncConfig}; +use zksync_dal::{ConnectionPool, StorageProcessor}; +use zksync_types::{tokens::TokenMetadata, Address}; + +use super::error::{ApiFetchError, ErrorAnalyzer}; + +mod mock; +mod one_inch; + +#[async_trait] +pub trait FetcherImpl: std::fmt::Debug + Send + Sync { + /// Retrieves the list of known tokens. + async fn fetch_token_list(&self) -> Result, ApiFetchError>; +} + +#[derive(Debug)] +pub struct TokenListFetcher { + config: ZkSyncConfig, + fetcher: Box, + error_handler: ErrorAnalyzer, +} + +impl TokenListFetcher { + fn create_fetcher(config: &ZkSyncConfig) -> Box { + let token_list_config = &config.fetcher.token_list; + match token_list_config.source { + TokenListSource::OneInch => { + Box::new(one_inch::OneInchTokenListFetcher::new(&config.fetcher)) + as Box + } + TokenListSource::Mock => { + Box::new(mock::MockTokenListFetcher::new(config)) as Box + } + } + } + + pub fn new(config: ZkSyncConfig) -> Self { + let fetcher = Self::create_fetcher(&config); + let error_handler = ErrorAnalyzer::new("TokenListFetcher"); + Self { + config, + fetcher, + error_handler, + } + } + + pub async fn run(mut self, pool: ConnectionPool, stop_receiver: watch::Receiver) { + let mut fetching_interval = + tokio::time::interval(self.config.fetcher.token_list.fetching_interval()); + + loop { + if *stop_receiver.borrow() { + vlog::info!("Stop signal received, token_list_fetcher is shutting down"); + break; + } + + fetching_interval.tick().await; + self.error_handler.update().await; + + let mut token_list = match self.fetch_token_list().await { + Ok(list) => { + self.error_handler.reset(); + list + } + Err(err) => { + self.error_handler.process_error(err); + continue; + } + }; + + // We assume that token metadata does not change, thus we only looking for the new tokens. + let mut storage = pool.access_storage().await; + let unknown_tokens = self.load_unknown_tokens(&mut storage).await; + token_list.retain(|token, _data| unknown_tokens.contains(token)); + + self.update_tokens(&mut storage, token_list).await; + } + } + + async fn fetch_token_list(&self) -> Result, ApiFetchError> { + const AWAITING_TIMEOUT: Duration = Duration::from_secs(2); + + let fetch_future = self.fetcher.fetch_token_list(); + + tokio::time::timeout(AWAITING_TIMEOUT, fetch_future) + .await + .map_err(|_| ApiFetchError::RequestTimeout)? + } + + async fn update_tokens( + &self, + storage: &mut StorageProcessor<'_>, + tokens: HashMap, + ) { + let mut tokens_dal = storage.tokens_dal(); + for (token, metadata) in tokens { + tokens_dal.update_well_known_l1_token(&token, metadata); + } + } + + async fn load_unknown_tokens(&self, storage: &mut StorageProcessor<'_>) -> HashSet
{ + storage + .tokens_dal() + .get_unknown_l1_token_addresses() + .into_iter() + .collect() + } +} diff --git a/core/bin/zksync_core/src/data_fetchers/token_list/one_inch.rs b/core/bin/zksync_core/src/data_fetchers/token_list/one_inch.rs new file mode 100644 index 000000000000..2643a9fb89a3 --- /dev/null +++ b/core/bin/zksync_core/src/data_fetchers/token_list/one_inch.rs @@ -0,0 +1,71 @@ +use std::{collections::HashMap, str::FromStr}; + +use async_trait::async_trait; +use reqwest::{Client, Url}; +use serde::{Deserialize, Serialize}; + +use zksync_config::FetcherConfig; +use zksync_types::{tokens::TokenMetadata, Address}; + +use crate::data_fetchers::error::ApiFetchError; + +use super::FetcherImpl; + +#[derive(Debug, Clone)] +pub struct OneInchTokenListFetcher { + client: Client, + addr: Url, +} + +impl OneInchTokenListFetcher { + pub fn new(config: &FetcherConfig) -> Self { + Self { + client: Client::new(), + addr: Url::from_str(&config.token_list.url).expect("failed parse One Inch URL"), + } + } +} + +#[async_trait] +impl FetcherImpl for OneInchTokenListFetcher { + async fn fetch_token_list(&self) -> Result, ApiFetchError> { + let token_list_url = self + .addr + .join("/v3.0/1/tokens") + .expect("failed to join URL path"); + + let token_list = self + .client + .get(token_list_url.clone()) + .send() + .await + .map_err(|err| { + ApiFetchError::ApiUnavailable(format!("{} , Error: {}", token_list_url, err)) + })? + .json::() + .await + .map_err(|err| ApiFetchError::UnexpectedJsonFormat(err.to_string()))? + .tokens; + + Ok(token_list) + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub(super) struct OneInchTokensResponse { + pub tokens: HashMap, +} + +#[tokio::test] +async fn test_fetch_one_inch_token_list() { + let mut config = FetcherConfig::from_env(); + config.token_list.url = "https://api.1inch.exchange".to_string(); + + let fetcher = OneInchTokenListFetcher::new(&config); + + let token_list = fetcher + .fetch_token_list() + .await + .expect("failed get token list"); + assert!(!token_list.is_empty(), "Token list is empty"); +} diff --git a/core/bin/zksync_core/src/data_fetchers/token_price/coingecko.rs b/core/bin/zksync_core/src/data_fetchers/token_price/coingecko.rs new file mode 100644 index 000000000000..2402f00ccc5d --- /dev/null +++ b/core/bin/zksync_core/src/data_fetchers/token_price/coingecko.rs @@ -0,0 +1,219 @@ +use std::{ + cmp::{max, min}, + collections::HashMap, + str::FromStr, +}; + +use async_trait::async_trait; +use chrono::{DateTime, NaiveDateTime, Utc}; +use futures::try_join; +use itertools::Itertools; +use num::{rational::Ratio, BigUint, FromPrimitive}; +use reqwest::{Client, Url}; +use serde::{Deserialize, Serialize}; + +use zksync_config::FetcherConfig; +use zksync_types::{ + tokens::{TokenPrice, ETHEREUM_ADDRESS}, + Address, +}; +use zksync_utils::UnsignedRatioSerializeAsDecimal; + +use crate::data_fetchers::error::ApiFetchError; + +use super::FetcherImpl; + +#[derive(Debug, Clone)] +pub struct CoinGeckoFetcher { + client: Client, + addr: Url, +} + +impl CoinGeckoFetcher { + pub fn new(config: &FetcherConfig) -> Self { + Self { + client: Client::new(), + addr: Url::from_str(&config.token_price.url).expect("failed parse CoinGecko URL"), + } + } + + pub async fn fetch_erc20_token_prices( + &self, + tokens: &[Address], + ) -> Result, ApiFetchError> { + let token_price_url = self + .addr + .join("api/v3/simple/token_price/ethereum") + .expect("failed to join URL path"); + + let mut token_prices = HashMap::new(); + let mut fetching_interval = tokio::time::interval(tokio::time::Duration::from_secs(1)); + // Splitting is needed to avoid 'Request-URI Too Large' error. + for tokens_chunk in tokens.chunks(10) { + fetching_interval.tick().await; + let comma_separated_token_addresses = tokens_chunk + .iter() + .map(|token_addr| format!("{:#x}", token_addr)) + .join(","); + + let token_prices_chunk = self + .client + .get(token_price_url.clone()) + .query(&[ + ( + "contract_addresses", + comma_separated_token_addresses.as_str(), + ), + ("vs_currencies", "usd"), + ("include_last_updated_at", "true"), + ("include_24hr_change", "true"), + ]) + .send() + .await + .map_err(|err| { + ApiFetchError::ApiUnavailable(format!("{} , Error: {}", token_price_url, err)) + })? + .json::>() + .await + .map_err(|err| ApiFetchError::UnexpectedJsonFormat(err.to_string()))?; + token_prices.extend(token_prices_chunk); + } + + Ok(token_prices) + } + + pub async fn fetch_ethereum_price(&self) -> Result { + let coin_price_url = self + .addr + .join("api/v3/simple/price") + .expect("failed to join URL path"); + + let mut token_prices = self + .client + .get(coin_price_url.clone()) + .query(&[ + ("ids", "ethereum"), + ("vs_currencies", "usd"), + ("include_last_updated_at", "true"), + ("include_24hr_change", "true"), + ]) + .send() + .await + .map_err(|err| { + ApiFetchError::ApiUnavailable(format!("{} , Error: {}", coin_price_url, err)) + })? + .json::>() + .await + .map_err(|err| ApiFetchError::UnexpectedJsonFormat(err.to_string()))?; + + let eth_token_price = token_prices + .remove("ethereum") + .ok_or_else(|| ApiFetchError::Other("Failed to get ether price".to_string()))?; + + Ok(eth_token_price) + } +} + +#[async_trait] +impl FetcherImpl for CoinGeckoFetcher { + async fn fetch_token_price( + &self, + tokens: &[Address], + ) -> Result, ApiFetchError> { + let token_prices = { + // We have to find out the ether price separately from the erc20 tokens, + // so we will launch requests concurrently + if tokens.contains(ÐEREUM_ADDRESS) { + let (mut token_prices, ethereum_price) = try_join!( + self.fetch_erc20_token_prices(tokens), + self.fetch_ethereum_price(), + )?; + token_prices.insert(ETHEREUM_ADDRESS, ethereum_price); + + token_prices + } else { + self.fetch_erc20_token_prices(tokens).await? + } + }; + + let result = token_prices + .into_iter() + .map(|(address, coingecko_token_price)| { + let usd_price = { + let current_price = coingecko_token_price.usd; + if let Some(usd_24h_change) = coingecko_token_price.usd_24h_change { + let percent_price_diff = BigUint::from_f64(100.0f64 - usd_24h_change); + if let Some(percent_price_diff) = percent_price_diff { + let yesterdays_price = + (¤t_price * percent_price_diff) / BigUint::from(100u32); + + if address == ETHEREUM_ADDRESS { + max(current_price, yesterdays_price) + } else { + min(current_price, yesterdays_price) + } + } else { + current_price + } + } else { + current_price + } + }; + + let last_updated = { + let naive_last_updated = + NaiveDateTime::from_timestamp_opt(coingecko_token_price.last_updated_at, 0) + .unwrap(); + DateTime::::from_utc(naive_last_updated, Utc) + }; + + let token_price = TokenPrice { + usd_price, + last_updated, + }; + + (address, token_price) + }) + .collect(); + + Ok(result) + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CoinGeckoTokenPrice { + /// timestamp (milliseconds) + pub last_updated_at: i64, + pub usd_24h_change: Option, + #[serde(with = "UnsignedRatioSerializeAsDecimal")] + pub usd: Ratio, +} + +#[tokio::test] +#[ignore] +async fn test_fetch_coingecko_prices() { + let mut config = FetcherConfig::from_env(); + config.token_price.url = "https://api.coingecko.com".to_string(); + + let fetcher = CoinGeckoFetcher::new(&config); + + let tokens = vec![ + ETHEREUM_ADDRESS, + Address::from_str("6b175474e89094c44da98b954eedeac495271d0f").expect("DAI"), + Address::from_str("1f9840a85d5af5bf1d1762f925bdaddc4201f984").expect("UNI"), + Address::from_str("514910771af9ca656af840dff83e8264ecf986ca").expect("LINK"), + ]; + + let token_prices = fetcher + .fetch_token_price(&tokens) + .await + .expect("failed get tokens price"); + assert_eq!( + token_prices.len(), + tokens.len(), + "not all data was received" + ); + for token_address in tokens { + assert!(token_prices.get(&token_address).is_some()); + } +} diff --git a/core/bin/zksync_core/src/data_fetchers/token_price/coinmarketcap.rs b/core/bin/zksync_core/src/data_fetchers/token_price/coinmarketcap.rs new file mode 100644 index 000000000000..b233508f1434 --- /dev/null +++ b/core/bin/zksync_core/src/data_fetchers/token_price/coinmarketcap.rs @@ -0,0 +1,193 @@ +// use std::{collections::HashMap, str::FromStr}; +// +// use async_trait::async_trait; +// use chrono::{DateTime, Utc}; +// use itertools::Itertools; +// use num::{rational::Ratio, BigUint}; +// use reqwest::{Client, Url}; +// use serde::{Deserialize, Serialize}; +// +// use zksync_config::FetcherConfig; +// use zksync_storage::{db_view::DBView, tokens::TokensSchema}; +// use zksync_types::{tokens::TokenPrice, Address}; +// use zksync_utils::UnsignedRatioSerializeAsDecimal; +// +// use crate::data_fetchers::error::ApiFetchError; +// +// use super::FetcherImpl; +// +// #[derive(Debug, Clone)] +// pub struct CoinMarketCapFetcher { +// client: Client, +// addr: Url, +// } +// +// impl CoinMarketCapFetcher { +// pub fn new(config: &FetcherConfig) -> Self { +// Self { +// client: Client::new(), +// addr: Url::from_str(&config.token_list.url).expect("failed parse One Inch URL"), +// } +// } +// } +// +// #[async_trait] +// impl FetcherImpl for CoinMarketCapFetcher { +// async fn fetch_token_price( +// &self, +// token_addrs: &[Address], +// ) -> Result, ApiFetchError> { +// let token_addrs = token_addrs.to_vec(); +// +// let tokens = DBView::with_snapshot(move |snap| { +// let tokens_list = TokensSchema::new(&*snap).token_list(); +// +// token_addrs +// .iter() +// .cloned() +// .filter_map(|token_addr| { +// if let Some(token_symbol) = tokens_list.token_symbol(&token_addr) { +// Some((token_addr, token_symbol)) +// } else { +// vlog::warn!( +// "Error getting token symbol: token address: {:#x}", +// token_addr, +// ); +// None +// } +// }) +// .collect::>() +// }) +// .await; +// +// if tokens.is_empty() { +// return Err(ApiFetchError::Other( +// "Failed to identify symbols of tokens by their addresses".to_string(), +// )); +// } +// +// let comma_separated_token_symbols = tokens +// .iter() +// .map(|(_, token_symbol)| token_symbol) +// .join(","); +// +// let request_url = self +// .addr +// .join("/v1/cryptocurrency/quotes/latest") +// .expect("failed to join URL path"); +// +// let mut api_response = self +// .client +// .get(request_url.clone()) +// .query(&[("symbol", comma_separated_token_symbols)]) +// .send() +// .await +// .map_err(|err| { +// ApiFetchError::Other(format!("Coinmarketcap API request failed: {}", err)) +// })? +// .json::() +// .await +// .map_err(|err| ApiFetchError::UnexpectedJsonFormat(err.to_string()))?; +// +// let result = tokens +// .into_iter() +// .filter_map(|(token_addr, token_symbol)| { +// let token_info = api_response.data.remove(&token_symbol); +// let usd_quote = token_info.and_then(|mut token_info| token_info.quote.remove("USD")); +// +// if let Some(usd_quote) = usd_quote { +// Some(( +// token_addr, +// TokenPrice { +// usd_price: usd_quote.price, +// last_updated: usd_quote.last_updated, +// }, +// )) +// } else { +// vlog::warn!( +// "Error getting token price from CoinMarketCap: token address: {:#x}, token symbol: {}", +// token_addr, +// token_symbol, +// ); +// None +// } +// }) +// .collect(); +// +// Ok(result) +// } +// } +// +// #[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq)] +// pub struct CoinMarketCapQuote { +// #[serde(with = "UnsignedRatioSerializeAsDecimal")] +// pub price: Ratio, +// pub last_updated: DateTime, +// } +// +// #[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq)] +// pub struct CoinMarketCapTokenInfo { +// pub quote: HashMap, +// } +// +// #[derive(Debug, Clone, Serialize, Deserialize)] +// pub struct CoinMarketCapResponse { +// pub data: HashMap, +// } +// +// #[test] +// fn parse_coin_market_cap_response() { +// let example = r#"{ +// "status": { +// "timestamp": "2020-04-17T04:51:12.012Z", +// "error_code": 0, +// "error_message": null, +// "elapsed": 9, +// "credit_count": 1, +// "notice": null +// }, +// "data": { +// "ETH": { +// "id": 1027, +// "name": "Ethereum", +// "symbol": "ETH", +// "slug": "ethereum", +// "num_market_pairs": 5153, +// "date_added": "2015-08-07T00:00:00.000Z", +// "tags": [ +// "mineable" +// ], +// "max_supply": null, +// "circulating_supply": 110550929.1865, +// "total_supply": 110550929.1865, +// "platform": null, +// "cmc_rank": 2, +// "last_updated": "2020-04-17T04:50:41.000Z", +// "quote": { +// "USD": { +// "price": 170.692214992, +// "volume_24h": 22515583743.3856, +// "percent_change_1h": -0.380817, +// "percent_change_24h": 11.5718, +// "percent_change_7d": 3.6317, +// "market_cap": 18870182972.267426, +// "last_updated": "2020-04-17T04:50:41.000Z" +// } +// } +// } +// } +// }"#; +// +// let resp = +// serde_json::from_str::(example).expect("serialization failed"); +// let token_data = resp.data.get("ETH").expect("ETH data not found"); +// let quote = token_data.quote.get("USD").expect("USD not found"); +// assert_eq!( +// quote.price, +// UnsignedRatioSerializeAsDecimal::deserialize_from_str_with_dot("170.692214992").unwrap() +// ); +// assert_eq!( +// quote.last_updated, +// DateTime::::from_str("2020-04-17T04:50:41.000Z").unwrap() +// ); +// } diff --git a/core/bin/zksync_core/src/data_fetchers/token_price/mock.rs b/core/bin/zksync_core/src/data_fetchers/token_price/mock.rs new file mode 100644 index 000000000000..a49d178b78a6 --- /dev/null +++ b/core/bin/zksync_core/src/data_fetchers/token_price/mock.rs @@ -0,0 +1,50 @@ +use std::collections::HashMap; + +use async_trait::async_trait; +use chrono::Utc; +use num::{rational::Ratio, BigUint}; +use zksync_types::{ + tokens::{TokenPrice, ETHEREUM_ADDRESS}, + Address, +}; + +use crate::data_fetchers::error::ApiFetchError; + +use super::FetcherImpl; + +#[derive(Debug, Default, Clone)] +pub struct MockPriceFetcher; + +impl MockPriceFetcher { + pub fn new() -> Self { + Self::default() + } + + pub fn token_price(&self, token: &Address) -> TokenPrice { + let raw_base_price = if *token == ETHEREUM_ADDRESS { + 1500u64 + } else { + 1u64 + }; + let usd_price = Ratio::from_integer(BigUint::from(raw_base_price)); + + TokenPrice { + usd_price, + last_updated: Utc::now(), + } + } +} + +#[async_trait] +impl FetcherImpl for MockPriceFetcher { + async fn fetch_token_price( + &self, + tokens: &[Address], + ) -> Result, ApiFetchError> { + let data: HashMap<_, _> = tokens + .iter() + .map(|token| (*token, self.token_price(token))) + .collect(); + Ok(data) + } +} diff --git a/core/bin/zksync_core/src/data_fetchers/token_price/mod.rs b/core/bin/zksync_core/src/data_fetchers/token_price/mod.rs new file mode 100644 index 000000000000..2f3717144e0b --- /dev/null +++ b/core/bin/zksync_core/src/data_fetchers/token_price/mod.rs @@ -0,0 +1,129 @@ +//! Token price fetcher is responsible for maintaining actual prices for tokens that are used in zkSync. + +use std::{collections::HashMap, time::Duration}; + +use async_trait::async_trait; + +use zksync_config::{configs::fetcher::TokenPriceSource, ZkSyncConfig}; +use zksync_dal::{ConnectionPool, StorageProcessor}; +use zksync_types::{tokens::TokenPrice, Address}; + +use super::error::{ApiFetchError, ErrorAnalyzer}; +use bigdecimal::FromPrimitive; +use num::{rational::Ratio, BigUint}; +use tokio::sync::watch; + +pub mod coingecko; +// pub mod coinmarketcap; +pub mod mock; + +#[async_trait] +pub trait FetcherImpl: std::fmt::Debug + Send + Sync { + /// Retrieves the token price in USD. + async fn fetch_token_price( + &self, + tokens: &[Address], + ) -> Result, ApiFetchError>; +} + +#[derive(Debug)] +pub struct TokenPriceFetcher { + minimum_required_liquidity: Ratio, + config: ZkSyncConfig, + fetcher: Box, + error_handler: ErrorAnalyzer, +} + +impl TokenPriceFetcher { + fn create_fetcher(config: &ZkSyncConfig) -> Box { + let token_price_config = &config.fetcher.token_price; + match token_price_config.source { + TokenPriceSource::CoinGecko => { + Box::new(coingecko::CoinGeckoFetcher::new(&config.fetcher)) as Box + } + TokenPriceSource::CoinMarketCap => { + unimplemented!() + } + TokenPriceSource::Mock => { + Box::new(mock::MockPriceFetcher::new()) as Box + } + } + } + + pub fn new(config: ZkSyncConfig) -> Self { + let fetcher = Self::create_fetcher(&config); + let error_handler = ErrorAnalyzer::new("TokenPriceFetcher"); + Self { + minimum_required_liquidity: Ratio::from_integer( + BigUint::from_u64(0).unwrap(), // We don't use minimum required liquidity in the server anymore. + ), + config, + fetcher, + error_handler, + } + } + + pub async fn run(mut self, pool: ConnectionPool, stop_receiver: watch::Receiver) { + let mut fetching_interval = + tokio::time::interval(self.config.fetcher.token_price.fetching_interval()); + + loop { + if *stop_receiver.borrow() { + vlog::info!("Stop signal received, token_price_fetcher is shutting down"); + break; + } + + fetching_interval.tick().await; + self.error_handler.update().await; + + // We refresh token list in case new tokens were added. + let mut storage = pool.access_storage().await; + let tokens = self.get_tokens(&mut storage).await; + + // Vector of received token prices in the format of (`token_addr`, `price_in_usd`, `fetch_timestamp`). + let token_prices = match self.fetch_token_price(&tokens).await { + Ok(prices) => { + self.error_handler.reset(); + prices + } + Err(err) => { + self.error_handler.process_error(err); + continue; + } + }; + self.store_token_prices(&mut storage, token_prices).await; + } + } + + async fn fetch_token_price( + &self, + tokens: &[Address], + ) -> Result, ApiFetchError> { + const AWAITING_TIMEOUT: Duration = Duration::from_secs(2); + + let fetch_future = self.fetcher.fetch_token_price(tokens); + + tokio::time::timeout(AWAITING_TIMEOUT, fetch_future) + .await + .map_err(|_| ApiFetchError::RequestTimeout)? + } + + async fn store_token_prices( + &self, + storage: &mut StorageProcessor<'_>, + token_prices: HashMap, + ) { + let mut tokens_dal = storage.tokens_dal(); + for (token, price) in token_prices { + tokens_dal.set_l1_token_price(&token, price); + } + } + + /// Returns the list of "interesting" tokens, e.g. ones that can be used to pay fees. + /// We don't actually need prices for other tokens. + async fn get_tokens(&self, storage: &mut StorageProcessor<'_>) -> Vec
{ + storage + .tokens_dal() + .get_l1_tokens_by_volume(&self.minimum_required_liquidity) + } +} diff --git a/core/bin/zksync_core/src/data_fetchers/token_trading_volume/mock.rs b/core/bin/zksync_core/src/data_fetchers/token_trading_volume/mock.rs new file mode 100644 index 000000000000..8aab1c6bb502 --- /dev/null +++ b/core/bin/zksync_core/src/data_fetchers/token_trading_volume/mock.rs @@ -0,0 +1,41 @@ +use std::collections::HashMap; + +use async_trait::async_trait; +use bigdecimal::FromPrimitive; +use chrono::Utc; +use num::{rational::Ratio, BigUint}; +use zksync_types::{tokens::TokenMarketVolume, Address}; + +use crate::data_fetchers::error::ApiFetchError; + +use super::FetcherImpl; + +#[derive(Debug, Clone)] +pub struct MockTradingVolumeFetcher {} + +impl MockTradingVolumeFetcher { + pub fn new() -> Self { + Self {} + } + + pub fn volume(&self, _token: &Address) -> TokenMarketVolume { + TokenMarketVolume { + market_volume: Ratio::from(BigUint::from_u64(1).unwrap()), // We don't use volume in the server anymore. + last_updated: Utc::now(), + } + } +} + +#[async_trait] +impl FetcherImpl for MockTradingVolumeFetcher { + async fn fetch_trading_volumes( + &self, + tokens: &[Address], + ) -> Result, ApiFetchError> { + let volumes: HashMap<_, _> = tokens + .iter() + .map(|token| (*token, self.volume(token))) + .collect(); + Ok(volumes) + } +} diff --git a/core/bin/zksync_core/src/data_fetchers/token_trading_volume/mod.rs b/core/bin/zksync_core/src/data_fetchers/token_trading_volume/mod.rs new file mode 100644 index 000000000000..2b3725ab14bf --- /dev/null +++ b/core/bin/zksync_core/src/data_fetchers/token_trading_volume/mod.rs @@ -0,0 +1,124 @@ +//! Token trading volume fetcher loads the information about how good tokens are being traded on exchanges. +//! We need this information in order to either accept or deny paying fees in a certain tokens: +//! we are only interested in tokens that can be sold to cover expences for the network maintenance. + +use std::{collections::HashMap, time::Duration}; + +use async_trait::async_trait; +use tokio::sync::watch; + +use zksync_config::{configs::fetcher::TokenTradingVolumeSource, ZkSyncConfig}; +use zksync_dal::{ConnectionPool, StorageProcessor}; +use zksync_types::{tokens::TokenMarketVolume, Address}; + +use super::error::{ApiFetchError, ErrorAnalyzer}; + +mod mock; +mod uniswap; + +#[async_trait] +pub trait FetcherImpl: std::fmt::Debug + Send + Sync { + /// Retrieves the list of known tokens. + async fn fetch_trading_volumes( + &self, + tokens: &[Address], + ) -> Result, ApiFetchError>; +} + +#[derive(Debug)] +pub struct TradingVolumeFetcher { + config: ZkSyncConfig, + fetcher: Box, + error_handler: ErrorAnalyzer, +} + +impl TradingVolumeFetcher { + fn create_fetcher(config: &ZkSyncConfig) -> Box { + let token_trading_volume_config = &config.fetcher.token_trading_volume; + match token_trading_volume_config.source { + TokenTradingVolumeSource::Uniswap => { + Box::new(uniswap::UniswapTradingVolumeFetcher::new(&config.fetcher)) + as Box + } + TokenTradingVolumeSource::Mock => { + Box::new(mock::MockTradingVolumeFetcher::new()) as Box + } + } + } + + pub fn new(config: ZkSyncConfig) -> Self { + let fetcher = Self::create_fetcher(&config); + let error_handler = ErrorAnalyzer::new("TradingVolumeFetcher"); + Self { + config, + fetcher, + error_handler, + } + } + + pub async fn run(mut self, pool: ConnectionPool, stop_receiver: watch::Receiver) { + let mut fetching_interval = + tokio::time::interval(self.config.fetcher.token_trading_volume.fetching_interval()); + loop { + if *stop_receiver.borrow() { + vlog::info!("Stop signal received, trading_volume_fetcher is shutting down"); + break; + } + + fetching_interval.tick().await; + self.error_handler.update().await; + + let mut storage = pool.access_storage().await; + let known_l1_tokens = self.load_tokens(&mut storage).await; + + let trading_volumes = match self.fetch_trading_volumes(&known_l1_tokens).await { + Ok(volumes) => { + self.error_handler.reset(); + volumes + } + Err(err) => { + self.error_handler.process_error(err); + continue; + } + }; + + self.store_market_volumes(&mut storage, trading_volumes) + .await; + } + } + + async fn fetch_trading_volumes( + &self, + addresses: &[Address], + ) -> Result, ApiFetchError> { + const AWAITING_TIMEOUT: Duration = Duration::from_secs(2); + + let fetch_future = self.fetcher.fetch_trading_volumes(addresses); + + tokio::time::timeout(AWAITING_TIMEOUT, fetch_future) + .await + .map_err(|_| ApiFetchError::RequestTimeout)? + } + + async fn store_market_volumes( + &self, + storage: &mut StorageProcessor<'_>, + tokens: HashMap, + ) { + let mut tokens_dal = storage.tokens_dal(); + for (token, volume) in tokens { + tokens_dal.set_l1_token_market_volume(&token, volume); + } + } + + /// Returns the list of tokens with known metadata (if token is not in the list we use, + /// it's very likely to not have required level of trading volume anyways). + async fn load_tokens(&self, storage: &mut StorageProcessor<'_>) -> Vec
{ + storage + .tokens_dal() + .get_well_known_token_addresses() + .into_iter() + .map(|(l1_token, _)| l1_token) + .collect() + } +} diff --git a/core/bin/zksync_core/src/data_fetchers/token_trading_volume/uniswap.rs b/core/bin/zksync_core/src/data_fetchers/token_trading_volume/uniswap.rs new file mode 100644 index 000000000000..4e1a2b576f71 --- /dev/null +++ b/core/bin/zksync_core/src/data_fetchers/token_trading_volume/uniswap.rs @@ -0,0 +1,146 @@ +use std::{collections::HashMap, str::FromStr}; + +use async_trait::async_trait; +use chrono::Utc; +use itertools::Itertools; +use num::{rational::Ratio, BigUint}; +use reqwest::{Client, Url}; +use serde::{Deserialize, Serialize}; + +use zksync_config::FetcherConfig; +use zksync_types::{tokens::TokenMarketVolume, Address}; +use zksync_utils::UnsignedRatioSerializeAsDecimal; + +use crate::data_fetchers::error::ApiFetchError; + +use super::FetcherImpl; + +#[derive(Debug, Clone)] +pub struct UniswapTradingVolumeFetcher { + client: Client, + addr: Url, +} + +impl UniswapTradingVolumeFetcher { + pub fn new(config: &FetcherConfig) -> Self { + Self { + client: Client::new(), + addr: Url::from_str(&config.token_trading_volume.url) + .expect("failed parse Uniswap URL"), + } + } +} + +#[async_trait] +impl FetcherImpl for UniswapTradingVolumeFetcher { + async fn fetch_trading_volumes( + &self, + tokens: &[Address], + ) -> Result, ApiFetchError> { + let comma_separated_token_addresses = tokens + .iter() + .map(|token_addr| format!("\"{:#x}\"", token_addr)) + .join(","); + + let query = format!( + "{{tokens(where:{{id_in:[{}]}}){{id, untrackedVolumeUSD}}}}", + comma_separated_token_addresses + ); + + let last_updated = Utc::now(); + + let raw_response = self + .client + .post(self.addr.clone()) + .json(&serde_json::json!({ + "query": query, + })) + .send() + .await + .map_err(|err| { + ApiFetchError::ApiUnavailable(format!("Uniswap API request failed: {}", err)) + })?; + + let response_status = raw_response.status(); + let response_text = raw_response.text().await.map_err(|err| { + ApiFetchError::Other(format!( + "Error: {} while while decoding response to text", + err + )) + })?; + + let response: GraphqlResponse = serde_json::from_str(&response_text).map_err(|err| { + ApiFetchError::UnexpectedJsonFormat(format!( + "Error: {} while decoding response: {} with status: {}", + err, response_text, response_status + )) + })?; + + let result = response + .data + .tokens + .into_iter() + .map(|token_response| { + ( + token_response.id, + TokenMarketVolume { + market_volume: token_response.untracked_volume_usd, + last_updated, + }, + ) + }) + .collect(); + + Ok(result) + } +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct GraphqlResponse { + pub data: GraphqlTokensResponse, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct GraphqlTokensResponse { + pub tokens: Vec, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct TokenResponse { + pub id: Address, + /// Total amount swapped all time in token pair stored in USD, no minimum liquidity threshold. + #[serde( + with = "UnsignedRatioSerializeAsDecimal", + rename = "untrackedVolumeUSD" + )] + pub untracked_volume_usd: Ratio, +} + +#[tokio::test] +async fn test_fetch_uniswap_trading_volumes() { + let mut config = FetcherConfig::from_env(); + config.token_trading_volume.url = + "https://api.thegraph.com/subgraphs/name/uniswap/uniswap-v2".to_string(); + + let fetcher = UniswapTradingVolumeFetcher::new(&config); + + let tokens = vec![ + Address::from_str("6b175474e89094c44da98b954eedeac495271d0f").expect("DAI"), + Address::from_str("1f9840a85d5af5bf1d1762f925bdaddc4201f984").expect("UNI"), + Address::from_str("514910771af9ca656af840dff83e8264ecf986ca").expect("LINK"), + ]; + + let token_volumes = fetcher + .fetch_trading_volumes(&tokens) + .await + .expect("failed get tokens price"); + + assert_eq!( + token_volumes.len(), + tokens.len(), + "not all data was received" + ); + for token_address in tokens { + assert!(token_volumes.get(&token_address).is_some()); + } +} diff --git a/core/bin/zksync_core/src/db_storage_provider.rs b/core/bin/zksync_core/src/db_storage_provider.rs new file mode 100644 index 000000000000..8fe7eeb897d8 --- /dev/null +++ b/core/bin/zksync_core/src/db_storage_provider.rs @@ -0,0 +1,53 @@ +use zksync_dal::StorageProcessor; +use zksync_types::{Address, MiniblockNumber, StorageKey, StorageValue, ZkSyncReadStorage, H256}; + +#[derive(Debug)] +pub struct DbStorageProvider<'a> { + connection: StorageProcessor<'a>, + block_number: MiniblockNumber, + consider_new_l1_batch: bool, +} + +impl<'a> DbStorageProvider<'a> { + pub fn new( + connection: StorageProcessor<'a>, + block_number: MiniblockNumber, + consider_new_l1_batch: bool, + ) -> DbStorageProvider<'a> { + DbStorageProvider { + connection, + block_number, + consider_new_l1_batch, + } + } +} + +impl<'a> ZkSyncReadStorage for DbStorageProvider<'a> { + fn read_value(&mut self, key: &StorageKey) -> StorageValue { + self.connection + .storage_web3_dal() + .get_historical_value_unchecked(key, self.block_number) + .unwrap() + } + + fn is_write_initial(&mut self, key: &StorageKey) -> bool { + self.connection + .storage_web3_dal() + .is_write_initial(key, self.block_number, self.consider_new_l1_batch) + .unwrap() + } + + fn load_contract(&mut self, address: Address) -> Option> { + self.connection + .storage_web3_dal() + .get_contract_code_unchecked(address, self.block_number) + .unwrap() + } + + fn load_factory_dep(&mut self, hash: H256) -> Option> { + self.connection + .storage_web3_dal() + .get_factory_dep_unchecked(hash, self.block_number) + .unwrap() + } +} diff --git a/core/bin/zksync_core/src/eth_sender/aggregator.rs b/core/bin/zksync_core/src/eth_sender/aggregator.rs new file mode 100644 index 000000000000..7094da1b40d0 --- /dev/null +++ b/core/bin/zksync_core/src/eth_sender/aggregator.rs @@ -0,0 +1,271 @@ +use crate::eth_sender::block_publish_criterion::{ + BlockNumberCriterion, BlockPublishCriterion, DataSizeCriterion, GasCriterion, + TimestampDeadlineCriterion, +}; +use zksync_config::configs::eth_sender::{ProofSendingMode, SenderConfig}; +use zksync_dal::StorageProcessor; +use zksync_types::aggregated_operations::{ + AggregatedActionType, AggregatedOperation, BlocksCommitOperation, BlocksExecuteOperation, + BlocksProofOperation, +}; +use zksync_types::commitment::BlockWithMetadata; +use zksync_types::L1BatchNumber; + +#[derive(Debug)] +pub struct Aggregator { + commit_criterion: Vec>, + proof_criterion: Vec>, + execute_criterion: Vec>, + config: SenderConfig, +} + +impl Aggregator { + pub fn new(config: SenderConfig) -> Self { + Self { + commit_criterion: vec![ + Box::from(BlockNumberCriterion { + op: AggregatedActionType::CommitBlocks, + limit: config.max_aggregated_blocks_to_commit, + }), + Box::from(GasCriterion::new( + AggregatedActionType::CommitBlocks, + config.max_aggregated_tx_gas, + )), + Box::from(DataSizeCriterion { + op: AggregatedActionType::CommitBlocks, + data_limit: config.max_eth_tx_data_size, + }), + Box::from(TimestampDeadlineCriterion { + op: AggregatedActionType::CommitBlocks, + deadline_seconds: config.aggregated_block_commit_deadline, + max_allowed_lag: Some(config.timestamp_criteria_max_allowed_lag), + }), + ], + proof_criterion: vec![ + Box::from(BlockNumberCriterion { + op: AggregatedActionType::PublishProofBlocksOnchain, + limit: *config.aggregated_proof_sizes.iter().max().unwrap() as u32, + }), + Box::from(GasCriterion::new( + AggregatedActionType::PublishProofBlocksOnchain, + config.max_aggregated_tx_gas, + )), + Box::from(TimestampDeadlineCriterion { + op: AggregatedActionType::PublishProofBlocksOnchain, + deadline_seconds: config.aggregated_block_prove_deadline, + // Currently, we can't use this functionality for proof criterion + // since we don't send dummy and real proofs in the same range, + // so even small ranges must be closed. + max_allowed_lag: None, + }), + ], + execute_criterion: vec![ + Box::from(BlockNumberCriterion { + op: AggregatedActionType::ExecuteBlocks, + limit: config.max_aggregated_blocks_to_execute, + }), + Box::from(GasCriterion::new( + AggregatedActionType::ExecuteBlocks, + config.max_aggregated_tx_gas, + )), + Box::from(TimestampDeadlineCriterion { + op: AggregatedActionType::ExecuteBlocks, + deadline_seconds: config.aggregated_block_execute_deadline, + max_allowed_lag: Some(config.timestamp_criteria_max_allowed_lag), + }), + ], + config, + } + } + + pub async fn get_next_ready_operation( + &mut self, + storage: &mut StorageProcessor<'_>, + ) -> Option { + let last_sealed_block_number = storage.blocks_dal().get_sealed_block_number(); + if let Some(op) = self + .get_execute_operations( + storage, + self.config.max_aggregated_blocks_to_execute as usize, + last_sealed_block_number, + ) + .await + { + Some(AggregatedOperation::ExecuteBlocks(op)) + } else if let Some(op) = self + .get_proof_operation( + storage, + *self.config.aggregated_proof_sizes.iter().max().unwrap(), + last_sealed_block_number, + ) + .await + { + Some(AggregatedOperation::PublishProofBlocksOnchain(op)) + } else { + self.get_commit_operation( + storage, + self.config.max_aggregated_blocks_to_commit as usize, + last_sealed_block_number, + ) + .await + .map(AggregatedOperation::CommitBlocks) + } + } + + async fn get_execute_operations( + &mut self, + storage: &mut StorageProcessor<'_>, + limit: usize, + last_sealed_block: L1BatchNumber, + ) -> Option { + let ready_for_execute_blocks = storage.blocks_dal().get_ready_for_execute_blocks(limit); + let blocks = extract_ready_subrange( + storage, + &mut self.execute_criterion, + ready_for_execute_blocks, + last_sealed_block, + ) + .await; + + blocks.map(|blocks| BlocksExecuteOperation { blocks }) + } + + async fn get_commit_operation( + &mut self, + storage: &mut StorageProcessor<'_>, + limit: usize, + last_sealed_block: L1BatchNumber, + ) -> Option { + let mut blocks_dal = storage.blocks_dal(); + + let last_block = blocks_dal.get_last_committed_to_eth_block()?; + let ready_for_commit_blocks = blocks_dal.get_ready_for_commit_blocks(limit); + + let blocks = extract_ready_subrange( + storage, + &mut self.commit_criterion, + ready_for_commit_blocks, + last_sealed_block, + ) + .await; + blocks.map(|blocks| BlocksCommitOperation { + last_committed_block: last_block, + blocks, + }) + } + + fn load_real_proof_operation( + storage: &mut StorageProcessor<'_>, + ) -> Option { + let blocks = storage + .blocks_dal() + .get_ready_for_proof_blocks_real_verifier(1usize); + if !blocks.is_empty() { + let prev_block_number = blocks.first().map(|bl| bl.header.number - 1)?; + let prev_block = storage.blocks_dal().get_block_metadata(prev_block_number)?; + let from = blocks.first().map(|bl| bl.header.number)?; + let to = blocks.last().map(|bl| bl.header.number)?; + let proofs = storage.prover_dal().get_final_proofs_for_blocks(from, to); + + // currently we only support sending one proof + assert_eq!(proofs.len(), 1); + assert_eq!(from, to); + + Some(BlocksProofOperation { + prev_block, + blocks, + proofs, + should_verify: true, + }) + } else { + None + } + } + + async fn prepare_dummy_proof_operation( + &mut self, + storage: &mut StorageProcessor<'_>, + ready_for_proof_blocks: Vec, + last_sealed_block: L1BatchNumber, + ) -> Option { + if let Some(blocks) = extract_ready_subrange( + storage, + &mut self.proof_criterion, + ready_for_proof_blocks, + last_sealed_block, + ) + .await + { + let prev_block_number = blocks.first().map(|bl| bl.header.number - 1)?; + let prev_block = storage.blocks_dal().get_block_metadata(prev_block_number)?; + + Some(BlocksProofOperation { + prev_block, + blocks, + proofs: vec![], + should_verify: false, + }) + } else { + None + } + } + + async fn get_proof_operation( + &mut self, + storage: &mut StorageProcessor<'_>, + limit: usize, + last_sealed_block: L1BatchNumber, + ) -> Option { + match self.config.proof_sending_mode { + ProofSendingMode::OnlyRealProofs => Self::load_real_proof_operation(storage), + ProofSendingMode::SkipEveryProof => { + let ready_for_proof_blocks = + storage.blocks_dal().get_ready_for_dummy_proof_blocks(limit); + self.prepare_dummy_proof_operation( + storage, + ready_for_proof_blocks, + last_sealed_block, + ) + .await + } + ProofSendingMode::OnlySampledProofs => { + // if there is a sampled proof then send it, otherwise check for skipped ones. + if let Some(op) = Self::load_real_proof_operation(storage) { + Some(op) + } else { + let ready_for_proof_blocks = + storage.blocks_dal().get_skipped_for_proof_blocks(limit); + self.prepare_dummy_proof_operation( + storage, + ready_for_proof_blocks, + last_sealed_block, + ) + .await + } + } + } + } +} + +async fn extract_ready_subrange( + storage: &mut StorageProcessor<'_>, + publish_criteria: &mut [Box], + unpublished_blocks: Vec, + last_sealed_block: L1BatchNumber, +) -> Option> { + let mut last_block: Option = None; + for crit in publish_criteria.iter_mut() { + if let Some(crit_block) = crit + .last_block_to_publish(storage, &unpublished_blocks, last_sealed_block) + .await + { + last_block = last_block.map_or(Some(crit_block), |block| Some(block.min(crit_block))); + } + } + last_block.map(|last_block| { + unpublished_blocks + .into_iter() + .take_while(|bl| bl.header.number <= last_block) + .collect() + }) +} diff --git a/core/bin/zksync_core/src/eth_sender/block_publish_criterion.rs b/core/bin/zksync_core/src/eth_sender/block_publish_criterion.rs new file mode 100644 index 000000000000..907ccc3ca97a --- /dev/null +++ b/core/bin/zksync_core/src/eth_sender/block_publish_criterion.rs @@ -0,0 +1,255 @@ +use crate::gas_tracker::agg_block_base_cost; +use async_trait::async_trait; +use chrono::Utc; +use zksync_dal::StorageProcessor; +use zksync_types::commitment::BlockWithMetadata; +use zksync_types::{aggregated_operations::AggregatedActionType, L1BatchNumber}; + +#[async_trait] +pub trait BlockPublishCriterion: std::fmt::Debug + Send + Sync { + // returns None if there is no need to publish any blocks + // otherwise returns the block height of the last block that needs to be published + async fn last_block_to_publish( + &mut self, + storage: &mut StorageProcessor<'_>, + consecutive_blocks: &[BlockWithMetadata], + last_sealed_block: L1BatchNumber, + ) -> Option; + + fn name(&self) -> &'static str; +} + +#[derive(Debug)] +pub struct BlockNumberCriterion { + pub op: AggregatedActionType, + // maximum number of blocks to be packed together + pub limit: u32, +} + +#[async_trait] +impl BlockPublishCriterion for BlockNumberCriterion { + async fn last_block_to_publish( + &mut self, + _storage: &mut StorageProcessor<'_>, + consecutive_blocks: &[BlockWithMetadata], + _last_sealed_block: L1BatchNumber, + ) -> Option { + { + let mut block_heights = consecutive_blocks.iter().map(|block| block.header.number.0); + block_heights.next().and_then(|first| { + let last_block_height = block_heights.last().unwrap_or(first); + let blocks_count = last_block_height - first + 1; + if blocks_count >= self.limit { + let result = L1BatchNumber(first + self.limit - 1); + vlog::debug!( + "{} block range {}-{}: NUMBER {} triggered", + self.op.to_string(), + first, + result.0, + self.limit + ); + metrics::counter!( + "server.eth_sender.block_aggregation_reason", + 1, + "type" => "number", + "op" => self.op.to_string() + ); + Some(result) + } else { + None + } + }) + } + } + + fn name(&self) -> &'static str { + "block_number" + } +} + +#[derive(Debug)] +pub struct TimestampDeadlineCriterion { + pub op: AggregatedActionType, + // Maximum block age in seconds. Once reached, we pack and publish all the available blocks. + pub deadline_seconds: u64, + // If `max_allowed_lag` is some and last block sent to L1 is more than `max_allowed_lag` behind, + // it means that sender is lagging significantly and we shouldn't apply this criteria to use all capacity + // and avoid packing small ranges. + pub max_allowed_lag: Option, +} + +#[async_trait] +impl BlockPublishCriterion for TimestampDeadlineCriterion { + async fn last_block_to_publish( + &mut self, + _storage: &mut StorageProcessor<'_>, + consecutive_blocks: &[BlockWithMetadata], + last_sealed_block: L1BatchNumber, + ) -> Option { + consecutive_blocks.iter().next().and_then(|first_block| { + let last_block_number = consecutive_blocks.iter().last().unwrap().header.number.0; + if let Some(max_allowed_lag) = self.max_allowed_lag { + if last_sealed_block.0 - last_block_number >= max_allowed_lag as u32 { + return None; + } + } + let oldest_block_age_seconds = + Utc::now().timestamp() as u64 - first_block.header.timestamp; + if oldest_block_age_seconds >= self.deadline_seconds { + let result = consecutive_blocks + .last() + .unwrap_or(first_block) + .header + .number; + vlog::debug!( + "{} block range {}-{}: TIMESTAMP triggered", + self.op.to_string(), + first_block.header.number.0, + result.0 + ); + metrics::counter!( + "server.eth_sender.block_aggregation_reason", + 1, + "type" => "timestamp", + "op" => self.op.to_string() + ); + Some(result) + } else { + None + } + }) + } + fn name(&self) -> &'static str { + "timestamp" + } +} + +#[derive(Debug)] +pub struct GasCriterion { + pub op: AggregatedActionType, + pub gas_limit: u32, +} + +impl GasCriterion { + pub fn new(op: AggregatedActionType, gas_limit: u32) -> GasCriterion { + GasCriterion { op, gas_limit } + } + + async fn get_gas_amount( + &mut self, + storage: &mut StorageProcessor<'_>, + block_number: L1BatchNumber, + ) -> u32 { + storage + .blocks_dal() + .get_blocks_predicted_gas(block_number, block_number, self.op) + } +} + +#[async_trait] +impl BlockPublishCriterion for GasCriterion { + async fn last_block_to_publish( + &mut self, + storage: &mut StorageProcessor<'_>, + consecutive_blocks: &[BlockWithMetadata], + _last_sealed_block: L1BatchNumber, + ) -> Option { + let base_cost = agg_block_base_cost(self.op); + assert!( + self.gas_limit > base_cost, + "Config max gas cost for operations is too low" + ); + // We're not sure our predictions are accurate, so it's safer to lower the gas limit by 10% + let mut gas_left = (self.gas_limit as f64 * 0.9).round() as u32 - base_cost; + + let mut last_block: Option = None; + for (index, block) in consecutive_blocks.iter().enumerate() { + let block_gas = self.get_gas_amount(storage, block.header.number).await; + if block_gas >= gas_left { + if index == 0 { + panic!( + "block {} requires {} gas, which is more than the range limit of {}", + block.header.number, block_gas, self.gas_limit + ) + } + last_block = Some(L1BatchNumber(block.header.number.0 - 1)); + break; + } else { + gas_left -= block_gas; + } + } + + if last_block.is_some() { + vlog::debug!( + "{} block range {}-{}: GAS {} triggered", + self.op.to_string(), + consecutive_blocks.first().unwrap().header.number.0, + last_block.unwrap().0, + self.gas_limit - gas_left, + ); + metrics::counter!( + "server.eth_sender.block_aggregation_reason", + 1, + "type" => "gas", + "op" => self.op.to_string() + ); + } + last_block + } + fn name(&self) -> &'static str { + "gas_limit" + } +} + +#[derive(Debug)] +pub struct DataSizeCriterion { + pub op: AggregatedActionType, + pub data_limit: usize, +} + +#[async_trait] +impl BlockPublishCriterion for DataSizeCriterion { + async fn last_block_to_publish( + &mut self, + _storage: &mut StorageProcessor<'_>, + consecutive_blocks: &[BlockWithMetadata], + _last_sealed_block: L1BatchNumber, + ) -> Option { + const STORED_BLOCK_INFO_SIZE: usize = 96; // size of `StoredBlockInfo` solidity struct + let mut data_size_left = self.data_limit - STORED_BLOCK_INFO_SIZE; + + for (index, block) in consecutive_blocks.iter().enumerate() { + if data_size_left < block.l1_commit_data_size() { + if index == 0 { + panic!( + "block {} requires {} data, which is more than the range limit of {}", + block.header.number, + block.l1_commit_data_size(), + self.data_limit + ) + } + vlog::debug!( + "{} block range {}-{}: DATA LIMIT {} triggered", + self.op.to_string(), + consecutive_blocks.first().unwrap().header.number.0, + block.header.number.0 - 1, + self.data_limit - data_size_left, + ); + metrics::counter!( + "server.eth_sender.block_aggregation_reason", + 1, + "type" => "data_size", + "op" => self.op.to_string() + ); + return Some(block.header.number - 1); + } + data_size_left -= block.l1_commit_data_size(); + } + + None + } + + fn name(&self) -> &'static str { + "data_size" + } +} diff --git a/core/bin/zksync_core/src/eth_sender/error.rs b/core/bin/zksync_core/src/eth_sender/error.rs new file mode 100644 index 000000000000..be2912e05c70 --- /dev/null +++ b/core/bin/zksync_core/src/eth_sender/error.rs @@ -0,0 +1,7 @@ +use zksync_eth_client::clients::http_client::Error; + +#[derive(Debug, thiserror::Error)] +pub enum ETHSenderError { + #[error("Ethereum gateway Error {0}")] + EthereumGateWayError(#[from] Error), +} diff --git a/core/bin/zksync_core/src/eth_sender/eth_tx_aggregator.rs b/core/bin/zksync_core/src/eth_sender/eth_tx_aggregator.rs new file mode 100644 index 000000000000..67ae2a3702be --- /dev/null +++ b/core/bin/zksync_core/src/eth_sender/eth_tx_aggregator.rs @@ -0,0 +1,174 @@ +use crate::eth_sender::grafana_metrics::track_eth_tx_metrics; +use crate::eth_sender::zksync_functions::ZkSyncFunctions; +use crate::eth_sender::{zksync_functions, Aggregator, ETHSenderError}; +use crate::gas_tracker::agg_block_base_cost; +use std::cmp::max; +use tokio::sync::watch; +use zksync_config::configs::eth_sender::SenderConfig; +use zksync_dal::{ConnectionPool, StorageProcessor}; +use zksync_types::aggregated_operations::AggregatedOperation; +use zksync_types::eth_sender::EthTx; +use zksync_types::Address; + +/// The component is responsible for aggregating l1 batches into eth_txs: +/// Such as CommitBlocks, PublishProofBlocksOnchain and ExecuteBlock +/// These eth_txs will be used as a queue for generating signed txs and send them later +#[derive(Debug)] +pub struct EthTxAggregator { + aggregator: Aggregator, + config: SenderConfig, + contract_address: Address, + functions: ZkSyncFunctions, + base_nonce: u64, +} + +impl EthTxAggregator { + pub fn new( + config: SenderConfig, + aggregator: Aggregator, + contract_address: Address, + base_nonce: u64, + ) -> Self { + let functions = zksync_functions::get_zksync_functions(); + Self { + base_nonce, + aggregator, + config, + contract_address, + functions, + } + } + + pub async fn run(mut self, pool: ConnectionPool, stop_receiver: watch::Receiver) { + loop { + let mut storage = pool.access_storage().await; + + if *stop_receiver.borrow() { + vlog::info!("Stop signal received, eth_tx_aggregator is shutting down"); + break; + } + + if let Err(e) = self.loop_iteration(&mut storage).await { + // Web3 API request failures can cause this, + // and anything more important is already properly reported. + vlog::warn!("eth_sender error {:?}", e); + } + + tokio::time::sleep(self.config.tx_poll_period()).await; + } + } + + #[tracing::instrument(skip(self, storage))] + async fn loop_iteration( + &mut self, + storage: &mut StorageProcessor<'_>, + ) -> Result<(), ETHSenderError> { + if let Some(agg_op) = self.aggregator.get_next_ready_operation(storage).await { + let tx = self.save_eth_tx(storage, &agg_op).await?; + Self::log_eth_tx_saving(storage, agg_op, &tx).await; + } + Ok(()) + } + + async fn log_eth_tx_saving( + storage: &mut StorageProcessor<'_>, + aggregated_op: AggregatedOperation, + tx: &EthTx, + ) { + vlog::info!( + "eth_tx {} {} ({}-{}): saved", + tx.id, + aggregated_op.get_action_caption(), + aggregated_op.get_block_range().0 .0, + aggregated_op.get_block_range().1 .0, + ); + + if let AggregatedOperation::CommitBlocks(commit_op) = &aggregated_op { + for block in &commit_op.blocks { + metrics::histogram!( + "server.eth_sender.pubdata_size", + block.metadata.l2_l1_messages_compressed.len() as f64, + "kind" => "l2_l1_messages_compressed" + ); + metrics::histogram!( + "server.eth_sender.pubdata_size", + block.metadata.initial_writes_compressed.len() as f64, + "kind" => "initial_writes_compressed" + ); + metrics::histogram!( + "server.eth_sender.pubdata_size", + block.metadata.repeated_writes_compressed.len() as f64, + "kind" => "repeated_writes_compressed" + ); + } + } + + metrics::histogram!( + "server.eth_sender.block_range_size", + (aggregated_op.get_block_range().1.0 - aggregated_op.get_block_range().0.0 + 1) as f64, + "type" => aggregated_op.get_action_type().to_string() + ); + track_eth_tx_metrics(storage, "save", tx); + } + + fn encode_aggregated_op(&self, op: &AggregatedOperation) -> Vec { + match &op { + AggregatedOperation::CommitBlocks(commit_blocks) => self + .functions + .commit_blocks + .encode_input(&commit_blocks.get_eth_tx_args()), + AggregatedOperation::PublishProofBlocksOnchain(prove_blocks) => self + .functions + .prove_blocks + .encode_input(&prove_blocks.get_eth_tx_args()), + AggregatedOperation::ExecuteBlocks(execute_blocks) => self + .functions + .execute_blocks + .encode_input(&execute_blocks.get_eth_tx_args()), + } + .expect("Failed to encode transaction data.") + .to_vec() + } + + pub(super) async fn save_eth_tx( + &self, + storage: &mut StorageProcessor<'_>, + aggregated_op: &AggregatedOperation, + ) -> Result { + let mut transaction = storage.start_transaction().await; + let nonce = self.get_next_nonce(&mut transaction).await?; + let calldata = self.encode_aggregated_op(aggregated_op); + let (first_block, last_block) = aggregated_op.get_block_range(); + let op_type = aggregated_op.get_action_type(); + + let blocks_predicted_gas = + transaction + .blocks_dal() + .get_blocks_predicted_gas(first_block, last_block, op_type); + let eth_tx_predicted_gas = agg_block_base_cost(op_type) + blocks_predicted_gas; + + let eth_tx = transaction.eth_sender_dal().save_eth_tx( + nonce, + calldata, + op_type, + self.contract_address, + eth_tx_predicted_gas, + ); + + transaction + .blocks_dal() + .set_eth_tx_id(first_block, last_block, eth_tx.id, op_type); + transaction.commit().await; + Ok(eth_tx) + } + + async fn get_next_nonce( + &self, + storage: &mut StorageProcessor<'_>, + ) -> Result { + let db_nonce = storage.eth_sender_dal().get_next_nonce().unwrap_or(0); + // Between server starts we can execute some txs using operator account or remove some txs from the database + // At the start we have to consider this fact and get the max nonce. + Ok(max(db_nonce, self.base_nonce)) + } +} diff --git a/core/bin/zksync_core/src/eth_sender/eth_tx_manager.rs b/core/bin/zksync_core/src/eth_sender/eth_tx_manager.rs new file mode 100644 index 000000000000..60457df09bdc --- /dev/null +++ b/core/bin/zksync_core/src/eth_sender/eth_tx_manager.rs @@ -0,0 +1,609 @@ +use std::sync::Arc; +use tokio::sync::watch; + +use zksync_config::configs::eth_sender::SenderConfig; +use zksync_dal::{ConnectionPool, StorageProcessor}; +use zksync_eth_client::{ + clients::http_client::{Error, ExecutedTxStatus, SignedCallResult}, + EthInterface, +}; +use zksync_types::{ + eth_sender::EthTx, + web3::{contract::Options, error::Error as Web3Error}, + L1BlockNumber, H256, U256, +}; +use zksync_utils::time::seconds_since_epoch; + +use crate::eth_sender::grafana_metrics::track_eth_tx_metrics; +use crate::eth_sender::ETHSenderError; +use crate::gas_adjuster::GasAdjuster; + +#[derive(Debug)] +struct EthFee { + base_fee_per_gas: u64, + priority_fee_per_gas: u64, +} + +#[derive(Debug)] +struct OperatorNonce { + // Nonce on block `current_block - self.wait_confirmations` + lagging: u64, + // Nonce on block `current_block` + current: u64, +} + +/// The component is responsible for managing sending eth_txs attempts: +/// Based on eth_tx queue the component generates new attempt with the minimum possible fee, +/// save it to the database, and send it to ethereum. +/// Based on eth_tx_history queue the component can mark txs as stuck and create the new attempt +/// with higher gas price +#[derive(Debug)] +pub struct EthTxManager { + ethereum_gateway: E, + config: SenderConfig, + gas_adjuster: Arc>, +} + +impl EthTxManager { + pub fn new( + config: SenderConfig, + gas_adjuster: Arc>, + ethereum_gateway: E, + ) -> Self { + Self { + ethereum_gateway, + config, + gas_adjuster, + } + } + + async fn get_tx_status_and_confirmations_count( + &self, + tx_hash: H256, + current_block: L1BlockNumber, + ) -> Result, ETHSenderError> { + let status = self + .ethereum_gateway + .get_tx_status(tx_hash, "eth_tx_manager") + .await?; + if let Some(status) = status { + // Amount of confirmations for a block containing the transaction. + let confirmations = (current_block.0 as u64) + .saturating_sub(status.receipt.block_number.unwrap().as_u64()); + return Ok(Some((status, confirmations))); + } + Ok(None) + } + + async fn check_all_sending_attempts( + &self, + storage: &mut StorageProcessor<'_>, + op: &EthTx, + current_block: L1BlockNumber, + ) -> Option<(ExecutedTxStatus, u64)> { + // Checking history items, starting from most recently sent. + for history_item in storage.eth_sender_dal().get_tx_history_to_check(op.id) { + // `status` is a Result here and we don't unwrap it with `?` + // because if we do and get an `Err`, we won't finish the for loop, + // which means we might miss the transaction that actually succeeded. + match self + .get_tx_status_and_confirmations_count(history_item.tx_hash, current_block) + .await + { + Ok(Some(s)) => return Some(s), + Ok(_) => continue, + Err(err) => vlog::warn!("Can't check transaction {:?}", err), + } + } + None + } + + fn calculate_fee( + &self, + storage: &mut StorageProcessor<'_>, + tx: &EthTx, + time_in_mempool: u32, + ) -> Result { + let base_fee_per_gas = self.gas_adjuster.get_base_fee(time_in_mempool); + + let priority_fee_per_gas = if time_in_mempool != 0 { + metrics::increment_counter!("server.eth_sender.transaction_resent"); + let priority_fee_per_gas = + self.increase_priority_fee(storage, tx.id, base_fee_per_gas)?; + vlog::info!( + "Resending operation {} with base fee {:?} and priority fee {:?}", + tx.id, + base_fee_per_gas, + priority_fee_per_gas + ); + priority_fee_per_gas + } else { + self.gas_adjuster.get_priority_fee() + }; + + // Extra check to prevent sending transaction will extremely high priority fee. + const MAX_ACCEPTABLE_PRIORITY_FEE: u64 = 10u64.pow(11); // 100 gwei + if priority_fee_per_gas > MAX_ACCEPTABLE_PRIORITY_FEE { + panic!( + "Extremely high value of priority_fee_per_gas is suggested: {}, while max acceptable is {}", + priority_fee_per_gas, + MAX_ACCEPTABLE_PRIORITY_FEE + ); + } + + Ok(EthFee { + base_fee_per_gas, + priority_fee_per_gas, + }) + } + + fn increase_priority_fee( + &self, + storage: &mut StorageProcessor<'_>, + eth_tx_id: u32, + base_fee_per_gas: u64, + ) -> Result { + let previous_sent_tx = storage + .eth_sender_dal() + .get_last_sent_eth_tx(eth_tx_id) + .unwrap(); + + let previous_base_fee = previous_sent_tx.base_fee_per_gas; + let previous_priority_fee = previous_sent_tx.priority_fee_per_gas; + let next_block_minimal_base_fee = self.gas_adjuster.get_next_block_minimal_base_fee(); + + if base_fee_per_gas <= next_block_minimal_base_fee.min(previous_base_fee) { + // If the base fee is lower than the previous used one + // or is lower than the minimal possible value for the next block, sending is skipped. + vlog::info!( + "Skipping gas adjustment for operation {}, \ + base_fee_per_gas: suggested for resending {:?}, previously sent {:?}, next block minimum {:?}", + eth_tx_id, + base_fee_per_gas, + previous_base_fee, + next_block_minimal_base_fee + ); + return Err(ETHSenderError::from(Error::from(Web3Error::Internal))); + } + + // Increase `priority_fee_per_gas` by at least 10% to prevent "replacement transaction underpriced" error. + Ok((previous_priority_fee + (previous_priority_fee / 10) + 1) + .max(self.gas_adjuster.get_priority_fee())) + } + + pub(crate) async fn send_eth_tx( + &mut self, + storage: &mut StorageProcessor<'_>, + tx: &EthTx, + time_in_mempool: u32, + current_block: L1BlockNumber, + ) -> Result { + let EthFee { + base_fee_per_gas, + priority_fee_per_gas, + } = self.calculate_fee(storage, tx, time_in_mempool)?; + + metrics::histogram!( + "server.eth_sender.used_base_fee_per_gas", + base_fee_per_gas as f64 + ); + + metrics::histogram!( + "server.eth_sender.used_priority_fee_per_gas", + priority_fee_per_gas as f64 + ); + + let signed_tx = self + .sign_tx(tx, base_fee_per_gas, priority_fee_per_gas) + .await; + + let tx_history_id = storage.eth_sender_dal().insert_tx_history( + tx.id, + base_fee_per_gas, + priority_fee_per_gas, + signed_tx.hash, + signed_tx.raw_tx.clone(), + ); + + if let Err(error) = self + .send_raw_transaction(storage, tx_history_id, signed_tx.raw_tx, current_block) + .await + { + vlog::warn!( + "Error when sending new signed tx for tx {}, base_fee_per_gas {}, priority_fee_per_gas: {}: {}", + tx.id, + base_fee_per_gas, + priority_fee_per_gas, + error + ); + } + + Ok(signed_tx.hash) + } + + async fn send_raw_transaction( + &self, + storage: &mut StorageProcessor<'_>, + tx_history_id: u32, + raw_tx: Vec, + current_block: L1BlockNumber, + ) -> Result { + match self.ethereum_gateway.send_raw_tx(raw_tx).await { + Ok(tx_hash) => { + storage + .eth_sender_dal() + .set_sent_at_block(tx_history_id, current_block.0); + Ok(tx_hash) + } + Err(error) => { + storage.eth_sender_dal().remove_tx_history(tx_history_id); + Err(error.into()) + } + } + } + + async fn get_operator_nonce( + &self, + current_block: L1BlockNumber, + ) -> Result { + let lagging = self + .ethereum_gateway + .nonce_at( + current_block + .saturating_sub(self.config.wait_confirmations as u32) + .into(), + "eth_tx_manager", + ) + .await? + .as_u64(); + + let current = self + .ethereum_gateway + .current_nonce("eth_tx_manager") + .await? + .as_u64(); + Ok(OperatorNonce { lagging, current }) + } + + // Monitors the inflight transactions, marks mined ones as confirmed, + // returns the one that has to be resent (if there is one). + pub(super) async fn monitor_inflight_transactions( + &mut self, + storage: &mut StorageProcessor<'_>, + current_block: L1BlockNumber, + ) -> Result, ETHSenderError> { + metrics::gauge!( + "server.eth_sender.last_known_l1_block", + current_block.0 as f64 + ); + + let operator_nonce = self.get_operator_nonce(current_block).await?; + + // Not confirmed transactions, ordered by nonce + for tx in storage.eth_sender_dal().get_inflight_txs() { + vlog::debug!( + "Going through not confirmed txs. \ + Current block: {}, current tx id: {}, \ + sender's nonce on block `current block - number of confirmations`: {}", + current_block, + tx.id, + operator_nonce.lagging + ); + + // If the `current_sender_nonce` <= `tx.nonce`, this means + // that `tx` is not mined and we should resend it. + // We only resend the first unmined transaction. + if operator_nonce.current <= tx.nonce { + // None means txs hasn't been sent yet + if let Some(first_sent_at_block) = storage + .eth_sender_dal() + .get_block_number_on_first_sent_attempt(tx.id) + { + return Ok(Some((tx, first_sent_at_block))); + } else { + vlog::warn!("ETH Tx {} wasn't send", tx.id); + } + continue; + } + + // If on block `current_block - self.wait_confirmations` + // sender's nonce was > tx.nonce, then `tx` is mined and confirmed (either successful or reverted). + // Only then we will check the history to find the receipt. + // Otherwise, `tx` is mined but not confirmed, so we skip to the next one. + if operator_nonce.lagging <= tx.nonce { + continue; + } + + vlog::debug!( + "Sender's nonce on block `current block - number of confirmations` is greater than current tx's nonce. \ + Checking transaction with id {}. Tx nonce is equal to {}", + tx.id, + tx.nonce, + ); + + match self + .check_all_sending_attempts(storage, &tx, current_block) + .await + { + Some((tx_status, confirmations)) => { + self.apply_tx_status(storage, &tx, tx_status, confirmations, current_block) + .await; + } + None => { + // The nonce has increased but we did not find the receipt. + // This is an error because such a big reorg may cause transactions that were + // previously recorded as confirmed to become pending again and we have to + // make sure it's not the case - otherwire eth_sender may not work properly. + vlog::error!( + "Possible block reorgs: nonce increase detected {} blocks ago, but no tx receipt found for tx {:?}", + self.config.wait_confirmations, + &tx + ); + } + } + } + Ok(None) + } + + async fn sign_tx( + &self, + tx: &EthTx, + base_fee_per_gas: u64, + priority_fee_per_gas: u64, + ) -> SignedCallResult { + self.ethereum_gateway + .sign_prepared_tx_for_addr( + tx.raw_tx.clone(), + tx.contract_address, + Options::with(|opt| { + opt.gas = Some(self.config.max_aggregated_tx_gas.into()); + opt.max_fee_per_gas = Some(U256::from(base_fee_per_gas + priority_fee_per_gas)); + opt.max_priority_fee_per_gas = Some(U256::from(priority_fee_per_gas)); + opt.nonce = Some(tx.nonce.into()); + }), + "eth_tx_manager", + ) + .await + .expect("Failed to sign transaction") + } + + async fn send_unsent_txs( + &mut self, + storage: &mut StorageProcessor<'_>, + current_block: L1BlockNumber, + ) { + for tx in storage.eth_sender_dal().get_unsent_txs() { + // Check already sent txs not marked as sent and mark them as sent. + // The common reason for this behaviour is that we sent tx and stop the server + // before updating the database + let tx_status = self + .get_tx_status_and_confirmations_count(tx.tx_hash, current_block) + .await; + + if let Ok(Some((tx_status, confirmations))) = tx_status { + vlog::info!("The tx {:?} has been already sent", tx.tx_hash); + storage + .eth_sender_dal() + .set_sent_at_block(tx.id, tx_status.receipt.block_number.unwrap().as_u32()); + + let eth_tx = storage + .eth_sender_dal() + .get_eth_tx(tx.eth_tx_id) + .expect("Eth tx should exist"); + + self.apply_tx_status(storage, ð_tx, tx_status, confirmations, current_block) + .await; + } else if let Err(error) = self + .send_raw_transaction(storage, tx.id, tx.signed_raw_tx.clone(), current_block) + .await + { + vlog::warn!("Error {:?} in sending tx {:?}", error, &tx); + } + } + } + + async fn apply_tx_status( + &self, + storage: &mut StorageProcessor<'_>, + tx: &EthTx, + tx_status: ExecutedTxStatus, + confirmations: u64, + current_block: L1BlockNumber, + ) { + if confirmations >= self.config.wait_confirmations { + if tx_status.success { + self.confirm_tx(storage, tx, tx_status, current_block); + } else { + self.fail_tx(storage, tx, tx_status).await; + } + } else { + vlog::debug!( + "There is {} confirmations for transaction with history item tx hash: {} and id: {}. \ + But {} number of confirmations is required", + confirmations, + tx_status.tx_hash, + tx.id, + self.config.wait_confirmations + ); + } + } + + pub async fn fail_tx( + &self, + storage: &mut StorageProcessor<'_>, + tx: &EthTx, + tx_status: ExecutedTxStatus, + ) { + storage.eth_sender_dal().mark_failed_transaction(tx.id); + let failure_reason = self + .ethereum_gateway + .failure_reason(tx_status.receipt.transaction_hash) + .await + .expect( + "Tx is already failed, it's safe to fail here and apply the status on the next run", + ); + + vlog::error!( + "Eth tx failed {:?}, {:?}, failure reason {:?}", + tx, + tx_status.receipt, + failure_reason + ); + panic!("We can't operate after tx fail"); + } + + pub fn confirm_tx( + &self, + storage: &mut StorageProcessor<'_>, + tx: &EthTx, + tx_status: ExecutedTxStatus, + current_block: L1BlockNumber, + ) { + let tx_hash = tx_status.receipt.transaction_hash; + let gas_used = tx_status + .receipt + .gas_used + .expect("light ETH clients are not supported"); + + storage + .eth_sender_dal() + .confirm_tx(tx_status.tx_hash, gas_used); + + track_eth_tx_metrics(storage, "mined", tx); + + if gas_used > U256::from(tx.predicted_gas_cost) { + vlog::error!( + "Predicted gas {} lower than used gas {} for tx {:?} {}", + tx.predicted_gas_cost, + gas_used, + tx.tx_type, + tx.id + ); + } + vlog::info!( + "eth_tx {} with hash {:?} for {} is confirmed. Gas spent: {:?}", + tx.id, + tx_hash, + tx.tx_type.to_string(), + gas_used + ); + metrics::histogram!( + "server.eth_sender.l1_gas_used", + gas_used.low_u128() as f64, + "type" => tx.tx_type.to_string() + ); + metrics::histogram!( + "server.eth_sender.l1_tx_mined_latency", + (seconds_since_epoch() - tx.created_at_timestamp) as f64, + "type" => tx.tx_type.to_string() + ); + + let sent_at_block = storage + .eth_sender_dal() + .get_block_number_on_first_sent_attempt(tx.id) + .unwrap_or(0); + metrics::histogram!( + "server.eth_sender.l1_blocks_waited_in_mempool", + (current_block.0 - sent_at_block - self.config.wait_confirmations as u32) as f64, + "type" => tx.tx_type.to_string() + ); + } + + pub async fn run(mut self, pool: ConnectionPool, stop_receiver: watch::Receiver) { + { + let current_block = L1BlockNumber( + self.ethereum_gateway + .block_number("etx_tx_manager") + .await + .unwrap() + .as_u32(), + ); + let mut storage = pool.access_storage().await; + self.send_unsent_txs(&mut storage, current_block).await; + } + + // It's mandatory to set last_known_l1_block to zero, otherwise the first iteration + // will never check inflight txs status + let mut last_known_l1_block = L1BlockNumber(0); + loop { + let mut storage = pool.access_storage().await; + + if *stop_receiver.borrow() { + vlog::info!("Stop signal received, eth_tx_manager is shutting down"); + break; + } + + match self.loop_iteration(&mut storage, last_known_l1_block).await { + Ok(block) => last_known_l1_block = block, + Err(e) => { + // Web3 API request failures can cause this, + // and anything more important is already properly reported. + vlog::warn!("eth_sender error {:?}", e); + } + } + + tokio::time::sleep(self.config.tx_poll_period()).await; + } + } + + async fn send_new_eth_txs( + &mut self, + storage: &mut StorageProcessor<'_>, + current_block: L1BlockNumber, + ) { + let number_inflight_txs = storage.eth_sender_dal().get_inflight_txs().len(); + let number_of_available_slots_for_eth_txs = self + .config + .max_txs_in_flight + .saturating_sub(number_inflight_txs as u64); + + if number_of_available_slots_for_eth_txs > 0 { + // Get the new eth tx and create history item for them + let new_eth_tx = storage + .eth_sender_dal() + .get_new_eth_txs(number_of_available_slots_for_eth_txs); + + for tx in new_eth_tx { + let _ = self.send_eth_tx(storage, &tx, 0, current_block).await; + } + } + } + + #[tracing::instrument(skip(self, storage))] + async fn loop_iteration( + &mut self, + storage: &mut StorageProcessor<'_>, + previous_block: L1BlockNumber, + ) -> Result { + let current_block = L1BlockNumber( + self.ethereum_gateway + .block_number("eth_tx_manager") + .await? + .as_u32(), + ); + + self.send_new_eth_txs(storage, current_block).await; + + if current_block <= previous_block { + // Nothing to do - no new blocks were mined. + return Ok(current_block); + } + + if let Some((tx, sent_at_block)) = self + .monitor_inflight_transactions(storage, current_block) + .await? + { + // New gas price depends on the time this tx spent in mempool. + let time_in_mempool = current_block.0 - sent_at_block; + + // We don't want to return early in case resend does not succeed - + // the error is logged anyway, but early returns will prevent + // sending new operations. + let _ = self + .send_eth_tx(storage, &tx, time_in_mempool, current_block) + .await; + } + + Ok(current_block) + } +} diff --git a/core/bin/zksync_core/src/eth_sender/grafana_metrics.rs b/core/bin/zksync_core/src/eth_sender/grafana_metrics.rs new file mode 100644 index 000000000000..904bdcd88640 --- /dev/null +++ b/core/bin/zksync_core/src/eth_sender/grafana_metrics.rs @@ -0,0 +1,42 @@ +use std::time::Instant; +use zksync_dal::StorageProcessor; +use zksync_types::eth_sender::EthTx; +use zksync_utils::time::seconds_since_epoch; + +pub fn track_eth_tx_metrics(connection: &mut StorageProcessor<'_>, l1_stage: &str, tx: &EthTx) { + let start = Instant::now(); + let stage = format!("l1_{}_{}", l1_stage, tx.tx_type.to_string()); + + let blocks = connection.blocks_dal().get_blocks_for_eth_tx_id(tx.id); + + // This should be only the case when some blocks were reverted. + if blocks.is_empty() { + vlog::warn!("No blocks were found for eth_tx with id = {}", tx.id); + return; + } + + metrics::gauge!( + "server.block_number", + blocks.last().unwrap().number.0 as f64, + "stage" => stage.clone() + ); + for block in blocks { + metrics::histogram!( + "server.block_latency", + (seconds_since_epoch() - block.timestamp) as f64, + "stage" => stage.clone() + ); + metrics::counter!( + "server.processed_txs", + block.tx_count() as u64, + "stage" => stage.clone() + ); + metrics::counter!( + "server.processed_l1_txs", + block.l1_tx_count as u64, + "stage" => stage.clone() + ); + } + + metrics::histogram!("server.eth_sender.metrics.latency", start.elapsed()); +} diff --git a/core/bin/zksync_core/src/eth_sender/mod.rs b/core/bin/zksync_core/src/eth_sender/mod.rs new file mode 100644 index 000000000000..2e5044be81e9 --- /dev/null +++ b/core/bin/zksync_core/src/eth_sender/mod.rs @@ -0,0 +1,16 @@ +mod aggregator; +mod block_publish_criterion; + +mod error; +mod eth_tx_aggregator; +mod eth_tx_manager; +mod grafana_metrics; +mod zksync_functions; + +#[cfg(test)] +mod tests; + +pub use aggregator::Aggregator; +pub use error::ETHSenderError; +pub use eth_tx_aggregator::EthTxAggregator; +pub use eth_tx_manager::EthTxManager; diff --git a/core/bin/zksync_core/src/eth_sender/tests.rs b/core/bin/zksync_core/src/eth_sender/tests.rs new file mode 100644 index 000000000000..e85dfa014775 --- /dev/null +++ b/core/bin/zksync_core/src/eth_sender/tests.rs @@ -0,0 +1,397 @@ +use crate::eth_sender::{Aggregator, EthTxAggregator, EthTxManager}; +use crate::gas_adjuster::GasAdjuster; +use db_test_macro::db_test; +use zksync_config::{ + configs::eth_sender::{ProofSendingMode, SenderConfig}, + ETHSenderConfig, GasAdjusterConfig, +}; +use zksync_dal::{ConnectionPool, StorageProcessor}; +use zksync_eth_client::clients::{http_client::EthInterface, mock::MockEthereum}; +use zksync_types::{ + aggregated_operations::{AggregatedOperation, BlocksExecuteOperation}, + Address, L1BlockNumber, +}; + +use std::sync::Arc; + +// Alias to conveniently call static methods of ETHSender. +type MockEthTxManager = EthTxManager>; + +const DUMMY_OPERATION: AggregatedOperation = + AggregatedOperation::ExecuteBlocks(BlocksExecuteOperation { blocks: vec![] }); + +#[derive(Debug)] +struct EthSenderTester { + conn: ConnectionPool, + gateway: Arc, + manager: MockEthTxManager, + aggregator: EthTxAggregator, + gas_adjuster: Arc>>, +} + +impl EthSenderTester { + const WAIT_CONFIRMATIONS: u64 = 10; + const MAX_BASE_FEE_SAMPLES: usize = 7; + + async fn new(connection_pool: ConnectionPool, history: Vec) -> Self { + let eth_sender_config = ETHSenderConfig::from_env(); + let aggregator_config = SenderConfig { + aggregated_proof_sizes: vec![1], + ..eth_sender_config.sender + }; + + let gateway = Arc::new(MockEthereum::default().with_fee_history(history)); + + let gas_adjuster = Arc::new( + GasAdjuster::new( + gateway.clone(), + GasAdjusterConfig { + max_base_fee_samples: Self::MAX_BASE_FEE_SAMPLES, + pricing_formula_parameter_a: 3.0, + pricing_formula_parameter_b: 2.0, + ..eth_sender_config.gas_adjuster + }, + ) + .await + .unwrap(), + ); + + let aggregator = EthTxAggregator::new( + SenderConfig { + wait_confirmations: Self::WAIT_CONFIRMATIONS, + proof_sending_mode: ProofSendingMode::SkipEveryProof, + ..eth_sender_config.sender.clone() + }, + // Aggregator - unused + Aggregator::new(aggregator_config.clone()), + // zkSync contract address + Address::random(), + 0, + ); + + let manager = EthTxManager::new( + SenderConfig { + wait_confirmations: Self::WAIT_CONFIRMATIONS, + ..eth_sender_config.sender + }, + gas_adjuster.clone(), + gateway.clone(), + ); + Self { + gateway, + manager, + aggregator, + gas_adjuster, + conn: connection_pool, + } + } + + async fn storage(&self) -> StorageProcessor<'static> { + self.conn.access_test_storage().await + } +} + +// Tests that we send multiple transactions and confirm them all in one iteration. +#[db_test] +async fn confirm_many(connection_pool: ConnectionPool) -> anyhow::Result<()> { + let mut tester = EthSenderTester::new(connection_pool.clone(), vec![10; 100]).await; + + let mut hashes = vec![]; + + for _ in 0..5 { + let tx = tester + .aggregator + .save_eth_tx(&mut tester.storage().await, &DUMMY_OPERATION) + .await?; + let hash = tester + .manager + .send_eth_tx( + &mut tester.storage().await, + &tx, + 0, + L1BlockNumber(tester.gateway.block_number("").await?.as_u32()), + ) + .await?; + hashes.push(hash); + } + + // check that we sent something + assert_eq!(tester.gateway.sent_txs.read().unwrap().len(), 5); + assert_eq!( + tester + .storage() + .await + .eth_sender_dal() + .get_inflight_txs() + .len(), + 5 + ); + + for hash in hashes { + tester + .gateway + .execute_tx(hash, true, EthSenderTester::WAIT_CONFIRMATIONS)?; + } + + let to_resend = tester + .manager + .monitor_inflight_transactions( + &mut tester.storage().await, + L1BlockNumber(tester.gateway.block_number("a").await.unwrap().as_u32()), + ) + .await?; + + // check that transaction is marked as accepted + assert_eq!( + tester + .storage() + .await + .eth_sender_dal() + .get_inflight_txs() + .len(), + 0 + ); + + // also check that we didn't try to resend it + assert!(to_resend.is_none()); + + Ok(()) +} + +// Tests that we resend first unmined transaction every block with an increased gas price. +#[db_test] +async fn resend_each_block(connection_pool: ConnectionPool) -> anyhow::Result<()> { + let mut tester = EthSenderTester::new(connection_pool.clone(), vec![7, 6, 5, 4, 3, 2, 1]).await; + + // after this, median should be 6 + tester.gateway.advance_block_number(3); + tester.gas_adjuster.keep_updated().await?; + + let block = L1BlockNumber(tester.gateway.block_number("").await?.as_u32()); + let tx = tester + .aggregator + .save_eth_tx(&mut tester.storage().await, &DUMMY_OPERATION) + .await?; + + let hash = tester + .manager + .send_eth_tx(&mut tester.storage().await, &tx, 0, block) + .await?; + + // check that we sent something and stored it in the db + assert_eq!(tester.gateway.sent_txs.read().unwrap().len(), 1); + assert_eq!( + tester + .storage() + .await + .eth_sender_dal() + .get_inflight_txs() + .len(), + 1 + ); + + let sent_tx = tester.gateway.sent_txs.read().unwrap()[&hash]; + assert_eq!(sent_tx.hash, hash); + assert_eq!(sent_tx.nonce, 0); + assert_eq!(sent_tx.base_fee.as_usize(), 18); // 6 * 3 * 2^0 + + // now, median is 5 + tester.gateway.advance_block_number(2); + tester.gas_adjuster.keep_updated().await?; + let block = L1BlockNumber(tester.gateway.block_number("").await?.as_u32()); + + let (to_resend, _) = tester + .manager + .monitor_inflight_transactions(&mut tester.storage().await, block) + .await? + .unwrap(); + + let resent_hash = tester + .manager + .send_eth_tx(&mut tester.storage().await, &to_resend, 1, block) + .await?; + + // check that transaction has been resent + assert_eq!(tester.gateway.sent_txs.read().unwrap().len(), 2); + assert_eq!( + tester + .storage() + .await + .eth_sender_dal() + .get_inflight_txs() + .len(), + 1 + ); + + let resent_tx = tester.gateway.sent_txs.read().unwrap()[&resent_hash]; + assert_eq!(resent_tx.nonce, 0); + assert_eq!(resent_tx.base_fee.as_usize(), 30); // 5 * 3 * 2^1 + + Ok(()) +} + +// Tests that if transaction was mined, but not enough blocks has been mined since, +// we won't mark it as confirmed but also won't resend it. +#[db_test] +async fn dont_resend_already_mined(connection_pool: ConnectionPool) -> anyhow::Result<()> { + let mut tester = EthSenderTester::new(connection_pool.clone(), vec![100; 100]).await; + let tx = tester + .aggregator + .save_eth_tx(&mut tester.storage().await, &DUMMY_OPERATION) + .await + .unwrap(); + + let hash = tester + .manager + .send_eth_tx( + &mut tester.storage().await, + &tx, + 0, + L1BlockNumber(tester.gateway.block_number("").await.unwrap().as_u32()), + ) + .await + .unwrap(); + + // check that we sent something and stored it in the db + assert_eq!(tester.gateway.sent_txs.read().unwrap().len(), 1); + assert_eq!( + tester + .storage() + .await + .eth_sender_dal() + .get_inflight_txs() + .len(), + 1 + ); + + // mine the transaction but don't have enough confirmations yet + tester + .gateway + .execute_tx(hash, true, EthSenderTester::WAIT_CONFIRMATIONS - 1)?; + + let to_resend = tester + .manager + .monitor_inflight_transactions( + &mut tester.storage().await, + L1BlockNumber(tester.gateway.block_number("a").await.unwrap().as_u32()), + ) + .await?; + + // check that transaction is still considered inflight + assert_eq!( + tester + .storage() + .await + .eth_sender_dal() + .get_inflight_txs() + .len(), + 1 + ); + + // also check that we didn't try to resend it + assert!(to_resend.is_none()); + + Ok(()) +} + +#[db_test] +async fn three_scenarios(connection_pool: ConnectionPool) -> anyhow::Result<()> { + let mut tester = EthSenderTester::new(connection_pool.clone(), vec![100; 100]).await; + let mut hashes = vec![]; + + for _ in 0..3 { + let tx = tester + .aggregator + .save_eth_tx(&mut tester.storage().await, &DUMMY_OPERATION) + .await + .unwrap(); + + let hash = tester + .manager + .send_eth_tx( + &mut tester.storage().await, + &tx, + 0, + L1BlockNumber(tester.gateway.block_number("").await.unwrap().as_u32()), + ) + .await + .unwrap(); + + hashes.push(hash); + } + + // check that we sent something + assert_eq!(tester.gateway.sent_txs.read().unwrap().len(), 3); + + // mined & confirmed + tester + .gateway + .execute_tx(hashes[0], true, EthSenderTester::WAIT_CONFIRMATIONS)?; + + // mined but not confirmed + tester + .gateway + .execute_tx(hashes[1], true, EthSenderTester::WAIT_CONFIRMATIONS - 1)?; + + let (to_resend, _) = tester + .manager + .monitor_inflight_transactions( + &mut tester.storage().await, + L1BlockNumber(tester.gateway.block_number("a").await.unwrap().as_u32()), + ) + .await? + .expect("we should be trying to resend the last tx"); + + // check that last 2 transactions are still considered inflight + assert_eq!( + tester + .storage() + .await + .eth_sender_dal() + .get_inflight_txs() + .len(), + 2 + ); + + // last sent transaction has nonce == 2, because they start from 0 + assert_eq!(to_resend.nonce, 2); + + Ok(()) +} + +#[should_panic(expected = "We can't operate after tx fail")] +#[db_test] +async fn failed_eth_tx(connection_pool: ConnectionPool) { + let mut tester = EthSenderTester::new(connection_pool.clone(), vec![100; 100]).await; + + let tx = tester + .aggregator + .save_eth_tx(&mut tester.storage().await, &DUMMY_OPERATION) + .await + .unwrap(); + + let hash = tester + .manager + .send_eth_tx( + &mut tester.storage().await, + &tx, + 0, + L1BlockNumber(tester.gateway.block_number("").await.unwrap().as_u32()), + ) + .await + .unwrap(); + + // fail this tx + tester + .gateway + .execute_tx(hash, false, EthSenderTester::WAIT_CONFIRMATIONS) + .unwrap(); + tester + .manager + .monitor_inflight_transactions( + &mut tester.storage().await, + L1BlockNumber(tester.gateway.block_number("a").await.unwrap().as_u32()), + ) + .await + .unwrap(); +} diff --git a/core/bin/zksync_core/src/eth_sender/zksync_functions.rs b/core/bin/zksync_core/src/eth_sender/zksync_functions.rs new file mode 100644 index 000000000000..78e684f02877 --- /dev/null +++ b/core/bin/zksync_core/src/eth_sender/zksync_functions.rs @@ -0,0 +1,43 @@ +use zksync_contracts::zksync_contract; +use zksync_types::ethabi::Function; + +#[derive(Debug)] +pub(super) struct ZkSyncFunctions { + pub(super) commit_blocks: Function, + pub(super) prove_blocks: Function, + pub(super) execute_blocks: Function, +} + +pub(super) fn get_zksync_functions() -> ZkSyncFunctions { + let zksync_contract = zksync_contract(); + + let commit_blocks = zksync_contract + .functions + .get("commitBlocks") + .cloned() + .expect("commitBlocks function not found") + .pop() + .expect("commitBlocks function entry not found"); + + let prove_blocks = zksync_contract + .functions + .get("proveBlocks") + .cloned() + .expect("proveBlocks function not found") + .pop() + .expect("proveBlocks function entry not found"); + + let execute_blocks = zksync_contract + .functions + .get("executeBlocks") + .cloned() + .expect("executeBlocks function not found") + .pop() + .expect("executeBlocks function entry not found"); + + ZkSyncFunctions { + commit_blocks, + prove_blocks, + execute_blocks, + } +} diff --git a/core/bin/zksync_core/src/eth_watch/client.rs b/core/bin/zksync_core/src/eth_watch/client.rs new file mode 100644 index 000000000000..9f2aa6a73f6a --- /dev/null +++ b/core/bin/zksync_core/src/eth_watch/client.rs @@ -0,0 +1,229 @@ +use itertools::Itertools; +use std::convert::TryFrom; +use std::fmt::{Debug, Display}; + +use tokio::time::Instant; + +use zksync_eth_client::clients::http_client::{self, EthInterface, EthereumClient}; +use zksync_types::ethabi::{Contract, Hash}; + +use zksync_contracts::zksync_contract; +use zksync_types::{ + l1::L1Tx, + web3::{ + self, + contract::Options, + types::{BlockNumber, FilterBuilder, Log}, + }, + Address, Nonce, H160, +}; + +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[error("Log parsing filed: {0}")] + LogParse(String), + #[error("Eth client error: {0}")] + EthClient(#[from] http_client::Error), + #[error("Infinite recursion caused by too many responses")] + InfiniteRecursion, +} + +#[derive(Debug)] +struct ContractTopics { + new_priority_request: Hash, +} + +impl ContractTopics { + fn new(zksync_contract: &Contract) -> Self { + Self { + new_priority_request: zksync_contract + .event("NewPriorityRequest") + .expect("main contract abi error") + .signature(), + } + } +} + +#[async_trait::async_trait] +pub trait EthClient { + async fn get_priority_op_events( + &self, + from: BlockNumber, + to: BlockNumber, + retries_left: usize, + ) -> Result, Error>; + async fn block_number(&self) -> Result; + async fn get_auth_fact(&self, address: Address, nonce: Nonce) -> Result, Error>; + async fn get_auth_fact_reset_time(&self, address: Address, nonce: Nonce) -> Result; +} + +pub const RETRY_LIMIT: usize = 5; +const TOO_MANY_RESULTS_INFURA: &str = "query returned more than"; +const TOO_MANY_RESULTS_ALCHEMY: &str = "response size exceeded"; + +#[derive(Debug)] +pub struct EthHttpClient { + client: EthereumClient, + topics: ContractTopics, + zksync_contract_addr: H160, +} + +impl EthHttpClient { + pub fn new(client: EthereumClient, zksync_contract_addr: H160) -> Self { + vlog::debug!("New eth client, contract addr: {:x}", zksync_contract_addr); + let topics = ContractTopics::new(&zksync_contract()); + Self { + client, + topics, + zksync_contract_addr, + } + } + + async fn get_filter_logs( + &self, + from: BlockNumber, + to: BlockNumber, + topics: Vec, + ) -> Result, Error> + where + T: TryFrom, + T::Error: Debug + Display, + { + let filter = FilterBuilder::default() + .address(vec![self.zksync_contract_addr]) + .from_block(from) + .to_block(to) + .topics(Some(topics), None, None, None) + .build(); + + self.client + .logs(filter, "watch") + .await? + .into_iter() + .map(|log| T::try_from(log).map_err(|err| Error::LogParse(format!("{}", err)))) + .collect() + } +} + +#[async_trait::async_trait] +impl EthClient for EthHttpClient { + async fn get_priority_op_events( + &self, + from: BlockNumber, + to: BlockNumber, + retries_left: usize, + ) -> Result, Error> { + let start = Instant::now(); + + let mut result = self + .get_filter_logs(from, to, vec![self.topics.new_priority_request]) + .await; + + // This code is compatible with both Infura and Alchemy API providers. + // Note: we don't handle rate-limits here - assumption is that we're never going to hit them. + if let Err(Error::EthClient(http_client::Error::EthereumGateway(err))) = &result { + vlog::warn!("Provider returned error message: {:?}", err); + let err_message = err.to_string(); + let err_code = if let web3::Error::Rpc(err) = err { + Some(err.code.code()) + } else { + None + }; + + let should_retry = |err_code, err_message: String| { + // All of these can be emitted by either API provider. + err_code == Some(-32603) // Internal error + || err_message.contains("failed") // Server error + || err_message.contains("timed out") // Time-out error + }; + + // check whether the error is related to having too many results + if err_message.contains(TOO_MANY_RESULTS_INFURA) + || err_message.contains(TOO_MANY_RESULTS_ALCHEMY) + { + // get the numeric block ids + let from_number = match from { + BlockNumber::Number(num) => num, + _ => { + // invalid variant + return result; + } + }; + let to_number = match to { + BlockNumber::Number(num) => num, + BlockNumber::Latest => self.client.block_number("watch").await?, + _ => { + // invalid variant + return result; + } + }; + + // divide range into two halves and recursively fetch them + let mid = (from_number + to_number) / 2; + + // safety check to prevent infinite recursion (quite unlikely) + if from_number >= mid { + return Err(Error::InfiniteRecursion); + } + vlog::warn!( + "Splitting block range in half: {:?} - {:?} - {:?}", + from, + mid, + to + ); + let mut first_half = self + .get_priority_op_events(from, BlockNumber::Number(mid), RETRY_LIMIT) + .await?; + let mut second_half = self + .get_priority_op_events(BlockNumber::Number(mid + 1u64), to, RETRY_LIMIT) + .await?; + + first_half.append(&mut second_half); + result = Ok(first_half); + } else if should_retry(err_code, err_message) && retries_left > 0 { + vlog::warn!("Retrying. Retries left: {:?}", retries_left); + result = self + .get_priority_op_events(from, to, retries_left - 1) + .await; + } + } + + let events: Vec = result? + .into_iter() + .sorted_by_key(|event| event.serial_id()) + .collect(); + + metrics::histogram!("eth_watcher.get_priority_op_events", start.elapsed()); + Ok(events) + } + + async fn block_number(&self) -> Result { + Ok(self.client.block_number("watch").await?.as_u64()) + } + + async fn get_auth_fact(&self, address: Address, nonce: Nonce) -> Result, Error> { + Ok(self + .client + .call_main_contract_function( + "authFacts", + (address, u64::from(*nonce)), + None, + Options::default(), + None, + ) + .await?) + } + + async fn get_auth_fact_reset_time(&self, address: Address, nonce: Nonce) -> Result { + Ok(self + .client + .call_main_contract_function::( + "authFactsResetTimer", + (address, u64::from(*nonce)), + None, + Options::default(), + None, + ) + .await?) + } +} diff --git a/core/bin/zksync_core/src/eth_watch/mod.rs b/core/bin/zksync_core/src/eth_watch/mod.rs new file mode 100644 index 000000000000..5b9f9623bff4 --- /dev/null +++ b/core/bin/zksync_core/src/eth_watch/mod.rs @@ -0,0 +1,248 @@ +//! Ethereum watcher polls the Ethereum node for PriorityQueue events. +//! New events are accepted to the zkSync network once they have the sufficient amount of L1 confirmations. +//! +//! Poll interval is configured using the `ETH_POLL_INTERVAL` constant. +//! Number of confirmations is configured using the `CONFIRMATIONS_FOR_ETH_EVENT` environment variable. + +// Built-in deps +use std::time::{Duration, Instant}; + +// External uses +use tokio::{sync::watch, task::JoinHandle}; + +// Workspace deps +use zksync_config::constants::PRIORITY_EXPIRATION; +use zksync_eth_client::clients::http_client::EthereumClient; +use zksync_types::{ + l1::L1Tx, web3::types::BlockNumber as Web3BlockNumber, L1BlockNumber, PriorityOpId, +}; + +// Local deps +use self::client::{Error, EthClient, EthHttpClient}; + +use zksync_config::ZkSyncConfig; + +use crate::eth_watch::client::RETRY_LIMIT; +use zksync_dal::{ConnectionPool, StorageProcessor}; + +mod client; + +#[cfg(test)] +mod tests; + +#[derive(Debug)] +struct EthWatchState { + next_expected_priority_id: PriorityOpId, + last_processed_ethereum_block: u64, +} + +#[derive(Debug)] +pub struct EthWatch { + client: W, + /// All ethereum events are accepted after sufficient confirmations to eliminate risk of block reorg. + number_of_confirmations_for_event: usize, + poll_interval: Duration, + + state: EthWatchState, +} + +impl EthWatch { + pub async fn new( + client: W, + pool: &ConnectionPool, + number_of_confirmations_for_event: usize, + poll_interval: Duration, + ) -> Self { + let mut storage = pool.access_storage().await; + + let state = + Self::initialize_state(&client, &mut storage, number_of_confirmations_for_event).await; + + vlog::info!("initialized state: {:?}", state); + Self { + client, + number_of_confirmations_for_event, + poll_interval, + state, + } + } + + async fn initialize_state( + client: &W, + storage: &mut StorageProcessor<'_>, + number_of_confirmations_for_event: usize, + ) -> EthWatchState { + let next_expected_priority_id: PriorityOpId = storage + .transactions_dal() + .last_priority_id() + .map_or(PriorityOpId(0), |e| e + 1); + + let last_processed_ethereum_block = + match storage.transactions_dal().get_last_processed_l1_block() { + // There are some priority ops processed - start from the last processed eth block + // but subtract 1 in case the server stopped mid-block. + Some(block) => block.0.saturating_sub(1).into(), + // There are no priority ops processed - to be safe, scan the last 50k blocks. + None => { + Self::get_current_finalized_eth_block(client, number_of_confirmations_for_event) + .await + .expect("cannot initialize eth watch: cannot get current ETH block") + .saturating_sub(PRIORITY_EXPIRATION) + } + }; + + EthWatchState { + next_expected_priority_id, + last_processed_ethereum_block, + } + } + + pub async fn run(&mut self, pool: ConnectionPool, stop_receiver: watch::Receiver) { + let mut timer = tokio::time::interval(self.poll_interval); + loop { + if *stop_receiver.borrow() { + vlog::info!("Stop signal received, eth_watch is shutting down"); + break; + } + + timer.tick().await; + + metrics::counter!("server.eth_watch.eth_poll", 1); + + let mut storage = pool.access_storage().await; + if let Err(error) = self.loop_iteration(&mut storage).await { + // This is an error because otherwise we could potentially miss a priority operation + // thus entering priority mode, which is not desired. + vlog::error!("Failed to process new blocks {}", error); + self.state = Self::initialize_state( + &self.client, + &mut storage, + self.number_of_confirmations_for_event, + ) + .await; + } + } + } + + #[tracing::instrument(skip(self, storage))] + async fn loop_iteration(&mut self, storage: &mut StorageProcessor<'_>) -> Result<(), Error> { + let mut stage_start = Instant::now(); + let to_block = Self::get_current_finalized_eth_block( + &self.client, + self.number_of_confirmations_for_event, + ) + .await?; + + if to_block <= self.state.last_processed_ethereum_block { + return Ok(()); + } + + let new_ops = self + .get_new_priority_ops(self.state.last_processed_ethereum_block, to_block) + .await?; + + self.state.last_processed_ethereum_block = to_block; + + metrics::histogram!("eth_watcher.poll_eth_node", stage_start.elapsed(), "stage" => "request"); + if !new_ops.is_empty() { + let first = &new_ops[0].1; + let last = &new_ops[new_ops.len() - 1].1; + assert_eq!( + first.serial_id(), + self.state.next_expected_priority_id, + "priority transaction serial id mismatch" + ); + self.state.next_expected_priority_id = last.serial_id().next(); + stage_start = Instant::now(); + metrics::counter!( + "server.processed_txs", + new_ops.len() as u64, + "stage" => "mempool_added" + ); + metrics::counter!( + "server.processed_l1_txs", + new_ops.len() as u64, + "stage" => "mempool_added" + ); + for (eth_block, new_op) in new_ops { + storage + .transactions_dal() + .insert_transaction_l1(new_op, eth_block); + } + metrics::histogram!("eth_watcher.poll_eth_node", stage_start.elapsed(), "stage" => "persist"); + } + Ok(()) + } + + async fn get_new_priority_ops( + &self, + from_block: u64, + to_block: u64, + ) -> Result, Error> { + let priority_ops: Vec = self + .client + .get_priority_op_events( + Web3BlockNumber::Number(from_block.into()), + Web3BlockNumber::Number(to_block.into()), + RETRY_LIMIT, + ) + .await? + .into_iter() + .collect::>(); + + if !priority_ops.is_empty() { + let first = &priority_ops[0]; + let last = &priority_ops[priority_ops.len() - 1]; + vlog::debug!( + "Received priority requests with serial ids: {} (block {}) - {} (block {})", + first.serial_id(), + first.eth_block(), + last.serial_id(), + last.eth_block(), + ); + assert_eq!( + last.serial_id().0 - first.serial_id().0 + 1, + priority_ops.len() as u64, + "there is a gap in priority ops received" + ) + } + + Ok(priority_ops + .into_iter() + .skip_while(|tx| tx.serial_id() < self.state.next_expected_priority_id) + .map(|tx| (L1BlockNumber(tx.eth_block() as u32), tx)) + .collect()) + } + + // ETH block assumed to be final (that is, old enough to not worry about reorgs) + async fn get_current_finalized_eth_block( + client: &W, + number_of_confirmations_for_event: usize, + ) -> Result { + Ok(client + .block_number() + .await? + .saturating_sub(number_of_confirmations_for_event as u64)) + } +} + +pub async fn start_eth_watch( + pool: ConnectionPool, + eth_gateway: EthereumClient, + config_options: &ZkSyncConfig, + stop_receiver: watch::Receiver, +) -> JoinHandle<()> { + let eth_client = EthHttpClient::new(eth_gateway, config_options.contracts.diamond_proxy_addr); + + let mut eth_watch = EthWatch::new( + eth_client, + &pool, + config_options.eth_watch.confirmations_for_eth_event as usize, + config_options.eth_watch.poll_interval(), + ) + .await; + + tokio::spawn(async move { + eth_watch.run(pool, stop_receiver).await; + }) +} diff --git a/core/bin/zksync_core/src/eth_watch/tests.rs b/core/bin/zksync_core/src/eth_watch/tests.rs new file mode 100644 index 000000000000..78000624498d --- /dev/null +++ b/core/bin/zksync_core/src/eth_watch/tests.rs @@ -0,0 +1,278 @@ +use std::cmp::max; +use std::collections::HashMap; +use std::convert::TryInto; +use std::sync::Arc; + +use tokio::sync::RwLock; + +use db_test_macro::db_test; +use zksync_dal::StorageProcessor; +use zksync_types::web3::types::{Address, BlockNumber}; +use zksync_types::{ + l1::{L1Tx, OpProcessingType, PriorityQueueType}, + Execute, L1TxCommonData, Nonce, PriorityOpId, Transaction, H256, U256, +}; + +use super::client::Error; +use crate::eth_watch::{client::EthClient, EthWatch}; + +struct FakeEthClientData { + transactions: HashMap>, + last_block_number: u64, +} + +impl FakeEthClientData { + fn new() -> Self { + Self { + transactions: Default::default(), + last_block_number: 0, + } + } + + fn add_transactions(&mut self, transactions: &[L1Tx]) { + for transaction in transactions { + let eth_block = transaction.eth_block(); + self.last_block_number = max(eth_block, self.last_block_number); + self.transactions + .entry(eth_block) + .or_insert_with(Vec::new) + .push(transaction.clone()); + } + } + fn set_last_block_number(&mut self, number: u64) { + self.last_block_number = number; + } +} + +#[derive(Clone)] +struct FakeEthClient { + inner: Arc>, +} + +impl FakeEthClient { + fn new() -> Self { + Self { + inner: Arc::new(RwLock::new(FakeEthClientData::new())), + } + } + + async fn add_transactions(&mut self, transactions: &[L1Tx]) { + self.inner.write().await.add_transactions(transactions); + } + + async fn set_last_block_number(&mut self, number: u64) { + self.inner.write().await.set_last_block_number(number); + } + + async fn block_to_number(&self, block: BlockNumber) -> u64 { + match block { + BlockNumber::Latest => self.inner.read().await.last_block_number, + BlockNumber::Earliest => 0, + BlockNumber::Pending => unreachable!(), + BlockNumber::Number(number) => number.as_u64(), + } + } +} + +#[async_trait::async_trait] +impl EthClient for FakeEthClient { + async fn get_priority_op_events( + &self, + from: BlockNumber, + to: BlockNumber, + _retries_left: usize, + ) -> Result, Error> { + let from = self.block_to_number(from).await; + let to = self.block_to_number(to).await; + let mut transactions = vec![]; + for number in from..=to { + if let Some(ops) = self.inner.read().await.transactions.get(&number) { + transactions.extend_from_slice(ops); + } + } + Ok(transactions) + } + + async fn block_number(&self) -> Result { + Ok(self.block_to_number(BlockNumber::Latest).await) + } + + async fn get_auth_fact(&self, _address: Address, _nonce: Nonce) -> Result, Error> { + unreachable!() + } + + async fn get_auth_fact_reset_time( + &self, + _address: Address, + _nonce: Nonce, + ) -> Result { + unreachable!() + } +} + +fn build_tx(serial_id: u64, eth_block: u64) -> L1Tx { + L1Tx { + execute: Execute { + contract_address: Address::repeat_byte(0x11), + calldata: vec![1, 2, 3], + factory_deps: None, + value: U256::zero(), + }, + common_data: L1TxCommonData { + serial_id: PriorityOpId(serial_id), + sender: [1u8; 20].into(), + deadline_block: 0, + eth_hash: [2; 32].into(), + eth_block, + gas_limit: Default::default(), + gas_per_pubdata_limit: 1u32.into(), + full_fee: Default::default(), + layer_2_tip_fee: U256::from(10u8), + refund_recipient: Address::zero(), + to_mint: Default::default(), + priority_queue_type: PriorityQueueType::Deque, + op_processing_type: OpProcessingType::Common, + canonical_tx_hash: H256::from_low_u64_le(serial_id), + }, + received_timestamp_ms: 0, + } +} + +#[db_test] +async fn test_normal_operation(connection_pool: ConnectionPool) { + let mut client = FakeEthClient::new(); + let mut watcher = EthWatch::new( + client.clone(), + &connection_pool, + 5, + std::time::Duration::from_nanos(1), + ) + .await; + + let mut storage = connection_pool.access_test_storage().await; + client + .add_transactions(&[build_tx(0, 10), build_tx(1, 14), build_tx(2, 18)]) + .await; + client.set_last_block_number(20).await; + // second tx will not be processed, as it has less than 5 confirmations + watcher.loop_iteration(&mut storage).await.unwrap(); + let db_txs = get_all_db_txs(&mut storage); + assert_eq!(db_txs.len(), 2); + let db_tx: L1Tx = db_txs[0].clone().try_into().unwrap(); + assert_eq!(db_tx.common_data.serial_id.0, 0); + let db_tx: L1Tx = db_txs[1].clone().try_into().unwrap(); + assert_eq!(db_tx.common_data.serial_id.0, 1); + + client.set_last_block_number(25).await; + // now the second tx will be processed + watcher.loop_iteration(&mut storage).await.unwrap(); + let db_txs = get_all_db_txs(&mut storage); + assert_eq!(db_txs.len(), 3); + let db_tx: L1Tx = db_txs[2].clone().try_into().unwrap(); + assert_eq!(db_tx.common_data.serial_id.0, 2); +} + +#[db_test] +#[should_panic] +async fn test_gap_in_single_batch(connection_pool: ConnectionPool) { + let mut client = FakeEthClient::new(); + let mut watcher = EthWatch::new( + client.clone(), + &connection_pool, + 5, + std::time::Duration::from_nanos(1), + ) + .await; + + let mut storage = connection_pool.access_test_storage().await; + client + .add_transactions(&[ + build_tx(0, 10), + build_tx(1, 14), + build_tx(2, 14), + build_tx(3, 14), + build_tx(5, 14), + ]) + .await; + client.set_last_block_number(20).await; + watcher.loop_iteration(&mut storage).await.unwrap(); +} + +#[db_test] +#[should_panic] +async fn test_gap_between_batches(connection_pool: ConnectionPool) { + let mut client = FakeEthClient::new(); + let mut watcher = EthWatch::new( + client.clone(), + &connection_pool, + 5, + std::time::Duration::from_nanos(1), + ) + .await; + + let mut storage = connection_pool.access_test_storage().await; + client + .add_transactions(&[ + // this goes to the first batch + build_tx(0, 10), + build_tx(1, 14), + build_tx(2, 14), + // this goes to the second batch + build_tx(4, 20), + build_tx(5, 22), + ]) + .await; + client.set_last_block_number(20).await; + watcher.loop_iteration(&mut storage).await.unwrap(); + let db_txs = get_all_db_txs(&mut storage); + assert_eq!(db_txs.len(), 3); + client.set_last_block_number(30).await; + watcher.loop_iteration(&mut storage).await.unwrap(); +} + +#[db_test] +async fn test_overlapping_batches(connection_pool: ConnectionPool) { + let mut client = FakeEthClient::new(); + let mut watcher = EthWatch::new( + client.clone(), + &connection_pool, + 5, + std::time::Duration::from_nanos(1), + ) + .await; + + let mut storage = connection_pool.access_test_storage().await; + client + .add_transactions(&[ + // this goes to the first batch + build_tx(0, 10), + build_tx(1, 14), + build_tx(2, 14), + // this goes to the second batch + build_tx(1, 20), + build_tx(2, 22), + build_tx(3, 23), + build_tx(4, 23), + ]) + .await; + client.set_last_block_number(20).await; + watcher.loop_iteration(&mut storage).await.unwrap(); + let db_txs = get_all_db_txs(&mut storage); + assert_eq!(db_txs.len(), 3); + client.set_last_block_number(30).await; + watcher.loop_iteration(&mut storage).await.unwrap(); + let db_txs = get_all_db_txs(&mut storage); + assert_eq!(db_txs.len(), 5); + let tx: L1Tx = db_txs[2].clone().try_into().unwrap(); + assert_eq!(tx.common_data.serial_id.0, 2); + let tx: L1Tx = db_txs[4].clone().try_into().unwrap(); + assert_eq!(tx.common_data.serial_id.0, 4); +} + +fn get_all_db_txs(storage: &mut StorageProcessor<'_>) -> Vec { + storage.transactions_dal().reset_mempool(); + storage + .transactions_dal() + .sync_mempool(vec![], vec![], 0, 0, 1000) + .0 +} diff --git a/core/bin/zksync_core/src/fee_monitor/mod.rs b/core/bin/zksync_core/src/fee_monitor/mod.rs new file mode 100644 index 000000000000..99a0bbfe1fa8 --- /dev/null +++ b/core/bin/zksync_core/src/fee_monitor/mod.rs @@ -0,0 +1,220 @@ +//! This module contains utilities for monitoring the fee model performance, +//! i.e. ability of the protocol to cover the costs for its own maintenance. + +use std::time::Duration; +use zksync_config::ZkSyncConfig; +use zksync_dal::ConnectionPool; +use zksync_eth_client::clients::http_client::EthereumClient; +use zksync_types::{ + api::BlockId, AccountTreeId, Address, L1BatchNumber, L2_ETH_TOKEN_ADDRESS, U256, +}; + +/// Component name used to track eth client usage. +const COMPONENT_NAME: &str = "fee-monitor"; + +/// Inclusive iterator for the (from..=to) blocks range +fn block_range(from: L1BatchNumber, to: L1BatchNumber) -> impl Iterator { + (from.0..=to.0).map(L1BatchNumber) +} + +/// Helper trait allowing to convert U256 balance representation to the f64 +/// with the given amount of decimals. +/// +/// Important! Never attempt to use this trait for anything important, because +/// the conversion is, obviously, lossy. +trait BalanceConvert { + fn to_f64_with_decimals(self, decimals: u8) -> f64; +} + +impl BalanceConvert for U256 { + fn to_f64_with_decimals(self, decimals: u8) -> f64 { + let divider = U256::from(10u64.pow(decimals as u32)); + let (quotient, remainder) = self.div_mod(divider); + let remainder_fractional = (remainder.as_u128() as f64) * 10.0f64.powf(-(decimals as f64)); + + quotient.as_u128() as f64 + remainder_fractional + } +} + +#[derive(Debug)] +pub struct FeeMonitor { + operator_address: Address, + fee_account_address: Address, + + storage: ConnectionPool, + client: EthereumClient, + + next_finalized_block: L1BatchNumber, +} + +impl FeeMonitor { + pub async fn new( + config: &ZkSyncConfig, + storage: ConnectionPool, + client: EthereumClient, + ) -> Self { + let mut storage_processor = storage.access_storage().await; + let latest_l1_batch_finalized = storage_processor + .blocks_dal() + .get_number_of_last_block_executed_on_eth() + .unwrap_or_default(); + drop(storage_processor); + + Self { + operator_address: config.eth_sender.sender.operator_commit_eth_addr, + fee_account_address: config.chain.state_keeper.fee_account_addr, + + storage, + client, + + next_finalized_block: latest_l1_batch_finalized.next(), + } + } + + pub async fn run(mut self) { + // We don't need these metrics to be reported often. + let mut timer = tokio::time::interval(Duration::from_secs(15)); + + loop { + timer.tick().await; + self.run_iter().await; + } + } + + async fn run_iter(&mut self) { + let last_finalized = { + let mut storage = self.storage.access_storage().await; + storage + .blocks_dal() + .get_number_of_last_block_executed_on_eth() + .unwrap_or_default() + }; + + let _ = self.report_balances().await.map_err(|err| { + vlog::warn!("Unable to report account balances in fee monitor: {err}"); + }); + + // Only report data if new blocks were finalized. + if last_finalized >= self.next_finalized_block { + let _ = self + .report_collected_fees(last_finalized) + .await + .map_err(|err| { + vlog::warn!("Unable to report collected fees in fee monitor: {err}"); + }); + let _ = self + .report_l1_batch_finalized(last_finalized) + .await + .map_err(|err| { + vlog::warn!("Unable to report l1 batch finalization in fee monitor: {err}"); + }); + + self.next_finalized_block = last_finalized.next(); + } + } + + async fn report_balances(&self) -> anyhow::Result<()> { + let mut storage = self.storage.access_storage().await; + let mut operator_balance_l1 = self + .client + .eth_balance(self.operator_address, COMPONENT_NAME) + .await? + .to_f64_with_decimals(18); + let mut fee_account_balance_l1 = self + .client + .eth_balance(self.fee_account_address, COMPONENT_NAME) + .await? + .to_f64_with_decimals(18); + let mut fee_account_balance_l2 = storage + .storage_web3_dal() + .standard_token_historical_balance( + AccountTreeId::new(L2_ETH_TOKEN_ADDRESS), + AccountTreeId::new(self.fee_account_address), + BlockId::Number(zksync_types::api::BlockNumber::Pending), + )?? + .to_f64_with_decimals(18); + + // Limit balances to sane values to render them adequatily on the localhost. + for balance in [ + &mut operator_balance_l1, + &mut fee_account_balance_l1, + &mut fee_account_balance_l2, + ] { + // We're unlikely to keep more than 1000 ETH on hot wallets in any real environment. + const MAX_BALANCE_TO_DISPLAY_ETH: f64 = 1000.0f64; + *balance = balance.min(MAX_BALANCE_TO_DISPLAY_ETH); + } + + metrics::gauge!("fee_monitor.balances", operator_balance_l1, "account" => "operator_l1"); + metrics::gauge!("fee_monitor.balances", fee_account_balance_l1, "account" => "fee_account_l1"); + metrics::gauge!("fee_monitor.balances", fee_account_balance_l2, "account" => "fee_account_l2"); + + Ok(()) + } + + async fn report_collected_fees(&mut self, last_finalized: L1BatchNumber) -> anyhow::Result<()> { + let mut storage = self.storage.access_storage().await; + for block_number in block_range(self.next_finalized_block, last_finalized) { + let collected_fees = storage + .fee_monitor_dal() + .fetch_erc20_transfers(block_number, self.fee_account_address)?; + + let total_fee_wei: U256 = collected_fees + .into_iter() + .fold(U256::zero(), |acc, x| acc + x); + + // Convert value to gwei to reduce verbosity. + let fee_in_gwei = total_fee_wei.to_f64_with_decimals(9); + metrics::gauge!("fee_monitor.collected_fees", fee_in_gwei); + vlog::info!("Collected fees in block {block_number}: {fee_in_gwei:.6} gwei"); + } + + Ok(()) + } + + async fn report_l1_batch_finalized( + &mut self, + last_finalized: L1BatchNumber, + ) -> anyhow::Result<()> { + let mut storage = self.storage.access_storage().await; + for block_number in block_range(self.next_finalized_block, last_finalized) { + let block_data = storage + .fee_monitor_dal() + .get_block_gas_consumption(block_number)?; + let total_wei_spent = U256::from(block_data.wei_spent()); + + // Convert value to gwei to reduce verbosity. + let gwei_spent = total_wei_spent.to_f64_with_decimals(9); + metrics::gauge!("fee_monitor.expenses", gwei_spent); + vlog::info!("Block processing expenses in block {block_number}: {gwei_spent:.6} gwei"); + } + + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + fn check(val: u64, expected: f64, decimals: u8) { + // That's a bad way to compare floats, mmkay? + // However we aren't going for precision anyways, so don't tell anyone. ( ͡° ͜ʖ ͡°) + let result = U256::from(val).to_f64_with_decimals(decimals); + let abs_diff = (result - expected).abs(); + assert!( + abs_diff < 0.000001f64, + "Value mismatch: expected {}, got {}", + expected, + result + ); + } + + #[test] + fn to_f64_with_decimals() { + check(1000000, 1.0, 6); + check(1000001, 1.000001, 6); + check(1800001, 1.800001, 6); + check(3241500000000000000, 3.2415, 18); + } +} diff --git a/core/bin/zksync_core/src/fee_ticker/error.rs b/core/bin/zksync_core/src/fee_ticker/error.rs new file mode 100644 index 000000000000..a7e2dceaa8ee --- /dev/null +++ b/core/bin/zksync_core/src/fee_ticker/error.rs @@ -0,0 +1,12 @@ +use thiserror::Error; +use zksync_types::Address; + +#[derive(Debug, Error)] +pub enum TickerError { + #[error("Token {0:x} is not being tracked for its price")] + PriceNotTracked(Address), + #[error("Third-party API data is temporarily unavailable")] + ApiDataUnavailable, + #[error("Fee ticker internal error")] + InternalError, +} diff --git a/core/bin/zksync_core/src/fee_ticker/gas_price.rs b/core/bin/zksync_core/src/fee_ticker/gas_price.rs new file mode 100644 index 000000000000..3c4cff6991f0 --- /dev/null +++ b/core/bin/zksync_core/src/fee_ticker/gas_price.rs @@ -0,0 +1,8 @@ +//! This module contains the logic used to calculate the price of 1 gas in Wei. + +use num::{rational::Ratio, BigUint}; + +/// Converts any token price in USD into one Wei price per USD. +pub fn token_price_to_wei_price_usd(token_price: &Ratio, decimals: u32) -> Ratio { + token_price / BigUint::from(10u32).pow(decimals) +} diff --git a/core/bin/zksync_core/src/fee_ticker/mod.rs b/core/bin/zksync_core/src/fee_ticker/mod.rs new file mode 100644 index 000000000000..ebf9a2ceac68 --- /dev/null +++ b/core/bin/zksync_core/src/fee_ticker/mod.rs @@ -0,0 +1,90 @@ +//! This module defines the price components of L2 transactions. + +use core::fmt::Debug; + +use bigdecimal::BigDecimal; +use num::{rational::Ratio, BigUint}; +use vm::vm_with_bootloader::base_fee_to_gas_per_pubdata; +use zksync_types::Address; +use zksync_utils::ratio_to_big_decimal_normalized; + +use self::error::TickerError; +use zksync_dal::tokens_web3_dal::TokensWeb3Dal; + +pub mod error; +mod gas_price; +pub mod types; + +/// Amount of possible symbols after the decimal dot in the USD. +/// Used to convert `Ratio` to `BigDecimal`. +pub const USD_PRECISION: usize = 100; + +/// Minimum amount of symbols after the decimal dot in the USD. +/// Used to convert `Ratio` to `BigDecimal`. +pub const MIN_PRECISION: usize = 2; + +#[derive(Debug, PartialEq, Eq)] +pub enum TokenPriceRequestType { + USDForOneWei, + USDForOneToken, +} + +#[derive(Debug, Default)] +pub struct FeeTicker; + +impl FeeTicker { + /// Returns the token price in USD. + pub fn get_l2_token_price( + tokens_web3_dal: &mut TokensWeb3Dal<'_, '_>, + request_type: TokenPriceRequestType, + l2_token_addr: &Address, + ) -> Result { + Self::get_l2_token_price_inner(tokens_web3_dal, request_type, l2_token_addr).map( + |final_price| { + ratio_to_big_decimal_normalized(&final_price, USD_PRECISION, MIN_PRECISION) + }, + ) + } + + /// Returns the acceptable `gas_per_pubdata_byte` based on the current gas price. + pub fn gas_per_pubdata_byte(gas_price_wei: u64, base_fee: u64) -> u32 { + base_fee_to_gas_per_pubdata(gas_price_wei, base_fee) as u32 + } + + fn get_l2_token_price_inner( + tokens_web3_dal: &mut TokensWeb3Dal<'_, '_>, + request_type: TokenPriceRequestType, + l2_token_addr: &Address, + ) -> Result, TickerError> { + let token_price = tokens_web3_dal + .get_token_price(l2_token_addr) + .map_err(|_| TickerError::InternalError)? + .ok_or(TickerError::PriceNotTracked(*l2_token_addr))? + .usd_price; + + let final_price = match request_type { + TokenPriceRequestType::USDForOneToken => token_price, + TokenPriceRequestType::USDForOneWei => { + let token_metadata = tokens_web3_dal + .get_token_metadata(l2_token_addr) + .map_err(|_| TickerError::InternalError)? + .ok_or_else(|| { + // It's kinda not OK that we have a price for token, but no metadata. + // Not a reason for a panic, but surely highest possible report level. + vlog::error!( + "Token {:x} has price, but no stored metadata", + l2_token_addr + ); + TickerError::PriceNotTracked(*l2_token_addr) + })?; + + gas_price::token_price_to_wei_price_usd( + &token_price, + token_metadata.decimals as u32, + ) + } + }; + + Ok(final_price) + } +} diff --git a/core/bin/zksync_core/src/fee_ticker/types.rs b/core/bin/zksync_core/src/fee_ticker/types.rs new file mode 100644 index 000000000000..daedb6f4a6d5 --- /dev/null +++ b/core/bin/zksync_core/src/fee_ticker/types.rs @@ -0,0 +1,5 @@ +#[derive(Debug, PartialEq, Eq)] +pub enum TokenPriceRequestType { + USDForOneWei, + USDForOneToken, +} diff --git a/core/bin/zksync_core/src/gas_adjuster/mod.rs b/core/bin/zksync_core/src/gas_adjuster/mod.rs new file mode 100644 index 000000000000..351b419699f8 --- /dev/null +++ b/core/bin/zksync_core/src/gas_adjuster/mod.rs @@ -0,0 +1,227 @@ +//! This module determines the fees to pay in txs containing blocks submitted to the L1. + +// Built-in deps +use std::collections::VecDeque; +use std::sync::{Arc, RwLock}; +use tokio::sync::watch::Receiver; +use vm::vm_with_bootloader::derive_base_fee_and_gas_per_pubdata; +use zksync_types::FAIR_L2_GAS_PRICE; + +use zksync_config::GasAdjusterConfig; +use zksync_eth_client::{clients::http_client::Error, EthInterface}; +use zksync_mempool::L2TxFilter; + +#[cfg(test)] +mod tests; + +/// This component keeps track of the median base_fee from the last `max_base_fee_samples` blocks. +/// It is used to adjust the base_fee of transactions sent to L1. +#[derive(Debug)] +pub struct GasAdjuster { + pub(super) statistics: GasStatistics, + pub(super) config: GasAdjusterConfig, + eth_client: E, +} + +impl GasAdjuster { + pub async fn new(eth_client: E, config: GasAdjusterConfig) -> Result { + let current_block = eth_client.block_number("gas_adjuster").await?.as_usize(); + let history = eth_client + .base_fee_history(current_block, config.max_base_fee_samples, "gas_adjuster") + .await?; + Ok(Self { + statistics: GasStatistics::new(config.max_base_fee_samples, current_block, &history), + eth_client, + config, + }) + } + + /// Returns the sum of base and priority fee, in wei, not considering time in mempool. + /// Can be used to get an estimate of current gas price. + pub fn estimate_effective_gas_price(&self) -> u64 { + let effective_gas_price = self.get_base_fee(0) + self.get_priority_fee(); + + (self.config.internal_l1_pricing_multiplier * effective_gas_price as f64) as u64 + } + + // This is the method where we decide how much we are ready to pay for the + // base_fee based on the number of L1 blocks the transaction has been in the mempool. + // This is done in order to avoid base_fee spikes (e.g. during NFT drops) and + // smooth out base_fee increases in general. + // In other words, in order to pay less fees, we are ready to wait longer. + // But the longer we wait, the more we are ready to pay. + pub fn get_base_fee(&self, time_in_mempool: u32) -> u64 { + let a = self.config.pricing_formula_parameter_a; + let b = self.config.pricing_formula_parameter_b; + + // Currently we use an exponential formula. + // The alternative is a linear one: + // let scale_factor = a + b * time_in_mempool as f64; + let scale_factor = a * b.powf(time_in_mempool as f64); + let median = self.statistics.median(); + + metrics::gauge!("server.gas_adjuster.median_base_fee_per_gas", median as f64); + + let new_fee = median as f64 * scale_factor; + new_fee as u64 + } + + pub fn get_next_block_minimal_base_fee(&self) -> u64 { + let last_block_base_fee = self.statistics.last_added_value(); + + // The next block's base fee will decrease by a maximum of 12.5%. + last_block_base_fee * 875 / 1000 + } + + // Priority fee is set to constant, sourced from config. + // Reasoning behind this is the following: + // High priority_fee means high demand for block space, + // which means base_fee will increase, which means priority_fee + // will decrease. The EIP-1559 mechanism is designed such that + // base_fee will balance out priority_fee in such a way that + // priority_fee will be a small fraction of the overall fee. + pub fn get_priority_fee(&self) -> u64 { + self.config.default_priority_fee_per_gas + } + + /// Performs an actualization routine for `GasAdjuster`. + /// This method is intended to be invoked periodically. + pub async fn keep_updated(&self) -> Result<(), Error> { + // Subtracting 1 from the "latest" block number to prevent errors in case + // the info about the latest block is not yet present on the node. + // This sometimes happens on Infura. + let current_block = self + .eth_client + .block_number("gas_adjuster") + .await? + .as_usize() + .saturating_sub(1); + + let last_processed_block = self.statistics.last_processed_block(); + + if current_block > last_processed_block { + // Report the current price to be gathered by the statistics module. + let history = self + .eth_client + .base_fee_history( + current_block, + current_block - last_processed_block, + "gas_adjuster", + ) + .await?; + + metrics::gauge!( + "server.gas_adjuster.current_base_fee_per_gas", + *history.last().unwrap() as f64 + ); + + self.statistics.add_samples(&history); + } + Ok(()) + } + + pub fn l2_tx_filter(&self) -> L2TxFilter { + let effective_gas_price = self.estimate_effective_gas_price(); + + let (base_fee, gas_per_pubdata) = + derive_base_fee_and_gas_per_pubdata(effective_gas_price, FAIR_L2_GAS_PRICE); + L2TxFilter { + l1_gas_price: effective_gas_price, + fee_per_gas: base_fee, + gas_per_pubdata: gas_per_pubdata as u32, + } + } + + pub async fn run(self: Arc, stop_receiver: Receiver) { + loop { + if *stop_receiver.borrow() { + vlog::info!("Stop signal received, gas_adjuster is shutting down"); + break; + } + + if let Err(err) = self.keep_updated().await { + vlog::warn!("Cannot add the base fee to gas statistics: {}", err); + } + + tokio::time::sleep(self.config.poll_period()).await; + } + } +} + +/// Helper structure responsible for collecting the data about recent transactions, +/// calculating the median base fee. +#[derive(Debug, Clone, Default)] +pub(super) struct GasStatisticsInner { + samples: VecDeque, + median_cached: u64, + max_samples: usize, + last_processed_block: usize, +} + +impl GasStatisticsInner { + fn new(max_samples: usize, block: usize, fee_history: &[u64]) -> Self { + let mut statistics = Self { + max_samples, + samples: VecDeque::with_capacity(max_samples), + median_cached: 0, + last_processed_block: 0, + }; + + statistics.add_samples(fee_history); + + Self { + last_processed_block: block, + ..statistics + } + } + + fn median(&self) -> u64 { + self.median_cached + } + + fn last_added_value(&self) -> u64 { + self.samples.back().copied().unwrap_or(self.median_cached) + } + + fn add_samples(&mut self, fees: &[u64]) { + self.samples.extend(fees); + self.last_processed_block += fees.len(); + + let extra = self.samples.len().saturating_sub(self.max_samples); + self.samples.drain(..extra); + + let mut samples: Vec<_> = self.samples.iter().cloned().collect(); + let (_, &mut median, _) = samples.select_nth_unstable(self.samples.len() / 2); + + self.median_cached = median; + } +} + +#[derive(Debug, Default)] +pub(super) struct GasStatistics(RwLock); + +impl GasStatistics { + pub fn new(max_samples: usize, block: usize, fee_history: &[u64]) -> Self { + Self(RwLock::new(GasStatisticsInner::new( + max_samples, + block, + fee_history, + ))) + } + + pub fn median(&self) -> u64 { + self.0.read().unwrap().median() + } + + pub fn last_added_value(&self) -> u64 { + self.0.read().unwrap().last_added_value() + } + + pub fn add_samples(&self, fees: &[u64]) { + self.0.write().unwrap().add_samples(fees) + } + + pub fn last_processed_block(&self) -> usize { + self.0.read().unwrap().last_processed_block + } +} diff --git a/core/bin/zksync_core/src/gas_adjuster/tests.rs b/core/bin/zksync_core/src/gas_adjuster/tests.rs new file mode 100644 index 000000000000..40cf84f5d0dd --- /dev/null +++ b/core/bin/zksync_core/src/gas_adjuster/tests.rs @@ -0,0 +1,61 @@ +// Built-in uses +use std::collections::VecDeque; +use std::sync::Arc; +// Workspace uses +use zksync_config::GasAdjusterConfig; +use zksync_eth_client::clients::mock::MockEthereum; +// Local uses +use super::GasAdjuster; +use crate::gas_adjuster::GasStatisticsInner; + +/// Check that we compute the median correctly +#[test] +fn median() { + // sorted: 4 4 6 7 8 + assert_eq!(GasStatisticsInner::new(5, 5, &[6, 4, 7, 8, 4]).median(), 6); + // sorted: 4 4 8 10 + assert_eq!(GasStatisticsInner::new(4, 4, &[8, 4, 4, 10]).median(), 8); +} + +/// Check that we properly manage the block base fee queue +#[test] +fn samples_queue() { + let mut stats = GasStatisticsInner::new(5, 5, &[6, 4, 7, 8, 4, 5]); + + assert_eq!(stats.samples, VecDeque::from([4, 7, 8, 4, 5])); + + stats.add_samples(&[18, 18, 18]); + + assert_eq!(stats.samples, VecDeque::from([4, 5, 18, 18, 18])); +} + +/// Check that we properly fetch base fees as block are mined +#[tokio::test] +async fn kept_updated() { + let eth_client = + Arc::new(MockEthereum::default().with_fee_history(vec![0, 4, 6, 8, 7, 5, 5, 8, 10, 9])); + eth_client.advance_block_number(5); + + let adjuster = GasAdjuster::new( + Arc::clone(ð_client), + GasAdjusterConfig { + default_priority_fee_per_gas: 5, + max_base_fee_samples: 5, + pricing_formula_parameter_a: 1.5, + pricing_formula_parameter_b: 1.0005, + internal_l1_pricing_multiplier: 0.8, + poll_period: 5, + }, + ) + .await + .unwrap(); + + assert_eq!(adjuster.statistics.0.read().unwrap().samples.len(), 5); + assert_eq!(adjuster.statistics.0.read().unwrap().median(), 6); + + eth_client.advance_block_number(3); + adjuster.keep_updated().await.unwrap(); + + assert_eq!(adjuster.statistics.0.read().unwrap().samples.len(), 5); + assert_eq!(adjuster.statistics.0.read().unwrap().median(), 7); +} diff --git a/core/bin/zksync_core/src/gas_tracker/constants.rs b/core/bin/zksync_core/src/gas_tracker/constants.rs new file mode 100644 index 000000000000..7dfc6a95b972 --- /dev/null +++ b/core/bin/zksync_core/src/gas_tracker/constants.rs @@ -0,0 +1,17 @@ +// Currently, every AGGR_* cost is overestimated, +// so there are safety margins around 100_000 -- 200_000 + +pub const AGGR_BLOCK_COMMIT_BASE_COST: u32 = 242_000; +pub const AGGR_BLOCK_PROVE_BASE_COST: u32 = 1_000_000; +pub const AGGR_BLOCK_EXECUTE_BASE_COST: u32 = 241_000; + +pub const BLOCK_COMMIT_BASE_COST: u32 = 31_000; +pub const BLOCK_PROVE_BASE_COST: u32 = 7_000; +pub const BLOCK_EXECUTE_BASE_COST: u32 = 30_000; + +pub const EXECUTE_COMMIT_COST: u32 = 0; +pub const EXECUTE_EXECUTE_COST: u32 = 0; + +pub const L1_OPERATION_EXECUTE_COST: u32 = 12_500; + +pub const GAS_PER_BYTE: u32 = 18; diff --git a/core/bin/zksync_core/src/gas_tracker/mod.rs b/core/bin/zksync_core/src/gas_tracker/mod.rs new file mode 100644 index 000000000000..5ec2a71b5489 --- /dev/null +++ b/core/bin/zksync_core/src/gas_tracker/mod.rs @@ -0,0 +1,95 @@ +//! This module predicts L1 gas cost for the Commit/PublishProof/Execute operations. + +use zksync_types::{ + aggregated_operations::AggregatedActionType, block::BlockGasCount, + commitment::BlockWithMetadata, tx::ExecutionMetrics, ExecuteTransactionCommon, Transaction, +}; + +use self::constants::*; +pub mod constants; + +pub fn agg_block_base_cost(op: AggregatedActionType) -> u32 { + match op { + AggregatedActionType::CommitBlocks => AGGR_BLOCK_COMMIT_BASE_COST, + AggregatedActionType::PublishProofBlocksOnchain => AGGR_BLOCK_PROVE_BASE_COST, + AggregatedActionType::ExecuteBlocks => AGGR_BLOCK_EXECUTE_BASE_COST, + } +} + +pub fn block_base_cost(op: AggregatedActionType) -> u32 { + match op { + AggregatedActionType::CommitBlocks => BLOCK_COMMIT_BASE_COST, + AggregatedActionType::PublishProofBlocksOnchain => BLOCK_PROVE_BASE_COST, + AggregatedActionType::ExecuteBlocks => BLOCK_EXECUTE_BASE_COST, + } +} + +pub trait GasCost { + fn base_cost(&self, op: AggregatedActionType) -> u32; +} + +impl GasCost for Transaction { + fn base_cost(&self, op: AggregatedActionType) -> u32 { + match op { + AggregatedActionType::CommitBlocks => EXECUTE_COMMIT_COST, + AggregatedActionType::PublishProofBlocksOnchain => 0, + AggregatedActionType::ExecuteBlocks => match self.common_data { + ExecuteTransactionCommon::L2(_) => EXECUTE_EXECUTE_COST, + ExecuteTransactionCommon::L1(_) => L1_OPERATION_EXECUTE_COST, + }, + } + } +} + +pub fn additional_commit_cost(execution_metrics: &ExecutionMetrics) -> u32 { + (execution_metrics.size() as u32) * GAS_PER_BYTE +} + +pub fn new_block_gas_count() -> BlockGasCount { + BlockGasCount { + commit: block_base_cost(AggregatedActionType::CommitBlocks), + prove: block_base_cost(AggregatedActionType::PublishProofBlocksOnchain), + execute: block_base_cost(AggregatedActionType::ExecuteBlocks), + } +} + +pub fn gas_count_from_tx_and_metrics( + tx: &Transaction, + execution_metrics: &ExecutionMetrics, +) -> BlockGasCount { + let commit = tx.base_cost(AggregatedActionType::CommitBlocks) + + additional_commit_cost(execution_metrics); + BlockGasCount { + commit, + prove: tx.base_cost(AggregatedActionType::PublishProofBlocksOnchain), + execute: tx.base_cost(AggregatedActionType::ExecuteBlocks), + } +} + +pub fn gas_count_from_metrics(execution_metrics: &ExecutionMetrics) -> BlockGasCount { + BlockGasCount { + commit: additional_commit_cost(execution_metrics), + prove: 0, + execute: 0, + } +} + +pub fn commit_gas_count_for_block(block: &BlockWithMetadata) -> u32 { + let base_cost = block_base_cost(AggregatedActionType::CommitBlocks); + let additional_calldata_bytes = block.metadata.initial_writes_compressed.len() as u32 + + block.metadata.repeated_writes_compressed.len() as u32 + + block.metadata.l2_l1_messages_compressed.len() as u32 + + block + .header + .l2_to_l1_messages + .iter() + .map(|message| message.len() as u32) + .sum::() + + block + .factory_deps + .iter() + .map(|factory_dep| factory_dep.len() as u32) + .sum::(); + let additional_cost = additional_calldata_bytes * GAS_PER_BYTE; + base_cost + additional_cost +} diff --git a/core/bin/zksync_core/src/genesis.rs b/core/bin/zksync_core/src/genesis.rs new file mode 100644 index 000000000000..f181233ad775 --- /dev/null +++ b/core/bin/zksync_core/src/genesis.rs @@ -0,0 +1,267 @@ +//! This module aims to provide a genesis setup for the zkSync Era network. +//! It initializes the Merkle tree with the basic setup (such as fields of special service accounts), +//! setups the required databases, and outputs the data required to initialize a smart contract. + +use tempfile::TempDir; +use vm::zk_evm::aux_structures::{LogQuery, Timestamp}; +use zksync_types::system_contracts::get_system_smart_contracts; +use zksync_types::tokens::{TokenInfo, TokenMetadata, ETHEREUM_ADDRESS}; +use zksync_types::{ + block::{BlockGasCount, L1BatchHeader, MiniblockHeader}, + get_code_key, Address, L1BatchNumber, MiniblockNumber, StorageLog, StorageLogKind, + StorageLogQueryType, H256, +}; +use zksync_types::{get_system_context_init_logs, StorageLogQuery, FAIR_L2_GAS_PRICE}; +use zksync_utils::{bytecode::hash_bytecode, h256_to_u256, miniblock_hash}; + +use zksync_config::ZkSyncConfig; +use zksync_merkle_tree::ZkSyncTree; + +use zksync_dal::StorageProcessor; +use zksync_storage::db::Database; + +use zksync_storage::RocksDB; +use zksync_types::block::DeployedContract; +use zksync_types::commitment::{BlockCommitment, BlockMetadata}; +use zksync_types::log_query_sorter::sort_storage_access_queries; + +pub async fn ensure_genesis_state(storage: &mut StorageProcessor<'_>, config: ZkSyncConfig) { + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let db = RocksDB::new(Database::MerkleTree, temp_dir.as_ref(), false); + let mut tree = ZkSyncTree::new(db); + + let mut transaction = storage.start_transaction().await; + + // return if genesis block was already processed + if !transaction.blocks_dal().is_genesis_needed() { + vlog::debug!("genesis is not needed!"); + return; + } + vlog::info!("running regenesis"); + + // For now we consider the operator to be the first validator. + let first_validator_address = config.eth_sender.sender.operator_commit_eth_addr; + let chain_id = H256::from_low_u64_be(config.chain.eth.zksync_network_id as u64); + + chain_schema_genesis(&mut transaction, first_validator_address, chain_id).await; + vlog::info!("chain_schema_genesis is complete"); + + let storage_logs = + crate::metadata_calculator::get_logs_for_l1_batch(&mut transaction, L1BatchNumber(0)); + let metadata = tree.process_block(storage_logs.unwrap().storage_logs); + let genesis_root_hash = H256::from_slice(&metadata.root_hash); + let rollup_last_leaf_index = metadata.rollup_last_leaf_index; + + let block_commitment = BlockCommitment::new( + vec![], + rollup_last_leaf_index, + genesis_root_hash, + vec![], + vec![], + ); + + operations_schema_genesis( + &mut transaction, + &block_commitment, + genesis_root_hash, + rollup_last_leaf_index, + ); + vlog::info!("operations_schema_genesis is complete"); + + transaction.commit().await; + + // We need to `println` this value because it will be used to initialize the smart contract. + println!( + "CONTRACTS_GENESIS_ROOT=0x{}", + hex::encode(genesis_root_hash) + ); + println!( + "CONTRACTS_GENESIS_BLOCK_COMMITMENT=0x{}", + hex::encode(block_commitment.hash().commitment) + ); + println!( + "CONTRACTS_GENESIS_ROLLUP_LEAF_INDEX={}", + rollup_last_leaf_index + ); +} + +async fn insert_system_contracts( + storage: &mut StorageProcessor<'_>, + contracts: Vec, + chain_id: H256, +) { + let system_context_init_logs = (H256::default(), get_system_context_init_logs(chain_id)); + + let storage_logs: Vec<(H256, Vec)> = contracts + .clone() + .into_iter() + .map(|contract| { + let hash = hash_bytecode(&contract.bytecode); + let code_key = get_code_key(contract.account_id.address()); + + ( + Default::default(), + vec![StorageLog::new_write_log(code_key, hash)], + ) + }) + .chain(Some(system_context_init_logs)) + .collect(); + + let mut transaction = storage.start_transaction().await; + + transaction + .storage_logs_dal() + .insert_storage_logs(MiniblockNumber(0), &storage_logs); + + // we don't produce proof for the genesis block, + // but we still need to populate the table + // to have the correct initial state of the merkle tree + let log_queries: Vec = storage_logs + .iter() + .enumerate() + .flat_map(|(tx_index, (_, storage_logs))| { + storage_logs + .iter() + .enumerate() + .map(move |(log_index, storage_log)| { + let log_type = match storage_log.kind { + StorageLogKind::Read => StorageLogQueryType::Read, + StorageLogKind::Write => StorageLogQueryType::InitialWrite, + }; + StorageLogQuery { + log_query: LogQuery { + // Monotonically increasing Timestamp. Normally it's generated by the VM, but we don't have a VM in the genesis block. + timestamp: Timestamp(((tx_index << 16) + log_index) as u32), + tx_number_in_block: tx_index as u16, + aux_byte: 0, + shard_id: 0, + address: *storage_log.key.address(), + key: h256_to_u256(*storage_log.key.key()), + read_value: h256_to_u256(H256::zero()), + written_value: h256_to_u256(storage_log.value), + rw_flag: storage_log.kind == StorageLogKind::Write, + rollback: false, + is_service: false, + }, + log_type, + } + }) + .collect::>() + }) + .collect(); + + let (_, deduped_log_queries) = sort_storage_access_queries(&log_queries); + + transaction + .storage_logs_dedup_dal() + .insert_storage_logs(L1BatchNumber(0), &deduped_log_queries); + + let (protective_reads, deduplicated_writes): (Vec<_>, Vec<_>) = deduped_log_queries + .into_iter() + .partition(|log_query| log_query.log_type == StorageLogQueryType::Read); + transaction + .storage_logs_dedup_dal() + .insert_protective_reads(L1BatchNumber(0), &protective_reads); + transaction + .storage_logs_dedup_dal() + .insert_initial_writes(L1BatchNumber(0), &deduplicated_writes); + + transaction.storage_dal().apply_storage_logs(&storage_logs); + + let factory_deps = contracts + .into_iter() + .map(|c| (hash_bytecode(&c.bytecode), c.bytecode)) + .collect(); + transaction + .storage_dal() + .insert_factory_deps(MiniblockNumber(0), factory_deps); + + transaction.commit().await; +} + +pub(crate) async fn chain_schema_genesis<'a>( + storage: &mut StorageProcessor<'_>, + first_validator_address: Address, + chain_id: H256, +) { + let mut zero_block_header = L1BatchHeader::new(L1BatchNumber(0), 0, first_validator_address); + zero_block_header.is_finished = true; + + let zero_miniblock_header = MiniblockHeader { + number: MiniblockNumber(0), + timestamp: 0, + hash: miniblock_hash(MiniblockNumber(0)), + l1_tx_count: 0, + l2_tx_count: 0, + base_fee_per_gas: 0, + l1_gas_price: 0, + l2_fair_gas_price: FAIR_L2_GAS_PRICE, + }; + + let mut transaction = storage.start_transaction().await; + + transaction + .blocks_dal() + .insert_l1_batch(zero_block_header, BlockGasCount::default()); + transaction + .blocks_dal() + .insert_miniblock(zero_miniblock_header); + transaction + .blocks_dal() + .mark_miniblocks_as_executed_in_l1_batch(L1BatchNumber(0)); + + let contracts = get_system_smart_contracts(); + insert_system_contracts(&mut transaction, contracts, chain_id).await; + + add_eth_token(&mut transaction).await; + + transaction.commit().await; +} + +pub(crate) async fn add_eth_token(storage: &mut StorageProcessor<'_>) { + let eth_token = TokenInfo { + l1_address: ETHEREUM_ADDRESS, + l2_address: ETHEREUM_ADDRESS, + metadata: TokenMetadata { + name: "Ether".to_string(), + symbol: "ETH".to_string(), + decimals: 18, + }, + }; + + let mut transaction = storage.start_transaction().await; + + transaction.tokens_dal().add_tokens(vec![eth_token.clone()]); + transaction + .tokens_dal() + .update_well_known_l1_token(ÐEREUM_ADDRESS, eth_token.metadata); + + transaction.commit().await; +} + +pub(crate) fn operations_schema_genesis( + storage: &mut StorageProcessor<'_>, + block_commitment: &BlockCommitment, + genesis_root_hash: H256, + rollup_last_leaf_index: u64, +) { + let block_commitment_hash = block_commitment.hash(); + + let metadata = BlockMetadata { + root_hash: genesis_root_hash, + rollup_last_leaf_index, + merkle_root_hash: genesis_root_hash, + initial_writes_compressed: vec![], + repeated_writes_compressed: vec![], + commitment: block_commitment_hash.commitment, + l2_l1_messages_compressed: vec![], + l2_l1_merkle_root: Default::default(), + block_meta_params: block_commitment.meta_parameters(), + aux_data_hash: block_commitment_hash.aux_output, + meta_parameters_hash: block_commitment_hash.meta_parameters, + pass_through_data_hash: block_commitment_hash.pass_through_data, + }; + storage + .blocks_dal() + .save_block_metadata(L1BatchNumber(0), metadata); +} diff --git a/core/bin/zksync_core/src/house_keeper/gcs_blob_cleaner.rs b/core/bin/zksync_core/src/house_keeper/gcs_blob_cleaner.rs new file mode 100644 index 000000000000..a71e0da76386 --- /dev/null +++ b/core/bin/zksync_core/src/house_keeper/gcs_blob_cleaner.rs @@ -0,0 +1,217 @@ +use std::error; + +use zksync_dal::ConnectionPool; +use zksync_object_store::cloud_storage::Reason; +use zksync_object_store::gcs_object_store::cloud_storage::Error; +use zksync_object_store::gcs_object_store::GOOGLE_CLOUD_STORAGE_OBJECT_STORE_TYPE; +use zksync_object_store::object_store::{ + DynamicObjectStore, LEAF_AGGREGATION_WITNESS_JOBS_BUCKET_PATH, + NODE_AGGREGATION_WITNESS_JOBS_BUCKET_PATH, PROVER_JOBS_BUCKET_PATH, + SCHEDULER_WITNESS_JOBS_BUCKET_PATH, WITNESS_INPUT_BUCKET_PATH, +}; + +use crate::house_keeper::periodic_job::PeriodicJob; + +#[derive(Debug)] +pub struct GcsBlobCleaner { + pub object_store: DynamicObjectStore, +} + +const BATCH_CLEANUP_SIZE: u8 = 5; + +fn handle_remove_result(object_store_type: &str, result: Result<(), Box>) { + if object_store_type == GOOGLE_CLOUD_STORAGE_OBJECT_STORE_TYPE { + match result { + Ok(_) => {} // DO NOTHING + Err(err) => { + let gcs_error = err.downcast::().unwrap(); + match *gcs_error { + Error::Google(err) => { + if err + .error + .errors + .iter() + .any(|err| matches!(err.reason, Reason::NotFound)) + { + return; + } + panic!("{:?}", err) + } + _ => { + panic!("{:?}", gcs_error) + } + } + } + } + } +} + +/// There can be scenario when the removal from the GCS succeeded and updating the DB after that fails, +/// in this scenario the retry of removal from GCS would fail as the object is already removed. +/// To handle this either the `Key does not exist` error from GCS can be ignored or other option is to do everything inside a transaction. +impl GcsBlobCleaner { + fn cleanup_blobs(&mut self, pool: ConnectionPool) { + self.cleanup_prover_jobs_blobs(pool.clone()); + self.cleanup_witness_inputs_blobs(pool.clone()); + self.cleanup_leaf_aggregation_witness_jobs_blobs(pool.clone()); + self.cleanup_node_aggregation_witness_jobs_blobs(pool.clone()); + self.cleanup_scheduler_witness_jobs_blobs(pool); + } + + fn cleanup_prover_jobs_blobs(&mut self, pool: ConnectionPool) { + let mut conn = pool.access_storage_blocking(); + let id_blob_urls_tuple = conn + .prover_dal() + .get_circuit_input_blob_urls_to_be_cleaned(BATCH_CLEANUP_SIZE); + let (ids, circuit_input_blob_urls): (Vec<_>, Vec<_>) = + id_blob_urls_tuple.into_iter().unzip(); + + vlog::info!("Found {} provers jobs for cleaning blobs", ids.len()); + + circuit_input_blob_urls.into_iter().for_each(|url| { + handle_remove_result( + self.object_store.get_store_type(), + self.object_store.remove(PROVER_JOBS_BUCKET_PATH, url), + ); + }); + + conn.prover_dal().mark_gcs_blobs_as_cleaned(ids); + } + + fn cleanup_witness_inputs_blobs(&mut self, pool: ConnectionPool) { + let mut conn = pool.access_storage_blocking(); + let l1_batches_blob_urls_tuple = conn + .blocks_dal() + .get_merkle_tree_paths_blob_urls_to_be_cleaned(BATCH_CLEANUP_SIZE); + let (l1_batch_numbers, merkle_tree_paths_blob_urls): (Vec<_>, Vec<_>) = + l1_batches_blob_urls_tuple.into_iter().unzip(); + + vlog::info!( + "Found {} witness inputs for cleaning blobs", + l1_batch_numbers.len() + ); + + merkle_tree_paths_blob_urls.into_iter().for_each(|url| { + handle_remove_result( + self.object_store.get_store_type(), + self.object_store.remove(WITNESS_INPUT_BUCKET_PATH, url), + ); + }); + conn.blocks_dal() + .mark_gcs_blobs_as_cleaned(l1_batch_numbers); + } + + fn cleanup_leaf_aggregation_witness_jobs_blobs(&mut self, pool: ConnectionPool) { + let mut conn = pool.access_storage_blocking(); + + let l1_batches_blob_urls_tuple = conn + .witness_generator_dal() + .get_basic_circuit_and_circuit_inputs_blob_urls_to_be_cleaned(BATCH_CLEANUP_SIZE); + let (l1_batch_numbers, basic_circuit_and_circuit_inputs_blob_urls): (Vec<_>, Vec<_>) = + l1_batches_blob_urls_tuple.into_iter().unzip(); + + vlog::info!( + "Found {} leaf aggregation witness jobs for cleaning blobs", + l1_batch_numbers.len() + ); + + basic_circuit_and_circuit_inputs_blob_urls + .into_iter() + .for_each(|url_pair| { + handle_remove_result( + self.object_store.get_store_type(), + self.object_store + .remove(LEAF_AGGREGATION_WITNESS_JOBS_BUCKET_PATH, url_pair.0), + ); + handle_remove_result( + self.object_store.get_store_type(), + self.object_store + .remove(LEAF_AGGREGATION_WITNESS_JOBS_BUCKET_PATH, url_pair.1), + ); + }); + + conn.witness_generator_dal() + .mark_leaf_aggregation_gcs_blobs_as_cleaned(l1_batch_numbers); + } + + fn cleanup_node_aggregation_witness_jobs_blobs(&mut self, pool: ConnectionPool) { + let mut conn = pool.access_storage_blocking(); + let l1_batches_blob_urls_tuple = conn + .witness_generator_dal() + .get_leaf_layer_subqueues_and_aggregation_outputs_blob_urls_to_be_cleaned( + BATCH_CLEANUP_SIZE, + ); + + let (l1_batch_numbers, leaf_layer_subqueues_and_aggregation_outputs_blob_urls): ( + Vec<_>, + Vec<_>, + ) = l1_batches_blob_urls_tuple.into_iter().unzip(); + + vlog::info!( + "Found {} node aggregation witness jobs for cleaning blobs", + l1_batch_numbers.len() + ); + + leaf_layer_subqueues_and_aggregation_outputs_blob_urls + .into_iter() + .for_each(|url_pair| { + handle_remove_result( + self.object_store.get_store_type(), + self.object_store + .remove(NODE_AGGREGATION_WITNESS_JOBS_BUCKET_PATH, url_pair.0), + ); + handle_remove_result( + self.object_store.get_store_type(), + self.object_store + .remove(NODE_AGGREGATION_WITNESS_JOBS_BUCKET_PATH, url_pair.1), + ); + }); + conn.witness_generator_dal() + .mark_node_aggregation_gcs_blobs_as_cleaned(l1_batch_numbers); + } + + fn cleanup_scheduler_witness_jobs_blobs(&mut self, pool: ConnectionPool) { + let mut conn = pool.access_storage_blocking(); + let l1_batches_blob_urls_tuple = conn + .witness_generator_dal() + .get_scheduler_witness_and_node_aggregations_blob_urls_to_be_cleaned( + BATCH_CLEANUP_SIZE, + ); + + let (l1_batch_numbers, scheduler_witness_and_node_aggregations_blob_urls): ( + Vec<_>, + Vec<_>, + ) = l1_batches_blob_urls_tuple.into_iter().unzip(); + + vlog::info!( + "Found {} scheduler witness jobs for cleaning blobs", + l1_batch_numbers.len() + ); + + scheduler_witness_and_node_aggregations_blob_urls + .into_iter() + .for_each(|url_pair| { + handle_remove_result( + self.object_store.get_store_type(), + self.object_store + .remove(SCHEDULER_WITNESS_JOBS_BUCKET_PATH, url_pair.0), + ); + handle_remove_result( + self.object_store.get_store_type(), + self.object_store + .remove(SCHEDULER_WITNESS_JOBS_BUCKET_PATH, url_pair.1), + ); + }); + conn.witness_generator_dal() + .mark_scheduler_witness_gcs_blobs_as_cleaned(l1_batch_numbers); + } +} + +impl PeriodicJob for GcsBlobCleaner { + const SERVICE_NAME: &'static str = "GcsBlobCleaner"; + const POLLING_INTERVAL_MS: u64 = 5000; + + fn run_routine_task(&mut self, connection_pool: ConnectionPool) { + self.cleanup_blobs(connection_pool); + } +} diff --git a/core/bin/zksync_core/src/house_keeper/gpu_prover_queue_monitor.rs b/core/bin/zksync_core/src/house_keeper/gpu_prover_queue_monitor.rs new file mode 100644 index 000000000000..17b09d56f919 --- /dev/null +++ b/core/bin/zksync_core/src/house_keeper/gpu_prover_queue_monitor.rs @@ -0,0 +1,29 @@ +use zksync_dal::ConnectionPool; + +use crate::house_keeper::periodic_job::PeriodicJob; + +#[derive(Debug, Default)] +pub struct GpuProverQueueMonitor {} + +/// Invoked periodically to push prover job statistics to Prometheus +/// Note: these values will be used for auto-scaling circuit-synthesizer +impl PeriodicJob for GpuProverQueueMonitor { + const SERVICE_NAME: &'static str = "GpuProverQueueMonitor"; + + fn run_routine_task(&mut self, connection_pool: ConnectionPool) { + let free_prover_instance_count = connection_pool + .access_storage_blocking() + .gpu_prover_queue_dal() + .get_count_of_jobs_ready_for_processing(); + vlog::info!( + "Found {} free circuit synthesizer jobs", + free_prover_instance_count + ); + + metrics::gauge!( + "server.circuit_synthesizer.jobs", + free_prover_instance_count as f64, + "type" => "queued" + ); + } +} diff --git a/core/bin/zksync_core/src/house_keeper/mod.rs b/core/bin/zksync_core/src/house_keeper/mod.rs new file mode 100644 index 000000000000..cbf6ed875184 --- /dev/null +++ b/core/bin/zksync_core/src/house_keeper/mod.rs @@ -0,0 +1,5 @@ +pub mod gcs_blob_cleaner; +pub mod gpu_prover_queue_monitor; +pub mod periodic_job; +pub mod witness_generator_misc_reporter; +pub mod witness_generator_queue_monitor; diff --git a/core/bin/zksync_core/src/house_keeper/periodic_job.rs b/core/bin/zksync_core/src/house_keeper/periodic_job.rs new file mode 100644 index 000000000000..edded0668490 --- /dev/null +++ b/core/bin/zksync_core/src/house_keeper/periodic_job.rs @@ -0,0 +1,30 @@ +use std::time::Duration; + +use async_trait::async_trait; +use tokio::time::sleep; + +use zksync_dal::ConnectionPool; + +#[async_trait] +pub trait PeriodicJob { + const SERVICE_NAME: &'static str; + const POLLING_INTERVAL_MS: u64 = 1000; + + /// Runs the routine task periodically in `POLLING_INTERVAL_MS` frequency. + fn run_routine_task(&mut self, connection_pool: ConnectionPool); + + async fn run(mut self, connection_pool: ConnectionPool) + where + Self: Sized, + { + vlog::info!( + "Starting periodic job: {} with frequency: {} ms", + Self::SERVICE_NAME, + Self::POLLING_INTERVAL_MS + ); + loop { + self.run_routine_task(connection_pool.clone()); + sleep(Duration::from_millis(Self::POLLING_INTERVAL_MS)).await; + } + } +} diff --git a/core/bin/zksync_core/src/house_keeper/witness_generator_misc_reporter.rs b/core/bin/zksync_core/src/house_keeper/witness_generator_misc_reporter.rs new file mode 100644 index 000000000000..364d56fb9b72 --- /dev/null +++ b/core/bin/zksync_core/src/house_keeper/witness_generator_misc_reporter.rs @@ -0,0 +1,44 @@ +use crate::house_keeper::periodic_job::PeriodicJob; +use zksync_config::configs::{ + prover::ProverConfig, + witness_generator::{SamplingMode, WitnessGeneratorConfig}, +}; +use zksync_dal::ConnectionPool; + +#[derive(Debug)] +pub struct WitnessGeneratorMetricsReporter { + pub witness_generator_config: WitnessGeneratorConfig, + pub prover_config: ProverConfig, +} + +impl WitnessGeneratorMetricsReporter { + fn report_metrics(&self, connection_pool: ConnectionPool) { + let mut conn = connection_pool.access_storage_blocking(); + let last_sealed_l1_batch_number = conn.blocks_dal().get_sealed_block_number(); + let min_unproved_l1_batch_number = conn + .prover_dal() + .min_unproved_l1_batch_number(self.prover_config.max_attempts) + .unwrap_or(last_sealed_l1_batch_number); + let prover_lag = last_sealed_l1_batch_number.0 - min_unproved_l1_batch_number.0; + metrics::gauge!("server.prover.lag", prover_lag as f64); + + if let SamplingMode::Enabled(sampling_params) = + self.witness_generator_config.sampling_mode() + { + let sampling_probability = + sampling_params.calculate_sampling_probability(prover_lag as usize); + metrics::gauge!( + "server.witness_generator.sampling_probability", + sampling_probability + ); + } + } +} + +impl PeriodicJob for WitnessGeneratorMetricsReporter { + const SERVICE_NAME: &'static str = "WitnessGeneratorMiscReporter"; + + fn run_routine_task(&mut self, connection_pool: ConnectionPool) { + self.report_metrics(connection_pool); + } +} diff --git a/core/bin/zksync_core/src/house_keeper/witness_generator_queue_monitor.rs b/core/bin/zksync_core/src/house_keeper/witness_generator_queue_monitor.rs new file mode 100644 index 000000000000..047306a10d86 --- /dev/null +++ b/core/bin/zksync_core/src/house_keeper/witness_generator_queue_monitor.rs @@ -0,0 +1,50 @@ +use zksync_dal::ConnectionPool; +use zksync_types::proofs::{AggregationRound, JobCountStatistics}; + +use crate::house_keeper::periodic_job::PeriodicJob; + +const WITNESS_GENERATOR_SERVICE_NAME: &str = "witness_generator"; + +#[derive(Debug, Default)] +pub struct WitnessGeneratorStatsReporter {} + +impl WitnessGeneratorStatsReporter { + fn get_job_statistics(connection_pool: ConnectionPool) -> JobCountStatistics { + let mut conn = connection_pool.access_storage_blocking(); + conn.witness_generator_dal() + .get_witness_jobs_stats(AggregationRound::BasicCircuits) + + conn + .witness_generator_dal() + .get_witness_jobs_stats(AggregationRound::LeafAggregation) + + conn + .witness_generator_dal() + .get_witness_jobs_stats(AggregationRound::NodeAggregation) + + conn + .witness_generator_dal() + .get_witness_jobs_stats(AggregationRound::Scheduler) + } +} + +/// Invoked periodically to push job statistics to Prometheus +/// Note: these values will be used for auto-scaling job processors +impl PeriodicJob for WitnessGeneratorStatsReporter { + const SERVICE_NAME: &'static str = "WitnessGeneratorStatsReporter"; + + fn run_routine_task(&mut self, connection_pool: ConnectionPool) { + let stats = Self::get_job_statistics(connection_pool); + + vlog::info!("Found {} free witness generators jobs", stats.queued); + + metrics::gauge!( + format!("server.{}.jobs", WITNESS_GENERATOR_SERVICE_NAME), + stats.queued as f64, + "type" => "queued" + ); + + metrics::gauge!( + format!("server.{}.jobs", WITNESS_GENERATOR_SERVICE_NAME), + stats.in_progress as f64, + "type" => "in_progress" + ); + } +} diff --git a/core/bin/zksync_core/src/lib.rs b/core/bin/zksync_core/src/lib.rs new file mode 100644 index 000000000000..7296bdc6eeff --- /dev/null +++ b/core/bin/zksync_core/src/lib.rs @@ -0,0 +1,548 @@ +#![allow(clippy::upper_case_acronyms, clippy::derive_partial_eq_without_eq)] + +use std::str::FromStr; +use std::sync::{Arc, Mutex}; + +use futures::channel::oneshot; +use futures::future; +use std::time::Instant; +use tokio::sync::watch; +use tokio::task::JoinHandle; +use zksync_config::configs::WitnessGeneratorConfig; + +use house_keeper::periodic_job::PeriodicJob; +use prometheus_exporter::run_prometheus_exporter; +use zksync_circuit_breaker::{ + code_hashes::CodeHashesChecker, facet_selectors::FacetSelectorsChecker, + l1_txs::FailedL1TransactionChecker, vks::VksChecker, CircuitBreaker, CircuitBreakerChecker, + CircuitBreakerError, +}; +use zksync_config::ZkSyncConfig; +use zksync_dal::{ConnectionPool, StorageProcessor}; +use zksync_eth_client::clients::http_client::EthereumClient; +use zksync_eth_client::EthInterface; +use zksync_mempool::MempoolStore; +use zksync_object_store::object_store::create_object_store_from_env; +use zksync_queued_job_processor::JobProcessor; + +use crate::eth_sender::{Aggregator, EthTxManager}; +use crate::fee_monitor::FeeMonitor; +use crate::house_keeper::gcs_blob_cleaner::GcsBlobCleaner; +use crate::house_keeper::gpu_prover_queue_monitor::GpuProverQueueMonitor; +use crate::house_keeper::{ + witness_generator_misc_reporter::WitnessGeneratorMetricsReporter, + witness_generator_queue_monitor::WitnessGeneratorStatsReporter, +}; +use crate::metadata_calculator::{MetadataCalculator, MetadataCalculatorMode}; +use crate::state_keeper::{MempoolFetcher, MempoolGuard}; +use crate::witness_generator::WitnessGenerator; +use crate::{ + api_server::{explorer, web3}, + data_fetchers::run_data_fetchers, + eth_sender::EthTxAggregator, + eth_watch::start_eth_watch, + gas_adjuster::GasAdjuster, +}; + +pub mod api_server; +pub mod data_fetchers; +pub mod db_storage_provider; +pub mod eth_sender; +pub mod eth_watch; +pub mod fee_monitor; +pub mod fee_ticker; +pub mod gas_adjuster; +pub mod gas_tracker; +pub mod genesis; +pub mod house_keeper; +pub mod metadata_calculator; +pub mod state_keeper; +pub mod witness_generator; + +/// Waits for *any* of the tokio tasks to be finished. +/// Since thesks are used as actors which should live as long +/// as application runs, any possible outcome (either `Ok` or `Err`) is considered +/// as a reason to stop the server completely. +pub async fn wait_for_tasks(task_futures: Vec>, tasks_allowed_to_finish: bool) { + match future::select_all(task_futures).await.0 { + Ok(_) => { + if tasks_allowed_to_finish { + vlog::info!("One of the actors finished its run. Finishing execution."); + } else { + vlog::info!( + "One of the actors finished its run, while it wasn't expected to do it" + ); + } + } + Err(error) => { + vlog::info!( + "One of the tokio actors unexpectedly finished, shutting down: {:?}", + error + ); + } + } +} + +/// Inserts the initial information about zkSync tokens into the database. +pub async fn genesis_init(config: ZkSyncConfig) { + let mut storage = StorageProcessor::establish_connection(true).await; + genesis::ensure_genesis_state(&mut storage, config).await; +} + +#[derive(Clone, Debug, PartialEq)] +pub enum Component { + // Public Web3 API running on HTTP server. + HttpApi, + // Public Web3 API (including PubSub) running on WebSocket server. + WsApi, + // REST API for explorer. + ExplorerApi, + // Metadata Calculator. + Tree, + TreeLightweight, + TreeBackup, + EthWatcher, + // Eth tx generator + EthTxAggregator, + // Manager for eth tx + EthTxManager, + // Data fetchers: list fetcher, volume fetcher, price fetcher. + DataFetcher, + // State keeper. + StateKeeper, + // Witness Generator. The argument is a number of jobs to process. If None, runs indefinitely. + WitnessGenerator(Option), + // Component for housekeeping task such as cleaning blobs from GCS, reporting metrics etc. + Housekeeper, +} + +#[derive(Debug)] +pub struct Components(pub Vec); + +impl FromStr for Components { + type Err = String; + + fn from_str(s: &str) -> Result { + match s { + "api" => Ok(Components(vec![ + Component::HttpApi, + Component::WsApi, + Component::ExplorerApi, + ])), + "http_api" => Ok(Components(vec![Component::HttpApi])), + "ws_api" => Ok(Components(vec![Component::WsApi])), + "explorer_api" => Ok(Components(vec![Component::ExplorerApi])), + "tree" => Ok(Components(vec![Component::Tree])), + "tree_lightweight" => Ok(Components(vec![Component::TreeLightweight])), + "tree_backup" => Ok(Components(vec![Component::TreeBackup])), + "data_fetcher" => Ok(Components(vec![Component::DataFetcher])), + "state_keeper" => Ok(Components(vec![Component::StateKeeper])), + "housekeeper" => Ok(Components(vec![Component::Housekeeper])), + "witness_generator" => Ok(Components(vec![Component::WitnessGenerator(None)])), + "one_shot_witness_generator" => { + Ok(Components(vec![Component::WitnessGenerator(Some(1))])) + } + "eth" => Ok(Components(vec![ + Component::EthWatcher, + Component::EthTxAggregator, + Component::EthTxManager, + ])), + "eth_watcher" => Ok(Components(vec![Component::EthWatcher])), + "eth_tx_aggregator" => Ok(Components(vec![Component::EthTxAggregator])), + "eth_tx_manager" => Ok(Components(vec![Component::EthTxManager])), + other => Err(format!("{} is not a valid component name", other)), + } + } +} + +pub async fn initialize_components( + config: &ZkSyncConfig, + components: Vec, + use_prometheus_pushgateway: bool, +) -> anyhow::Result<( + Vec>, + watch::Sender, + oneshot::Receiver, +)> { + vlog::info!("Starting the components: {:?}", components); + let connection_pool = ConnectionPool::new(None, true); + let replica_connection_pool = ConnectionPool::new(None, false); + + let circuit_breaker_checker = CircuitBreakerChecker::new( + circuit_breakers_for_components(&components, config), + &config.chain.circuit_breaker, + ); + circuit_breaker_checker.check().await.unwrap_or_else(|err| { + panic!("Circuit breaker triggered: {}", err); + }); + + let (stop_sender, stop_receiver) = watch::channel(false); + let (cb_sender, cb_receiver) = oneshot::channel(); + // Prometheus exporter and circuit breaker checker should run for every component configuration. + let mut task_futures: Vec> = vec![ + run_prometheus_exporter(config.api.prometheus.clone(), use_prometheus_pushgateway), + tokio::spawn(circuit_breaker_checker.run(cb_sender, stop_receiver.clone())), + ]; + + if components.contains(&Component::HttpApi) { + let started_at = Instant::now(); + vlog::info!("initializing HTTP API"); + task_futures.extend( + run_http_api( + config, + connection_pool.clone(), + replica_connection_pool.clone(), + stop_receiver.clone(), + ) + .await, + ); + vlog::info!("initialized HTTP API in {:?}", started_at.elapsed()); + metrics::gauge!("server.init.latency", started_at.elapsed().as_secs() as f64, "stage" => "http_api"); + } + + if components.contains(&Component::WsApi) { + let started_at = Instant::now(); + vlog::info!("initializing WS API"); + task_futures.extend( + run_ws_api( + config, + connection_pool.clone(), + replica_connection_pool.clone(), + stop_receiver.clone(), + ) + .await, + ); + vlog::info!("initialized WS API in {:?}", started_at.elapsed()); + metrics::gauge!("server.init.latency", started_at.elapsed().as_secs() as f64, "stage" => "ws_api"); + } + + if components.contains(&Component::ExplorerApi) { + let started_at = Instant::now(); + vlog::info!("initializing explorer REST API"); + task_futures.push(explorer::start_server_thread_detached( + config, + connection_pool.clone(), + replica_connection_pool, + stop_receiver.clone(), + )); + vlog::info!( + "initialized explorer REST API in {:?}", + started_at.elapsed() + ); + metrics::gauge!("server.init.latency", started_at.elapsed().as_secs() as f64, "stage" => "explorer_api"); + } + + if components.contains(&Component::StateKeeper) { + let started_at = Instant::now(); + vlog::info!("initializing State Keeper"); + let state_keeper_pool = ConnectionPool::new(Some(1), true); + let next_priority_id = state_keeper_pool + .access_storage() + .await + .transactions_dal() + .next_priority_id(); + let mempool = MempoolGuard(Arc::new(Mutex::new(MempoolStore::new( + next_priority_id, + config.chain.mempool.capacity, + )))); + let eth_gateway = EthereumClient::from_config(config); + let gas_adjuster = Arc::new( + GasAdjuster::new(eth_gateway.clone(), config.eth_sender.gas_adjuster) + .await + .unwrap(), + ); + task_futures.push(tokio::task::spawn( + gas_adjuster.clone().run(stop_receiver.clone()), + )); + + let state_keeper_actor = crate::state_keeper::start_state_keeper( + config, + &state_keeper_pool, + mempool.clone(), + gas_adjuster.clone(), + stop_receiver.clone(), + ); + + task_futures.push(tokio::task::spawn_blocking(move || { + state_keeper_actor.run() + })); + + let mempool_fetcher_pool = ConnectionPool::new(Some(1), true); + let mempool_fetcher_actor = MempoolFetcher::new(mempool, gas_adjuster, config); + task_futures.push(tokio::spawn(mempool_fetcher_actor.run( + mempool_fetcher_pool, + config.chain.mempool.remove_stuck_txs, + config.chain.mempool.stuck_tx_timeout(), + stop_receiver.clone(), + ))); + + // Fee monitor is normally tied to a single instance of server, and it makes most sense to keep it together + // with state keeper (since without state keeper running there should be no balance changes). + let fee_monitor_eth_gateway = EthereumClient::from_config(config); + let fee_monitor_pool = ConnectionPool::new(Some(1), true); + let fee_monitor_actor = + FeeMonitor::new(config, fee_monitor_pool, fee_monitor_eth_gateway).await; + task_futures.push(tokio::spawn(fee_monitor_actor.run())); + + vlog::info!("initialized State Keeper in {:?}", started_at.elapsed()); + metrics::gauge!("server.init.latency", started_at.elapsed().as_secs() as f64, "stage" => "state_keeper"); + } + + if components.contains(&Component::EthWatcher) { + let started_at = Instant::now(); + vlog::info!("initializing ETH-Watcher"); + let eth_gateway = EthereumClient::from_config(config); + let eth_watch_pool = ConnectionPool::new(Some(1), true); + task_futures.push( + start_eth_watch( + eth_watch_pool, + eth_gateway.clone(), + config, + stop_receiver.clone(), + ) + .await, + ); + vlog::info!("initialized ETH-Watcher in {:?}", started_at.elapsed()); + metrics::gauge!("server.init.latency", started_at.elapsed().as_secs() as f64, "stage" => "eth_watcher"); + } + + if components.contains(&Component::EthTxAggregator) { + let started_at = Instant::now(); + vlog::info!("initializing ETH-TxAggregator"); + let eth_sender_storage = ConnectionPool::new(Some(1), true); + let eth_gateway = EthereumClient::from_config(config); + let nonce = eth_gateway.pending_nonce("eth_sender").await.unwrap(); + let eth_tx_aggregator_actor = EthTxAggregator::new( + config.eth_sender.sender.clone(), + Aggregator::new(config.eth_sender.sender.clone()), + config.contracts.diamond_proxy_addr, + nonce.as_u64(), + ); + task_futures.push(tokio::spawn( + eth_tx_aggregator_actor.run(eth_sender_storage.clone(), stop_receiver.clone()), + )); + vlog::info!("initialized ETH-TxAggregator in {:?}", started_at.elapsed()); + metrics::gauge!("server.init.latency", started_at.elapsed().as_secs() as f64, "stage" => "eth_tx_aggregator"); + } + + if components.contains(&Component::EthTxManager) { + let started_at = Instant::now(); + vlog::info!("initializing ETH-TxManager"); + let eth_sender_storage = ConnectionPool::new(Some(1), true); + let eth_gateway = EthereumClient::from_config(config); + let gas_adjuster = Arc::new( + GasAdjuster::new(eth_gateway.clone(), config.eth_sender.gas_adjuster) + .await + .unwrap(), + ); + let eth_tx_manager_actor = EthTxManager::new( + config.eth_sender.sender.clone(), + gas_adjuster.clone(), + eth_gateway.clone(), + ); + task_futures.extend([ + tokio::spawn( + eth_tx_manager_actor.run(eth_sender_storage.clone(), stop_receiver.clone()), + ), + tokio::spawn(gas_adjuster.run(stop_receiver.clone())), + ]); + vlog::info!("initialized ETH-TxManager in {:?}", started_at.elapsed()); + metrics::gauge!("server.init.latency", started_at.elapsed().as_secs() as f64, "stage" => "eth_tx_aggregator"); + } + + if components.contains(&Component::DataFetcher) { + let started_at = Instant::now(); + vlog::info!("initializing data fetchers"); + task_futures.extend(run_data_fetchers( + config, + connection_pool.clone(), + stop_receiver.clone(), + )); + vlog::info!("initialized data fetchers in {:?}", started_at.elapsed()); + metrics::gauge!("server.init.latency", started_at.elapsed().as_secs() as f64, "stage" => "data_fetchers"); + } + + if components.contains(&Component::Tree) { + let started_at = Instant::now(); + vlog::info!("initializing the tree"); + task_futures.extend(run_tree( + config, + stop_receiver.clone(), + MetadataCalculatorMode::Full, + )); + vlog::info!("initialized tree in {:?}", started_at.elapsed()); + metrics::gauge!("server.init.latency", started_at.elapsed().as_secs() as f64, "stage" => "tree"); + } + + if components.contains(&Component::TreeLightweight) { + task_futures.extend(run_tree( + config, + stop_receiver.clone(), + MetadataCalculatorMode::Lightweight, + )); + } + + if components.contains(&Component::TreeBackup) { + task_futures.extend(run_tree( + config, + stop_receiver.clone(), + MetadataCalculatorMode::Backup, + )); + } + + // We don't want witness generator to run on local nodes, as it's CPU heavy and is not stable yet + let is_local_setup = std::env::var("ZKSYNC_LOCAL_SETUP") == Ok("true".to_owned()); + if let Some(Component::WitnessGenerator(batch_size)) = components + .iter() + .find(|c| matches!(c, Component::WitnessGenerator(_))) + { + if !is_local_setup { + let started_at = Instant::now(); + vlog::info!( + "initializing the witness generator, batch size: {:?}", + batch_size + ); + let config = WitnessGeneratorConfig::from_env(); + let witness_generator = WitnessGenerator::new(config); + task_futures.push(tokio::spawn(witness_generator.run( + connection_pool.clone(), + stop_receiver.clone(), + *batch_size, + ))); + vlog::info!( + "initialized witness generator in {:?}", + started_at.elapsed() + ); + metrics::gauge!("server.init.latency", started_at.elapsed().as_secs() as f64, "stage" => "witness_generator"); + } + } + + if components.contains(&Component::Housekeeper) { + let witness_generator_misc_reporter = WitnessGeneratorMetricsReporter { + witness_generator_config: WitnessGeneratorConfig::from_env(), + prover_config: config.prover.non_gpu.clone(), + }; + let gcs_blob_cleaner = GcsBlobCleaner { + object_store: create_object_store_from_env(), + }; + let witness_generator_metrics = vec![ + tokio::spawn( + WitnessGeneratorStatsReporter::default().run(ConnectionPool::new(Some(1), true)), + ), + tokio::spawn(witness_generator_misc_reporter.run(ConnectionPool::new(Some(1), true))), + tokio::spawn(GpuProverQueueMonitor::default().run(ConnectionPool::new(Some(1), true))), + tokio::spawn(gcs_blob_cleaner.run(ConnectionPool::new(Some(1), true))), + ]; + + task_futures.extend(witness_generator_metrics); + } + + Ok((task_futures, stop_sender, cb_receiver)) +} + +fn run_tree( + config: &ZkSyncConfig, + stop_receiver: watch::Receiver, + mode: MetadataCalculatorMode, +) -> Vec> { + let metadata_calculator = MetadataCalculator::new(config, mode); + let pool = ConnectionPool::new(Some(1), true); + vec![tokio::spawn(metadata_calculator.run(pool, stop_receiver))] +} + +async fn run_http_api( + config: &ZkSyncConfig, + master_connection_pool: ConnectionPool, + replica_connection_pool: ConnectionPool, + stop_receiver: watch::Receiver, +) -> Vec> { + let eth_gateway = EthereumClient::from_config(config); + let gas_adjuster = Arc::new( + GasAdjuster::new(eth_gateway.clone(), config.eth_sender.gas_adjuster) + .await + .unwrap(), + ); + vec![ + web3::start_http_rpc_server_old( + master_connection_pool, + replica_connection_pool, + config, + stop_receiver.clone(), + gas_adjuster.clone(), + ), + tokio::spawn(gas_adjuster.run(stop_receiver)), + ] +} + +async fn run_ws_api( + config: &ZkSyncConfig, + master_connection_pool: ConnectionPool, + replica_connection_pool: ConnectionPool, + stop_receiver: watch::Receiver, +) -> Vec> { + let eth_gateway = EthereumClient::from_config(config); + let gas_adjuster = Arc::new( + GasAdjuster::new(eth_gateway.clone(), config.eth_sender.gas_adjuster) + .await + .unwrap(), + ); + web3::start_ws_rpc_server_old( + master_connection_pool, + replica_connection_pool, + config, + stop_receiver, + gas_adjuster, + ) +} + +fn circuit_breakers_for_components( + components: &[Component], + config: &ZkSyncConfig, +) -> Vec> { + let mut circuit_breakers: Vec> = Vec::new(); + + if components.iter().any(|c| { + matches!( + c, + Component::EthTxAggregator | Component::EthTxManager | Component::StateKeeper + ) + }) { + circuit_breakers.push(Box::new(FailedL1TransactionChecker { + pool: ConnectionPool::new(Some(1), false), + })); + } + + if components.iter().any(|c| { + matches!( + c, + Component::EthTxAggregator + | Component::EthTxManager + | Component::StateKeeper + | Component::Tree + | Component::TreeBackup + ) + }) { + circuit_breakers.push(Box::new(CodeHashesChecker::new(config))); + } + + if components.iter().any(|c| { + matches!( + c, + Component::EthTxAggregator + | Component::EthTxManager + | Component::Tree + | Component::TreeBackup + ) + }) { + circuit_breakers.push(Box::new(VksChecker::new(config))); + } + + if components + .iter() + .any(|c| matches!(c, Component::EthTxAggregator | Component::EthTxManager)) + { + circuit_breakers.push(Box::new(FacetSelectorsChecker::new(config))); + } + + circuit_breakers +} diff --git a/core/bin/zksync_core/src/metadata_calculator/mod.rs b/core/bin/zksync_core/src/metadata_calculator/mod.rs new file mode 100644 index 000000000000..2acd9b94e83f --- /dev/null +++ b/core/bin/zksync_core/src/metadata_calculator/mod.rs @@ -0,0 +1,597 @@ +//! This module applies updates to the ZkSyncTree, calculates metadata for sealed blocks, and +//! stores them in the DB. + +use std::collections::BTreeMap; +use std::time::Instant; + +use tokio::sync::watch; + +use zksync_config::{DBConfig, ZkSyncConfig}; +use zksync_dal::{ConnectionPool, StorageProcessor}; +use zksync_merkle_tree::{TreeMetadata, TreeMode, ZkSyncTree}; +use zksync_object_store::gcs_utils::merkle_tree_paths_blob_url; +use zksync_object_store::object_store::{ + create_object_store_from_env, DynamicObjectStore, WITNESS_INPUT_BUCKET_PATH, +}; +use zksync_storage::db::Database; +use zksync_storage::rocksdb::backup::{BackupEngine, BackupEngineOptions, RestoreOptions}; +use zksync_storage::rocksdb::{Options, DB}; +use zksync_storage::RocksDB; +use zksync_types::block::L1BatchHeader; +use zksync_types::commitment::{BlockCommitment, BlockMetadata, BlockWithMetadata}; +use zksync_types::{ + block::WitnessBlockWithLogs, L1BatchNumber, StorageKey, StorageLog, StorageLogKind, + WitnessStorageLog, H256, +}; +use zksync_utils::time::seconds_since_epoch; + +#[cfg(test)] +mod tests; + +#[derive(Debug)] +pub struct MetadataCalculator { + #[cfg_attr(test, allow(dead_code))] + delay_interval: std::time::Duration, + tree: ZkSyncTree, + config: DBConfig, + mode: MetadataCalculatorMode, + object_store: DynamicObjectStore, +} + +#[derive(Debug, Copy, Clone, PartialEq)] +pub enum MetadataCalculatorMode { + Full, + Lightweight, + Backup, +} + +impl From for TreeMode { + fn from(mode: MetadataCalculatorMode) -> Self { + match mode { + MetadataCalculatorMode::Lightweight => TreeMode::Lightweight, + _ => TreeMode::Full, + } + } +} + +impl MetadataCalculator { + pub fn new(config: &ZkSyncConfig, mode: MetadataCalculatorMode) -> Self { + { + let db = RocksDB::new( + Database::MerkleTree, + Self::rocksdb_path(&config.db, mode), + false, + ); + let tree = ZkSyncTree::new(db); + if tree.is_empty() { + Self::restore_from_backup(&config.db); + } + } + let db = RocksDB::new( + Database::MerkleTree, + Self::rocksdb_path(&config.db, mode), + true, + ); + let tree = ZkSyncTree::new_with_mode(db, mode.into()); + Self { + delay_interval: config.chain.operations_manager.delay_interval(), + tree, + config: config.db.clone(), + mode, + object_store: create_object_store_from_env(), + } + } + + pub async fn run(mut self, pool: ConnectionPool, stop_receiver: watch::Receiver) { + let mut storage = pool.access_storage().await; + + // ensure genesis creation + if self.tree.is_empty() { + let storage_logs = get_logs_for_l1_batch(&mut storage, L1BatchNumber(0)); + self.tree.process_block(storage_logs.unwrap().storage_logs); + self.tree.save().expect("Unable to update tree state"); + } + let mut next_block_number_to_seal_in_tree = self.get_current_rocksdb_block_number(); + + let current_db_block = storage.blocks_dal().get_sealed_block_number() + 1; + let last_block_number_with_metadata = + storage.blocks_dal().get_last_block_number_with_metadata() + 1; + drop(storage); + + vlog::info!( + "initialized metadata calculator. Current rocksDB block: {}. Current Postgres block: {}", + next_block_number_to_seal_in_tree, + current_db_block + ); + metrics::gauge!( + "server.metadata_calculator.backup_lag", + (last_block_number_with_metadata - *next_block_number_to_seal_in_tree).0 as f64, + ); + + loop { + if *stop_receiver.borrow() { + vlog::info!("Stop signal received, metadata_calculator is shutting down"); + break; + } + + let query_started_at = Instant::now(); + + let mut storage = pool.access_storage().await; + + match self.mode { + MetadataCalculatorMode::Full => { + let last_sealed_block = storage.blocks_dal().get_sealed_block_number(); + let new_blocks: Vec<_> = (next_block_number_to_seal_in_tree.0 + ..=last_sealed_block.0) + .take(self.config.max_block_batch) + .flat_map(|block_number| { + get_logs_for_l1_batch(&mut storage, L1BatchNumber(block_number)) + }) + .collect(); + + metrics::histogram!( + "server.metadata_calculator.update_tree.latency.stage", + query_started_at.elapsed(), + "stage" => "load_changes" + ); + + if new_blocks.is_empty() { + // we don't have any new data to process. Waiting... + #[cfg(not(test))] + tokio::time::sleep(self.delay_interval).await; + + #[cfg(test)] + return; + } else { + next_block_number_to_seal_in_tree = + new_blocks.last().unwrap().header.number + 1; + + self.process_multiple_blocks(&mut storage, new_blocks).await; + } + } + MetadataCalculatorMode::Lightweight => { + let new_block_logs = + get_logs_for_l1_batch(&mut storage, next_block_number_to_seal_in_tree); + + metrics::histogram!( + "server.metadata_calculator.update_tree.latency.stage", + query_started_at.elapsed(), + "stage" => "load_changes" + ); + + match new_block_logs { + None => { + // we don't have any new data to process. Waiting... + #[cfg(not(test))] + tokio::time::sleep(self.delay_interval).await; + + #[cfg(test)] + return; + } + Some(block) => { + next_block_number_to_seal_in_tree = block.header.number + 1; + + self.process_block(&mut storage, block).await; + } + } + } + MetadataCalculatorMode::Backup => { + unreachable!("Backup mode is disabled"); + } + } + } + } + + fn rocksdb_path(config: &DBConfig, mode: MetadataCalculatorMode) -> &str { + match mode { + MetadataCalculatorMode::Full => config.path(), + _ => config.merkle_tree_fast_ssd_path(), + } + } + + pub(crate) fn get_current_rocksdb_block_number(&mut self) -> L1BatchNumber { + L1BatchNumber(self.tree.block_number()) + } + + // Applies the sealed block to the tree and returns the new root hash + #[tracing::instrument(skip(self, storage, block))] + async fn process_block( + &mut self, + storage: &mut StorageProcessor<'_>, + block: WitnessBlockWithLogs, + ) -> H256 { + let start = Instant::now(); + let mut start_stage = Instant::now(); + + assert_eq!(self.mode, MetadataCalculatorMode::Lightweight); + + let storage_logs = get_filtered_storage_logs(&block.storage_logs, self.mode); + let total_logs: usize = storage_logs.len(); + let previous_root_hash = self.tree.root_hash(); + let metadata_at_block = self.tree.process_block(storage_logs); + + metrics::histogram!( + "server.metadata_calculator.update_tree.latency.stage", + start_stage.elapsed(), + "stage" => "compute" + ); + start_stage = Instant::now(); + + let metadata = Self::build_block_metadata(&metadata_at_block, &block.header); + let root_hash = metadata.root_hash; + + metrics::histogram!( + "server.metadata_calculator.update_tree.latency.stage", + start_stage.elapsed(), + "stage" => "prepare_results" + ); + + let block_with_metadata = + Self::reestimate_block_commit_gas(storage, block.header, metadata); + + start_stage = Instant::now(); + + // for consistency it's important to save to postgres before rocksDB + storage.blocks_dal().save_blocks_metadata( + block_with_metadata.header.number, + block_with_metadata.metadata, + H256::from_slice(&previous_root_hash), + ); + + metrics::histogram!( + "server.metadata_calculator.update_tree.latency.stage", + start_stage.elapsed(), + "stage" => "save_postgres" + ); + + start_stage = Instant::now(); + + self.tree.save().expect("Unable to update tree state"); + + // only metrics after this point + self.update_metrics( + &[block_with_metadata.header], + total_logs, + start_stage, + start, + ); + root_hash + } + + #[tracing::instrument(skip(self, storage, blocks))] + async fn process_multiple_blocks( + &mut self, + storage: &mut StorageProcessor<'_>, + blocks: Vec, + ) -> H256 { + let start = Instant::now(); + + assert_eq!( + self.mode, + MetadataCalculatorMode::Full, + "Lightweight tree shouldn't process multiple blocks" + ); + + let mut start_stage = Instant::now(); + + let total_logs: usize = blocks.iter().map(|block| block.storage_logs.len()).sum(); + let storage_logs = blocks.iter().map(|block| block.storage_logs.iter()); + let previous_root_hash = self.tree.root_hash(); + let metadata = self.tree.process_blocks(storage_logs); + + metrics::histogram!( + "server.metadata_calculator.update_tree.latency.stage", + start_stage.elapsed(), + "stage" => "compute" + ); + + let root_hashes: Vec<_> = std::iter::once(&previous_root_hash) + .chain(metadata.iter().map(|metadata| &metadata.root_hash)) + .map(|hash| H256::from_slice(hash)) + .collect(); + let last_root_hash = *root_hashes.last().unwrap(); + let mut block_headers = Vec::with_capacity(blocks.len()); + + for ((metadata_at_block, block), previous_root_hash) in + metadata.into_iter().zip(blocks).zip(root_hashes) + { + start_stage = Instant::now(); + + let metadata = Self::build_block_metadata(&metadata_at_block, &block.header); + + metrics::histogram!( + "server.metadata_calculator.update_tree.latency.stage", + start_stage.elapsed(), + "stage" => "prepare_results" + ); + + let block_with_metadata = + Self::reestimate_block_commit_gas(storage, block.header, metadata); + + start_stage = Instant::now(); + + // Save witness input only when running in Full mode. + self.object_store + .put( + WITNESS_INPUT_BUCKET_PATH, + merkle_tree_paths_blob_url(block_with_metadata.header.number), + metadata_at_block.witness_input, + ) + .unwrap(); + + metrics::histogram!( + "server.metadata_calculator.update_tree.latency.stage", + start_stage.elapsed(), + "stage" => "save_gcs" + ); + + start_stage = Instant::now(); + + // Save the metadata in case the lightweight tree is behind / not running + storage.blocks_dal().save_blocks_metadata( + block_with_metadata.header.number, + block_with_metadata.metadata, + previous_root_hash, + ); + + storage + .witness_generator_dal() + .save_witness_inputs(block_with_metadata.header.number); + + metrics::histogram!( + "server.metadata_calculator.update_tree.latency.stage", + start_stage.elapsed(), + "stage" => "save_postgres" + ); + + block_headers.push(block_with_metadata.header); + } + start_stage = Instant::now(); + + self.tree.save().expect("Unable to update tree state"); + + // only metrics after this point + self.update_metrics(&block_headers, total_logs, start_stage, start); + + last_root_hash + } + + /// This is used to improve L1 gas estimation for the commit operation. The estimations are computed + /// in the State Keeper, where storage writes aren't yet deduplicated, whereas block metadata + /// contains deduplicated storage writes. + fn reestimate_block_commit_gas( + storage: &mut StorageProcessor<'_>, + block_header: L1BatchHeader, + metadata: BlockMetadata, + ) -> BlockWithMetadata { + let start_stage = Instant::now(); + let unsorted_factory_deps = storage + .blocks_dal() + .get_l1_batch_factory_deps(block_header.number); + let block_with_metadata = + BlockWithMetadata::new(block_header, metadata, unsorted_factory_deps); + let commit_gas_cost = crate::gas_tracker::commit_gas_count_for_block(&block_with_metadata); + storage + .blocks_dal() + .update_predicted_block_commit_gas(block_with_metadata.header.number, commit_gas_cost); + metrics::histogram!( + "server.metadata_calculator.update_tree.latency.stage", + start_stage.elapsed(), + "stage" => "reestimate_block_commit_gas_cost" + ); + block_with_metadata + } + + fn update_metrics( + &self, + block_headers: &[L1BatchHeader], + total_logs: usize, + start_stage: Instant, + start: Instant, + ) { + metrics::histogram!( + "server.metadata_calculator.update_tree.latency.stage", + start_stage.elapsed(), + "stage" => "save_rocksdb" + ); + metrics::histogram!( + "server.metadata_calculator.update_tree.latency", + start.elapsed() + ); + + if total_logs > 0 { + metrics::histogram!( + "server.metadata_calculator.update_tree.per_log.latency", + start.elapsed().div_f32(total_logs as f32) + ); + } + + let total_tx: usize = block_headers.iter().map(|block| block.tx_count()).sum(); + let total_l1_tx: u16 = block_headers.iter().map(|block| block.l1_tx_count).sum(); + metrics::counter!("server.processed_txs", total_tx as u64, "stage" => "tree"); + metrics::counter!("server.processed_l1_txs", total_l1_tx as u64, "stage" => "tree"); + metrics::histogram!("server.metadata_calculator.log_batch", total_logs as f64); + metrics::histogram!( + "server.metadata_calculator.blocks_batch", + block_headers.len() as f64 + ); + + let last_block_number = block_headers.last().unwrap().number.0; + vlog::info!("block {:?} processed in tree", last_block_number); + metrics::gauge!( + "server.block_number", + last_block_number as f64, + "stage" => format!("tree_{:?}_mode", self.mode).to_lowercase() + ); + metrics::histogram!( + "server.block_latency", + (seconds_since_epoch() - block_headers.first().unwrap().timestamp) as f64, + "stage" => format!("tree_{:?}_mode", self.mode).to_lowercase() + ); + } + + fn build_block_metadata( + tree_metadata_at_block: &TreeMetadata, + l1_batch_header: &L1BatchHeader, + ) -> BlockMetadata { + let merkle_root_hash = H256::from_slice(&tree_metadata_at_block.root_hash); + + let block_commitment = BlockCommitment::new( + l1_batch_header.l2_to_l1_logs.clone(), + tree_metadata_at_block.rollup_last_leaf_index, + merkle_root_hash, + tree_metadata_at_block.initial_writes.clone(), + tree_metadata_at_block.repeated_writes.clone(), + ); + let block_commitment_hash = block_commitment.hash(); + vlog::trace!("Block commitment {:?}", &block_commitment); + + let metadata = BlockMetadata { + root_hash: merkle_root_hash, + rollup_last_leaf_index: tree_metadata_at_block.rollup_last_leaf_index, + merkle_root_hash, + initial_writes_compressed: block_commitment.initial_writes_compressed().to_vec(), + repeated_writes_compressed: block_commitment.repeated_writes_compressed().to_vec(), + commitment: block_commitment_hash.commitment, + l2_l1_messages_compressed: block_commitment.l2_l1_logs_compressed().to_vec(), + l2_l1_merkle_root: block_commitment.l2_l1_logs_merkle_root(), + block_meta_params: block_commitment.meta_parameters(), + aux_data_hash: block_commitment_hash.aux_output, + meta_parameters_hash: block_commitment_hash.meta_parameters, + pass_through_data_hash: block_commitment_hash.pass_through_data, + }; + + vlog::trace!("Block metadata {:?}", metadata); + metadata + } + + /// Encodes storage key using the pre-defined zkSync hasher. + pub fn key_hash_fn(key: &StorageKey) -> Vec { + key.hashed_key().to_fixed_bytes().to_vec() + } + + fn restore_from_backup(db_config: &DBConfig) { + let mut engine = BackupEngine::open( + &BackupEngineOptions::default(), + db_config.merkle_tree_backup_path(), + ) + .expect("failed to initialize restore engine"); + if let Err(err) = engine.restore_from_latest_backup( + db_config.path(), + db_config.path(), + &RestoreOptions::default(), + ) { + vlog::warn!("can't restore tree from backup {:?}", err); + } + } + + fn _backup(&mut self) { + let started_at = Instant::now(); + let mut engine = BackupEngine::open( + &BackupEngineOptions::default(), + self.config.merkle_tree_backup_path(), + ) + .expect("failed to create backup engine"); + let rocksdb_path = Self::rocksdb_path(&self.config, self.mode); + let db = DB::open_for_read_only(&Options::default(), rocksdb_path, false) + .expect("failed to open db for backup"); + engine.create_new_backup(&db).unwrap(); + engine + .purge_old_backups(self.config.backup_count()) + .expect("failed to purge old backups"); + metrics::histogram!( + "server.metadata_calculator.update_tree.latency.stage", + started_at.elapsed(), + "stage" => "backup_tree" + ); + } +} + +/// Filters the storage log based on the MetadataCalculatorMode and StorageLogKind. +/// | MetadataCalculatorMode | Processing | +/// |------------------------------|------------------------| +/// | Full | Read + Write | +/// | Lightweight | Write | +/// | Backup | Write | +fn get_filtered_storage_logs( + storage_logs: &[WitnessStorageLog], + mode: MetadataCalculatorMode, +) -> Vec<&WitnessStorageLog> { + storage_logs + .iter() + .filter(|log| { + mode == MetadataCalculatorMode::Full || log.storage_log.kind == StorageLogKind::Write + }) + .collect() +} + +pub(crate) fn get_logs_for_l1_batch( + storage: &mut StorageProcessor<'_>, + l1_batch_number: L1BatchNumber, +) -> Option { + let header = storage.blocks_dal().get_block_header(l1_batch_number)?; + + // `BTreeMap` is used because tree needs to process slots in lexicographical order. + let mut storage_logs: BTreeMap = BTreeMap::new(); + + let protective_reads = storage + .storage_logs_dedup_dal() + .get_protective_reads_for_l1_batch(l1_batch_number); + let touched_slots = storage + .storage_logs_dedup_dal() + .get_touched_slots_for_l1_batch(l1_batch_number); + + let hashed_keys = protective_reads + .iter() + .chain(touched_slots.keys()) + .map(|key| key.hashed_key()) + .collect(); + let previous_values = storage + .storage_logs_dedup_dal() + .get_previous_storage_values(hashed_keys, l1_batch_number); + + for storage_key in protective_reads { + let previous_value = previous_values + .get(&storage_key.hashed_key()) + .cloned() + .unwrap(); + + // Sanity check: value must not change for slots that require protective reads. + if let Some(value) = touched_slots.get(&storage_key) { + assert_eq!( + previous_value, *value, + "Value was changed for slot that requires protective read" + ); + } + + storage_logs.insert( + storage_key, + WitnessStorageLog { + storage_log: StorageLog::new_read_log(storage_key, previous_value), + previous_value, + }, + ); + } + + for (storage_key, value) in touched_slots { + let previous_value = previous_values + .get(&storage_key.hashed_key()) + .cloned() + .unwrap(); + + if previous_value != value { + storage_logs.insert( + storage_key, + WitnessStorageLog { + storage_log: StorageLog::new_write_log(storage_key, value), + previous_value, + }, + ); + } + } + + Some(WitnessBlockWithLogs { + header, + storage_logs: storage_logs.into_values().collect(), + }) +} diff --git a/core/bin/zksync_core/src/metadata_calculator/tests.rs b/core/bin/zksync_core/src/metadata_calculator/tests.rs new file mode 100644 index 000000000000..410992d15025 --- /dev/null +++ b/core/bin/zksync_core/src/metadata_calculator/tests.rs @@ -0,0 +1,241 @@ +use itertools::Itertools; +use std::path::Path; +use std::str::FromStr; + +use db_test_macro::db_test; +use tempfile::TempDir; +use tokio::sync::watch; +use zksync_types::FAIR_L2_GAS_PRICE; + +use crate::genesis::{chain_schema_genesis, operations_schema_genesis}; +use crate::metadata_calculator::MetadataCalculator; + +use crate::MetadataCalculatorMode; +use zksync_config::ZkSyncConfig; +use zksync_dal::ConnectionPool; +use zksync_merkle_tree::ZkSyncTree; +use zksync_storage::db::Database; +use zksync_storage::RocksDB; +use zksync_types::{ + block::{L1BatchHeader, MiniblockHeader}, + commitment::BlockCommitment, + AccountTreeId, Address, L1BatchNumber, MiniblockNumber, StorageKey, StorageLog, H256, +}; +use zksync_utils::{miniblock_hash, u32_to_h256}; + +#[db_test] +async fn genesis_creation(connection_pool: ConnectionPool) { + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + + { + let metadata_calculator = + setup_metadata_calculator(temp_dir.path(), connection_pool.clone()).await; + metadata_calculator + .run(connection_pool.clone(), watch::channel(false).1) + .await; + } + + let mut metadata_calculator = setup_metadata_calculator(temp_dir.path(), connection_pool).await; + assert_eq!( + metadata_calculator.get_current_rocksdb_block_number(), + L1BatchNumber(1) + ); +} + +#[ignore] +#[db_test] +async fn backup_recovery(connection_pool: ConnectionPool) { + let backup_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let backup_path = backup_dir.path().to_str().unwrap().to_string(); + + { + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let metadata_calculator = setup_metadata_calculator_with_options( + temp_dir.path(), + connection_pool.clone(), + MetadataCalculatorMode::Backup, + Some(backup_path.clone()), + ) + .await; + reset_db_state(connection_pool.clone(), 1).await; + metadata_calculator + .run(connection_pool.clone(), watch::channel(false).1) + .await; + } + + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let mut metadata_calculator = setup_metadata_calculator_with_options( + temp_dir.path(), + connection_pool, + MetadataCalculatorMode::Full, + Some(backup_path), + ) + .await; + assert_eq!( + metadata_calculator.get_current_rocksdb_block_number(), + L1BatchNumber(2) + ); +} + +#[db_test] +async fn basic_workflow(connection_pool: ConnectionPool) { + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + + { + let metadata_calculator = + setup_metadata_calculator(temp_dir.path(), connection_pool.clone()).await; + reset_db_state(connection_pool.clone(), 1).await; + metadata_calculator + .run(connection_pool.clone(), watch::channel(false).1) + .await; + } + + let mut metadata_calculator = setup_metadata_calculator(temp_dir.path(), connection_pool).await; + assert_eq!( + metadata_calculator.get_current_rocksdb_block_number(), + L1BatchNumber(2) + ); +} + +#[db_test] +async fn multi_block_workflow(connection_pool: ConnectionPool) { + // run all transactions as single block + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + + { + let metadata_calculator = + setup_metadata_calculator(temp_dir.path(), connection_pool.clone()).await; + reset_db_state(connection_pool.clone(), 1).await; + metadata_calculator + .run(connection_pool.clone(), watch::channel(false).1) + .await; + } + + let db = RocksDB::new(Database::MerkleTree, temp_dir.as_ref(), false); + let root_hash = { + let tree = ZkSyncTree::new(db); + tree.root_hash() + }; + + // run same transactions as multiple blocks + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + { + let metadata_calculator = + setup_metadata_calculator(temp_dir.path(), connection_pool.clone()).await; + reset_db_state(connection_pool.clone(), 10).await; + metadata_calculator + .run(connection_pool.clone(), watch::channel(false).1) + .await; + } + let db = RocksDB::new(Database::MerkleTree, temp_dir.as_ref(), false); + let tree = ZkSyncTree::new(db); + let multi_block_root_hash = tree.root_hash(); + + // verify that hashes match + assert_eq!(multi_block_root_hash, root_hash); +} + +async fn setup_metadata_calculator(db_path: &Path, pool: ConnectionPool) -> MetadataCalculator { + setup_metadata_calculator_with_options(db_path, pool, MetadataCalculatorMode::Full, None).await +} + +async fn setup_metadata_calculator_with_options( + db_path: &Path, + pool: ConnectionPool, + mode: MetadataCalculatorMode, + backup_directory: Option, +) -> MetadataCalculator { + let backup_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let mut config = ZkSyncConfig::from_env().clone(); + config.db.path = db_path.to_str().unwrap().to_string(); + config.db.merkle_tree_fast_ssd_path = config.db.path.clone(); + config.db.merkle_tree_backup_path = + backup_directory.unwrap_or_else(|| backup_dir.path().to_str().unwrap().to_string()); + config.db.backup_interval_ms = 0; + let fee_address = Address::repeat_byte(0x01); + let mut storage = pool.access_storage().await; + let metadata_calculator = MetadataCalculator::new(&config, mode); + + if storage.blocks_dal().is_genesis_needed() { + let chain_id = H256::from_low_u64_be(config.chain.eth.zksync_network_id as u64); + chain_schema_genesis(&mut storage, fee_address, chain_id).await; + let block_commitment = BlockCommitment::new(vec![], 0, Default::default(), vec![], vec![]); + + operations_schema_genesis( + &mut storage, + &block_commitment, + H256::from_slice(&metadata_calculator.tree.root_hash()), + 1, + ); + } + metadata_calculator +} + +async fn reset_db_state(pool: ConnectionPool, num_blocks: usize) { + let mut storage = pool.access_storage().await; + // Drops all blocks (except the block with number = 0) and theirs storage logs. + storage + .storage_logs_dal() + .rollback_storage_logs(MiniblockNumber(0)); + storage.blocks_dal().delete_miniblocks(MiniblockNumber(0)); + storage.blocks_dal().delete_l1_batches(L1BatchNumber(0)); + + let all_logs = gen_storage_logs(num_blocks); + for (block_number, block_logs) in (1..=(num_blocks as u32)).zip(all_logs) { + let mut header = L1BatchHeader::mock(L1BatchNumber(block_number)); + header.is_finished = true; + // Assumes that L1 batch consists of only one miniblock. + let miniblock_header = MiniblockHeader { + number: MiniblockNumber(block_number), + timestamp: header.timestamp, + hash: miniblock_hash(MiniblockNumber(block_number)), + l1_tx_count: header.l1_tx_count, + l2_tx_count: header.l2_tx_count, + base_fee_per_gas: header.base_fee_per_gas, + l1_gas_price: 0, + l2_fair_gas_price: FAIR_L2_GAS_PRICE, + }; + + storage + .blocks_dal() + .insert_l1_batch(header.clone(), Default::default()); + storage.blocks_dal().insert_miniblock(miniblock_header); + storage.storage_logs_dal().insert_storage_logs( + MiniblockNumber(block_number), + &[(H256::default(), block_logs)], + ); + storage + .blocks_dal() + .mark_miniblocks_as_executed_in_l1_batch(header.number); + } +} + +fn gen_storage_logs(num_blocks: usize) -> Vec> { + // Note, addresses and keys of storage logs must be sorted for the multi_block_workflow test. + let addrs = vec![ + "4b3af74f66ab1f0da3f2e4ec7a3cb99baf1af7b2", + "ef4bb7b21c5fe7432a7d63876cc59ecc23b46636", + "89b8988a018f5348f52eeac77155a793adf03ecc", + "782806db027c08d36b2bed376b4271d1237626b3", + "b2b57b76717ee02ae1327cc3cf1f40e76f692311", + ] + .into_iter() + .map(|s| Address::from_str(s).unwrap()) + .sorted(); + + let proof_keys: Vec<_> = addrs + .flat_map(|addr| { + (0..20).map(move |i| StorageKey::new(AccountTreeId::new(addr), u32_to_h256(i))) + }) + .collect(); + let proof_values: Vec<_> = (0..100).map(u32_to_h256).collect(); + + let logs = proof_keys + .iter() + .zip(proof_values.iter()) + .map(|(proof_key, &proof_value)| StorageLog::new_write_log(*proof_key, proof_value)) + .collect::>(); + logs.chunks(logs.len() / num_blocks) + .map(|v| v.into()) + .collect() +} diff --git a/core/bin/zksync_core/src/state_keeper/batch_executor/mod.rs b/core/bin/zksync_core/src/state_keeper/batch_executor/mod.rs new file mode 100644 index 000000000000..a5390b52c063 --- /dev/null +++ b/core/bin/zksync_core/src/state_keeper/batch_executor/mod.rs @@ -0,0 +1,511 @@ +use std::{sync::mpsc, thread, time::Instant}; + +use vm::{ + storage::Storage, + vm::{VmPartialExecutionResult, VmTxExecutionResult}, + vm_with_bootloader::{ + init_vm, push_transaction_to_bootloader_memory, BlockContextMode, BootloaderJobType, + TxExecutionMode, + }, + zk_evm::block_properties::BlockProperties, + TxRevertReason, VmBlockResult, VmInstance, +}; +use zksync_dal::ConnectionPool; +use zksync_state::{secondary_storage::SecondaryStateStorage, storage_view::StorageView}; +use zksync_storage::{db::Database, RocksDB}; +use zksync_types::{tx::ExecutionMetrics, Transaction, U256}; + +use crate::gas_tracker::{gas_count_from_metrics, gas_count_from_tx_and_metrics}; + +use crate::state_keeper::types::ExecutionMetricsForCriteria; + +#[cfg(test)] +mod tests; + +/// Representation of a transaction executed in the virtual machine. +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct TxExecutionResult { + /// `Ok(_)` represents a transaction that was executed (even if it reverted), while + /// `Err(_)` represents a rejected transaction (one that can't be applied to the state). + pub(super) tx_result: Result, + /// Result of dry run executing the bootloader tip. Will be `None` if the transaction was rejected + /// (`tx_result` field is `err). + pub(super) bootloader_dry_run_result: Option>, + /// Execution metrics for the transaction itself. + /// Will be `None` if the transaction was rejected. + pub(super) tx_metrics: Option, + /// Execution metrics for the bootloader tip dry run. + /// Will be `None` if either the transaction was rejected or if bootloader tip dry run failed. + pub(super) bootloader_dry_run_metrics: Option, +} + +impl TxExecutionResult { + pub(super) fn new(tx_result: Result) -> Self { + Self { + tx_result, + bootloader_dry_run_result: None, + tx_metrics: None, + bootloader_dry_run_metrics: None, + } + } + + pub(super) fn add_tx_metrics(&mut self, tx_metrics: ExecutionMetricsForCriteria) { + self.tx_metrics = Some(tx_metrics); + } + + pub(super) fn add_bootloader_result( + &mut self, + bootloader_dry_run_result: Result, + ) { + self.bootloader_dry_run_result = Some(bootloader_dry_run_result); + } + + pub(super) fn add_bootloader_metrics( + &mut self, + bootloader_dry_run_metrics: ExecutionMetricsForCriteria, + ) { + self.bootloader_dry_run_metrics = Some(bootloader_dry_run_metrics); + } + + /// Returns `true` if both transaction and bootloader tip dry run were successful. + pub(super) fn success(&self) -> bool { + self.tx_result.is_ok() + && self + .bootloader_dry_run_result + .as_ref() + .map(|res| res.is_ok()) + .unwrap_or(false) + } + + /// Returns a revert reason if either transaction was rejected or bootloader dry run tip failed. + pub(super) fn err(&self) -> Option { + self.tx_result + .as_ref() + .err() + .or_else(|| { + self.bootloader_dry_run_result + .as_ref() + .and_then(|res| res.as_ref().err()) + }) + .cloned() + } +} + +/// An abstraction that allows us to create different kinds of batch executors. +/// The only requirement is to return the `BatchExecutorHandle` object, which does its work +/// by communicating with the externally initialized thread. +pub(crate) trait L1BatchExecutorBuilder: 'static + std::fmt::Debug + Send { + fn init_batch( + &self, + block_context: BlockContextMode, + block_properties: BlockProperties, + ) -> BatchExecutorHandle; +} + +/// The default implementation of the `BatchExecutorBuilder`. +/// Creates a "real" batch executor which maintains the VM (as opposed to the test builder which doesn't use the VM). +#[derive(Debug, Clone)] +pub(crate) struct MainBatchExecutorBuilder { + state_keeper_db_path: String, + pool: ConnectionPool, + reexecute_each_tx: bool, + max_allowed_tx_gas_limit: U256, +} + +impl MainBatchExecutorBuilder { + pub(crate) fn new( + state_keeper_db_path: String, + pool: ConnectionPool, + reexecute_each_tx: bool, + max_allowed_tx_gas_limit: U256, + ) -> Self { + Self { + state_keeper_db_path, + pool, + reexecute_each_tx, + max_allowed_tx_gas_limit, + } + } +} + +impl L1BatchExecutorBuilder for MainBatchExecutorBuilder { + fn init_batch( + &self, + block_context: BlockContextMode, + block_properties: BlockProperties, + ) -> BatchExecutorHandle { + let secondary_storage = self + .pool + .access_storage_blocking() + .storage_load_dal() + .load_secondary_storage(RocksDB::new( + Database::StateKeeper, + &self.state_keeper_db_path, + true, + )); + vlog::info!( + "Secondary storage for batch {} initialized, size is {}", + block_context.inner_block_context().context.block_number, + secondary_storage.get_estimated_map_size() + ); + metrics::gauge!( + "server.state_keeper.storage_map_size", + secondary_storage.get_estimated_map_size() as f64, + ); + BatchExecutorHandle::new( + self.reexecute_each_tx, + self.max_allowed_tx_gas_limit, + block_context, + block_properties, + secondary_storage, + ) + } +} + +/// A public interface for interaction with the `BatchExecutor`. +/// `BatchExecutorHandle` is stored in the state keeper and is used to invoke or rollback transactions, and also seal +/// the batches. +#[derive(Debug)] +pub(crate) struct BatchExecutorHandle { + handle: thread::JoinHandle<()>, + commands: mpsc::Sender, +} + +impl BatchExecutorHandle { + pub(super) fn new( + reexecute_each_tx: bool, + max_allowed_tx_gas_limit: U256, + block_context: BlockContextMode, + block_properties: BlockProperties, + secondary_storage: SecondaryStateStorage, + ) -> Self { + let (commands_sender, commands_receiver) = mpsc::channel(); + let executor = BatchExecutor { + reexecute_each_tx, + max_allowed_tx_gas_limit, + commands: commands_receiver, + }; + + let handle = + thread::spawn(move || executor.run(secondary_storage, block_context, block_properties)); + + Self { + handle, + commands: commands_sender, + } + } + + /// Creates a batch executor handle from the provided sender and thread join handle. + /// Can be used to inject an alternative batch executor implementation. + #[cfg(test)] + pub(super) fn from_raw( + handle: thread::JoinHandle<()>, + commands: mpsc::Sender, + ) -> Self { + Self { handle, commands } + } + + pub(super) fn execute_tx(&self, tx: Transaction) -> TxExecutionResult { + let (response_sender, response_receiver) = mpsc::sync_channel(0); + self.commands + .send(Command::ExecuteTx(tx, response_sender)) + .unwrap(); + + let start = Instant::now(); + let res = response_receiver.recv().unwrap(); + metrics::histogram!("state_keeper.batch_executor.command_response_time", start.elapsed(), "command" => "execute_tx"); + res + } + + pub(super) fn rollback_last_tx(&self) { + // While we don't get anything from the channel, it's useful to have it as a confirmation that the operation + // indeed has been processed. + let (response_sender, response_receiver) = mpsc::sync_channel(0); + self.commands + .send(Command::RollbackLastTx(response_sender)) + .unwrap(); + let start = Instant::now(); + response_receiver.recv().unwrap(); + metrics::histogram!("state_keeper.batch_executor.command_response_time", start.elapsed(), "command" => "rollback_last_tx"); + } + + pub(super) fn finish_batch(self) -> VmBlockResult { + let (response_sender, response_receiver) = mpsc::sync_channel(0); + self.commands + .send(Command::FinishBatch(response_sender)) + .unwrap(); + let start = Instant::now(); + let resp = response_receiver.recv().unwrap(); + self.handle.join().unwrap(); + metrics::histogram!("state_keeper.batch_executor.command_response_time", start.elapsed(), "command" => "finish_batch"); + resp + } +} + +/// Implementation of the "primary" (non-test) batch executor. +/// Upon launch, it initialized the VM object with provided block context and properties, and keeps applying +/// transactions until the batch is sealed. +/// +/// One `BatchExecutor` can execute exactly one batch, so once the batch is sealed, a new `BatchExecutor` object must +/// be constructed. +#[derive(Debug)] +pub(super) struct BatchExecutor { + reexecute_each_tx: bool, + max_allowed_tx_gas_limit: U256, + commands: mpsc::Receiver, +} + +#[allow(clippy::large_enum_variant)] +pub(super) enum Command { + ExecuteTx(Transaction, mpsc::SyncSender), + RollbackLastTx(mpsc::SyncSender<()>), + FinishBatch(mpsc::SyncSender), +} + +impl BatchExecutor { + pub(super) fn run( + self, + secondary_storage: SecondaryStateStorage, + block_context: BlockContextMode, + block_properties: BlockProperties, + ) { + vlog::info!( + "Starting executing batch #{}", + block_context.inner_block_context().context.block_number + ); + + let mut storage_view = StorageView::new(&secondary_storage); + let mut oracle_tools = vm::OracleTools::new(&mut storage_view as &mut dyn Storage); + + let mut vm = init_vm( + &mut oracle_tools, + block_context, + &block_properties, + TxExecutionMode::VerifyExecute, + ); + + while let Ok(cmd) = self.commands.recv() { + match cmd { + Command::ExecuteTx(tx, resp) => { + let result = self.execute_tx(&tx, &mut vm); + resp.send(result).unwrap(); + } + Command::RollbackLastTx(resp) => { + self.rollback_last_tx(&mut vm); + resp.send(()).unwrap(); + } + Command::FinishBatch(resp) => { + resp.send(self.finish_batch(&mut vm)).unwrap(); + return; + } + } + } + // State keeper can exit because of stop signal, so it's OK to exit mid-batch. + vlog::info!("State keeper exited with an unfinished batch"); + } + + fn execute_tx(&self, tx: &Transaction, vm: &mut VmInstance) -> TxExecutionResult { + let gas_consumed_before_tx = vm.gas_consumed(); + let updated_storage_slots_before_tx = vm.number_of_updated_storage_slots(); + + // Save pre-`execute_next_tx` VM snapshot. + vm.save_current_vm_as_snapshot(); + + // Reject transactions with too big gas limit. + // They are also rejected on the API level, but + // we need to secure ourselves in case some tx will somehow get into mempool. + if tx.gas_limit() > self.max_allowed_tx_gas_limit { + vlog::warn!( + "Found tx with too big gas limit in state keeper, hash: {:?}, gas_limit: {}", + tx.hash(), + tx.gas_limit() + ); + return TxExecutionResult { + tx_result: Err(TxRevertReason::TooBigGasLimit), + bootloader_dry_run_result: None, + tx_metrics: None, + bootloader_dry_run_metrics: None, + }; + } + + // Execute the transaction. + let stage_started_at = Instant::now(); + let tx_result = self.execute_tx_in_vm(tx, vm); + metrics::histogram!( + "server.state_keeper.tx_execution_time", + stage_started_at.elapsed(), + "stage" => "execution" + ); + metrics::increment_counter!( + "server.processed_txs", + "stage" => "state_keeper" + ); + metrics::counter!( + "server.processed_l1_txs", + tx.is_l1() as u64, + "stage" => "state_keeper" + ); + + if self.reexecute_each_tx { + self.reexecute_tx_in_vm(vm, tx, tx_result.clone()); + } + + let mut result = TxExecutionResult::new(tx_result.clone()); + if result.err().is_some() { + return result; + } + + let tx_metrics = Self::get_execution_metrics( + vm, + Some(tx), + &tx_result.as_ref().unwrap().result, + gas_consumed_before_tx, + updated_storage_slots_before_tx, + ); + result.add_tx_metrics(tx_metrics); + + match self.dryrun_block_tip(vm) { + Ok((exec_result, metrics)) => { + result.add_bootloader_result(Ok(exec_result)); + result.add_bootloader_metrics(metrics); + } + Err(err) => { + vlog::warn!("VM reverted while executing block tip: {}", err); + result.add_bootloader_result(Err(err)); + } + } + + result + } + + fn rollback_last_tx(&self, vm: &mut VmInstance) { + let stage_started_at = Instant::now(); + vm.rollback_to_latest_snapshot_popping(); + metrics::histogram!( + "server.state_keeper.tx_execution_time", + stage_started_at.elapsed(), + "stage" => "tx_rollback" + ); + } + + fn finish_batch(&self, vm: &mut VmInstance) -> VmBlockResult { + vm.execute_till_block_end(BootloaderJobType::BlockPostprocessing) + } + + // Err when transaction is rejected. + // Ok(TxExecutionStatus::Success) when the transaction succeeded + // Ok(TxExecutionStatus::Failure) when the transaction failed. + // Note that failed transactions are considered properly processed and are included in blocks + fn execute_tx_in_vm( + &self, + tx: &Transaction, + vm: &mut VmInstance, + ) -> Result { + push_transaction_to_bootloader_memory(vm, tx, TxExecutionMode::VerifyExecute); + vm.execute_next_tx() + } + + fn reexecute_tx_in_vm( + &self, + vm: &mut VmInstance<'_>, + tx: &Transaction, + expected_tx_result: Result, + ) { + // Rollback to the pre-`execute_next_tx` VM snapshot. + // `rollback_to_latest_snapshot` (not `rollback_to_latest_snapshot_popping`) is used here because + // we will need this snapshot again if seal criteria will result in `ExcludeAndSead`. + vm.rollback_to_latest_snapshot(); + let alternative_result = self.execute_tx_in_vm(tx, vm); + assert_eq!( + alternative_result, + expected_tx_result, + "Failed to reexecute transaction {}", + tx.hash() + ); + } + + fn dryrun_block_tip( + &self, + vm: &mut VmInstance, + ) -> Result<(VmPartialExecutionResult, ExecutionMetricsForCriteria), TxRevertReason> { + let stage_started_at = Instant::now(); + let gas_consumed_before = vm.gas_consumed(); + let updated_storage_slots_before = vm.number_of_updated_storage_slots(); + + // Save pre-`execute_till_block_end` VM snapshot. + vm.save_current_vm_as_snapshot(); + let block_tip_result = vm.execute_block_tip(); + let result = match &block_tip_result.revert_reason { + None => { + let metrics = Self::get_execution_metrics( + vm, + None, + &block_tip_result, + gas_consumed_before, + updated_storage_slots_before, + ); + Ok((block_tip_result, metrics)) + } + Some(TxRevertReason::BootloaderOutOfGas) => Err(TxRevertReason::BootloaderOutOfGas), + Some(other_reason) => { + panic!("VM must not revert when finalizing block (except `BootloaderOutOfGas`). Revert reason: {:?}", other_reason); + } + }; + + // Rollback to the pre-`execute_till_block_end` state. + vm.rollback_to_latest_snapshot_popping(); + + metrics::histogram!( + "server.state_keeper.tx_execution_time", + stage_started_at.elapsed(), + "stage" => "dryrun_block_tip" + ); + + result + } + + fn get_execution_metrics( + vm: &VmInstance, + tx: Option<&Transaction>, + execution_result: &VmPartialExecutionResult, + gas_consumed_before: u32, + updated_storage_slots_before: usize, + ) -> ExecutionMetricsForCriteria { + let storage_updates = vm.number_of_updated_storage_slots() - updated_storage_slots_before; + + let gas_consumed_after = vm.gas_consumed(); + assert!( + gas_consumed_after >= gas_consumed_before, + "Invalid consumed gas value, possible underflow. Tx: {:?}", + tx + ); + let gas_used = gas_consumed_after - gas_consumed_before; + let total_factory_deps = tx + .map(|tx| { + tx.execute + .factory_deps + .as_ref() + .map_or(0, |deps| deps.len() as u16) + }) + .unwrap_or(0); + + let execution_metrics = ExecutionMetrics::new( + &execution_result.logs, + gas_used as usize, + total_factory_deps, + execution_result.contracts_used, + execution_result.cycles_used, + ); + + let l1_gas = match tx { + Some(tx) => gas_count_from_tx_and_metrics(tx, &execution_metrics), + None => gas_count_from_metrics(&execution_metrics), + }; + + ExecutionMetricsForCriteria { + storage_updates, + l1_gas, + execution_metrics, + } + } +} diff --git a/core/bin/zksync_core/src/state_keeper/batch_executor/tests/mod.rs b/core/bin/zksync_core/src/state_keeper/batch_executor/tests/mod.rs new file mode 100644 index 000000000000..c056830b4513 --- /dev/null +++ b/core/bin/zksync_core/src/state_keeper/batch_executor/tests/mod.rs @@ -0,0 +1,206 @@ +use crate::state_keeper::batch_executor::tests::tester::TestConfig; + +use self::tester::{Account, Tester}; +use assert_matches::assert_matches; +use db_test_macro::db_test; +use vm::{utils::BLOCK_GAS_LIMIT, TxRevertReason}; +use zksync_types::PriorityOpId; + +mod tester; + +use super::TxExecutionResult; + +/// Ensures that transaction was executed successfully. +fn assert_executed(execution_result: TxExecutionResult) { + assert_matches!(execution_result.tx_result, Ok(_)); + assert_matches!(execution_result.bootloader_dry_run_result, Some(Ok(_))); + assert_matches!(execution_result.tx_metrics, Some(_)); + assert_matches!(execution_result.bootloader_dry_run_metrics, Some(_)); +} + +/// Ensures that transaction was rejected. +fn assert_rejected(execution_result: TxExecutionResult) { + assert_matches!(execution_result.tx_result, Err(_)); + assert_matches!(execution_result.bootloader_dry_run_result, None); + assert_matches!(execution_result.tx_metrics, None); + assert_matches!(execution_result.bootloader_dry_run_metrics, None); +} + +/// Checks that we can successfully execute a single L2 tx in batch executor. +#[db_test] +async fn execute_l2_tx(connection_pool: ConnectionPool) { + let mut alice = Account::random(); + + let tester = Tester::new(connection_pool); + tester.genesis().await; + tester.fund(&[alice.address()]); + let executor = tester.create_batch_executor(); + + let res = executor.execute_tx(alice.execute()); + assert_executed(res); + executor.finish_batch(); +} + +/// Checks that we can successfully execute a single L1 tx in batch executor. +#[db_test] +async fn execute_l1_tx(connection_pool: ConnectionPool) { + let mut alice = Account::random(); + + let tester = Tester::new(connection_pool); + tester.genesis().await; + tester.fund(&[alice.address()]); + let executor = tester.create_batch_executor(); + + let res = executor.execute_tx(alice.l1_execute(PriorityOpId(1))); + assert_executed(res); + executor.finish_batch(); +} + +/// Checks that we can successfully rollback the transaction and execute it once again. +#[db_test] +#[ignore] +async fn rollback(connection_pool: ConnectionPool) { + let mut alice = Account::random(); + + let tester = Tester::new(connection_pool); + tester.genesis().await; + tester.fund(&[alice.address()]); + let executor = tester.create_batch_executor(); + + let tx = alice.execute(); + let res_old = executor.execute_tx(tx.clone()); + assert_executed(res_old.clone()); + + executor.rollback_last_tx(); + + // Execute the same transaction, it must succeed. + let res_new = executor.execute_tx(tx); + assert_executed(res_new.clone()); + + assert_eq!( + res_old.tx_metrics, res_new.tx_metrics, + "Execution results must be the same" + ); + executor.finish_batch(); +} + +/// Checks that incorrect transactions are marked as rejected. +#[db_test] +async fn reject_tx(connection_pool: ConnectionPool) { + let mut alice = Account::random(); + + let tester = Tester::new(connection_pool); + tester.genesis().await; + let executor = tester.create_batch_executor(); + + // Wallet is not funded, it can't pay for fees. + let res = executor.execute_tx(alice.execute()); + assert_rejected(res); + executor.finish_batch(); +} + +/// Checks that we tx with too big gas limit is correctly rejected. +#[db_test] +async fn too_big_gas_limit(connection_pool: ConnectionPool) { + let mut alice = Account::random(); + + let tester = Tester::new(connection_pool); + tester.genesis().await; + tester.fund(&[alice.address()]); + let executor = tester.create_batch_executor(); + + let bad_tx = alice.execute_with_gas_limit(u32::MAX); + let res1 = executor.execute_tx(bad_tx.clone()); + assert_rejected(res1.clone()); + + executor.rollback_last_tx(); + + let res2 = executor.execute_tx(bad_tx); + assert_rejected(res2.clone()); + + assert_eq!(res1, res2); + + // Ensure that now we can execute a valid tx. + alice.nonce -= 1; // Reset the nonce. + let res3 = executor.execute_tx(alice.execute()); + assert_executed(res3); + executor.finish_batch(); +} + +/// Checks that we can't execute the same transaction twice. +#[db_test] +async fn tx_cant_be_reexecuted(connection_pool: ConnectionPool) { + let mut alice = Account::random(); + + let tester = Tester::new(connection_pool); + tester.genesis().await; + tester.fund(&[alice.address()]); + let executor = tester.create_batch_executor(); + + let tx = alice.execute(); + let res1 = executor.execute_tx(tx.clone()); + assert_executed(res1); + // Nonce is used for the second tx. + let res2 = executor.execute_tx(tx); + assert_rejected(res2); + executor.finish_batch(); +} + +/// Checks that we can deploy and call the loadnext contract. +#[db_test] +async fn deploy_and_call_loadtest(connection_pool: ConnectionPool) { + let mut alice = Account::random(); + + let tester = Tester::new(connection_pool); + tester.genesis().await; + tester.fund(&[alice.address()]); + let executor = tester.create_batch_executor(); + + let (deploy_tx, loadtest_address) = alice.deploy_loadnext_tx(); + assert_executed(executor.execute_tx(deploy_tx)); + assert_executed(executor.execute_tx(alice.loadnext_custom_gas_call( + loadtest_address, + 10, + 10000000, + ))); + assert_executed(executor.execute_tx(alice.loadnext_custom_writes_call(loadtest_address, 1))); + executor.finish_batch(); +} + +/// Checks that we can cause the bootloader out of gas error on tip phase. +#[db_test] +#[ignore] +async fn bootloader_tip_out_of_gas(connection_pool: ConnectionPool) { + let mut alice = Account::random(); + + // Disable the gas limit check. + let tester = Tester::with_config( + connection_pool, + TestConfig { + reexecute_each_tx: false, + max_allowed_tx_gas_limit: u32::MAX, + }, + ); + tester.genesis().await; + tester.fund(&[alice.address()]); + + let executor = tester.create_batch_executor(); + + let mut gas_remaining = BLOCK_GAS_LIMIT; + let (deploy_tx, loadnext_address) = alice.deploy_loadnext_tx(); + + let deploy_res = executor.execute_tx(deploy_tx); + assert_executed(deploy_res.clone()); + gas_remaining -= deploy_res.tx_metrics.unwrap().execution_metrics.gas_used as u32; + + let consume_gas_tx = + alice.loadnext_custom_gas_call(loadnext_address, gas_remaining, gas_remaining); + let res = executor.execute_tx(consume_gas_tx); + + assert!(res.tx_result.is_ok()); + assert_matches!( + res.bootloader_dry_run_result, + Some(Err(TxRevertReason::BootloaderOutOfGas)) + ); + executor.finish_batch(); +} diff --git a/core/bin/zksync_core/src/state_keeper/batch_executor/tests/tester.rs b/core/bin/zksync_core/src/state_keeper/batch_executor/tests/tester.rs new file mode 100644 index 000000000000..a3176bf90fb4 --- /dev/null +++ b/core/bin/zksync_core/src/state_keeper/batch_executor/tests/tester.rs @@ -0,0 +1,335 @@ +//! Testing harness for the batch executor. +//! Contains helper functionality to initialize test context and perform tests without too much boilerplate. + +use crate::genesis::chain_schema_genesis; +use crate::state_keeper::batch_executor::BatchExecutorHandle; +use tempfile::TempDir; +use vm::{ + test_utils::{ + get_create_zksync_address, get_deploy_tx, mock_loadnext_gas_burn_call, + mock_loadnext_test_call, + }, + utils::default_block_properties, + vm_with_bootloader::{BlockContext, BlockContextMode, DerivedBlockContext}, + zk_evm::{ + block_properties::BlockProperties, + zkevm_opcode_defs::system_params::INITIAL_STORAGE_WRITE_PUBDATA_BYTES, + }, +}; +use zksync_config::ZkSyncConfig; +use zksync_contracts::{get_loadnext_contract, TestContract}; +use zksync_dal::ConnectionPool; +use zksync_storage::{db::Database, RocksDB}; +use zksync_types::{ + ethabi::{encode, Token}, + fee::Fee, + l1::{L1Tx, OpProcessingType, PriorityQueueType}, + l2::L2Tx, + utils::storage_key_for_standard_token_balance, + AccountTreeId, Address, Execute, L1BatchNumber, L1TxCommonData, L2ChainId, MiniblockNumber, + Nonce, PackedEthSignature, PriorityOpId, StorageLog, Transaction, H256, L2_ETH_TOKEN_ADDRESS, + SYSTEM_CONTEXT_MINIMAL_BASE_FEE, U256, +}; +use zksync_utils::{test_utils::LoadnextContractExecutionParams, u256_to_h256}; + +const DEFAULT_GAS_PER_PUBDATA: u32 = 100; +const CHAIN_ID: L2ChainId = L2ChainId(270); + +/// Representation of configuration parameters used by the state keeper. +/// Has sensible defaults for most tests, each of which can be overridden. +#[derive(Debug)] +pub(super) struct TestConfig { + pub(super) reexecute_each_tx: bool, + pub(super) max_allowed_tx_gas_limit: u32, +} + +impl TestConfig { + pub(super) fn new() -> Self { + // It's OK to use env config here, since we would load the postgres URL from there anyway. + let config = ZkSyncConfig::from_env(); + + Self { + reexecute_each_tx: true, + max_allowed_tx_gas_limit: config.chain.state_keeper.max_allowed_l2_tx_gas_limit, + } + } +} + +/// Tester represents an entity that can initialize the state and create batch executors over this storage. +/// Important: `Tester` must be a *sole* owner of the `ConnectionPool`, since the test pool cannot be shared. +#[derive(Debug)] +pub(super) struct Tester { + fee_account: Address, + db_dir: TempDir, + pool: ConnectionPool, + config: TestConfig, +} + +impl Tester { + pub(super) fn new(pool: ConnectionPool) -> Self { + Self::with_config(pool, TestConfig::new()) + } + + pub(super) fn with_config(pool: ConnectionPool, config: TestConfig) -> Self { + Self { + fee_account: Address::repeat_byte(0x01), + db_dir: TempDir::new().unwrap(), + pool, + config, + } + } + + /// Creates a batch executor instance. + /// This function intentionally uses sensible defaults to not introduce boilerplate. + pub(super) fn create_batch_executor(&self) -> BatchExecutorHandle { + // Not really important for the batch executor - it operates over a single batch. + let (block_context, block_properties) = self.batch_params(L1BatchNumber(1), 100); + + let secondary_storage = self + .pool + .access_storage_blocking() + .storage_load_dal() + .load_secondary_storage(RocksDB::new( + Database::StateKeeper, + self.db_dir.path().to_str().unwrap(), + true, + )); + + // We don't use the builder because it would require us to clone the `ConnectionPool`, which is forbidden + // for the test pool (see the doc-comment on `TestPool` for detauls). + BatchExecutorHandle::new( + self.config.reexecute_each_tx, + self.config.max_allowed_tx_gas_limit.into(), + block_context, + block_properties, + secondary_storage, + ) + } + + /// Creates test batch params that can be fed into the VM. + fn batch_params( + &self, + l1_batch_number: L1BatchNumber, + timestamp: u64, + ) -> (BlockContextMode, BlockProperties) { + let block_properties = default_block_properties(); + + let context = BlockContext { + block_number: l1_batch_number.0, + block_timestamp: timestamp, + l1_gas_price: 1, + fair_l2_gas_price: 1, + operator_address: self.fee_account, + }; + let derived_context = DerivedBlockContext { + context, + base_fee: 1, + }; + + let previous_block_hash = U256::zero(); // Not important in this context. + ( + BlockContextMode::NewBlock(derived_context, previous_block_hash), + block_properties, + ) + } + + /// Performs the genesis in the storage. + pub(super) async fn genesis(&self) { + let mut storage = self.pool.access_storage_blocking(); + if storage.blocks_dal().is_genesis_needed() { + let chain_id = H256::from_low_u64_be(CHAIN_ID.0 as u64); + chain_schema_genesis(&mut storage, self.fee_account, chain_id).await; + } + } + + /// Adds funds for specified account list. + /// Expects genesis to be performed (i.e. `setup_storage` called beforehand). + pub(super) fn fund(&self, addresses: &[Address]) { + let mut storage = self.pool.access_storage_blocking(); + + let eth_amount = U256::from(10u32).pow(U256::from(32)); //10^32 wei + + for address in addresses { + let key = storage_key_for_standard_token_balance( + AccountTreeId::new(L2_ETH_TOKEN_ADDRESS), + address, + ); + let value = u256_to_h256(eth_amount); + let storage_logs = vec![StorageLog::new_write_log(key, value)]; + + storage + .storage_logs_dal() + .append_storage_logs(MiniblockNumber(0), &[(H256::zero(), storage_logs.clone())]); + storage + .storage_dal() + .apply_storage_logs(&[(H256::zero(), storage_logs)]); + } + } +} + +/// Test account that maintains its own nonce and is able to encode common transaction types useful for tests. +#[derive(Debug)] +pub(super) struct Account { + pub pk: H256, + pub nonce: Nonce, +} + +impl Account { + pub(super) fn random() -> Self { + Self { + pk: H256::random(), + nonce: Nonce(0), + } + } + + /// Returns the address of the account. + pub(super) fn address(&self) -> Address { + PackedEthSignature::address_from_private_key(&self.pk).unwrap() + } + + /// Returns a valid `execute` transaction. + /// Automatically increments nonce of the account. + pub(super) fn execute(&mut self) -> Transaction { + self.execute_with_gas_limit(1_000_000) + } + + /// Returns a valid `execute` transaction. + /// Automatically increments nonce of the account. + pub(super) fn execute_with_gas_limit(&mut self, gas_limit: u32) -> Transaction { + let fee = fee(gas_limit); + let mut l2_tx = L2Tx::new_signed( + Address::random(), + vec![], + self.nonce, + fee, + Default::default(), + CHAIN_ID, + &self.pk, + None, + Default::default(), + ) + .unwrap(); + // Input means all transaction data (NOT calldata, but all tx fields) that came from the API. + // This input will be used for the derivation of the tx hash, so put some random to it to be sure + // that the transaction hash is unique. + l2_tx.set_input(H256::random().0.to_vec(), H256::random()); + + // Increment the account nonce. + self.nonce += 1; + + l2_tx.into() + } + + /// Returns a valid `execute` transaction initiated from L1. + /// Does not increment nonce. + pub(super) fn l1_execute(&mut self, serial_id: PriorityOpId) -> Transaction { + let priority_op_data = L1TxCommonData { + sender: self.address(), + canonical_tx_hash: H256::from_low_u64_be(serial_id.0), + serial_id, + deadline_block: 100000, + layer_2_tip_fee: U256::zero(), + full_fee: U256::zero(), + gas_limit: U256::from(100_100), + op_processing_type: OpProcessingType::Common, + priority_queue_type: PriorityQueueType::Deque, + eth_hash: H256::random(), + eth_block: 1, + gas_per_pubdata_limit: U256::from(1_000_000), + to_mint: U256::zero(), + refund_recipient: self.address(), + }; + + let execute = Execute { + contract_address: Address::random(), + value: Default::default(), + calldata: vec![], + factory_deps: None, + }; + + let tx = L1Tx { + common_data: priority_op_data, + execute, + received_timestamp_ms: 0, + }; + tx.into() + } + + /// Returns the transaction to deploy the loadnext contract and address of this contract (after deployment). + /// Increments the account nonce. + pub(super) fn deploy_loadnext_tx(&mut self) -> (Transaction, Address) { + let TestContract { + bytecode, + factory_deps, + .. + } = get_loadnext_contract(); + let loadnext_deploy_tx = get_deploy_tx( + self.pk, + self.nonce, + &bytecode, + factory_deps, + &encode(&[Token::Uint(U256::from(1000))]), + fee(500_000_000), + ); + let test_contract_address = + get_create_zksync_address(loadnext_deploy_tx.initiator_account(), self.nonce); + self.nonce += 1; + + (loadnext_deploy_tx.into(), test_contract_address) + } + + /// Returns a transaction to the loadnext contract with custom amount of write requests. + /// Increments the account nonce. + pub(super) fn loadnext_custom_writes_call( + &mut self, + address: Address, + writes: u32, + ) -> Transaction { + // For each iteration of the expensive contract, there are two slots that are updated: + // the length of the vector and the new slot with the element itself. + let minimal_fee = + 2 * DEFAULT_GAS_PER_PUBDATA * writes * INITIAL_STORAGE_WRITE_PUBDATA_BYTES as u32; + + let fee = fee(minimal_fee + 500_000_000); + + let tx = mock_loadnext_test_call( + self.pk, + self.nonce, + address, + fee, + LoadnextContractExecutionParams { + reads: 100, + writes: writes as usize, + events: 100, + hashes: 100, + recursive_calls: 0, + deploys: 100, + }, + ); + self.nonce += 1; + tx.into() + } + + /// Returns a transaction to the loadnext contract with custom gas limit and expected burned gas amount. + /// Increments the account nonce. + pub(super) fn loadnext_custom_gas_call( + &mut self, + address: Address, + gas_to_burn: u32, + gas_limit: u32, + ) -> Transaction { + let fee = fee(gas_limit); + let tx = mock_loadnext_gas_burn_call(self.pk, self.nonce, address, fee, gas_to_burn); + self.nonce += 1; + tx.into() + } +} + +fn fee(gas_limit: u32) -> Fee { + Fee { + gas_limit: U256::from(gas_limit), + max_fee_per_gas: SYSTEM_CONTEXT_MINIMAL_BASE_FEE.into(), + max_priority_fee_per_gas: U256::zero(), + gas_per_pubdata_limit: U256::from(DEFAULT_GAS_PER_PUBDATA), + } +} diff --git a/core/bin/zksync_core/src/state_keeper/extractors.rs b/core/bin/zksync_core/src/state_keeper/extractors.rs new file mode 100644 index 000000000000..91d838a986ef --- /dev/null +++ b/core/bin/zksync_core/src/state_keeper/extractors.rs @@ -0,0 +1,239 @@ +//! Pure functions that convert blocks/transactions data as required by the state keeper. + +use itertools::Itertools; +use std::collections::HashMap; +use std::time::{Duration, Instant}; + +use vm::vm_with_bootloader::{get_bootloader_memory, BlockContextMode, TxExecutionMode}; +use zksync_dal::StorageProcessor; +use zksync_types::block::DeployedContract; +use zksync_types::tx::{IncludedTxLocation, TransactionExecutionResult}; +use zksync_types::{ + l2_to_l1_log::L2ToL1Log, log::StorageLogKind, AccountTreeId, Address, ExecuteTransactionCommon, + L1BatchNumber, StorageKey, StorageLog, StorageLogQuery, StorageValue, VmEvent, + ACCOUNT_CODE_STORAGE_ADDRESS, H256, U256, +}; +use zksync_utils::{h256_to_account_address, h256_to_u256}; + +use super::updates::{L1BatchUpdates, UpdatesManager}; + +/// Storage logs grouped by transaction hash +type StorageLogs = Vec<(H256, Vec)>; + +pub(crate) fn log_queries_to_storage_logs( + log_queries: &[StorageLogQuery], + updates_manager: &UpdatesManager, + is_fictive_miniblock: bool, +) -> StorageLogs { + log_queries + .iter() + .group_by(|log| log.log_query.tx_number_in_block) + .into_iter() + .map(|(tx_index, logs)| { + let tx_hash = if is_fictive_miniblock { + assert_eq!( + tx_index as usize, + updates_manager.pending_executed_transactions_len() + ); + H256::zero() + } else { + updates_manager.get_tx_by_index(tx_index as usize).hash() + }; + + ( + tx_hash, + logs.map(StorageLog::from_log_query) + .collect::>(), + ) + }) + .collect() +} + +pub(crate) fn write_logs_from_storage_logs(storage_logs: StorageLogs) -> StorageLogs { + storage_logs + .into_iter() + .map(|(hash, mut logs)| { + logs.retain(|log| log.kind == StorageLogKind::Write); + (hash, logs) + }) + .collect() +} + +pub(crate) fn extract_events_this_block( + vm_events: &[VmEvent], + updates_manager: &UpdatesManager, + is_fictive_miniblock: bool, +) -> Vec<(IncludedTxLocation, Vec)> { + vm_events + .iter() + .group_by(|event| event.location.1) + .into_iter() + .map(|(tx_index, events)| { + let (tx_hash, tx_initiator_address) = if is_fictive_miniblock { + assert_eq!( + tx_index as usize, + updates_manager.pending_executed_transactions_len() + ); + (H256::zero(), Address::zero()) + } else { + let tx = updates_manager.get_tx_by_index(tx_index as usize); + (tx.hash(), tx.initiator_account()) + }; + + ( + IncludedTxLocation { + tx_hash, + tx_index_in_miniblock: tx_index + - updates_manager.l1_batch.executed_transactions.len() as u32, + tx_initiator_address, + }, + events.cloned().collect::>(), + ) + }) + .collect() +} + +pub(crate) fn extract_l2_to_l1_logs_this_block( + l2_to_l1_logs: &[L2ToL1Log], + updates_manager: &UpdatesManager, + is_fictive_miniblock: bool, +) -> Vec<(IncludedTxLocation, Vec)> { + l2_to_l1_logs + .iter() + .group_by(|log| log.tx_number_in_block) + .into_iter() + .map(|(tx_index, l2_to_l1_logs)| { + let (tx_hash, tx_initiator_address) = if is_fictive_miniblock { + assert_eq!( + tx_index as usize, + updates_manager.pending_executed_transactions_len() + ); + (H256::zero(), Address::zero()) + } else { + let tx = updates_manager.get_tx_by_index(tx_index as usize); + (tx.hash(), tx.initiator_account()) + }; + + ( + IncludedTxLocation { + tx_hash, + tx_index_in_miniblock: tx_index as u32 + - updates_manager.l1_batch.executed_transactions.len() as u32, + tx_initiator_address, + }, + l2_to_l1_logs.cloned().collect::>(), + ) + }) + .collect() +} + +pub(crate) fn l1_l2_tx_count( + executed_transactions: &[TransactionExecutionResult], +) -> (usize, usize) { + let (l1_txs, l2_txs): ( + Vec<&TransactionExecutionResult>, + Vec<&TransactionExecutionResult>, + ) = executed_transactions + .iter() + .partition(|t| matches!(t.transaction.common_data, ExecuteTransactionCommon::L1(_))); + (l1_txs.len(), l2_txs.len()) +} + +pub(crate) fn get_initial_bootloader_memory( + updates_accumulator: &L1BatchUpdates, + block_context: BlockContextMode, +) -> Vec<(usize, U256)> { + let transactions_data = updates_accumulator + .executed_transactions + .iter() + .map(|res| res.transaction.clone().into()) + .collect(); + + let refunds = updates_accumulator + .executed_transactions + .iter() + .map(|res| res.operator_suggested_refund) + .collect(); + + get_bootloader_memory( + transactions_data, + refunds, + TxExecutionMode::VerifyExecute, + block_context, + ) +} + +pub(crate) fn log_query_write_read_counts(logs: &[StorageLogQuery]) -> (usize, usize) { + let (reads, writes): (Vec<&StorageLogQuery>, Vec<&StorageLogQuery>) = + logs.iter().partition(|l| l.log_query.rw_flag); + (reads.len(), writes.len()) +} + +pub(crate) fn contracts_deployed_this_miniblock( + unique_storage_updates: Vec<(StorageKey, (H256, StorageValue))>, + storage: &mut StorageProcessor<'_>, +) -> Vec<(H256, Vec)> { + let mut result: HashMap> = Default::default(); + + // Each storage update in the AccountCodeStorage denotes the fact + // some contract bytecode has been deployed + unique_storage_updates + .into_iter() + .filter(|(key, _)| *key.account().address() == ACCOUNT_CODE_STORAGE_ADDRESS) + .for_each(|(code_key, (tx_hash, bytecode_hash))| { + if bytecode_hash == H256::zero() { + return; + } + + let contract_bytecode = storage + .storage_dal() + .get_factory_dep(bytecode_hash) + .expect("Missing factory dep for deployed contract"); + + let contracts_in_tx = result.entry(tx_hash).or_insert_with(Default::default); + contracts_in_tx.push(DeployedContract { + account_id: AccountTreeId::new(h256_to_account_address(code_key.key())), + bytecode: contract_bytecode, + }); + }); + + result.into_iter().collect() +} + +pub(crate) fn wait_for_prev_l1_batch_state_root_unchecked( + storage: &mut StorageProcessor<'_>, + number: L1BatchNumber, +) -> U256 { + if number == L1BatchNumber(0) { + return U256::default(); + } + wait_for_l1_batch_state_root_unchecked(storage, number - 1) +} + +// warning: if invoked for a `L1BatchNumber` of a non-existent l1 batch, will block current thread indefinitely +pub(crate) fn wait_for_l1_batch_state_root_unchecked( + storage: &mut StorageProcessor<'_>, + number: L1BatchNumber, +) -> U256 { + // If the state root is not known yet, this duration will be used to back off in the while loops + const SAFE_STATE_ROOT_INTERVAL: Duration = Duration::from_millis(100); + + let stage_started_at: Instant = Instant::now(); + loop { + let root_hash = storage.blocks_dal().get_block_state_root(number); + if let Some(root) = root_hash { + vlog::trace!( + "Waited for hash of block #{:?} took {:?}", + number.0, + stage_started_at.elapsed() + ); + metrics::histogram!( + "server.state_keeper.wait_for_prev_hash_time", + stage_started_at.elapsed() + ); + return h256_to_u256(root); + } + + std::thread::sleep(SAFE_STATE_ROOT_INTERVAL); + } +} diff --git a/core/bin/zksync_core/src/state_keeper/io/mempool.rs b/core/bin/zksync_core/src/state_keeper/io/mempool.rs new file mode 100644 index 000000000000..b45f5c1f3586 --- /dev/null +++ b/core/bin/zksync_core/src/state_keeper/io/mempool.rs @@ -0,0 +1,622 @@ +use std::sync::Arc; +use std::time::Duration; +use std::time::Instant; + +use vm::utils::default_block_properties; +use vm::vm_with_bootloader::BlockContext; +use vm::vm_with_bootloader::BlockContextMode; +use vm::vm_with_bootloader::DerivedBlockContext; +use vm::zk_evm::block_properties::BlockProperties; +use vm::VmBlockResult; +use zksync_dal::{ConnectionPool, StorageProcessor}; +use zksync_eth_client::EthInterface; +use zksync_mempool::L2TxFilter; +use zksync_types::block::MiniblockHeader; +use zksync_types::event::{extract_added_tokens, extract_long_l2_to_l1_messages}; +use zksync_types::log_query_sorter::sort_storage_access_queries; +use zksync_types::FAIR_L2_GAS_PRICE; +use zksync_types::{ + block::L1BatchHeader, Address, L1BatchNumber, MiniblockNumber, StorageLogQueryType, + Transaction, U256, +}; +use zksync_utils::{miniblock_hash, time::millis_since_epoch}; + +use crate::gas_adjuster::GasAdjuster; +use crate::state_keeper::extractors; +use crate::state_keeper::updates::UpdatesManager; +use crate::state_keeper::MempoolGuard; + +use super::PendingBatchData; +use super::StateKeeperIO; + +#[derive(Debug)] +struct StateKeeperStats { + num_contracts: u64, +} + +/// Mempool-based IO for the state keeper. +/// Receives transactions from the database through the mempool filtering logic. +/// Decides which batch parameters should be used for the new batch. +/// This is an IO for the main server application. +#[derive(Debug)] +pub(crate) struct MempoolIO { + mempool: MempoolGuard, + pool: ConnectionPool, + filter: L2TxFilter, + current_miniblock_number: MiniblockNumber, + current_l1_batch_number: L1BatchNumber, + fee_account: Address, + delay_interval: Duration, + + // Grafana metrics + statistics: StateKeeperStats, + + // Used to keep track of gas prices to set accepted price per pubdata byte in blocks. + gas_adjuster: Arc>, +} + +impl StateKeeperIO for MempoolIO { + fn current_l1_batch_number(&self) -> L1BatchNumber { + self.current_l1_batch_number + } + + fn current_miniblock_number(&self) -> MiniblockNumber { + self.current_miniblock_number + } + + fn load_pending_batch(&mut self) -> Option { + let mut storage = self.pool.access_storage_blocking(); + + // If pending miniblock doesn't exist, it means that there is no unsynced state (i.e. no transaction + // were executed after the last sealed batch). + let pending_miniblock_number = self.pending_miniblock_number(&mut storage); + let pending_miniblock_header = storage + .blocks_dal() + .get_miniblock_header(pending_miniblock_number)?; + + vlog::info!("getting previous block hash"); + let previous_l1_batch_hash = extractors::wait_for_prev_l1_batch_state_root_unchecked( + &mut storage, + self.current_l1_batch_number, + ); + vlog::info!("previous_l1_batch_hash: {}", previous_l1_batch_hash); + let params = self.default_block_params( + pending_miniblock_header.timestamp, + previous_l1_batch_hash, + pending_miniblock_header.l1_gas_price, + pending_miniblock_header.l2_fair_gas_price, + ); + + let txs = storage.transactions_dal().get_transactions_to_reexecute(); + + Some(PendingBatchData { params, txs }) + } + + fn wait_for_new_batch_params( + &mut self, + max_wait: Duration, + ) -> Option<(BlockContextMode, BlockProperties)> { + // Block until at least one transaction in the mempool can match the filter (or timeout happens). + // This is needed to ensure that block timestamp is not too old. + poll_until(self.delay_interval, max_wait, || { + // We create a new filter each time, since parameters may change and a previously + // ignored transaction in the mempool may be scheduled for the execution. + self.filter = self.gas_adjuster.l2_tx_filter(); + self.mempool.has_next(&self.filter).then(|| { + // We only need to get the root hash when we're certain that we have a new transaction. + vlog::info!("getting previous block hash"); + let previous_l1_batch_hash = { + let mut storage = self.pool.access_storage_blocking(); + extractors::wait_for_prev_l1_batch_state_root_unchecked( + &mut storage, + self.current_l1_batch_number, + ) + }; + vlog::info!("previous_l1_batch_hash: {}", previous_l1_batch_hash); + + self.default_block_params( + (millis_since_epoch() / 1000) as u64, + previous_l1_batch_hash, + self.filter.l1_gas_price, + FAIR_L2_GAS_PRICE, + ) + }) + }) + } + + fn wait_for_next_tx(&mut self, max_wait: Duration) -> Option { + poll_until(self.delay_interval, max_wait, || { + let started_at = Instant::now(); + let res = self.mempool.next_transaction(&self.filter); + metrics::histogram!( + "server.state_keeper.get_tx_from_mempool", + started_at.elapsed(), + ); + res + }) + } + + fn rollback(&mut self, tx: &Transaction) { + // Reset nonces in the mempool. + self.mempool.rollback(tx); + // Insert the transaction back. + self.mempool.insert(vec![tx.clone()], Default::default()); + } + + fn reject(&mut self, rejected: &Transaction, error: &str) { + assert!( + !rejected.is_l1(), + "L1 transactions should not be rejected: {}", + error + ); + + // Reset the nonces in the mempool, but don't insert the transaction back. + self.mempool.rollback(rejected); + + // Mark tx as rejected in the storage. + let mut storage = self.pool.access_storage_blocking(); + metrics::increment_counter!("server.state_keeper.rejected_transactions"); + vlog::warn!( + "transaction {} is rejected with error {}", + rejected.hash(), + error + ); + storage + .transactions_dal() + .mark_tx_as_rejected(rejected.hash(), &format!("rejected: {}", error)); + } + + fn seal_miniblock(&mut self, updates_manager: &UpdatesManager) -> u64 { + let new_miniblock_timestamp = (millis_since_epoch() / 1000) as u64; + let pool = self.pool.clone(); + let mut storage = pool.access_storage_blocking(); + self.seal_miniblock_impl(&mut storage, updates_manager, false); + new_miniblock_timestamp + } + + fn seal_l1_batch( + &mut self, + block_result: VmBlockResult, + updates_manager: UpdatesManager, + block_context: DerivedBlockContext, + ) { + assert_eq!( + updates_manager.batch_timestamp(), + block_context.context.block_timestamp, + "Batch timestamps don't match, batch number {}", + self.current_l1_batch_number() + ); + let pool = self.pool.clone(); + let mut storage = pool.access_storage_blocking(); + self.seal_l1_batch_impl(&mut storage, block_result, updates_manager, block_context); + } +} + +impl MempoolIO { + pub(crate) fn new( + mempool: MempoolGuard, + pool: ConnectionPool, + fee_account: Address, + delay_interval: Duration, + gas_adjuster: Arc>, + ) -> Self { + let mut storage = pool.access_storage_blocking(); + let last_sealed_block_header = storage.blocks_dal().get_newest_block_header(); + let last_miniblock_number = storage.blocks_dal().get_sealed_miniblock_number(); + let num_contracts = storage.storage_load_dal().load_number_of_contracts(); + let filter = L2TxFilter::default(); // Will be initialized properly on the first newly opened batch. + drop(storage); + + Self { + mempool, + pool, + filter, + current_l1_batch_number: last_sealed_block_header.number + 1, + current_miniblock_number: last_miniblock_number + 1, + fee_account, + delay_interval, + statistics: StateKeeperStats { num_contracts }, + gas_adjuster, + } + } + + fn pending_miniblock_number(&self, storage: &mut StorageProcessor<'_>) -> MiniblockNumber { + let (_, last_miniblock_number_included_in_l1_batch) = storage + .blocks_dal() + .get_miniblock_range_of_l1_batch(self.current_l1_batch_number - 1) + .unwrap(); + last_miniblock_number_included_in_l1_batch + 1 + } + + fn miniblock_assertions(&self, updates_manager: &UpdatesManager, is_fictive: bool) { + if is_fictive { + assert!(updates_manager.miniblock.executed_transactions.is_empty()); + } else { + assert!(!updates_manager.miniblock.executed_transactions.is_empty()); + } + + let first_tx_index_in_miniblock = updates_manager.l1_batch.executed_transactions.len(); + let next_tx_index = updates_manager.pending_executed_transactions_len(); + let miniblock_tx_index_range = if is_fictive { + next_tx_index..(next_tx_index + 1) + } else { + first_tx_index_in_miniblock..next_tx_index + }; + + for event in updates_manager.miniblock.events.iter() { + assert!(miniblock_tx_index_range.contains(&(event.location.1 as usize))) + } + for storage_log in updates_manager.miniblock.storage_logs.iter() { + assert!(miniblock_tx_index_range + .contains(&(storage_log.log_query.tx_number_in_block as usize))) + } + } + + fn track_l1_batch_execution_stage(stage: &'static str, stage_started_at: &mut Instant) { + metrics::histogram!( + "server.state_keeper.l1_batch.sealed_time_stage", + stage_started_at.elapsed(), + "stage" => stage + ); + *stage_started_at = Instant::now(); + } + + fn track_miniblock_execution_stage(stage: &'static str, stage_started_at: &mut Instant) { + metrics::histogram!( + "server.state_keeper.miniblock.sealed_time_stage", + stage_started_at.elapsed(), + "stage" => stage + ); + *stage_started_at = Instant::now(); + } + + // If `is_fictive` flag is set to true, then it is assumed that + // we should seal a fictive miniblock with no transactions in it. It is needed because + // there might be some storage logs/events that are created + // after the last processed tx in l1 batch. + // For now, there is only one event for sending the fee to the operator.. + fn seal_miniblock_impl( + &mut self, + storage: &mut StorageProcessor<'_>, + updates_manager: &UpdatesManager, + is_fictive: bool, + ) { + self.miniblock_assertions(updates_manager, is_fictive); + + let started_at = Instant::now(); + let mut stage_started_at: Instant = Instant::now(); + + let (l1_tx_count, l2_tx_count) = + extractors::l1_l2_tx_count(&updates_manager.miniblock.executed_transactions); + vlog::info!( + "sealing miniblock {} (l1 batch {}) with {} ({} l2 + {} l1) txs, {} events, (writes, reads): {:?}", + self.current_miniblock_number, + self.current_l1_batch_number, + l1_tx_count + l2_tx_count, + l2_tx_count, + l1_tx_count, + updates_manager.miniblock.events.len(), + extractors::log_query_write_read_counts(&updates_manager.miniblock.storage_logs), + ); + + let mut transaction = storage.start_transaction_blocking(); + let miniblock_header = MiniblockHeader { + number: self.current_miniblock_number, + timestamp: updates_manager.miniblock.timestamp, + hash: miniblock_hash(self.current_miniblock_number), + l1_tx_count: l1_tx_count as u16, + l2_tx_count: l2_tx_count as u16, + base_fee_per_gas: updates_manager.base_fee_per_gas(), + l1_gas_price: updates_manager.l1_gas_price(), + l2_fair_gas_price: updates_manager.fair_l2_gas_price(), + }; + + transaction.blocks_dal().insert_miniblock(miniblock_header); + Self::track_miniblock_execution_stage("insert_miniblock_header", &mut stage_started_at); + + transaction + .transactions_dal() + .mark_txs_as_executed_in_miniblock( + self.current_miniblock_number, + &updates_manager.miniblock.executed_transactions, + updates_manager.base_fee_per_gas().into(), + ); + Self::track_miniblock_execution_stage( + "mark_transactions_in_miniblock", + &mut stage_started_at, + ); + + let storage_logs = extractors::log_queries_to_storage_logs( + &updates_manager.miniblock.storage_logs, + updates_manager, + is_fictive, + ); + let write_logs = extractors::write_logs_from_storage_logs(storage_logs); + + transaction + .storage_logs_dal() + .insert_storage_logs(self.current_miniblock_number, &write_logs); + Self::track_miniblock_execution_stage("insert_storage_logs", &mut stage_started_at); + + let unique_updates = transaction.storage_dal().apply_storage_logs(&write_logs); + Self::track_miniblock_execution_stage("apply_storage_logs", &mut stage_started_at); + + let new_factory_deps = updates_manager.miniblock.new_factory_deps.clone(); + if !new_factory_deps.is_empty() { + transaction + .storage_dal() + .insert_factory_deps(self.current_miniblock_number, new_factory_deps); + } + Self::track_miniblock_execution_stage("insert_factory_deps", &mut stage_started_at); + + // Factory deps should be inserted before using `contracts_deployed_this_miniblock`. + let deployed_contracts = + extractors::contracts_deployed_this_miniblock(unique_updates, &mut transaction); + if !deployed_contracts.is_empty() { + self.statistics.num_contracts += deployed_contracts.len() as u64; + } + + let added_tokens = extract_added_tokens(&updates_manager.miniblock.events); + if !added_tokens.is_empty() { + transaction.tokens_dal().add_tokens(added_tokens); + } + Self::track_miniblock_execution_stage("insert_tokens", &mut stage_started_at); + + let events_this_miniblock = extractors::extract_events_this_block( + &updates_manager.miniblock.events, + updates_manager, + is_fictive, + ); + transaction + .events_dal() + .save_events(self.current_miniblock_number, events_this_miniblock); + Self::track_miniblock_execution_stage("insert_events", &mut stage_started_at); + + let l2_to_l1_logs_this_miniblock = extractors::extract_l2_to_l1_logs_this_block( + &updates_manager.miniblock.l2_to_l1_logs, + updates_manager, + is_fictive, + ); + transaction + .events_dal() + .save_l2_to_l1_logs(self.current_miniblock_number, l2_to_l1_logs_this_miniblock); + Self::track_miniblock_execution_stage("insert_l2_to_l1_logs", &mut stage_started_at); + + transaction.commit_blocking(); + Self::track_miniblock_execution_stage("commit_miniblock", &mut stage_started_at); + + metrics::histogram!( + "server.state_keeper.miniblock.transactions_in_miniblock", + updates_manager.miniblock.executed_transactions.len() as f64 + ); + metrics::histogram!( + "server.miniblock.latency", + ((millis_since_epoch() - updates_manager.miniblock.timestamp as u128 * 1000) as f64) / 1000f64, + "stage" => "sealed" + ); + metrics::histogram!( + "server.state_keeper.miniblock.sealed_time", + started_at.elapsed(), + ); + metrics::gauge!( + "server.miniblock.number", + self.current_miniblock_number.0 as f64, + "stage" => "sealed" + ); + + metrics::gauge!( + "server.state_keeper.storage_contracts_size", + self.statistics.num_contracts as f64 + ); + vlog::debug!( + "sealed miniblock {} in {:?}", + self.current_miniblock_number, + started_at.elapsed() + ); + + Self::track_miniblock_execution_stage( + "apply_miniblock_updates_to_l1_batch_updates_accumulator", + &mut stage_started_at, + ); + self.current_miniblock_number += 1; + } + + fn seal_l1_batch_impl( + &mut self, + storage: &mut StorageProcessor<'_>, + block_result: VmBlockResult, + mut updates_manager: UpdatesManager, + block_context: DerivedBlockContext, + ) { + let started_at = Instant::now(); + let mut stage_started_at: Instant = Instant::now(); + + let mut transaction = storage.start_transaction_blocking(); + + // The vm execution was paused right after the last transaction was executed. + // There is some post-processing work that the VM needs to do before the block is fully processed. + let VmBlockResult { + full_result, + block_tip_result, + } = block_result; + assert!( + full_result.revert_reason.is_none(), + "VM must not revert when finalizing block. Revert reason: {:?}", + full_result.revert_reason + ); + Self::track_l1_batch_execution_stage("vm_finalization", &mut stage_started_at); + + updates_manager + .miniblock + .extend_from_fictive_transaction(block_tip_result.logs); + // Seal fictive miniblock with last events and storage logs. + self.seal_miniblock_impl(&mut transaction, &updates_manager, true); + Self::track_l1_batch_execution_stage("fictive_miniblock", &mut stage_started_at); + + let (_, deduped_log_queries) = + sort_storage_access_queries(&full_result.storage_log_queries); + Self::track_l1_batch_execution_stage("log_deduplication", &mut stage_started_at); + + let (l1_tx_count, l2_tx_count) = + extractors::l1_l2_tx_count(&updates_manager.l1_batch.executed_transactions); + vlog::info!( + "sealing l1 batch {:?} with {:?} ({:?} l2 + {:?} l1) txs, {:?} l2_l1_logs, {:?} events, (writes, reads): {:?} , (writes_dedup, reads_dedup): {:?} ", + self.current_l1_batch_number, + l1_tx_count + l2_tx_count, + l2_tx_count, + l1_tx_count, + full_result.l2_to_l1_logs.len(), + full_result.events.len(), + extractors::log_query_write_read_counts(&full_result.storage_log_queries), + extractors::log_query_write_read_counts(&deduped_log_queries), + ); + + let hash = extractors::wait_for_prev_l1_batch_state_root_unchecked( + &mut transaction, + self.current_l1_batch_number, + ); + let block_context_properties = BlockContextMode::NewBlock(block_context, hash); + + let l1_batch = L1BatchHeader { + number: self.current_l1_batch_number, + is_finished: true, + timestamp: block_context.context.block_timestamp, + fee_account_address: self.fee_account, + priority_ops_onchain_data: updates_manager.l1_batch.priority_ops_onchain_data.clone(), + l1_tx_count: l1_tx_count as u16, + l2_tx_count: l2_tx_count as u16, + l2_to_l1_logs: full_result.l2_to_l1_logs, + l2_to_l1_messages: extract_long_l2_to_l1_messages(&full_result.events), + bloom: Default::default(), + initial_bootloader_contents: extractors::get_initial_bootloader_memory( + &updates_manager.l1_batch, + block_context_properties, + ), + used_contract_hashes: full_result.used_contract_hashes, + base_fee_per_gas: block_context.base_fee, + l1_gas_price: updates_manager.l1_gas_price(), + l2_fair_gas_price: updates_manager.fair_l2_gas_price(), + }; + + transaction + .blocks_dal() + .insert_l1_batch(l1_batch, updates_manager.l1_batch.l1_gas_count); + Self::track_l1_batch_execution_stage("insert_l1_batch_header", &mut stage_started_at); + + transaction + .blocks_dal() + .mark_miniblocks_as_executed_in_l1_batch(self.current_l1_batch_number); + Self::track_l1_batch_execution_stage( + "set_l1_batch_number_for_miniblocks", + &mut stage_started_at, + ); + + transaction + .transactions_dal() + .mark_txs_as_executed_in_l1_batch( + self.current_l1_batch_number, + &updates_manager.l1_batch.executed_transactions, + ); + Self::track_l1_batch_execution_stage( + "mark_txs_as_executed_in_l1_batch", + &mut stage_started_at, + ); + + transaction + .storage_logs_dedup_dal() + .insert_storage_logs(self.current_l1_batch_number, &deduped_log_queries); + Self::track_l1_batch_execution_stage("insert_storage_dedup_logs", &mut stage_started_at); + + let (protective_reads, deduplicated_writes): (Vec<_>, Vec<_>) = deduped_log_queries + .into_iter() + .partition(|log_query| log_query.log_type == StorageLogQueryType::Read); + transaction + .storage_logs_dedup_dal() + .insert_protective_reads(self.current_l1_batch_number, &protective_reads); + Self::track_l1_batch_execution_stage("insert_protective_reads", &mut stage_started_at); + + transaction + .storage_logs_dedup_dal() + .insert_initial_writes(self.current_l1_batch_number, &deduplicated_writes); + Self::track_l1_batch_execution_stage("insert_initial_writes", &mut stage_started_at); + + transaction.commit_blocking(); + Self::track_l1_batch_execution_stage("commit_l1_batch", &mut stage_started_at); + + metrics::histogram!( + "server.state_keeper.l1_batch.updated_storage_keys_len", + updates_manager.l1_batch.modified_storage_keys_number as f64 + ); + metrics::histogram!( + "server.state_keeper.l1_batch.transactions_in_l1_batch", + updates_manager.l1_batch.executed_transactions.len() as f64 + ); + metrics::histogram!( + "server.l1_batch.latency", + ((millis_since_epoch() - block_context.context.block_timestamp as u128 * 1000) as f64) / 1000f64, + "stage" => "sealed" + ); + metrics::gauge!( + "server.block_number", + self.current_l1_batch_number.0 as f64, + "stage" => "sealed" + ); + + metrics::histogram!( + "server.state_keeper.l1_batch.sealed_time", + started_at.elapsed(), + ); + vlog::debug!( + "sealed l1 batch {} in {:?}", + self.current_l1_batch_number, + started_at.elapsed() + ); + + self.current_l1_batch_number += 1; + } + + fn default_block_params( + &self, + l1_batch_timestamp: u64, + previous_block_hash: U256, + l1_gas_price: u64, + fair_l2_gas_price: u64, + ) -> (BlockContextMode, BlockProperties) { + vlog::info!( + "(l1_gas_price,fair_l2_gas_price) for block {} is ({l1_gas_price},{fair_l2_gas_price}", + self.current_l1_batch_number.0 + ); + + let block_properties = default_block_properties(); + + let context = BlockContext { + block_number: self.current_l1_batch_number.0, + block_timestamp: l1_batch_timestamp, + l1_gas_price, + fair_l2_gas_price, + operator_address: self.fee_account, + }; + + ( + BlockContextMode::NewBlock(context.into(), previous_block_hash), + block_properties, + ) + } +} + +fn poll_until Option>( + delay_interval: Duration, + max_wait: Duration, + mut f: F, +) -> Option { + let wait_interval = delay_interval.min(max_wait); + let start = Instant::now(); + while start.elapsed() <= max_wait { + let res = f(); + if res.is_some() { + return res; + } + std::thread::sleep(wait_interval); + } + None +} diff --git a/core/bin/zksync_core/src/state_keeper/io/mod.rs b/core/bin/zksync_core/src/state_keeper/io/mod.rs new file mode 100644 index 000000000000..88c809c23e51 --- /dev/null +++ b/core/bin/zksync_core/src/state_keeper/io/mod.rs @@ -0,0 +1,66 @@ +use std::time::Duration; + +use vm::vm_with_bootloader::BlockContextMode; +use vm::vm_with_bootloader::DerivedBlockContext; +use vm::zk_evm::block_properties::BlockProperties; +use vm::VmBlockResult; +use zksync_types::{L1BatchNumber, MiniblockNumber, Transaction}; + +use crate::state_keeper::updates::UpdatesManager; + +pub(crate) use mempool::MempoolIO; + +mod mempool; + +/// Contains information about the un-synced execution state: +/// Batch data and transactions that were executed before and are marked as so in the DB, +/// but aren't a part of a sealed batch. +/// +/// Upon a restart, we must re-execute the pending state to continue progressing from the +/// place where we stopped. +/// +/// Invariant is that there may be not more than 1 pending batch, and it's always the latest batch. +#[derive(Debug)] +pub(crate) struct PendingBatchData { + /// Data used to initialize the pending batch. We have to make sure that all the parameters + /// (e.g. timestamp) are the same, so transaction would have the same result after re-execution. + pub(crate) params: (BlockContextMode, BlockProperties), + /// List of miniblocks and corresponding transactions that were executed within batch. + pub(crate) txs: Vec<(MiniblockNumber, Vec)>, +} + +/// `StateKeeperIO` provides the interactive layer for the state keeper: +/// it's used to receive volatile parameters (such as batch parameters), and also it's used to perform +/// mutable operations on the persistent state (e.g. persist executed batches). +pub(crate) trait StateKeeperIO: 'static + std::fmt::Debug + Send { + /// Returns the number of the currently processed L1 batch. + fn current_l1_batch_number(&self) -> L1BatchNumber; + /// Returns the number of the currently processed miniblock (aka L2 block). + fn current_miniblock_number(&self) -> MiniblockNumber; + /// Returns the data on the batch that was not sealed before the server restart. + /// See `PendingBatchData` doc-comment for details. + fn load_pending_batch(&mut self) -> Option; + /// Blocks for up to `max_wait` until the parameters for the next L1 batch are available. + /// Returns the data required to initialize the VM for the next batch. + fn wait_for_new_batch_params( + &mut self, + max_wait: Duration, + ) -> Option<(BlockContextMode, BlockProperties)>; + /// Blocks for up to `max_wait` until the next transaction is available for execution. + /// Returns `None` if no transaction became available until the timeout. + fn wait_for_next_tx(&mut self, max_wait: Duration) -> Option; + /// Marks the transaction as "not executed", so it can be retrieved from the IO again. + fn rollback(&mut self, tx: &Transaction); + /// Marks the transaction as "rejected", e.g. one that is not correct and can't be executed. + fn reject(&mut self, tx: &Transaction, error: &str); + /// Marks the miniblock (aka L2 block) as sealed. + /// Returns the timestamp for the next miniblock. + fn seal_miniblock(&mut self, updates_manager: &UpdatesManager) -> u64; + /// Marks the L1 batch as sealed. + fn seal_l1_batch( + &mut self, + block_result: VmBlockResult, + updates_manager: UpdatesManager, + block_context: DerivedBlockContext, + ); +} diff --git a/core/bin/zksync_core/src/state_keeper/keeper.rs b/core/bin/zksync_core/src/state_keeper/keeper.rs new file mode 100644 index 000000000000..911c42fea899 --- /dev/null +++ b/core/bin/zksync_core/src/state_keeper/keeper.rs @@ -0,0 +1,390 @@ +use std::time::Duration; + +use tokio::sync::watch::Receiver; + +use vm::{ + vm_with_bootloader::BlockContextMode, zk_evm::block_properties::BlockProperties, TxRevertReason, +}; +use zksync_types::{MiniblockNumber, Transaction}; + +use crate::state_keeper::{ + batch_executor::{BatchExecutorHandle, L1BatchExecutorBuilder, TxExecutionResult}, + io::{PendingBatchData, StateKeeperIO}, + seal_criteria::{SealManager, SealResolution}, + types::ExecutionMetricsForCriteria, + updates::UpdatesManager, +}; + +/// Amount of time to block on waiting for some resource. The exact value is not really important, +/// we only need it to not block on waiting indefinitely and be able to process cancellation requests. +const POLL_WAIT_DURATION: Duration = Duration::from_secs(1); + +/// Structure used to indicate that task cancellation was requested. +#[derive(Debug)] +struct Canceled; + +/// State keeper represents a logic layer of batch/miniblock processing flow. +/// It's responsible for taking all the data from the `StateKeeperIO`, feeding it into `BatchExecutor` objects +/// and calling `SealManager` to decide whether miniblock or batch should be sealed. +/// +/// State keeper maintains the batch execution state in the `UpdatesManager` until batch is sealed and these changes +/// are persisted by the `StateKeeperIO` implementation. +/// +/// You can think of it as a state machine that runs over a sequence of incoming transactions, turning them into +/// a sequence of executed miniblocks and batches. +#[derive(Debug)] +pub(crate) struct ZkSyncStateKeeper { + stop_receiver: Receiver, + io: Box, + batch_executor_base: Box, + sealer: SealManager, +} + +impl ZkSyncStateKeeper { + pub(crate) fn new( + stop_receiver: Receiver, + io: Box, + batch_executor_base: Box, + sealer: SealManager, + ) -> Self { + ZkSyncStateKeeper { + stop_receiver, + io, + batch_executor_base, + sealer, + } + } + + pub fn run(mut self) { + match self.run_inner() { + Ok(()) => { + // Normally, state keeper can only exit its routine if the task was cancelled. + panic!("State keeper exited the main loop") + } + Err(Canceled) => { + vlog::info!("Stop signal received, state keeper is shutting down"); + } + } + } + + /// Fallible version of `run` routine that allows to easily exit upon cancellation. + fn run_inner(&mut self) -> Result<(), Canceled> { + vlog::info!( + "Starting state keeper. Next l1 batch to seal: {}, Next miniblock to seal: {}", + self.io.current_l1_batch_number(), + self.io.current_miniblock_number() + ); + + // Re-execute pending batch if it exists. Otherwise, initialize a new batch. + let PendingBatchData { + params, + txs: txs_to_reexecute, + } = match self.io.load_pending_batch() { + Some(params) => { + vlog::info!( + "There exists a pending batch consisting of {} miniblocks, the first one is {}", + params.txs.len(), + params + .txs + .first() + .map(|(number, _)| number) + .expect("Empty pending block represented as Some") + ); + params + } + None => { + vlog::info!("There is no open pending batch, starting a new empty batch"); + PendingBatchData { + params: self.wait_for_new_batch_params()?, + txs: Vec::new(), + } + } + }; + + let (mut block_context, mut block_properties) = params; + + let mut updates_manager = UpdatesManager::new(&block_context); + + let mut batch_executor = self + .batch_executor_base + .init_batch(block_context, block_properties); + self.restore_state(&batch_executor, &mut updates_manager, txs_to_reexecute); + + loop { + self.check_if_cancelled()?; + + // This function will run until the batch can be sealed. + self.process_l1_batch(&batch_executor, &mut updates_manager)?; + + // Finish current batch. + if !updates_manager.miniblock.executed_transactions.is_empty() { + let new_timestamp = self.io.seal_miniblock(&updates_manager); + updates_manager.seal_miniblock(new_timestamp); + } + let block_result = batch_executor.finish_batch(); + self.io.seal_l1_batch( + block_result, + updates_manager, + block_context.inner_block_context(), + ); + + // Start the new batch. + (block_context, block_properties) = self.wait_for_new_batch_params()?; + updates_manager = UpdatesManager::new(&block_context); + batch_executor = self + .batch_executor_base + .init_batch(block_context, block_properties); + } + } + + fn check_if_cancelled(&self) -> Result<(), Canceled> { + if *self.stop_receiver.borrow() { + return Err(Canceled); + } + Ok(()) + } + + fn wait_for_new_batch_params( + &mut self, + ) -> Result<(BlockContextMode, BlockProperties), Canceled> { + let params = loop { + if let Some(params) = self.io.wait_for_new_batch_params(POLL_WAIT_DURATION) { + break params; + } + self.check_if_cancelled()?; + }; + Ok(params) + } + + /// Applies the "pending state" on the `UpdatesManager`. + /// Pending state means transactions that were executed before the server restart. Before we continue processing the + /// batch, we need to restore the state. We must ensure that every transaction is executed successfully. + fn restore_state( + &mut self, + batch_executor: &BatchExecutorHandle, + updates_manager: &mut UpdatesManager, + txs_to_reexecute: Vec<(MiniblockNumber, Vec)>, + ) { + let mut total_batch_updated_slots = 0; + + for (miniblock_number, txs) in txs_to_reexecute { + vlog::info!( + "Starting to reexecute transactions from sealed miniblock {}", + miniblock_number + ); + for tx in txs { + let result = batch_executor.execute_tx(tx.clone()); + + if !result.success() { + let err = result.err().unwrap(); + panic!( + "Re-executing stored tx failed. Tx: {:?}. Err: {:?}", + tx, err + ) + }; + let tx_execution_result = result.tx_result.unwrap(); + let tx_execution_status = tx_execution_result.status; + + let ExecutionMetricsForCriteria { + storage_updates: storage_updates_this_tx, + l1_gas: tx_l1_gas_this_tx, + execution_metrics: tx_execution_metrics, + } = result.tx_metrics.unwrap(); + total_batch_updated_slots += storage_updates_this_tx; + + updates_manager.extend_from_executed_transaction( + &tx, + tx_execution_result, + tx_l1_gas_this_tx, + tx_execution_metrics, + ); + vlog::debug!( + "finished reexecuting tx {} by {} (is_l1: {}) (#{} in l1 batch {}) \ + (#{} in miniblock {}) status: {:?}. New modified storage slots: {}, \ + total in l1 batch: {}, L1 gas spent: {:?}, total in l1 batch: {:?}, \ + tx execution metrics: {:?}, block execution metrics: {:?}", + tx.hash(), + tx.initiator_account(), + tx.is_l1(), + updates_manager.pending_executed_transactions_len(), + self.io.current_l1_batch_number().0, + updates_manager.miniblock.executed_transactions.len(), + miniblock_number, + tx_execution_status, + storage_updates_this_tx, + total_batch_updated_slots, + tx_l1_gas_this_tx, + updates_manager.pending_l1_gas_count(), + &tx_execution_metrics, + updates_manager.pending_execution_metrics(), + ); + } + + // It's OK to use substitute values here even though we're re-executing the old blocks, + // since the correct values are already persisted in the DB and won't be overwritten. + updates_manager.seal_miniblock(updates_manager.batch_timestamp()); + } + } + + fn process_l1_batch( + &mut self, + batch_executor: &BatchExecutorHandle, + updates_manager: &mut UpdatesManager, + ) -> Result<(), Canceled> { + loop { + self.check_if_cancelled()?; + if self + .sealer + .should_seal_l1_batch_unconditionally(updates_manager) + { + return Ok(()); + } + let Some(tx) = self.io.wait_for_next_tx(POLL_WAIT_DURATION) else { + vlog::trace!("No new transactions. Waiting!"); + continue; + }; + + let (seal_resolution, exec_result) = + self.process_one_tx(batch_executor, updates_manager, &tx); + + match &seal_resolution { + SealResolution::NoSeal => { + let ExecutionMetricsForCriteria { + l1_gas: tx_l1_gas_this_tx, + execution_metrics: tx_execution_metrics, + .. + } = exec_result.tx_metrics.unwrap(); + updates_manager.extend_from_executed_transaction( + &tx, + exec_result.tx_result.unwrap(), + tx_l1_gas_this_tx, + tx_execution_metrics, + ); + if self.sealer.should_seal_miniblock(updates_manager) { + let new_timestamp = self.io.seal_miniblock(updates_manager); + updates_manager.seal_miniblock(new_timestamp); + } + } + SealResolution::IncludeAndSeal => { + let ExecutionMetricsForCriteria { + l1_gas: tx_l1_gas_this_tx, + execution_metrics: tx_execution_metrics, + .. + } = exec_result.tx_metrics.unwrap(); + updates_manager.extend_from_executed_transaction( + &tx, + exec_result.tx_result.unwrap(), + tx_l1_gas_this_tx, + tx_execution_metrics, + ); + } + SealResolution::ExcludeAndSeal => { + batch_executor.rollback_last_tx(); + self.io.rollback(&tx); + } + SealResolution::Unexecutable(reason) => { + batch_executor.rollback_last_tx(); + self.io.reject(&tx, reason); + } + }; + + if seal_resolution.should_seal() { + return Ok(()); + } + } + } + + /// Executes one transaction in the batch executor, and then decides whether the batch should be sealed. + /// Batch may be sealed because of one of the following reasons: + /// 1. The VM entered an incorrect state (e.g. out of gas). In that case, we must revert the transaction and seal + /// the blcok. + /// 2. Seal manager decided that batch is ready to be sealed. + fn process_one_tx( + &mut self, + batch_executor: &BatchExecutorHandle, + updates_manager: &UpdatesManager, + tx: &Transaction, + ) -> (SealResolution, TxExecutionResult) { + let exec_result = batch_executor.execute_tx(tx.clone()); + let TxExecutionResult { + tx_result, + bootloader_dry_run_result, + tx_metrics, + bootloader_dry_run_metrics, + } = exec_result.clone(); + + match tx_result { + Err(TxRevertReason::BootloaderOutOfGas) => { + metrics::increment_counter!( + "server.tx_aggregation.reason", + "criterion" => "bootloader_tx_out_of_gas", + "seal_resolution" => "exclude_and_seal", + ); + (SealResolution::ExcludeAndSeal, exec_result) + } + Err(rejection) => ( + SealResolution::Unexecutable(rejection.to_string()), + exec_result, + ), + Ok(tx_execution_result) => { + let tx_execution_status = tx_execution_result.status; + let ExecutionMetricsForCriteria { + storage_updates: storage_updates_this_tx, + l1_gas: tx_l1_gas_this_tx, + execution_metrics: tx_execution_metrics, + } = tx_metrics.unwrap(); + + vlog::debug!( + "finished tx {:?} by {:?} (is_l1: {}) (#{} in l1 batch {}) (#{} in miniblock {}) \ + status: {:?}. New modified storage slots: {}, L1 gas spent: {:?}, total in l1 batch: {:?}, \ + tx execution metrics: {:?}, block execution metrics: {:?}", + tx.hash(), + tx.initiator_account(), + tx.is_l1(), + updates_manager.pending_executed_transactions_len() + 1, + self.io.current_l1_batch_number().0, + updates_manager.miniblock.executed_transactions.len() + 1, + self.io.current_miniblock_number().0, + tx_execution_status, + storage_updates_this_tx, + tx_l1_gas_this_tx, + updates_manager.pending_l1_gas_count() + tx_l1_gas_this_tx, + &tx_execution_metrics, + updates_manager.pending_execution_metrics() + tx_execution_metrics, + ); + + if bootloader_dry_run_result.unwrap().is_err() { + // Exclude and seal. + metrics::increment_counter!( + "server.tx_aggregation.reason", + "criterion" => "bootloader_block_tip_failed", + "seal_resolution" => "exclude_and_seal", + ); + return (SealResolution::ExcludeAndSeal, exec_result); + } + + let ExecutionMetricsForCriteria { + l1_gas: finish_block_l1_gas, + execution_metrics: finish_block_execution_metrics, + .. + } = bootloader_dry_run_metrics.unwrap(); + + let resolution = self.sealer.should_seal_l1_batch( + self.io.current_l1_batch_number().0, + updates_manager.batch_timestamp() as u128 * 1000, + updates_manager.pending_executed_transactions_len() + 1, + updates_manager.pending_execution_metrics() + + tx_execution_metrics + + finish_block_execution_metrics, + tx_execution_metrics + finish_block_execution_metrics, + updates_manager.pending_l1_gas_count() + + tx_l1_gas_this_tx + + finish_block_l1_gas, + tx_l1_gas_this_tx + finish_block_l1_gas, + ); + + (resolution, exec_result) + } + } + } +} diff --git a/core/bin/zksync_core/src/state_keeper/mempool_actor.rs b/core/bin/zksync_core/src/state_keeper/mempool_actor.rs new file mode 100644 index 000000000000..73ae4e280a1a --- /dev/null +++ b/core/bin/zksync_core/src/state_keeper/mempool_actor.rs @@ -0,0 +1,76 @@ +use super::types::MempoolGuard; +use crate::GasAdjuster; +use std::sync::Arc; +use std::time::Duration; +use std::time::Instant; +use tokio::sync::watch; +use zksync_config::ZkSyncConfig; +use zksync_dal::ConnectionPool; +use zksync_eth_client::clients::http_client::EthereumClient; + +#[derive(Debug)] +pub struct MempoolFetcher { + mempool: MempoolGuard, + gas_adjuster: Arc>, + sync_interval: Duration, + sync_batch_size: usize, +} + +impl MempoolFetcher { + pub fn new( + mempool: MempoolGuard, + gas_adjuster: Arc>, + config: &ZkSyncConfig, + ) -> Self { + Self { + mempool, + gas_adjuster, + sync_interval: config.chain.mempool.sync_interval(), + sync_batch_size: config.chain.mempool.sync_batch_size, + } + } + + pub async fn run( + mut self, + pool: ConnectionPool, + remove_stuck_txs: bool, + stuck_tx_timeout: Duration, + stop_receiver: watch::Receiver, + ) { + { + let mut storage = pool.access_storage().await; + if remove_stuck_txs { + let removed_txs = storage + .transactions_dal() + .remove_stuck_txs(stuck_tx_timeout); + vlog::info!("Number of stuck txs was removed: {}", removed_txs); + } + storage.transactions_dal().reset_mempool(); + } + + loop { + if *stop_receiver.borrow() { + vlog::info!("Stop signal received, mempool is shutting down"); + break; + } + let started_at = Instant::now(); + let mut storage = pool.access_storage().await; + let mempool_info = self.mempool.get_mempool_info(); + let l2_tx_filter = self.gas_adjuster.l2_tx_filter(); + + let (transactions, nonces) = storage.transactions_dal().sync_mempool( + mempool_info.stashed_accounts, + mempool_info.purged_accounts, + l2_tx_filter.gas_per_pubdata, + l2_tx_filter.fee_per_gas, + self.sync_batch_size, + ); + let all_transactions_loaded = transactions.len() < self.sync_batch_size; + self.mempool.insert(transactions, nonces); + metrics::histogram!("server.state_keeper.mempool_sync", started_at.elapsed()); + if all_transactions_loaded { + tokio::time::sleep(self.sync_interval).await; + } + } + } +} diff --git a/core/bin/zksync_core/src/state_keeper/mod.rs b/core/bin/zksync_core/src/state_keeper/mod.rs new file mode 100644 index 000000000000..6d3214ae7390 --- /dev/null +++ b/core/bin/zksync_core/src/state_keeper/mod.rs @@ -0,0 +1,66 @@ +use std::sync::Arc; + +use tokio::sync::watch::Receiver; + +use zksync_config::constants::MAX_TXS_IN_BLOCK; +use zksync_config::ZkSyncConfig; +use zksync_dal::ConnectionPool; +use zksync_eth_client::EthInterface; + +use self::batch_executor::MainBatchExecutorBuilder; +use self::io::MempoolIO; +use crate::gas_adjuster::GasAdjuster; +use crate::state_keeper::seal_criteria::SealManager; + +pub(crate) use self::{ + keeper::ZkSyncStateKeeper, mempool_actor::MempoolFetcher, types::MempoolGuard, +}; + +mod batch_executor; +mod extractors; +mod io; +mod keeper; +pub(crate) mod mempool_actor; +pub(crate) mod seal_criteria; +#[cfg(test)] +mod tests; +mod types; +mod updates; + +pub(crate) fn start_state_keeper( + config: &ZkSyncConfig, + pool: &ConnectionPool, + mempool: MempoolGuard, + gas_adjuster: Arc>, + stop_receiver: Receiver, +) -> ZkSyncStateKeeper +where + E: EthInterface + 'static + std::fmt::Debug + Send + Sync, +{ + assert!( + config.chain.state_keeper.transaction_slots <= MAX_TXS_IN_BLOCK, + "Configured transaction_slots must be lower than the bootloader constant MAX_TXS_IN_BLOCK" + ); + + let batch_executor_base = MainBatchExecutorBuilder::new( + config.db.state_keeper_db_path.clone(), + pool.clone(), + config.chain.state_keeper.reexecute_each_tx, + config.chain.state_keeper.max_allowed_l2_tx_gas_limit.into(), + ); + let io = MempoolIO::new( + mempool, + pool.clone(), + config.chain.state_keeper.fee_account_addr, + config.chain.operations_manager.delay_interval(), + gas_adjuster, + ); + + let sealer = SealManager::new(config.chain.state_keeper.clone()); + ZkSyncStateKeeper::new( + stop_receiver, + Box::new(io), + Box::new(batch_executor_base), + sealer, + ) +} diff --git a/core/bin/zksync_core/src/state_keeper/seal_criteria/function.rs b/core/bin/zksync_core/src/state_keeper/seal_criteria/function.rs new file mode 100644 index 000000000000..41fd4310d714 --- /dev/null +++ b/core/bin/zksync_core/src/state_keeper/seal_criteria/function.rs @@ -0,0 +1,55 @@ +pub(self) use zksync_config::configs::chain::StateKeeperConfig; +use zksync_types::block::BlockGasCount; +use zksync_types::tx::ExecutionMetrics; + +use super::{SealCriterion, SealResolution}; + +/// Represents a thread-safe function pointer. +type CustomSealerFn = dyn Fn( + &StateKeeperConfig, + u128, + usize, + ExecutionMetrics, + ExecutionMetrics, + BlockGasCount, + BlockGasCount, + ) -> SealResolution + + Send + + 'static; + +/// Custom criterion made from a user-provided function. Allows to turn your closure into a seal criterion. +/// Mostly useful for tests. +pub(crate) struct FnCriterion(Box); + +impl std::fmt::Debug for FnCriterion { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_tuple("FnCriterion").finish() + } +} + +impl SealCriterion for FnCriterion { + fn should_seal( + &self, + config: &StateKeeperConfig, + block_open_timestamp_ms: u128, + tx_count: usize, + block_execution_metrics: ExecutionMetrics, + tx_execution_metrics: ExecutionMetrics, + block_gas_count: BlockGasCount, + tx_gas_count: BlockGasCount, + ) -> SealResolution { + self.0( + config, + block_open_timestamp_ms, + tx_count, + block_execution_metrics, + tx_execution_metrics, + block_gas_count, + tx_gas_count, + ) + } + + fn prom_criterion_name(&self) -> &'static str { + "function_sealer" + } +} diff --git a/core/bin/zksync_core/src/state_keeper/seal_criteria/gas.rs b/core/bin/zksync_core/src/state_keeper/seal_criteria/gas.rs new file mode 100644 index 000000000000..80e19d8d39a2 --- /dev/null +++ b/core/bin/zksync_core/src/state_keeper/seal_criteria/gas.rs @@ -0,0 +1,173 @@ +use super::{SealCriterion, SealResolution, StateKeeperConfig}; +use crate::gas_tracker::new_block_gas_count; +use zksync_types::block::BlockGasCount; +use zksync_types::tx::ExecutionMetrics; + +/// This is a temporary solution +/// Instead of checking for gas it simply checks that the contracts' +/// bytecode is large enough. +/// Among all the data which will be published on-chain the contracts' +/// bytecode is by far the largest one and with high probability +/// the slots will run out before the other pubdata becomes too big +#[derive(Debug)] +pub(crate) struct GasCriterion; + +impl SealCriterion for GasCriterion { + fn should_seal( + &self, + config: &StateKeeperConfig, + _block_open_timestamp_ms: u128, + _tx_count: usize, + _block_execution_metrics: ExecutionMetrics, + _tx_execution_metrics: ExecutionMetrics, + block_gas_count: BlockGasCount, + tx_gas_count: BlockGasCount, + ) -> SealResolution { + if (tx_gas_count + new_block_gas_count()).has_greater_than( + (config.max_single_tx_gas as f64 * config.reject_tx_at_gas_percentage).round() as u32, + ) { + SealResolution::Unexecutable("Transaction requires too much gas".into()) + } else if block_gas_count.has_greater_than(config.max_single_tx_gas) { + SealResolution::ExcludeAndSeal + } else if block_gas_count.has_greater_than( + (config.max_single_tx_gas as f64 * config.close_block_at_gas_percentage).round() as u32, + ) { + SealResolution::IncludeAndSeal + } else { + SealResolution::NoSeal + } + } + + fn prom_criterion_name(&self) -> &'static str { + "gas" + } +} + +#[cfg(test)] +mod tests { + + use super::{new_block_gas_count, BlockGasCount, GasCriterion, SealCriterion, SealResolution}; + use zksync_config::ZkSyncConfig; + + #[test] + fn test_gas_seal_criterion() { + let config = ZkSyncConfig::from_env().chain.state_keeper; + let criterion = GasCriterion; + + // Empty block should fit into gas criterion. + let empty_block_gas = new_block_gas_count(); + let empty_block_resolution = criterion.should_seal( + &config, + Default::default(), + Default::default(), + Default::default(), + Default::default(), + empty_block_gas, + Default::default(), + ); + assert_eq!(empty_block_resolution, SealResolution::NoSeal); + let tx_gas = BlockGasCount { + commit: config.max_single_tx_gas + 1, + prove: 0, + execute: 0, + }; + // Transaction that needs more gas than a block limit should be unexecutable. + let huge_transaction_resolution = criterion.should_seal( + &config, + Default::default(), + Default::default(), + Default::default(), + Default::default(), + empty_block_gas + tx_gas, + tx_gas, + ); + assert_eq!( + huge_transaction_resolution, + SealResolution::Unexecutable("Transaction requires too much gas".into()) + ); + + // Check criterion workflow + let tx_gas = BlockGasCount { + commit: (config.max_single_tx_gas as f64 * config.reject_tx_at_gas_percentage).round() + as u32 + - empty_block_gas.commit, + prove: (config.max_single_tx_gas as f64 * config.reject_tx_at_gas_percentage).round() + as u32 + - empty_block_gas.prove, + execute: (config.max_single_tx_gas as f64 * config.reject_tx_at_gas_percentage).round() + as u32 + - empty_block_gas.execute, + }; + let resolution_after_first_tx = criterion.should_seal( + &config, + Default::default(), + Default::default(), + Default::default(), + Default::default(), + empty_block_gas + tx_gas, + tx_gas, + ); + assert_eq!(resolution_after_first_tx, SealResolution::NoSeal); + + // Check criterion workflow + let tx_gas = BlockGasCount { + commit: (config.max_single_tx_gas as f64 * config.reject_tx_at_gas_percentage).round() + as u32 + - empty_block_gas.commit + - 1, + prove: (config.max_single_tx_gas as f64 * config.reject_tx_at_gas_percentage).round() + as u32 + - empty_block_gas.prove + - 1, + execute: (config.max_single_tx_gas as f64 * config.reject_tx_at_gas_percentage).round() + as u32 + - empty_block_gas.execute + - 1, + }; + + let block_gas = BlockGasCount { + commit: (config.max_single_tx_gas as f64 * config.close_block_at_gas_percentage).round() + as u32 + + 1, + prove: (config.max_single_tx_gas as f64 * config.close_block_at_gas_percentage).round() + as u32 + + 1, + execute: (config.max_single_tx_gas as f64 * config.close_block_at_gas_percentage) + .round() as u32 + + 1, + }; + let resolution_after_first_tx = criterion.should_seal( + &config, + Default::default(), + Default::default(), + Default::default(), + Default::default(), + block_gas, + tx_gas, + ); + assert_eq!(resolution_after_first_tx, SealResolution::IncludeAndSeal); + + // Check criterion workflow + let tx_gas = BlockGasCount { + commit: (config.max_single_tx_gas as f64 * config.reject_tx_at_gas_percentage).round() + as u32 + - empty_block_gas.commit, + prove: (config.max_single_tx_gas as f64 * config.reject_tx_at_gas_percentage).round() + as u32 + - empty_block_gas.prove, + execute: (config.max_single_tx_gas as f64 * config.reject_tx_at_gas_percentage).round() + as u32 + - empty_block_gas.execute, + }; + let resolution_after_first_tx = criterion.should_seal( + &config, + Default::default(), + Default::default(), + Default::default(), + Default::default(), + empty_block_gas + tx_gas + tx_gas, + tx_gas, + ); + assert_eq!(resolution_after_first_tx, SealResolution::ExcludeAndSeal); + } +} diff --git a/core/bin/zksync_core/src/state_keeper/seal_criteria/geometry_seal_criteria.rs b/core/bin/zksync_core/src/state_keeper/seal_criteria/geometry_seal_criteria.rs new file mode 100644 index 000000000000..63a28c8f129b --- /dev/null +++ b/core/bin/zksync_core/src/state_keeper/seal_criteria/geometry_seal_criteria.rs @@ -0,0 +1,257 @@ +use std::fmt::Debug; +use vm::MAX_CYCLES_FOR_TX; +use zksync_config::configs::chain::StateKeeperConfig; +use zksync_types::circuit::GEOMETRY_CONFIG; +use zksync_types::{block::BlockGasCount, tx::ExecutionMetrics}; +// Local uses +use crate::state_keeper::seal_criteria::{SealCriterion, SealResolution}; + +// Collected vm execution metrics should fit into geometry limits. +// Otherwise witness generation will fail and proof won't be generated. + +#[derive(Debug, Default)] +pub struct BytecodeHashesCriterion; +#[derive(Debug, Default)] +pub struct RepeatedWritesCriterion; +#[derive(Debug, Default)] +pub struct InitialWritesCriterion; +#[derive(Debug, Default)] +pub struct MaxCyclesCriterion; + +trait MetricExtractor { + const PROM_METRIC_CRITERION_NAME: &'static str; + fn limit_per_block() -> usize; + fn extract(metric: &ExecutionMetrics) -> usize; +} + +impl SealCriterion for T +where + T: MetricExtractor + Debug + Send + Sync + 'static, +{ + fn should_seal( + &self, + config: &StateKeeperConfig, + _block_open_timestamp_ms: u128, + _tx_count: usize, + block_execution_metrics: ExecutionMetrics, + tx_execution_metrics: ExecutionMetrics, + _block_gas_count: BlockGasCount, + _tx_gas_count: BlockGasCount, + ) -> SealResolution { + if T::extract(&tx_execution_metrics) + > (T::limit_per_block() as f64 * config.reject_tx_at_geometry_percentage).round() + as usize + { + SealResolution::Unexecutable("ZK proof cannot be generated for a transaction".into()) + } else if T::extract(&block_execution_metrics) >= T::limit_per_block() { + SealResolution::ExcludeAndSeal + } else if T::extract(&block_execution_metrics) + > (T::limit_per_block() as f64 * config.close_block_at_geometry_percentage).round() + as usize + { + SealResolution::IncludeAndSeal + } else { + SealResolution::NoSeal + } + } + + fn prom_criterion_name(&self) -> &'static str { + T::PROM_METRIC_CRITERION_NAME + } +} + +impl MetricExtractor for BytecodeHashesCriterion { + const PROM_METRIC_CRITERION_NAME: &'static str = "used_contract_hashes"; + + fn limit_per_block() -> usize { + GEOMETRY_CONFIG.limit_for_code_decommitter_sorter as usize + } + + fn extract(metrics: &ExecutionMetrics) -> usize { + metrics.contracts_used + } +} + +impl MetricExtractor for RepeatedWritesCriterion { + const PROM_METRIC_CRITERION_NAME: &'static str = "repeated_storage_writes"; + + fn limit_per_block() -> usize { + GEOMETRY_CONFIG.limit_for_repeated_writes_pubdata_hasher as usize + } + + fn extract(metrics: &ExecutionMetrics) -> usize { + metrics.repeated_storage_writes + } +} + +impl MetricExtractor for InitialWritesCriterion { + const PROM_METRIC_CRITERION_NAME: &'static str = "initial_storage_writes"; + + fn limit_per_block() -> usize { + GEOMETRY_CONFIG.limit_for_initial_writes_pubdata_hasher as usize + } + + fn extract(metrics: &ExecutionMetrics) -> usize { + metrics.initial_storage_writes + } +} + +impl MetricExtractor for MaxCyclesCriterion { + const PROM_METRIC_CRITERION_NAME: &'static str = "max_cycles"; + + fn limit_per_block() -> usize { + MAX_CYCLES_FOR_TX as usize + } + + fn extract(metrics: &ExecutionMetrics) -> usize { + metrics.cycles_used as usize + } +} + +#[cfg(test)] +mod tests { + use zksync_config::configs::chain::StateKeeperConfig; + use zksync_types::tx::ExecutionMetrics; + + use crate::state_keeper::seal_criteria::geometry_seal_criteria::MaxCyclesCriterion; + use crate::state_keeper::seal_criteria::{SealCriterion, SealResolution}; + + use super::{ + BytecodeHashesCriterion, InitialWritesCriterion, MetricExtractor, RepeatedWritesCriterion, + }; + + fn get_config() -> StateKeeperConfig { + StateKeeperConfig { + close_block_at_geometry_percentage: 0.9, + reject_tx_at_geometry_percentage: 0.9, + ..Default::default() + } + } + + fn test_no_seal_block_resolution( + block_execution_metrics: ExecutionMetrics, + criterion: &dyn SealCriterion, + ) { + let config = get_config(); + let block_resolution = criterion.should_seal( + &config, + Default::default(), + 0, + block_execution_metrics, + Default::default(), + Default::default(), + Default::default(), + ); + assert_eq!(block_resolution, SealResolution::NoSeal); + } + + fn test_include_and_seal_block_resolution( + block_execution_metrics: ExecutionMetrics, + criterion: &dyn SealCriterion, + ) { + let config = get_config(); + let block_resolution = criterion.should_seal( + &config, + Default::default(), + 0, + block_execution_metrics, + Default::default(), + Default::default(), + Default::default(), + ); + assert_eq!(block_resolution, SealResolution::IncludeAndSeal); + } + + fn test_exclude_and_seal_block_resolution( + block_execution_metrics: ExecutionMetrics, + criterion: &dyn SealCriterion, + ) { + let config = get_config(); + let block_resolution = criterion.should_seal( + &config, + Default::default(), + 0, + block_execution_metrics, + Default::default(), + Default::default(), + Default::default(), + ); + assert_eq!(block_resolution, SealResolution::ExcludeAndSeal); + } + + fn test_unexecutable_tx_resolution( + tx_execution_metrics: ExecutionMetrics, + criterion: &dyn SealCriterion, + ) { + let config = get_config(); + let block_resolution = criterion.should_seal( + &config, + Default::default(), + 0, + Default::default(), + tx_execution_metrics, + Default::default(), + Default::default(), + ); + + assert_eq!( + block_resolution, + SealResolution::Unexecutable("ZK proof cannot be generated for a transaction".into()) + ); + } + + macro_rules! test_scenario { + ($criterion: tt, $metric_name: ident, $metric_type: ty) => { + let config = get_config(); + let block_execution_metrics = ExecutionMetrics { + $metric_name: ($criterion::limit_per_block() / 2) as $metric_type, + ..Default::default() + }; + test_no_seal_block_resolution(block_execution_metrics, &$criterion); + + let block_execution_metrics = ExecutionMetrics { + $metric_name: ($criterion::limit_per_block() - 1) as $metric_type, + ..Default::default() + }; + + test_include_and_seal_block_resolution(block_execution_metrics, &$criterion); + + let block_execution_metrics = ExecutionMetrics { + $metric_name: ($criterion::limit_per_block()) as $metric_type, + ..Default::default() + }; + + test_exclude_and_seal_block_resolution(block_execution_metrics, &$criterion); + + let tx_execution_metrics = ExecutionMetrics { + $metric_name: ($criterion::limit_per_block() as f64 + * config.reject_tx_at_geometry_percentage + + 1f64) + .round() as $metric_type, + ..Default::default() + }; + + test_unexecutable_tx_resolution(tx_execution_metrics, &$criterion); + }; + } + + #[test] + fn bytecode_hashes_criterion() { + test_scenario!(BytecodeHashesCriterion, contracts_used, usize); + } + + #[test] + fn repeated_writes_seal_criterion() { + test_scenario!(RepeatedWritesCriterion, repeated_storage_writes, usize); + } + + #[test] + fn initial_writes_seal_criterion() { + test_scenario!(InitialWritesCriterion, initial_storage_writes, usize); + } + + #[test] + fn initial_max_cycles_seal_criterion() { + test_scenario!(MaxCyclesCriterion, cycles_used, u32); + } +} diff --git a/core/bin/zksync_core/src/state_keeper/seal_criteria/mod.rs b/core/bin/zksync_core/src/state_keeper/seal_criteria/mod.rs new file mode 100644 index 000000000000..d626db6c780c --- /dev/null +++ b/core/bin/zksync_core/src/state_keeper/seal_criteria/mod.rs @@ -0,0 +1,268 @@ +//! Seal criteria is a module system for checking whether a currently open block must be sealed. +//! +//! Criteria for sealing may vary, for example: +//! +//! - No transactions slots left. +//! - We've reached timeout for sealing block. +//! - We've reached timeout for sealing *aggregated* block. +//! - We won't fit into the acceptable gas limit with any more transactions. +//! +//! Maintaining all the criteria in one place has proven itself to be very error-prone, +//! thus now every criterion is independent of the others. + +use std::fmt::Debug; +pub(self) use zksync_config::configs::chain::StateKeeperConfig; +use zksync_types::block::BlockGasCount; +use zksync_types::tx::ExecutionMetrics; +use zksync_utils::time::{millis_since, millis_since_epoch}; + +use super::updates::UpdatesManager; + +pub(crate) mod function; +mod gas; +mod geometry_seal_criteria; +mod pubdata_bytes; +pub(crate) mod slots; +mod timeout; + +/// Reported decision regarding block sealing. +#[derive(Debug, Clone, PartialEq)] +pub enum SealResolution { + /// Block should not be sealed right now. + NoSeal, + /// Latest transaction should be included into the block and sealed after that. + IncludeAndSeal, + /// Latest transaction should be excluded from the block and become the first + /// tx in the next block. + /// While it may be kinda counter-intuitive that we first execute transaction and just then + /// decided whether we should include it into the block or not, it is required by the architecture of + /// zkSync Era. We may not know, for example, how much gas block will consume, because 1) smart contract + /// execution is hard to predict and 2) we may have writes to the same storage slots, which will save us + /// gas. + ExcludeAndSeal, + /// Unexecutable means that the last transaction of the block cannot be executed even + /// if the block will consist of it solely. Such a transaction must be rejected. + /// + /// Contains a reason for why transaction was considered unexecutable. + Unexecutable(String), +} + +impl SealResolution { + /// Compares two seal resolutions and chooses the one that is stricter. + /// `Unexecutable` is stricter than `ExcludeAndSeal`. + /// `ExcludeAndSeal` is stricter than `IncludeAndSeal`. + /// `IncludeAndSeal` is stricter than `NoSeal`. + pub fn stricter(self, other: SealResolution) -> SealResolution { + match (self, other) { + (SealResolution::Unexecutable(reason), _) + | (_, SealResolution::Unexecutable(reason)) => SealResolution::Unexecutable(reason), + (SealResolution::ExcludeAndSeal, _) | (_, SealResolution::ExcludeAndSeal) => { + SealResolution::ExcludeAndSeal + } + (SealResolution::IncludeAndSeal, _) | (_, SealResolution::IncludeAndSeal) => { + SealResolution::IncludeAndSeal + } + _ => SealResolution::NoSeal, + } + } + + /// Returns `true` if L1 batch should be sealed according to this resolution. + pub fn should_seal(self) -> bool { + matches!( + self, + SealResolution::IncludeAndSeal | SealResolution::ExcludeAndSeal + ) + } +} + +pub(crate) trait SealCriterion: Debug + Send + 'static { + #[allow(clippy::too_many_arguments)] + fn should_seal( + &self, + config: &StateKeeperConfig, + block_open_timestamp_ms: u128, + tx_count: usize, + block_execution_metrics: ExecutionMetrics, + tx_execution_metrics: ExecutionMetrics, + block_gas_count: BlockGasCount, + tx_gas_count: BlockGasCount, + ) -> SealResolution; + // We need self here only for rust restrictions for creating an object from trait + // https://doc.rust-lang.org/reference/items/traits.html#object-safety + fn prom_criterion_name(&self) -> &'static str; +} + +/// Sealer function that returns a boolean. +type SealerFn = dyn Fn(&UpdatesManager) -> bool + Send; + +pub(crate) struct SealManager { + config: StateKeeperConfig, + /// Primary sealers set that is used to check if batch should be sealed after executing a transaction. + sealers: Vec>, + /// Unconditional batch sealer, i.e. one that can be used if we should seal the batch *without* executing a tx. + unconditional_sealer: Box, + /// Miniblock sealer function used to determine if we should seal the miniblock. + miniblock_sealer: Box, +} + +impl Debug for SealManager { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("SealManager") + .field("config", &self.config) + .field("sealers", &self.sealers) + .finish() + } +} + +impl SealManager { + /// Creates a default pre-configured seal manager. + pub(crate) fn new(config: StateKeeperConfig) -> Self { + let sealers: Vec> = Self::get_default_sealers(); + let unconditional_sealer = Self::timeout_batch_sealer(config.block_commit_deadline_ms); + let miniblock_sealer = Self::timeout_miniblock_sealer(config.miniblock_commit_deadline_ms); + + Self::custom(config, sealers, unconditional_sealer, miniblock_sealer) + } + + /// Allows to create a seal manager object from externally-defined sealers. + /// Mostly useful for test configuration. + pub(crate) fn custom( + config: StateKeeperConfig, + sealers: Vec>, + unconditional_sealer: Box, + miniblock_sealer: Box, + ) -> Self { + Self { + config, + sealers, + unconditional_sealer, + miniblock_sealer, + } + } + + /// Creates a sealer function that would seal the batch because of the timeout. + fn timeout_batch_sealer(block_commit_deadline_ms: u64) -> Box { + Box::new(move |manager| { + let should_seal = millis_since(manager.batch_timestamp()) > block_commit_deadline_ms; + if should_seal { + metrics::increment_counter!( + "server.tx_aggregation.reason", + "criterion" => "no_txs_timeout" + ); + vlog::info!( + "l1_batch_timeout_triggered without new txs: {:?} {:?} {:?}", + manager.batch_timestamp(), + block_commit_deadline_ms, + millis_since_epoch() + ); + } + should_seal + }) + } + + /// Creates a sealer function that would seal the miniblock because of the timeout. + fn timeout_miniblock_sealer(miniblock_commit_deadline_ms: u64) -> Box { + Box::new(move |manager| { + millis_since(manager.miniblock.timestamp) > miniblock_commit_deadline_ms + }) + } + + #[allow(clippy::too_many_arguments)] + pub(crate) fn should_seal_l1_batch( + &self, + l1_batch_number: u32, + block_open_timestamp_ms: u128, + tx_count: usize, + block_execution_metrics: ExecutionMetrics, + tx_execution_metrics: ExecutionMetrics, + block_gas_count: BlockGasCount, + tx_gas_count: BlockGasCount, + ) -> SealResolution { + let mut final_seal_resolution = SealResolution::NoSeal; + for sealer in &self.sealers { + let seal_resolution = sealer.should_seal( + &self.config, + block_open_timestamp_ms, + tx_count, + block_execution_metrics, + tx_execution_metrics, + block_gas_count, + tx_gas_count, + ); + match seal_resolution { + SealResolution::IncludeAndSeal => { + vlog::debug!( + "Seal block with resolution: IncludeAndSeal {} {} block: {:?}", + l1_batch_number, + sealer.prom_criterion_name(), + block_execution_metrics + ); + metrics::counter!( + "server.tx_aggregation.reason", + 1, + "criterion" => sealer.prom_criterion_name(), + "seal_resolution" => "include_and_seal", + ); + } + SealResolution::ExcludeAndSeal => { + vlog::debug!( + "Seal block with resolution: ExcludeAndSeal {} {} block: {:?}", + l1_batch_number, + sealer.prom_criterion_name(), + block_execution_metrics + ); + metrics::counter!( + "server.tx_aggregation.reason", + 1, + "criterion" => sealer.prom_criterion_name(), + "seal_resolution" => "exclude_and_seal", + ); + } + SealResolution::Unexecutable(_) => { + vlog::debug!( + "Unexecutable {} {} block: {:?}", + l1_batch_number, + sealer.prom_criterion_name(), + block_execution_metrics + ); + metrics::counter!( + "server.tx_aggregation.reason", + 1, + "criterion" => sealer.prom_criterion_name(), + "seal_resolution" => "unexecutable", + ); + } + _ => {} + } + + final_seal_resolution = final_seal_resolution.stricter(seal_resolution); + } + final_seal_resolution + } + + pub(crate) fn should_seal_l1_batch_unconditionally( + &self, + updates_manager: &UpdatesManager, + ) -> bool { + updates_manager.pending_executed_transactions_len() != 0 + && (self.unconditional_sealer)(updates_manager) + } + + pub(crate) fn should_seal_miniblock(&self, updates_manager: &UpdatesManager) -> bool { + !updates_manager.miniblock.executed_transactions.is_empty() + && (self.miniblock_sealer)(updates_manager) + } + + pub(crate) fn get_default_sealers() -> Vec> { + let sealers: Vec> = vec![ + Box::new(slots::SlotsCriterion), + Box::new(gas::GasCriterion), + Box::new(pubdata_bytes::PubDataBytesCriterion), + Box::new(geometry_seal_criteria::BytecodeHashesCriterion), + Box::new(geometry_seal_criteria::InitialWritesCriterion), + Box::new(geometry_seal_criteria::RepeatedWritesCriterion), + Box::new(geometry_seal_criteria::MaxCyclesCriterion), + ]; + sealers + } +} diff --git a/core/bin/zksync_core/src/state_keeper/seal_criteria/pubdata_bytes.rs b/core/bin/zksync_core/src/state_keeper/seal_criteria/pubdata_bytes.rs new file mode 100644 index 000000000000..e690efdb97a5 --- /dev/null +++ b/core/bin/zksync_core/src/state_keeper/seal_criteria/pubdata_bytes.rs @@ -0,0 +1,143 @@ +use zksync_types::tx::ExecutionMetrics; +use zksync_types::{block::BlockGasCount, MAX_PUBDATA_PER_L1_BATCH}; + +use super::{SealCriterion, SealResolution, StateKeeperConfig}; + +#[derive(Debug)] +pub struct PubDataBytesCriterion; + +impl SealCriterion for PubDataBytesCriterion { + fn should_seal( + &self, + config: &StateKeeperConfig, + _block_open_timestamp_ms: u128, + _tx_count: usize, + block_execution_metrics: ExecutionMetrics, + tx_execution_metrics: ExecutionMetrics, + _block_gas_count: BlockGasCount, + _tx_gas_count: BlockGasCount, + ) -> SealResolution { + let max_pubdata_per_l1_batch = MAX_PUBDATA_PER_L1_BATCH as usize; + + let block_size = block_execution_metrics.size(); + if tx_execution_metrics.size() + > (max_pubdata_per_l1_batch as f64 * config.reject_tx_at_eth_params_percentage).round() + as usize + { + SealResolution::Unexecutable( + "Transaction cannot be sent to L1 due to pubdata limits".into(), + ) + } else if block_size > max_pubdata_per_l1_batch { + SealResolution::ExcludeAndSeal + } else if block_size + > (max_pubdata_per_l1_batch as f64 * config.close_block_at_eth_params_percentage) + .round() as usize + && block_size < max_pubdata_per_l1_batch + { + SealResolution::IncludeAndSeal + } else { + SealResolution::NoSeal + } + } + + fn prom_criterion_name(&self) -> &'static str { + "pub_data_size" + } +} + +#[cfg(test)] +mod tests { + use super::{PubDataBytesCriterion, SealCriterion, SealResolution}; + use crate::state_keeper::seal_criteria::pubdata_bytes::MAX_PUBDATA_PER_L1_BATCH; + use zksync_config::ZkSyncConfig; + use zksync_types::tx::ExecutionMetrics; + + #[test] + fn seal_criterion() { + let config = ZkSyncConfig::from_env().chain.state_keeper; + let criterion = PubDataBytesCriterion; + + let block_execution_metrics = ExecutionMetrics { + initial_storage_writes: 0, + repeated_storage_writes: 0, + contracts_deployed: 0, + contracts_used: 0, + gas_used: 0, + l2_l1_long_messages: (MAX_PUBDATA_PER_L1_BATCH as f64 + * config.close_block_at_eth_params_percentage + - 1.0) + .round() as usize, + published_bytecode_bytes: 0, + l2_l1_logs: 0, + vm_events: 0, + storage_logs: 0, + total_log_queries: 0, + cycles_used: 0, + }; + + let empty_block_resolution = criterion.should_seal( + &config, + Default::default(), + 0, + block_execution_metrics, + Default::default(), + Default::default(), + Default::default(), + ); + assert_eq!(empty_block_resolution, SealResolution::NoSeal); + + let block_execution_metrics = ExecutionMetrics { + initial_storage_writes: 0, + repeated_storage_writes: 0, + contracts_deployed: 0, + contracts_used: 0, + gas_used: 0, + l2_l1_long_messages: (MAX_PUBDATA_PER_L1_BATCH as f64 + * config.close_block_at_eth_params_percentage + + 1f64) + .round() as usize, + published_bytecode_bytes: 0, + l2_l1_logs: 0, + vm_events: 0, + storage_logs: 0, + total_log_queries: 0, + cycles_used: 0, + }; + + let full_block_resolution = criterion.should_seal( + &config, + Default::default(), + 0, + block_execution_metrics, + Default::default(), + Default::default(), + Default::default(), + ); + assert_eq!(full_block_resolution, SealResolution::IncludeAndSeal); + + let block_execution_metrics = ExecutionMetrics { + initial_storage_writes: 0, + repeated_storage_writes: 0, + contracts_deployed: 0, + contracts_used: 0, + gas_used: 0, + l2_l1_long_messages: MAX_PUBDATA_PER_L1_BATCH as usize + 1, + published_bytecode_bytes: 0, + l2_l1_logs: 0, + vm_events: 0, + storage_logs: 0, + total_log_queries: 0, + cycles_used: 0, + }; + let full_block_resolution = criterion.should_seal( + &config, + Default::default(), + 0, + block_execution_metrics, + Default::default(), + Default::default(), + Default::default(), + ); + assert_eq!(full_block_resolution, SealResolution::ExcludeAndSeal); + } +} diff --git a/core/bin/zksync_core/src/state_keeper/seal_criteria/slots.rs b/core/bin/zksync_core/src/state_keeper/seal_criteria/slots.rs new file mode 100644 index 000000000000..eb9bb000479a --- /dev/null +++ b/core/bin/zksync_core/src/state_keeper/seal_criteria/slots.rs @@ -0,0 +1,65 @@ +use super::{SealCriterion, SealResolution, StateKeeperConfig}; +use zksync_types::block::BlockGasCount; +use zksync_types::tx::ExecutionMetrics; + +/// Checks whether we should seal the block because we've run out of transaction slots. +#[derive(Debug)] +pub struct SlotsCriterion; + +impl SealCriterion for SlotsCriterion { + fn should_seal( + &self, + config: &StateKeeperConfig, + _block_open_timestamp_ms: u128, + tx_count: usize, + _block_execution_metrics: ExecutionMetrics, + _tx_execution_metrics: ExecutionMetrics, + _block_gas_count: BlockGasCount, + _tx_gas_count: BlockGasCount, + ) -> SealResolution { + if tx_count >= config.transaction_slots { + SealResolution::IncludeAndSeal + } else { + SealResolution::NoSeal + } + } + + fn prom_criterion_name(&self) -> &'static str { + "slots" + } +} + +#[cfg(test)] +mod tests { + + use super::{SealCriterion, SealResolution, SlotsCriterion}; + use zksync_config::ZkSyncConfig; + + #[test] + fn test_slots_seal_criterion() { + let config = ZkSyncConfig::from_env().chain.state_keeper; + let criterion = SlotsCriterion; + + let almost_full_block_resolution = criterion.should_seal( + &config, + Default::default(), + config.transaction_slots - 1, + Default::default(), + Default::default(), + Default::default(), + Default::default(), + ); + assert_eq!(almost_full_block_resolution, SealResolution::NoSeal); + + let full_block_resolution = criterion.should_seal( + &config, + Default::default(), + config.transaction_slots, + Default::default(), + Default::default(), + Default::default(), + Default::default(), + ); + assert_eq!(full_block_resolution, SealResolution::IncludeAndSeal); + } +} diff --git a/core/bin/zksync_core/src/state_keeper/seal_criteria/timeout.rs b/core/bin/zksync_core/src/state_keeper/seal_criteria/timeout.rs new file mode 100644 index 000000000000..59e537428aa5 --- /dev/null +++ b/core/bin/zksync_core/src/state_keeper/seal_criteria/timeout.rs @@ -0,0 +1,91 @@ +use zksync_types::block::BlockGasCount; +use zksync_types::tx::ExecutionMetrics; +use zksync_utils::time::millis_since_epoch; + +use super::{SealCriterion, SealResolution, StateKeeperConfig}; + +/// Checks whether we should seal the block because we've reached the block commit timeout. +#[derive(Debug)] +pub struct TimeoutCriterion; + +impl SealCriterion for TimeoutCriterion { + fn should_seal( + &self, + config: &StateKeeperConfig, + block_open_timestamp_ms: u128, + tx_count: usize, + _block_execution_metrics: ExecutionMetrics, + _tx_execution_metrics: ExecutionMetrics, + _block_gas_count: BlockGasCount, + _tx_gas_count: BlockGasCount, + ) -> SealResolution { + if tx_count == 0 { + return SealResolution::NoSeal; + } + + let current_timestamp = millis_since_epoch(); + + debug_assert!( + current_timestamp >= block_open_timestamp_ms, + "We can't go backwards in time" + ); + + if (current_timestamp - block_open_timestamp_ms) as u64 > config.block_commit_deadline_ms { + SealResolution::IncludeAndSeal + } else { + SealResolution::NoSeal + } + } + + fn prom_criterion_name(&self) -> &'static str { + "seal_criteria_timeout" + } +} + +#[cfg(test)] +mod tests { + + use super::{millis_since_epoch, SealCriterion, SealResolution, TimeoutCriterion}; + use zksync_config::ZkSyncConfig; + + #[test] + fn test_timeout_seal_criterion() { + let config = ZkSyncConfig::from_env().chain.state_keeper; + let criterion = TimeoutCriterion; + + // Empty block shouldn't be sealed by timeout + let empty_block_resolution = criterion.should_seal( + &config, + 0, + 0, + Default::default(), + Default::default(), + Default::default(), + Default::default(), + ); + assert_eq!(empty_block_resolution, SealResolution::NoSeal); + + // Check criterion workflow + let no_timeout_resolution = criterion.should_seal( + &config, + millis_since_epoch(), + 1, + Default::default(), + Default::default(), + Default::default(), + Default::default(), + ); + assert_eq!(no_timeout_resolution, SealResolution::NoSeal); + + let timeout_resolution = criterion.should_seal( + &config, + millis_since_epoch() - config.block_commit_deadline_ms as u128 - 1, + 1, + Default::default(), + Default::default(), + Default::default(), + Default::default(), + ); + assert_eq!(timeout_resolution, SealResolution::IncludeAndSeal); + } +} diff --git a/core/bin/zksync_core/src/state_keeper/tests/mod.rs b/core/bin/zksync_core/src/state_keeper/tests/mod.rs new file mode 100644 index 000000000000..ea3bd16511a9 --- /dev/null +++ b/core/bin/zksync_core/src/state_keeper/tests/mod.rs @@ -0,0 +1,137 @@ +use zksync_config::configs::chain::StateKeeperConfig; +use zksync_types::MiniblockNumber; + +use crate::state_keeper::seal_criteria::{slots::SlotsCriterion, SealManager}; + +use self::tester::{ + bootloader_tip_out_of_gas, pending_batch_data, random_tx, rejected_exec, successful_exec, + TestScenario, +}; + +mod tester; + +#[test] +fn basic_flow() { + let config = StateKeeperConfig { + transaction_slots: 2, + ..Default::default() + }; + let sealer = SealManager::custom( + config, + vec![Box::new(SlotsCriterion)], + Box::new(|_| false), + Box::new(|updates| updates.miniblock.executed_transactions.len() == 1), + ); + + TestScenario::new() + .next_tx("First tx", random_tx(1), successful_exec()) + .miniblock_sealed("Miniblock 1") + .next_tx("Second tx", random_tx(2), successful_exec()) + .miniblock_sealed("Miniblock 2") + .batch_sealed("Batch 1") + .run(sealer); +} + +#[test] +fn rejected_tx() { + let config = StateKeeperConfig { + transaction_slots: 2, + ..Default::default() + }; + let sealer = SealManager::custom( + config, + vec![Box::new(SlotsCriterion)], + Box::new(|_| false), + Box::new(|updates| updates.miniblock.executed_transactions.len() == 1), + ); + + let rejected_tx = random_tx(1); + TestScenario::new() + .next_tx("Rejected tx", rejected_tx.clone(), rejected_exec()) + .tx_rejected("Tx got rejected", rejected_tx, None) + .next_tx("Successful tx", random_tx(2), successful_exec()) + .miniblock_sealed("Miniblock with successful tx") + .next_tx("Second successful tx", random_tx(3), successful_exec()) + .miniblock_sealed("Second miniblock") + .batch_sealed("Batch with 2 successful txs") + .run(sealer); +} + +#[test] +fn bootloader_tip_out_of_gas_flow() { + let config = StateKeeperConfig { + transaction_slots: 2, + ..Default::default() + }; + let sealer = SealManager::custom( + config, + vec![Box::new(SlotsCriterion)], + Box::new(|_| false), + Box::new(|updates| updates.miniblock.executed_transactions.len() == 1), + ); + + let first_tx = random_tx(1); + let bootloader_out_of_gas_tx = random_tx(2); + let third_tx = random_tx(3); + TestScenario::new() + .next_tx("First tx", first_tx, successful_exec()) + .miniblock_sealed("Miniblock with 1st tx") + .next_tx( + "Tx -> Bootloader tip out of gas", + bootloader_out_of_gas_tx.clone(), + bootloader_tip_out_of_gas(), + ) + .tx_rollback( + "Last tx rolled back to seal the block", + bootloader_out_of_gas_tx.clone(), + ) + .batch_sealed("Batch sealed with 1 tx") + .next_tx( + "Same tx now succeeds", + bootloader_out_of_gas_tx, + successful_exec(), + ) + .miniblock_sealed("Miniblock with this tx sealed") + .next_tx("Second tx of the 2nd batch", third_tx, successful_exec()) + .miniblock_sealed("Miniblock with 2nd tx") + .batch_sealed("2nd batch sealed") + .run(sealer); +} + +#[test] +fn pending_batch_is_applied() { + let config = StateKeeperConfig { + transaction_slots: 3, + ..Default::default() + }; + let sealer = SealManager::custom( + config, + vec![Box::new(SlotsCriterion)], + Box::new(|_| false), + Box::new(|updates| updates.miniblock.executed_transactions.len() == 1), + ); + + let pending_batch = pending_batch_data(vec![ + (MiniblockNumber(1), vec![random_tx(1)]), + (MiniblockNumber(2), vec![random_tx(2)]), + ]); + + TestScenario::new() + .load_pending_batch(pending_batch) + .next_tx("Final tx of batch", random_tx(3), successful_exec()) + .miniblock_sealed_with("Miniblock with a single tx", |updates| { + assert_eq!( + updates.miniblock.executed_transactions.len(), + 1, + "Only one transaction should be in miniblock" + ); + }) + .batch_sealed_with("Batch sealed with all 3 txs", |_, updates, _| { + assert_eq!( + updates.l1_batch.executed_transactions.len(), + 3, + "There should be 3 transactions in the batch" + ); + }) + .run(sealer); +} diff --git a/core/bin/zksync_core/src/state_keeper/tests/tester.rs b/core/bin/zksync_core/src/state_keeper/tests/tester.rs new file mode 100644 index 000000000000..faed1f947c47 --- /dev/null +++ b/core/bin/zksync_core/src/state_keeper/tests/tester.rs @@ -0,0 +1,657 @@ +use std::{ + cell::RefCell, + collections::{HashMap, HashSet, VecDeque}, + sync::mpsc, + time::{Duration, Instant}, +}; + +use assert_matches::assert_matches; +use tokio::sync::watch; + +use vm::{ + utils::default_block_properties, + vm::{VmPartialExecutionResult, VmTxExecutionResult}, + vm_with_bootloader::{BlockContext, BlockContextMode, DerivedBlockContext}, + zk_evm::block_properties::BlockProperties, + VmBlockResult, VmExecutionResult, +}; +use zksync_types::{ + l2::L2Tx, tx::tx_execution_info::TxExecutionStatus, Address, L1BatchNumber, MiniblockNumber, + Nonce, Transaction, H256, U256, +}; + +use crate::state_keeper::{ + batch_executor::{BatchExecutorHandle, Command, L1BatchExecutorBuilder, TxExecutionResult}, + io::{PendingBatchData, StateKeeperIO}, + seal_criteria::SealManager, + types::ExecutionMetricsForCriteria, + updates::UpdatesManager, + ZkSyncStateKeeper, +}; + +const FEE_ACCOUNT: Address = Address::repeat_byte(0x11); + +/// Main entry for writing tests for the state keeper. +/// Represents a planned sequence of actions that would happen with the state keeper. +/// We defined a scenario by telling *exactly* what we expect to happen, and then launch the state keeper. +/// While state keeper progresses over the planned transactions, `TestScenario` makes sure that every action happens +/// according to the scenario. +/// +/// Every action requires a description: since in most scenarios there will be a lot of similar actions (e.g. `next_tx` +/// or `seal_miniblock`) it helps to see which action *exactly* caused a test failure. It's recommended to write +/// descriptions that are not only unique, but also will explain *why* we expected this action to happen. This way, +/// it would be easier for developer to find the problem. +/// +/// See any test in the `mod.rs` file to get a visual example. +#[derive(Debug)] +pub(crate) struct TestScenario { + actions: VecDeque, + pending_batch: Option, +} + +impl TestScenario { + pub(crate) fn new() -> Self { + Self { + actions: VecDeque::new(), + pending_batch: None, + } + } + + /// Adds a pending batch data that would be fed into the state keeper. + /// Note that during processing pending batch, state keeper do *not* call `seal_miniblock` method on the IO (since + /// it only recovers the temporary state). + pub(crate) fn load_pending_batch(mut self, pending_batch: PendingBatchData) -> Self { + self.pending_batch = Some(pending_batch); + self + } + + /// Expect the state keeper to request a transaction from IO. + /// Adds both a transaction and an outcome of this transaction (that would be returned to the state keeper from the + /// batch executor). + pub(crate) fn next_tx( + mut self, + description: &'static str, + tx: Transaction, + result: TxExecutionResult, + ) -> Self { + self.actions + .push_back(ScenarioItem::Tx(description, tx, result)); + self + } + + /// Expect the state keeper to rollback the transaction (i.e. return to the mempool). + pub(crate) fn tx_rollback(mut self, description: &'static str, tx: Transaction) -> Self { + self.actions + .push_back(ScenarioItem::Rollback(description, tx)); + self + } + + /// Expect the state keeper to reject the transaction. + /// `err` argument is an optional substring of the expected error message. If `None` is provided, any rejection + /// would work. If `Some` is provided, rejection reason would be checked against the provided substring. + pub(crate) fn tx_rejected( + mut self, + description: &'static str, + tx: Transaction, + err: Option, + ) -> Self { + self.actions + .push_back(ScenarioItem::Reject(description, tx, err)); + self + } + + /// Expects the miniblock to be sealed. + pub(crate) fn miniblock_sealed(mut self, description: &'static str) -> Self { + self.actions + .push_back(ScenarioItem::MiniblockSeal(description, None)); + self + } + + /// Expects the miniblock to be sealed. + /// Accepts a function that would be given access to the received miniblock seal params, which can implement + /// additional assertions on the sealed miniblock. + pub(crate) fn miniblock_sealed_with( + mut self, + description: &'static str, + f: F, + ) -> Self { + self.actions + .push_back(ScenarioItem::MiniblockSeal(description, Some(Box::new(f)))); + self + } + + /// Expects the batch to be sealed. + pub(crate) fn batch_sealed(mut self, description: &'static str) -> Self { + self.actions + .push_back(ScenarioItem::BatchSeal(description, None)); + self + } + + /// Expects the batch to be sealed. + /// Accepts a function that would be given access to the received batch seal params, which can implement + /// additional assertions on the sealed batch. + pub(crate) fn batch_sealed_with< + F: FnOnce(&VmBlockResult, &UpdatesManager, &BlockContext) + Send + 'static, + >( + mut self, + description: &'static str, + f: F, + ) -> Self { + self.actions + .push_back(ScenarioItem::BatchSeal(description, Some(Box::new(f)))); + self + } + + /// Launches the test. + /// Provided `SealManager` is expected to be externally configured to adhere the written scenario logic. + pub(crate) fn run(self, sealer: SealManager) { + assert!(!self.actions.is_empty(), "Test scenario can't be empty"); + + let batch_executor_base = TestBatchExecutorBuilder::new(&self); + + let (stop_sender, stop_receiver) = watch::channel(false); + let io = TestIO::new(stop_sender, self); + + let sk = ZkSyncStateKeeper::new( + stop_receiver, + Box::new(io), + Box::new(batch_executor_base), + sealer, + ); + + let sk_thread = std::thread::spawn(move || sk.run()); + + // We must assume that *theoretically* state keeper may ignore the stop signal from IO once scenario is + // completed, so we spawn it in a separate thread to not get test stuck. + let hard_timeout = Duration::from_secs(60); + let poll_interval = Duration::from_millis(50); + let start = Instant::now(); + while start.elapsed() <= hard_timeout { + if sk_thread.is_finished() { + sk_thread + .join() + .unwrap_or_else(|_| panic!("State keeper thread panicked")); + return; + } + std::thread::sleep(poll_interval); + } + panic!("State keeper test did not exit until the hard timeout, probably it got stuck"); + } +} + +/// Creates a random transaction. Provided tx number would be used as a transaction hash, +/// so it's easier to understand which transaction caused test to fail. +pub(crate) fn random_tx(tx_number: u64) -> Transaction { + let mut tx = L2Tx::new( + Default::default(), + Default::default(), + Nonce(0), + Default::default(), + Default::default(), + Default::default(), + Default::default(), + Default::default(), + ); + // Set the `tx_number` as tx hash so if transaction causes problems, + // it'll be easier to understand which one. + tx.set_input(H256::random().0.to_vec(), H256::from_low_u64_be(tx_number)); + tx.into() +} + +fn partial_execution_result() -> VmPartialExecutionResult { + VmPartialExecutionResult { + logs: Default::default(), + revert_reason: Default::default(), + contracts_used: Default::default(), + cycles_used: Default::default(), + } +} + +/// Creates a `TxExecutionResult` object denoting a successful tx execution. +pub(crate) fn successful_exec() -> TxExecutionResult { + let mut result = TxExecutionResult::new(Ok(VmTxExecutionResult { + status: TxExecutionStatus::Success, + result: partial_execution_result(), + gas_refunded: 0, + operator_suggested_refund: 0, + })); + result.add_tx_metrics(ExecutionMetricsForCriteria { + storage_updates: Default::default(), + l1_gas: Default::default(), + execution_metrics: Default::default(), + }); + result.add_bootloader_result(Ok(partial_execution_result())); + result.add_bootloader_metrics(ExecutionMetricsForCriteria { + storage_updates: Default::default(), + l1_gas: Default::default(), + execution_metrics: Default::default(), + }); + result +} + +/// Creates a `TxExecutionResult` object denoting a tx that was rejected. +pub(crate) fn rejected_exec() -> TxExecutionResult { + TxExecutionResult::new(Err(vm::TxRevertReason::InnerTxError)) +} + +/// Creates a `TxExecutionResult` object denoting a transaction that was executed, but caused a bootloader tip out of +/// gas error. +pub(crate) fn bootloader_tip_out_of_gas() -> TxExecutionResult { + let mut result = TxExecutionResult::new(Ok(VmTxExecutionResult { + status: TxExecutionStatus::Success, + result: partial_execution_result(), + gas_refunded: 0, + operator_suggested_refund: 0, + })); + result.add_tx_metrics(ExecutionMetricsForCriteria { + storage_updates: Default::default(), + l1_gas: Default::default(), + execution_metrics: Default::default(), + }); + result.add_bootloader_result(Err(vm::TxRevertReason::BootloaderOutOfGas)); + result +} + +/// Creates a mock `PendingBatchData` object containing the provided sequence of miniblocks. +pub(crate) fn pending_batch_data( + txs: Vec<(MiniblockNumber, Vec)>, +) -> PendingBatchData { + let block_properties = default_block_properties(); + + let context = BlockContext { + block_number: 1, + block_timestamp: 1, + l1_gas_price: 1, + fair_l2_gas_price: 1, + operator_address: FEE_ACCOUNT, + }; + let derived_context = DerivedBlockContext { + context, + base_fee: 1, + }; + + let params = ( + BlockContextMode::NewBlock(derived_context, Default::default()), + block_properties, + ); + + PendingBatchData { params, txs } +} + +#[allow(clippy::type_complexity, clippy::large_enum_variant)] // It's OK for tests. +enum ScenarioItem { + Tx(&'static str, Transaction, TxExecutionResult), + Rollback(&'static str, Transaction), + Reject(&'static str, Transaction, Option), + MiniblockSeal( + &'static str, + Option>, + ), + BatchSeal( + &'static str, + Option>, + ), +} + +impl std::fmt::Debug for ScenarioItem { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Tx(descr, tx, result) => f + .debug_tuple("Tx") + .field(descr) + .field(tx) + .field(result) + .finish(), + Self::Rollback(descr, tx) => f.debug_tuple("Rollback").field(descr).field(tx).finish(), + Self::Reject(descr, tx, err) => f + .debug_tuple("Reject") + .field(descr) + .field(tx) + .field(err) + .finish(), + Self::MiniblockSeal(descr, _) => f.debug_tuple("MiniblockSeal").field(descr).finish(), + Self::BatchSeal(descr, _) => f.debug_tuple("BatchSeal").field(descr).finish(), + } + } +} + +#[derive(Debug)] +pub(crate) struct TestBatchExecutorBuilder { + /// Sequence of known transaction execution results per batch. + /// We need to store txs for each batch separately, since the same transaction + /// can be executed in several batches (e.g. after an `ExcludeAndSeal` rollback). + /// When initializing each batch, we will `pop_front` known txs for the corresponding executor. + txs: RefCell>>>, + /// Set of transactions that would be rolled back at least once. + rollback_set: HashSet, +} + +impl TestBatchExecutorBuilder { + fn new(scenario: &TestScenario) -> Self { + let mut txs = VecDeque::new(); + let mut batch_txs = HashMap::new(); + let mut rollback_set = HashSet::new(); + + // Insert data about the pending batch, if it exists. + // All the txs from the pending batch must succeed. + if let Some(pending_batch) = &scenario.pending_batch { + for tx in pending_batch.txs.iter().flat_map(|(_, txs)| txs) { + batch_txs.insert(tx.hash(), vec![successful_exec()].into()); + } + } + + // Go through scenario and collect per-batch transactions and the overall rollback set. + for item in &scenario.actions { + match item { + ScenarioItem::Tx(_, tx, result) => { + batch_txs + .entry(tx.hash()) + .and_modify(|txs: &mut VecDeque| { + txs.push_back(result.clone()) + }) + .or_insert_with(|| { + let mut txs = VecDeque::with_capacity(1); + txs.push_back(result.clone()); + txs + }); + } + ScenarioItem::Rollback(_, tx) => { + rollback_set.insert(tx.hash()); + } + ScenarioItem::Reject(_, tx, _) => { + rollback_set.insert(tx.hash()); + } + ScenarioItem::BatchSeal(_, _) => txs.push_back(std::mem::take(&mut batch_txs)), + _ => {} + } + } + + // Some batch seal may not be included into scenario, dump such txs if they exist. + if !batch_txs.is_empty() { + txs.push_back(std::mem::take(&mut batch_txs)); + } + // After sealing the batch, state keeper initialized a new one, so we need to create an empty set + // for the initialization of the "next-to-last" batch. + txs.push_back(HashMap::default()); + + Self { + txs: RefCell::new(txs), + rollback_set, + } + } +} + +impl L1BatchExecutorBuilder for TestBatchExecutorBuilder { + fn init_batch( + &self, + _block_context: BlockContextMode, + _block_properties: BlockProperties, + ) -> BatchExecutorHandle { + let (commands_sender, commands_receiver) = mpsc::channel(); + + let executor = TestBatchExecutor::new( + commands_receiver, + self.txs.borrow_mut().pop_front().unwrap(), + self.rollback_set.clone(), + ); + + let handle = std::thread::spawn(move || executor.run()); + + BatchExecutorHandle::from_raw(handle, commands_sender) + } +} + +#[derive(Debug)] +pub(super) struct TestBatchExecutor { + commands: mpsc::Receiver, + /// Mapping tx -> response. + /// The same transaction can be executed several times, so we use a sequence of responses and consume them by one. + txs: HashMap>, + /// Set of transactions that are expected to be rolled back. + rollback_set: HashSet, + /// Last executed tx hash. + last_tx: H256, +} + +impl TestBatchExecutor { + pub(super) fn new( + commands: mpsc::Receiver, + txs: HashMap>, + rollback_set: HashSet, + ) -> Self { + Self { + commands, + txs, + rollback_set, + last_tx: H256::default(), // We don't expect rollbacks until the first tx is executed. + } + } + + pub(super) fn run(mut self) { + while let Ok(cmd) = self.commands.recv() { + match cmd { + Command::ExecuteTx(tx, resp) => { + let result = self + .txs + .get_mut(&tx.hash()) + .unwrap() + .pop_front() + .unwrap_or_else(|| { + panic!( + "Received a request to execute an unknown transaction: {:?}", + tx + ) + }); + resp.send(result).unwrap(); + self.last_tx = tx.hash(); + } + Command::RollbackLastTx(resp) => { + // This is an additional safety check: IO would check that every rollback is included in the + // test scenario, but here we want to additionally check that each such request goes to the + // the batch executor as well. + if !self.rollback_set.contains(&self.last_tx) { + // Request to rollback an unexpected tx. + panic!( + "Received a request to rollback an unexpected tx. Last executed tx: {:?}", + self.last_tx + ) + } + resp.send(()).unwrap(); + // It's OK to not update `last_executed_tx`, since state keeper never should rollback more than 1 + // tx in a row, and it's going to cause a panic anyway. + } + Command::FinishBatch(resp) => { + // Blanket result, it doesn't really matter. + let result = VmBlockResult { + full_result: VmExecutionResult { + events: Default::default(), + storage_log_queries: Default::default(), + used_contract_hashes: Default::default(), + l2_to_l1_logs: Default::default(), + return_data: Default::default(), + gas_used: Default::default(), + contracts_used: Default::default(), + revert_reason: Default::default(), + trace: Default::default(), + total_log_queries: Default::default(), + cycles_used: Default::default(), + }, + block_tip_result: VmPartialExecutionResult { + logs: Default::default(), + revert_reason: Default::default(), + contracts_used: Default::default(), + cycles_used: Default::default(), + }, + }; + + resp.send(result).unwrap(); + return; + } + } + } + } +} + +#[derive(Debug)] +pub(crate) struct TestIO { + stop_sender: watch::Sender, + batch_number: L1BatchNumber, + timestamp: u64, + l1_gas_price: u64, + fair_l2_gas_price: u64, + miniblock_number: MiniblockNumber, + fee_account: Address, + scenario: TestScenario, +} + +impl TestIO { + fn new(stop_sender: watch::Sender, scenario: TestScenario) -> Self { + Self { + stop_sender, + batch_number: L1BatchNumber(1), + timestamp: 1, + l1_gas_price: 1, + fair_l2_gas_price: 1, + miniblock_number: MiniblockNumber(1), + fee_account: FEE_ACCOUNT, + scenario, + } + } + + fn pop_next_item(&mut self, request: &str) -> ScenarioItem { + if self.scenario.actions.is_empty() { + panic!( + "Test scenario is empty, but the following action was done by the state keeper: {}", + request + ); + } + + let action = self.scenario.actions.pop_front().unwrap(); + // If that was a last action, tell the state keeper to stop after that. + if self.scenario.actions.is_empty() { + self.stop_sender.send(true).unwrap(); + } + action + } +} + +impl StateKeeperIO for TestIO { + fn current_l1_batch_number(&self) -> L1BatchNumber { + self.batch_number + } + + fn current_miniblock_number(&self) -> MiniblockNumber { + self.miniblock_number + } + + fn load_pending_batch(&mut self) -> Option { + self.scenario.pending_batch.take() + } + + fn wait_for_new_batch_params( + &mut self, + _max_wait: Duration, + ) -> Option<(BlockContextMode, BlockProperties)> { + let block_properties = default_block_properties(); + + let previous_block_hash = U256::zero(); + let context = BlockContext { + block_number: self.batch_number.0, + block_timestamp: self.timestamp, + l1_gas_price: self.l1_gas_price, + fair_l2_gas_price: self.fair_l2_gas_price, + operator_address: self.fee_account, + }; + let derived_context = DerivedBlockContext { + context, + base_fee: 1, + }; + + Some(( + BlockContextMode::NewBlock(derived_context, previous_block_hash), + block_properties, + )) + } + + fn wait_for_next_tx(&mut self, _max_wait: Duration) -> Option { + let action = self.pop_next_item("wait_for_next_tx"); + assert_matches!( + action, + ScenarioItem::Tx(_, _, _), + "Expected action from scenario (first), instead got another action (second)" + ); + let ScenarioItem::Tx(_, tx, _) = action else { unreachable!() }; + Some(tx) + } + + fn rollback(&mut self, tx: &Transaction) { + let action = self.pop_next_item("rollback"); + assert_matches!( + action, + ScenarioItem::Rollback(_, _), + "Expected action from scenario (first), instead got another action (second)" + ); + let ScenarioItem::Rollback(_, expected_tx) = action else { unreachable!() }; + assert_eq!( + tx, &expected_tx, + "Incorrect transaction has been rolled back" + ); + } + + fn reject(&mut self, tx: &Transaction, error: &str) { + let action = self.pop_next_item("reject"); + assert_matches!( + action, + ScenarioItem::Reject(_, _, _), + "Expected action from scenario (first), instead got another action (second)" + ); + let ScenarioItem::Reject(_, expected_tx, expected_err) = action else { unreachable!() }; + assert_eq!(tx, &expected_tx, "Incorrect transaction has been rejected"); + if let Some(expected_err) = expected_err { + assert!( + error.contains(&expected_err), + "Transaction was rejected with an unexpected error. Expected part was {}, but the actual error was {}", + expected_err, + error + ); + } + } + + fn seal_miniblock(&mut self, updates_manager: &UpdatesManager) -> u64 { + let action = self.pop_next_item("seal_miniblock"); + assert_matches!( + action, + ScenarioItem::MiniblockSeal(_, _), + "Expected action from scenario (first), instead got another action (second)" + ); + let ScenarioItem::MiniblockSeal(_, check_fn) = action else { unreachable!() }; + if let Some(check_fn) = check_fn { + check_fn(updates_manager); + } + self.miniblock_number += 1; + self.timestamp += 1; + self.timestamp + } + + fn seal_l1_batch( + &mut self, + block_result: VmBlockResult, + updates_manager: UpdatesManager, + block_context: DerivedBlockContext, + ) { + let action = self.pop_next_item("seal_l1_batch"); + assert_matches!( + action, + ScenarioItem::BatchSeal(_, _), + "Expected action from scenario (first), instead got another action (second)" + ); + let ScenarioItem::BatchSeal(_, check_fn) = action else { unreachable!() }; + if let Some(check_fn) = check_fn { + check_fn(&block_result, &updates_manager, &block_context.context); + } + + self.miniblock_number += 1; // Seal the fictive miniblock. + self.batch_number += 1; + self.timestamp += 1; + } +} diff --git a/core/bin/zksync_core/src/state_keeper/types.rs b/core/bin/zksync_core/src/state_keeper/types.rs new file mode 100644 index 000000000000..c0e84497d2d2 --- /dev/null +++ b/core/bin/zksync_core/src/state_keeper/types.rs @@ -0,0 +1,51 @@ +use std::collections::HashMap; +use std::sync::{Arc, Mutex}; +use zksync_mempool::{L2TxFilter, MempoolInfo, MempoolStore}; +use zksync_types::{block::BlockGasCount, tx::ExecutionMetrics, Address, Nonce, Transaction}; + +#[derive(Debug, Clone)] +pub struct MempoolGuard(pub Arc>); + +impl MempoolGuard { + pub fn insert(&mut self, transactions: Vec, nonces: HashMap) { + self.0 + .lock() + .expect("failed to acquire mempool lock") + .insert(transactions, nonces); + } + + pub fn has_next(&self, filter: &L2TxFilter) -> bool { + self.0 + .lock() + .expect("failed to acquire mempool lock") + .has_next(filter) + } + + pub fn next_transaction(&mut self, filter: &L2TxFilter) -> Option { + self.0 + .lock() + .expect("failed to acquire mempool lock") + .next_transaction(filter) + } + + pub fn rollback(&mut self, rejected: &Transaction) { + self.0 + .lock() + .expect("failed to acquire mempool lock") + .rollback(rejected); + } + + pub fn get_mempool_info(&mut self) -> MempoolInfo { + self.0 + .lock() + .expect("failed to acquire mempool lock") + .get_mempool_info() + } +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub struct ExecutionMetricsForCriteria { + pub storage_updates: usize, + pub l1_gas: BlockGasCount, + pub execution_metrics: ExecutionMetrics, +} diff --git a/core/bin/zksync_core/src/state_keeper/updates/l1_batch_updates.rs b/core/bin/zksync_core/src/state_keeper/updates/l1_batch_updates.rs new file mode 100644 index 000000000000..d16b56f337ae --- /dev/null +++ b/core/bin/zksync_core/src/state_keeper/updates/l1_batch_updates.rs @@ -0,0 +1,109 @@ +use super::miniblock_updates::MiniblockUpdates; +use crate::gas_tracker::new_block_gas_count; +use zksync_types::block::BlockGasCount; +use zksync_types::priority_op_onchain_data::PriorityOpOnchainData; +use zksync_types::tx::tx_execution_info::ExecutionMetrics; +use zksync_types::{tx::TransactionExecutionResult, ExecuteTransactionCommon}; + +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct L1BatchUpdates { + pub executed_transactions: Vec, + pub priority_ops_onchain_data: Vec, + pub block_execution_metrics: ExecutionMetrics, + // how much L1 gas will it take to submit this block? + pub l1_gas_count: BlockGasCount, + // We keep track on the number of modified storage keys to close the block by L1 gas + // Later on, we'll replace it with closing L2 blocks by gas. + pub modified_storage_keys_number: usize, +} + +impl L1BatchUpdates { + pub(crate) fn new() -> Self { + Self { + executed_transactions: Default::default(), + priority_ops_onchain_data: Default::default(), + block_execution_metrics: Default::default(), + l1_gas_count: new_block_gas_count(), + modified_storage_keys_number: 0, + } + } + + pub(crate) fn extend_from_sealed_miniblock(&mut self, miniblock_updates: MiniblockUpdates) { + for tx in miniblock_updates.executed_transactions.iter() { + if let ExecuteTransactionCommon::L1(data) = &tx.transaction.common_data { + let onchain_metadata = data.onchain_metadata().onchain_data; + self.priority_ops_onchain_data.push(onchain_metadata); + } + } + self.executed_transactions + .extend(miniblock_updates.executed_transactions); + + self.modified_storage_keys_number += miniblock_updates.modified_storage_keys_number; + self.l1_gas_count += miniblock_updates.l1_gas_count; + self.block_execution_metrics += miniblock_updates.block_execution_metrics; + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::gas_tracker::new_block_gas_count; + use vm::vm::{VmPartialExecutionResult, VmTxExecutionResult}; + use zksync_types::tx::tx_execution_info::TxExecutionStatus; + use zksync_types::{l2::L2Tx, Address, Nonce, H256, U256}; + + #[test] + fn apply_miniblock_with_empty_tx() { + let mut miniblock_accumulator = MiniblockUpdates::new(0); + let mut tx = L2Tx::new( + Default::default(), + Default::default(), + Nonce(0), + Default::default(), + Address::default(), + U256::zero(), + None, + Default::default(), + ); + + tx.set_input(H256::random().0.to_vec(), H256::random()); + + miniblock_accumulator.extend_from_executed_transaction( + &tx.into(), + VmTxExecutionResult { + status: TxExecutionStatus::Success, + result: VmPartialExecutionResult { + logs: Default::default(), + revert_reason: None, + contracts_used: 0, + cycles_used: 0, + }, + gas_refunded: 0, + operator_suggested_refund: 0, + }, + Default::default(), + Default::default(), + ); + + let mut l1_batch_accumulator = L1BatchUpdates::new(); + l1_batch_accumulator.extend_from_sealed_miniblock(miniblock_accumulator); + + assert_eq!(l1_batch_accumulator.executed_transactions.len(), 1); + assert_eq!(l1_batch_accumulator.l1_gas_count, new_block_gas_count()); + assert_eq!(l1_batch_accumulator.modified_storage_keys_number, 0); + assert_eq!(l1_batch_accumulator.priority_ops_onchain_data.len(), 0); + assert_eq!(l1_batch_accumulator.block_execution_metrics.l2_l1_logs, 0); + assert_eq!( + l1_batch_accumulator + .block_execution_metrics + .initial_storage_writes, + 0 + ); + assert_eq!( + l1_batch_accumulator + .block_execution_metrics + .repeated_storage_writes, + 0 + ); + } +} diff --git a/core/bin/zksync_core/src/state_keeper/updates/miniblock_updates.rs b/core/bin/zksync_core/src/state_keeper/updates/miniblock_updates.rs new file mode 100644 index 000000000000..17a17651c179 --- /dev/null +++ b/core/bin/zksync_core/src/state_keeper/updates/miniblock_updates.rs @@ -0,0 +1,164 @@ +use std::collections::HashMap; +use vm::vm::VmTxExecutionResult; +use zksync_types::block::BlockGasCount; +use zksync_types::event::extract_bytecodes_marked_as_known; +use zksync_types::l2_to_l1_log::L2ToL1Log; +use zksync_types::tx::tx_execution_info::VmExecutionLogs; +use zksync_types::tx::ExecutionMetrics; +use zksync_types::{tx::TransactionExecutionResult, StorageLogQuery, Transaction, VmEvent, H256}; +use zksync_utils::bytecode::hash_bytecode; + +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct MiniblockUpdates { + pub executed_transactions: Vec, + pub events: Vec, + pub storage_logs: Vec, + pub l2_to_l1_logs: Vec, + pub new_factory_deps: HashMap>, + // how much L1 gas will it take to submit this block? + pub l1_gas_count: BlockGasCount, + pub block_execution_metrics: ExecutionMetrics, + // We keep track on the number of modified storage keys to close the block by L1 gas + // Later on, we'll replace it with closing L2 blocks by gas. + pub modified_storage_keys_number: usize, + + pub timestamp: u64, +} + +impl MiniblockUpdates { + pub(crate) fn new(timestamp: u64) -> Self { + Self { + executed_transactions: Default::default(), + events: Default::default(), + storage_logs: Default::default(), + l2_to_l1_logs: Default::default(), + new_factory_deps: Default::default(), + l1_gas_count: Default::default(), + block_execution_metrics: Default::default(), + modified_storage_keys_number: 0, + timestamp, + } + } + + pub(crate) fn extend_from_fictive_transaction(&mut self, vm_execution_logs: VmExecutionLogs) { + self.events.extend(vm_execution_logs.events); + self.storage_logs.extend(vm_execution_logs.storage_logs); + self.l2_to_l1_logs.extend(vm_execution_logs.l2_to_l1_logs); + } + + pub(crate) fn extend_from_executed_transaction( + &mut self, + tx: &Transaction, + tx_execution_result: VmTxExecutionResult, + tx_l1_gas_this_tx: BlockGasCount, + execution_metrics: ExecutionMetrics, + ) { + // Get bytecode hashes that were marked as known + let saved_factory_deps = + extract_bytecodes_marked_as_known(&tx_execution_result.result.logs.events); + + // Get transaction factory deps + let tx_factory_deps: HashMap<_, _> = tx + .execute + .factory_deps + .clone() + .unwrap_or_default() + .iter() + .map(|bytecode| (hash_bytecode(bytecode), bytecode.clone())) + .collect(); + + // Save all bytecodes that were marked as known on the bootloader + saved_factory_deps.into_iter().for_each(|bytecodehash| { + let bytecode = tx_factory_deps + .get(&bytecodehash) + .unwrap_or_else(|| { + panic!( + "Failed to get factory deps on tx: bytecode hash: {:?}, tx hash: {}", + bytecodehash, + tx.hash() + ) + }) + .clone(); + + self.new_factory_deps.insert(bytecodehash, bytecode); + }); + + self.executed_transactions.push(TransactionExecutionResult { + transaction: tx.clone(), + hash: tx.hash(), + execution_info: execution_metrics, + execution_status: tx_execution_result.status, + refunded_gas: tx_execution_result.gas_refunded, + operator_suggested_refund: tx_execution_result.operator_suggested_refund, + }); + + self.events.extend(tx_execution_result.result.logs.events); + self.storage_logs + .extend(tx_execution_result.result.logs.storage_logs); + self.l2_to_l1_logs + .extend(tx_execution_result.result.logs.l2_to_l1_logs); + + self.modified_storage_keys_number += execution_metrics.storage_writes(); + self.l1_gas_count += tx_l1_gas_this_tx; + self.block_execution_metrics += execution_metrics; + } +} + +#[cfg(test)] +mod tests { + use super::*; + use vm::vm::{VmPartialExecutionResult, VmTxExecutionResult}; + use zksync_types::{l2::L2Tx, tx::tx_execution_info::TxExecutionStatus, Address, Nonce, U256}; + + #[test] + fn apply_empty_l2_tx() { + let mut accumulator = MiniblockUpdates::new(0); + + let mut tx = L2Tx::new( + Default::default(), + Default::default(), + Nonce(0), + Default::default(), + Address::default(), + U256::zero(), + None, + Default::default(), + ); + + tx.set_input(H256::random().0.to_vec(), H256::random()); + + accumulator.extend_from_executed_transaction( + &tx.into(), + VmTxExecutionResult { + status: TxExecutionStatus::Success, + result: VmPartialExecutionResult { + logs: Default::default(), + revert_reason: None, + contracts_used: 0, + cycles_used: 0, + }, + gas_refunded: 0, + operator_suggested_refund: 0, + }, + Default::default(), + Default::default(), + ); + + assert_eq!(accumulator.executed_transactions.len(), 1); + assert_eq!(accumulator.events.len(), 0); + assert_eq!(accumulator.storage_logs.len(), 0); + assert_eq!(accumulator.l2_to_l1_logs.len(), 0); + assert_eq!(accumulator.l1_gas_count, Default::default()); + assert_eq!(accumulator.modified_storage_keys_number, 0); + assert_eq!(accumulator.new_factory_deps.len(), 0); + assert_eq!( + accumulator.block_execution_metrics.initial_storage_writes, + 0 + ); + assert_eq!( + accumulator.block_execution_metrics.repeated_storage_writes, + 0 + ); + assert_eq!(accumulator.block_execution_metrics.l2_l1_logs, 0); + } +} diff --git a/core/bin/zksync_core/src/state_keeper/updates/mod.rs b/core/bin/zksync_core/src/state_keeper/updates/mod.rs new file mode 100644 index 000000000000..6287a9af6f2d --- /dev/null +++ b/core/bin/zksync_core/src/state_keeper/updates/mod.rs @@ -0,0 +1,175 @@ +use vm::{vm::VmTxExecutionResult, vm_with_bootloader::BlockContextMode}; +use zksync_types::block::BlockGasCount; +use zksync_types::tx::ExecutionMetrics; +use zksync_types::Transaction; + +mod l1_batch_updates; +mod miniblock_updates; + +pub(crate) use self::{l1_batch_updates::L1BatchUpdates, miniblock_updates::MiniblockUpdates}; + +/// Most of the information needed to seal the l1 batch/mini-block is contained within the VM, +/// things that are not captured there are accumulated externally. +/// `MiniblockUpdates` keeps updates for the pending mini-block. +/// `L1BatchUpdates` keeps updates for the already sealed mini-blocks of the pending L1 batch. +/// `UpdatesManager` manages the state of both of these accumulators to be consistent +/// and provides information about the pending state of the current L1 batch. +#[derive(Debug, Clone, PartialEq)] +pub(crate) struct UpdatesManager { + batch_timestamp: u64, + l1_gas_price: u64, + fair_l2_gas_price: u64, + base_fee_per_gas: u64, + pub l1_batch: L1BatchUpdates, + pub miniblock: MiniblockUpdates, +} + +impl UpdatesManager { + pub(crate) fn new(block_context: &BlockContextMode) -> Self { + let batch_timestamp = block_context.timestamp(); + let context = block_context.inner_block_context().context; + Self { + batch_timestamp, + l1_gas_price: context.l1_gas_price, + fair_l2_gas_price: context.fair_l2_gas_price, + base_fee_per_gas: block_context.inner_block_context().base_fee, + l1_batch: L1BatchUpdates::new(), + miniblock: MiniblockUpdates::new(batch_timestamp), + } + } + + pub(crate) fn batch_timestamp(&self) -> u64 { + self.batch_timestamp + } + + pub(crate) fn l1_gas_price(&self) -> u64 { + self.l1_gas_price + } + + pub(crate) fn fair_l2_gas_price(&self) -> u64 { + self.fair_l2_gas_price + } + + pub(crate) fn base_fee_per_gas(&self) -> u64 { + self.base_fee_per_gas + } + + pub(crate) fn extend_from_executed_transaction( + &mut self, + tx: &Transaction, + tx_execution_result: VmTxExecutionResult, + tx_l1_gas_this_tx: BlockGasCount, + execution_metrics: ExecutionMetrics, + ) { + self.miniblock.extend_from_executed_transaction( + tx, + tx_execution_result, + tx_l1_gas_this_tx, + execution_metrics, + ) + } + + pub(crate) fn seal_miniblock(&mut self, new_miniblock_timestamp: u64) { + let new_miniblock_updates = MiniblockUpdates::new(new_miniblock_timestamp); + let old_miniblock_updates = std::mem::replace(&mut self.miniblock, new_miniblock_updates); + + self.l1_batch + .extend_from_sealed_miniblock(old_miniblock_updates); + } + + pub(crate) fn pending_executed_transactions_len(&self) -> usize { + self.l1_batch.executed_transactions.len() + self.miniblock.executed_transactions.len() + } + + pub(crate) fn pending_l1_gas_count(&self) -> BlockGasCount { + self.l1_batch.l1_gas_count + self.miniblock.l1_gas_count + } + + pub(crate) fn pending_execution_metrics(&self) -> ExecutionMetrics { + self.l1_batch.block_execution_metrics + self.miniblock.block_execution_metrics + } + + pub(crate) fn get_tx_by_index(&self, index: usize) -> &Transaction { + if index < self.l1_batch.executed_transactions.len() { + &self.l1_batch.executed_transactions[index].transaction + } else if index < self.pending_executed_transactions_len() { + &self.miniblock.executed_transactions[index - self.l1_batch.executed_transactions.len()] + .transaction + } else { + panic!("Incorrect index provided"); + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::gas_tracker::new_block_gas_count; + use vm::vm::VmPartialExecutionResult; + use vm::vm_with_bootloader::{BlockContext, DerivedBlockContext}; + use zksync_types::tx::tx_execution_info::{TxExecutionStatus, VmExecutionLogs}; + use zksync_types::{l2::L2Tx, Address, Nonce, H256, U256}; + + #[test] + fn apply_miniblock() { + // Init accumulators. + let block_context = BlockContextMode::NewBlock( + DerivedBlockContext { + context: BlockContext { + block_number: 0, + block_timestamp: 0, + l1_gas_price: 0, + fair_l2_gas_price: 0, + operator_address: Default::default(), + }, + base_fee: 0, + }, + 0.into(), + ); + let mut updates_manager = UpdatesManager::new(&block_context); + assert_eq!(updates_manager.pending_executed_transactions_len(), 0); + + // Apply tx. + let mut tx = L2Tx::new( + Default::default(), + Default::default(), + Nonce(0), + Default::default(), + Address::default(), + U256::zero(), + None, + Default::default(), + ); + tx.set_input(H256::random().0.to_vec(), H256::random()); + updates_manager.extend_from_executed_transaction( + &tx.into(), + VmTxExecutionResult { + status: TxExecutionStatus::Success, + result: VmPartialExecutionResult { + logs: VmExecutionLogs::default(), + revert_reason: None, + contracts_used: 0, + cycles_used: 0, + }, + gas_refunded: 0, + operator_suggested_refund: 0, + }, + new_block_gas_count(), + Default::default(), + ); + + // Check that only pending state is updated. + assert_eq!(updates_manager.pending_executed_transactions_len(), 1); + assert_eq!(updates_manager.miniblock.executed_transactions.len(), 1); + assert_eq!(updates_manager.l1_batch.executed_transactions.len(), 0); + + // Seal miniblock. + updates_manager.seal_miniblock(2); + + // Check that L1 batch updates are the same with the pending state + // and miniblock updates are empty. + assert_eq!(updates_manager.pending_executed_transactions_len(), 1); + assert_eq!(updates_manager.miniblock.executed_transactions.len(), 0); + assert_eq!(updates_manager.l1_batch.executed_transactions.len(), 1); + } +} diff --git a/core/bin/zksync_core/src/witness_generator/basic_circuits.rs b/core/bin/zksync_core/src/witness_generator/basic_circuits.rs new file mode 100644 index 000000000000..bf8efc02685e --- /dev/null +++ b/core/bin/zksync_core/src/witness_generator/basic_circuits.rs @@ -0,0 +1,367 @@ +use serde::{Deserialize, Serialize}; +use std::collections::hash_map::DefaultHasher; +use std::collections::{HashMap, HashSet}; +use std::hash::{Hash, Hasher}; +use std::rc::Rc; +use std::time::Instant; +use vm::zk_evm::bitflags::_core::cell::RefCell; +use vm::zk_evm::ethereum_types::H256; +use vm::{StorageOracle, MAX_CYCLES_FOR_TX}; +use zksync_config::configs::WitnessGeneratorConfig; +use zksync_config::constants::BOOTLOADER_ADDRESS; +use zksync_contracts::{read_proved_block_bootloader_bytecode, read_sys_contract_bytecode}; +use zksync_dal::ConnectionPool; +use zksync_object_store::gcs_utils::{ + basic_circuits_blob_url, basic_circuits_inputs_blob_url, merkle_tree_paths_blob_url, + scheduler_witness_blob_url, +}; +use zksync_object_store::object_store::create_object_store_from_env; +use zksync_object_store::object_store::{ + DynamicObjectStore, LEAF_AGGREGATION_WITNESS_JOBS_BUCKET_PATH, + SCHEDULER_WITNESS_JOBS_BUCKET_PATH, WITNESS_INPUT_BUCKET_PATH, +}; +use zksync_state::storage_view::StorageView; +use zksync_types::zkevm_test_harness::toolset::GeometryConfig; +use zksync_types::{ + circuit::GEOMETRY_CONFIG, + proofs::{ + AggregationRound, BasicCircuitWitnessGeneratorInput, PrepareBasicCircuitsJob, + StorageLogMetadata, WitnessGeneratorJob, WitnessGeneratorJobInput, + }, + zkevm_test_harness::{ + bellman::bn256::Bn256, + witness::full_block_artifact::{BlockBasicCircuits, BlockBasicCircuitsPublicInputs}, + SchedulerCircuitInstanceWitness, + }, + Address, L1BatchNumber, U256, +}; +use zksync_utils::bytecode::hash_bytecode; +use zksync_utils::{bytes_to_chunks, h256_to_u256, u256_to_h256}; + +use crate::db_storage_provider::DbStorageProvider; +use crate::witness_generator; +use crate::witness_generator::precalculated_merkle_paths_provider::PrecalculatedMerklePathsProvider; +use crate::witness_generator::track_witness_generation_stage; +use crate::witness_generator::utils::{expand_bootloader_contents, save_prover_input_artifacts}; + +pub struct BasicCircuitArtifacts { + pub basic_circuits: BlockBasicCircuits, + pub basic_circuits_inputs: BlockBasicCircuitsPublicInputs, + pub scheduler_witness: SchedulerCircuitInstanceWitness, + pub serialized_circuits: Vec<(String, Vec)>, +} + +pub fn process_basic_circuits_job( + config: WitnessGeneratorConfig, + connection_pool: ConnectionPool, + started_at: Instant, + block_number: L1BatchNumber, + job: PrepareBasicCircuitsJob, +) -> BasicCircuitArtifacts { + let witness_gen_input = + build_basic_circuits_witness_generator_input(connection_pool.clone(), job, block_number); + + let (basic_circuits, basic_circuits_inputs, scheduler_witness) = + generate_witness(config, connection_pool, witness_gen_input); + + let individual_circuits = basic_circuits.clone().into_flattened_set(); + + let serialized_circuits: Vec<(String, Vec)> = + witness_generator::serialize_circuits(&individual_circuits); + + let total_size_bytes: usize = serialized_circuits + .iter() + .map(|(_, bytes)| bytes.len()) + .sum(); + vlog::info!( + "Witness generation for block {} is complete in {:?}. Number of circuits: {}, total size: {}KB", + block_number.0, + started_at.elapsed(), + serialized_circuits.len(), + total_size_bytes >> 10 + ); + + BasicCircuitArtifacts { + basic_circuits, + basic_circuits_inputs, + scheduler_witness, + serialized_circuits, + } +} + +pub fn update_database( + connection_pool: ConnectionPool, + started_at: Instant, + block_number: L1BatchNumber, + circuits: Vec, +) { + let mut connection = connection_pool.access_storage_blocking(); + let mut transaction = connection.start_transaction_blocking(); + + transaction + .witness_generator_dal() + .create_aggregation_jobs(block_number, circuits.len()); + transaction.prover_dal().insert_prover_jobs( + block_number, + circuits, + AggregationRound::BasicCircuits, + ); + transaction + .witness_generator_dal() + .mark_witness_job_as_successful( + block_number, + AggregationRound::BasicCircuits, + started_at.elapsed(), + ); + + transaction.commit_blocking(); + track_witness_generation_stage(block_number, started_at, AggregationRound::BasicCircuits); +} + +pub async fn get_artifacts( + block_number: L1BatchNumber, + object_store: &DynamicObjectStore, +) -> WitnessGeneratorJob { + let merkle_tree_paths = object_store + .get( + WITNESS_INPUT_BUCKET_PATH, + merkle_tree_paths_blob_url(block_number), + ) + .unwrap(); + + let (merkle_paths, next_enumeration_index) = + bincode::deserialize::<(Vec, u64)>(&merkle_tree_paths) + .expect("witness deserialization failed"); + + WitnessGeneratorJob { + block_number, + job: WitnessGeneratorJobInput::BasicCircuits(Box::new(PrepareBasicCircuitsJob { + merkle_paths, + next_enumeration_index, + })), + } +} + +pub async fn save_artifacts( + block_number: L1BatchNumber, + artifacts: BasicCircuitArtifacts, + object_store: &mut DynamicObjectStore, +) { + let basic_circuits_serialized = + bincode::serialize(&artifacts.basic_circuits).expect("cannot serialize basic_circuits"); + object_store + .put( + LEAF_AGGREGATION_WITNESS_JOBS_BUCKET_PATH, + basic_circuits_blob_url(block_number), + basic_circuits_serialized, + ) + .unwrap(); + + let basic_circuits_inputs_serialized = bincode::serialize(&artifacts.basic_circuits_inputs) + .expect("cannot serialize basic_circuits_inputs"); + object_store + .put( + LEAF_AGGREGATION_WITNESS_JOBS_BUCKET_PATH, + basic_circuits_inputs_blob_url(block_number), + basic_circuits_inputs_serialized, + ) + .unwrap(); + + let scheduler_witness_serialized = bincode::serialize(&artifacts.scheduler_witness) + .expect("cannot serialize scheduler_witness"); + + object_store + .put( + SCHEDULER_WITNESS_JOBS_BUCKET_PATH, + scheduler_witness_blob_url(block_number), + scheduler_witness_serialized, + ) + .unwrap(); + save_prover_input_artifacts( + block_number, + artifacts.serialized_circuits, + object_store, + AggregationRound::BasicCircuits, + ) + .await; +} + +// If making changes to this method, consider moving this logic to the DAL layer and make +// `PrepareBasicCircuitsJob` have all fields of `BasicCircuitWitnessGeneratorInput`. +pub fn build_basic_circuits_witness_generator_input( + connection_pool: ConnectionPool, + witness_merkle_input: PrepareBasicCircuitsJob, + block_number: L1BatchNumber, +) -> BasicCircuitWitnessGeneratorInput { + let mut connection = connection_pool.access_storage_blocking(); + let block_header = connection + .blocks_dal() + .get_block_header(block_number) + .unwrap(); + let previous_block_header = connection + .blocks_dal() + .get_block_header(block_number - 1) + .unwrap(); + BasicCircuitWitnessGeneratorInput { + block_number, + previous_block_timestamp: previous_block_header.timestamp, + block_timestamp: block_header.timestamp, + used_bytecodes_hashes: block_header.used_contract_hashes, + initial_heap_content: block_header.initial_bootloader_contents, + merkle_paths_input: witness_merkle_input, + } +} + +pub fn generate_witness( + config: WitnessGeneratorConfig, + connection_pool: ConnectionPool, + input: BasicCircuitWitnessGeneratorInput, +) -> ( + BlockBasicCircuits, + BlockBasicCircuitsPublicInputs, + SchedulerCircuitInstanceWitness, +) { + let mut connection = connection_pool.access_storage_blocking(); + + let account_bytecode = read_sys_contract_bytecode("", "DefaultAccount"); + let account_code_hash = h256_to_u256(hash_bytecode(&account_bytecode)); + let bootloader_code_bytes = read_proved_block_bootloader_bytecode(); + let bootloader_code_hash = h256_to_u256(hash_bytecode(&bootloader_code_bytes)); + let bootloader_code = bytes_to_chunks(&bootloader_code_bytes); + let bootloader_contents = expand_bootloader_contents(input.initial_heap_content); + + let hashes: HashSet = input + .used_bytecodes_hashes + .iter() + // SMA-1555: remove this hack once updated to the latest version of zkevm_test_harness + .filter(|&&hash| hash != bootloader_code_hash) + .map(|hash| u256_to_h256(*hash)) + .collect(); + + let mut used_bytecodes = connection.storage_dal().get_factory_deps(&hashes); + if input.used_bytecodes_hashes.contains(&account_code_hash) { + used_bytecodes.insert(account_code_hash, bytes_to_chunks(&account_bytecode)); + } + + assert_eq!( + hashes.len(), + used_bytecodes.len(), + "{} factory deps are not found in DB", + hashes.len() - used_bytecodes.len() + ); + + // `DbStorageProvider` was designed to be used in API, so it accepts miniblock numbers. + // Probably, we should make it work with L1 batch numbers too. + let (_, last_miniblock_number) = connection + .blocks_dal() + .get_miniblock_range_of_l1_batch(input.block_number - 1) + .expect("L1 batch should contain at least one miniblock"); + let db_storage_provider = DbStorageProvider::new(connection, last_miniblock_number, true); + let mut tree = PrecalculatedMerklePathsProvider::new(input.merkle_paths_input); + + let storage_ptr: &mut dyn vm::storage::Storage = &mut StorageView::new(db_storage_provider); + let storage_oracle = StorageOracle::new(Rc::new(RefCell::new(storage_ptr))); + let mut hasher = DefaultHasher::new(); + GEOMETRY_CONFIG.hash(&mut hasher); + vlog::info!( + "generating witness for block {} using geometry config hash: {}", + input.block_number.0, + hasher.finish() + ); + if config + .dump_arguments_for_blocks + .contains(&input.block_number.0) + { + save_run_with_fixed_params_args_to_gcs( + input.block_number.0, + last_miniblock_number.0, + Address::zero(), + BOOTLOADER_ADDRESS, + bootloader_code.clone(), + bootloader_contents.clone(), + false, + account_code_hash, + used_bytecodes.clone(), + Vec::default(), + MAX_CYCLES_FOR_TX as usize, + GEOMETRY_CONFIG, + tree.clone(), + ); + } + + zksync_types::zkevm_test_harness::external_calls::run_with_fixed_params( + Address::zero(), + BOOTLOADER_ADDRESS, + bootloader_code, + bootloader_contents, + false, + account_code_hash, + used_bytecodes, + Vec::default(), + MAX_CYCLES_FOR_TX as usize, + GEOMETRY_CONFIG, + storage_oracle, + &mut tree, + ) +} + +#[allow(clippy::too_many_arguments)] +fn save_run_with_fixed_params_args_to_gcs( + l1_batch_number: u32, + last_miniblock_number: u32, + caller: Address, + entry_point_address: Address, + entry_point_code: Vec<[u8; 32]>, + initial_heap_content: Vec, + zk_porter_is_available: bool, + default_aa_code_hash: U256, + used_bytecodes: HashMap>, + ram_verification_queries: Vec<(u32, U256)>, + cycle_limit: usize, + geometry: GeometryConfig, + tree: PrecalculatedMerklePathsProvider, +) { + let run_with_fixed_params_input = RunWithFixedParamsInput { + l1_batch_number, + last_miniblock_number, + caller, + entry_point_address, + entry_point_code, + initial_heap_content, + zk_porter_is_available, + default_aa_code_hash, + used_bytecodes, + ram_verification_queries, + cycle_limit, + geometry, + tree, + }; + let run_with_fixed_params_input_serialized = bincode::serialize(&run_with_fixed_params_input) + .expect("cannot serialize run_with_fixed_params_input"); + let blob_url = format!("run_with_fixed_params_input_{}.bin", l1_batch_number); + let mut object_store = create_object_store_from_env(); + object_store + .put( + WITNESS_INPUT_BUCKET_PATH, + blob_url, + run_with_fixed_params_input_serialized, + ) + .unwrap(); +} + +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] +pub struct RunWithFixedParamsInput { + l1_batch_number: u32, + last_miniblock_number: u32, + caller: Address, + entry_point_address: Address, + entry_point_code: Vec<[u8; 32]>, + initial_heap_content: Vec, + zk_porter_is_available: bool, + default_aa_code_hash: U256, + used_bytecodes: HashMap>, + ram_verification_queries: Vec<(u32, U256)>, + cycle_limit: usize, + geometry: GeometryConfig, + tree: PrecalculatedMerklePathsProvider, +} diff --git a/core/bin/zksync_core/src/witness_generator/leaf_aggregation.rs b/core/bin/zksync_core/src/witness_generator/leaf_aggregation.rs new file mode 100644 index 000000000000..92a6aef15f20 --- /dev/null +++ b/core/bin/zksync_core/src/witness_generator/leaf_aggregation.rs @@ -0,0 +1,222 @@ +use std::collections::HashMap; +use std::time::Instant; + +use zksync_dal::ConnectionPool; +use zksync_object_store::gcs_utils::{ + aggregation_outputs_blob_url, basic_circuits_blob_url, basic_circuits_inputs_blob_url, + leaf_layer_subqueues_blob_url, +}; +use zksync_object_store::object_store::{ + DynamicObjectStore, LEAF_AGGREGATION_WITNESS_JOBS_BUCKET_PATH, + NODE_AGGREGATION_WITNESS_JOBS_BUCKET_PATH, +}; +use zksync_types::{ + circuit::LEAF_SPLITTING_FACTOR, + proofs::{ + AggregationRound, PrepareLeafAggregationCircuitsJob, WitnessGeneratorJob, + WitnessGeneratorJobInput, WitnessGeneratorJobMetadata, + }, + zkevm_test_harness::{ + abstract_zksync_circuit::concrete_circuits::ZkSyncCircuit, + bellman::bn256::Bn256, + bellman::plonk::better_better_cs::setup::VerificationKey, + encodings::recursion_request::RecursionRequest, + encodings::QueueSimulator, + sync_vm, witness, + witness::{ + full_block_artifact::{BlockBasicCircuits, BlockBasicCircuitsPublicInputs}, + oracle::VmWitnessOracle, + }, + }, + L1BatchNumber, +}; +use zksync_verification_key_server::{ + get_ordered_vks_for_basic_circuits, get_vks_for_basic_circuits, get_vks_for_commitment, +}; + +use crate::witness_generator; +use crate::witness_generator::track_witness_generation_stage; +use crate::witness_generator::utils::save_prover_input_artifacts; + +pub struct LeafAggregationArtifacts { + pub leaf_layer_subqueues: Vec, 2, 2>>, + pub aggregation_outputs: + Vec>, + pub serialized_circuits: Vec<(String, Vec)>, + pub leaf_circuits: Vec>>, +} + +pub fn process_leaf_aggregation_job( + started_at: Instant, + block_number: L1BatchNumber, + job: PrepareLeafAggregationCircuitsJob, +) -> LeafAggregationArtifacts { + let stage_started_at = Instant::now(); + + let verification_keys: HashMap< + u8, + VerificationKey>>, + > = get_vks_for_basic_circuits(); + + vlog::info!( + "Verification keys loaded in {:?}", + stage_started_at.elapsed() + ); + + // we need the list of vks that matches the list of job.basic_circuit_proofs + let vks_for_aggregation: Vec< + VerificationKey>>, + > = get_ordered_vks_for_basic_circuits(&job.basic_circuits, &verification_keys); + + let (all_vk_committments, set_committment, g2_points) = + witness::recursive_aggregation::form_base_circuits_committment(get_vks_for_commitment( + verification_keys, + )); + + vlog::info!("Commitments generated in {:?}", stage_started_at.elapsed()); + + // fs::write("basic_circuits.bincode", bincode::serialize(&job.basic_circuits).unwrap()).unwrap(); + // fs::write("basic_circuits_inputs.bincode", bincode::serialize(&job.basic_circuits_inputs).unwrap()).unwrap(); + // fs::write("basic_circuits_proofs.bincode", bincode::serialize(&job.basic_circuits_proofs).unwrap()).unwrap(); + // fs::write("vks_for_aggregation.bincode", bincode::serialize(&vks_for_aggregation).unwrap()).unwrap(); + // fs::write("all_vk_committments.bincode", bincode::serialize(&all_vk_committments).unwrap()).unwrap(); + // fs::write("set_committment.bincode", bincode::serialize(&set_committment).unwrap()).unwrap(); + // fs::write("g2_points.bincode", bincode::serialize(&g2_points).unwrap()).unwrap(); + + let stage_started_at = Instant::now(); + + let (leaf_layer_subqueues, aggregation_outputs, leaf_circuits) = + zksync_types::zkevm_test_harness::witness::recursive_aggregation::prepare_leaf_aggregations( + job.basic_circuits, + job.basic_circuits_inputs, + job.basic_circuits_proofs, + vks_for_aggregation, + LEAF_SPLITTING_FACTOR, + all_vk_committments, + set_committment, + g2_points, + ); + + let serialized_circuits: Vec<(String, Vec)> = + witness_generator::serialize_circuits(&leaf_circuits); + + vlog::info!( + "prepare_leaf_aggregations took {:?}", + stage_started_at.elapsed() + ); + vlog::info!( + "Leaf witness generation for block {} is complete in {:?}. Number of circuits: {}", + block_number.0, + started_at.elapsed(), + leaf_circuits.len() + ); + + LeafAggregationArtifacts { + leaf_layer_subqueues, + aggregation_outputs, + serialized_circuits, + leaf_circuits, + } +} + +pub fn update_database( + connection_pool: ConnectionPool, + started_at: Instant, + block_number: L1BatchNumber, + leaf_circuits_len: usize, + circuits: Vec, +) { + let mut connection = connection_pool.access_storage_blocking(); + let mut transaction = connection.start_transaction_blocking(); + + // inserts artifacts into the node_aggregation_witness_jobs table + // and advances it to waiting_for_proofs status + transaction + .witness_generator_dal() + .save_leaf_aggregation_artifacts(block_number, leaf_circuits_len); + transaction.prover_dal().insert_prover_jobs( + block_number, + circuits, + AggregationRound::LeafAggregation, + ); + transaction + .witness_generator_dal() + .mark_witness_job_as_successful( + block_number, + AggregationRound::LeafAggregation, + started_at.elapsed(), + ); + + transaction.commit_blocking(); + track_witness_generation_stage(block_number, started_at, AggregationRound::LeafAggregation); +} + +pub async fn get_artifacts( + metadata: WitnessGeneratorJobMetadata, + object_store: &DynamicObjectStore, +) -> WitnessGeneratorJob { + let basic_circuits_serialized = object_store + .get( + LEAF_AGGREGATION_WITNESS_JOBS_BUCKET_PATH, + basic_circuits_blob_url(metadata.block_number), + ) + .unwrap(); + let basic_circuits = + bincode::deserialize::>(&basic_circuits_serialized) + .expect("basic_circuits deserialization failed"); + + let basic_circuits_inputs_serialized = object_store + .get( + LEAF_AGGREGATION_WITNESS_JOBS_BUCKET_PATH, + basic_circuits_inputs_blob_url(metadata.block_number), + ) + .unwrap(); + let basic_circuits_inputs = bincode::deserialize::>( + &basic_circuits_inputs_serialized, + ) + .expect("basic_circuits_inputs deserialization failed"); + + WitnessGeneratorJob { + block_number: metadata.block_number, + job: WitnessGeneratorJobInput::LeafAggregation(Box::new( + PrepareLeafAggregationCircuitsJob { + basic_circuits_inputs, + basic_circuits_proofs: metadata.proofs, + basic_circuits, + }, + )), + } +} + +pub async fn save_artifacts( + block_number: L1BatchNumber, + artifacts: LeafAggregationArtifacts, + object_store: &mut DynamicObjectStore, +) { + let leaf_layer_subqueues_serialized = bincode::serialize(&artifacts.leaf_layer_subqueues) + .expect("cannot serialize leaf_layer_subqueues"); + object_store + .put( + NODE_AGGREGATION_WITNESS_JOBS_BUCKET_PATH, + leaf_layer_subqueues_blob_url(block_number), + leaf_layer_subqueues_serialized, + ) + .unwrap(); + + let aggregation_outputs_serialized = bincode::serialize(&artifacts.aggregation_outputs) + .expect("cannot serialize aggregation_outputs"); + object_store + .put( + NODE_AGGREGATION_WITNESS_JOBS_BUCKET_PATH, + aggregation_outputs_blob_url(block_number), + aggregation_outputs_serialized, + ) + .unwrap(); + save_prover_input_artifacts( + block_number, + artifacts.serialized_circuits, + object_store, + AggregationRound::LeafAggregation, + ) + .await; +} diff --git a/core/bin/zksync_core/src/witness_generator/mod.rs b/core/bin/zksync_core/src/witness_generator/mod.rs new file mode 100644 index 000000000000..25dbe18f4c1f --- /dev/null +++ b/core/bin/zksync_core/src/witness_generator/mod.rs @@ -0,0 +1,427 @@ +use std::fmt::Debug; +use std::time::Instant; + +use async_trait::async_trait; + +use zksync_config::configs::prover::ProverConfigs; +use zksync_config::configs::witness_generator::SamplingMode; +use zksync_config::configs::WitnessGeneratorConfig; +use zksync_config::ProverConfig; +use zksync_dal::ConnectionPool; +use zksync_object_store::object_store::create_object_store_from_env; +use zksync_queued_job_processor::JobProcessor; +use zksync_types::{ + proofs::{AggregationRound, WitnessGeneratorJob, WitnessGeneratorJobInput}, + zkevm_test_harness::{ + abstract_zksync_circuit::concrete_circuits::ZkSyncCircuit, bellman::bn256::Bn256, + witness::oracle::VmWitnessOracle, + }, + L1BatchNumber, +}; + +// use crate::witness_generator::basic_circuits; +use crate::witness_generator::basic_circuits::BasicCircuitArtifacts; +use crate::witness_generator::leaf_aggregation::LeafAggregationArtifacts; +use crate::witness_generator::node_aggregation::NodeAggregationArtifacts; +use crate::witness_generator::scheduler::SchedulerArtifacts; + +mod precalculated_merkle_paths_provider; +mod utils; + +mod basic_circuits; +mod leaf_aggregation; +mod node_aggregation; +mod scheduler; +#[cfg(test)] +mod tests; + +/// `WitnessGenerator` component is responsible for generating prover jobs +/// and saving artifacts needed for the next round of proof aggregation. +/// +/// That is, every aggregation round needs two sets of input: +/// * computed proofs from the previous round +/// * some artifacts that the witness generator of previous round(s) returns. +/// +/// There are four rounds of proofs for every block, +/// each of them starts with an invocation of `WitnessGenerator` with a corresponding `WitnessGeneratorJobType`: +/// * `WitnessGeneratorJobType::BasicCircuits`: +/// generates basic circuits (circuits like `Main VM` - up to 50 * 48 = 2400 circuits): +/// input table: `basic_circuit_witness_jobs` +/// artifact/output table: `leaf_aggregation_jobs` (also creates job stubs in `node_aggregation_jobs` and `scheduler_aggregation_jobs`) +/// value in `aggregation_round` field of `prover_jobs` table: 0 +/// * `WitnessGeneratorJobType::LeafAggregation`: +/// generates leaf aggregation circuits (up to 48 circuits of type `LeafAggregation`) +/// input table: `leaf_aggregation_jobs` +/// artifact/output table: `node_aggregation_jobs` +/// value in `aggregation_round` field of `prover_jobs` table: 1 +/// * `WitnessGeneratorJobType::NodeAggregation` +/// generates one circuit of type `NodeAggregation` +/// input table: `leaf_aggregation_jobs` +/// value in `aggregation_round` field of `prover_jobs` table: 2 +/// * scheduler circuit +/// generates one circuit of type `Scheduler` +/// input table: `scheduler_witness_jobs` +/// value in `aggregation_round` field of `prover_jobs` table: 3 +/// +/// One round of prover generation consists of: +/// * `WitnessGenerator` picks up the next `queued` job in its input table and processes it +/// (invoking the corresponding helper function in `zkevm_test_harness` repo) +/// * it saves the generated circuis to `prover_jobs` table and the other artifacts to its output table +/// * the individual proofs are picked up by the provers, processed, and marked as complete. +/// * when the last proof for this round is computed, the prover updates the row in the output table +/// setting its status to `queued` +/// * `WitnessGenerator` picks up such job and proceeds to the next round +/// +/// Note that the very first input table (`basic_circuit_witness_jobs`) +/// is populated by the tree (as the input artifact for the `WitnessGeneratorJobType::BasicCircuits` is the merkle proofs) +/// +#[derive(Debug)] +pub struct WitnessGenerator { + config: WitnessGeneratorConfig, +} + +pub enum WitnessGeneratorArtifacts { + BasicCircuits(Box), + LeafAggregation(Box), + NodeAggregation(Box), + Scheduler(Box), +} + +impl WitnessGenerator { + pub fn new(config: WitnessGeneratorConfig) -> Self { + Self { config } + } + + fn process_job_sync( + connection_pool: ConnectionPool, + job: WitnessGeneratorJob, + started_at: Instant, + ) -> Option { + let config: WitnessGeneratorConfig = WitnessGeneratorConfig::from_env(); + let prover_config: ProverConfig = ProverConfigs::from_env().non_gpu; + let WitnessGeneratorJob { block_number, job } = job; + + if let ( + SamplingMode::Enabled(sampling_params), + &WitnessGeneratorJobInput::BasicCircuits(_), + ) = (config.sampling_mode(), &job) + { + let mut storage = connection_pool.access_storage_blocking(); + + let last_sealed_l1_batch_number = storage.blocks_dal().get_sealed_block_number(); + let min_unproved_l1_batch_number = storage + .prover_dal() + .min_unproved_l1_batch_number(prover_config.max_attempts) + .unwrap_or(last_sealed_l1_batch_number); + let prover_lag = last_sealed_l1_batch_number.0 - min_unproved_l1_batch_number.0; + + let sampling_probability = + sampling_params.calculate_sampling_probability(prover_lag as usize); + + // Generate random number in [0; 1). + let rand_value = rand::random::(); + // We get value higher than `sampling_probability` with prob = `1 - sampling_probability`. + // In this case job should be skipped. + if rand_value > sampling_probability { + metrics::counter!("server.witness_generator.skipped_blocks", 1); + vlog::info!("Skipping witness generation for block {}, prover lag: {}, sampling probability: {}", block_number.0, prover_lag, sampling_probability); + storage + .witness_generator_dal() + .mark_witness_job_as_skipped(block_number, AggregationRound::BasicCircuits); + return None; + } + } + + if matches!(&job, &WitnessGeneratorJobInput::BasicCircuits(_)) { + metrics::counter!("server.witness_generator.sampled_blocks", 1); + } + vlog::info!( + "Starting witness generation of type {:?} for block {}", + job.aggregation_round(), + block_number.0 + ); + + match job { + WitnessGeneratorJobInput::BasicCircuits(job) => { + Some(WitnessGeneratorArtifacts::BasicCircuits(Box::new( + basic_circuits::process_basic_circuits_job( + config, + connection_pool, + started_at, + block_number, + *job, + ), + ))) + } + WitnessGeneratorJobInput::LeafAggregation(job) => { + Some(WitnessGeneratorArtifacts::LeafAggregation(Box::new( + leaf_aggregation::process_leaf_aggregation_job(started_at, block_number, *job), + ))) + } + WitnessGeneratorJobInput::NodeAggregation(job) => { + Some(WitnessGeneratorArtifacts::NodeAggregation(Box::new( + node_aggregation::process_node_aggregation_job( + config, + started_at, + block_number, + *job, + ), + ))) + } + + WitnessGeneratorJobInput::Scheduler(job) => { + Some(WitnessGeneratorArtifacts::Scheduler(Box::new( + scheduler::process_scheduler_job(started_at, block_number, *job), + ))) + } + } + } +} + +#[async_trait] +impl JobProcessor for WitnessGenerator { + type Job = WitnessGeneratorJob; + type JobId = (L1BatchNumber, AggregationRound); + type JobArtifacts = Option; + + const SERVICE_NAME: &'static str = "witness_generator"; + + async fn get_next_job( + &self, + connection_pool: ConnectionPool, + ) -> Option<(Self::JobId, Self::Job)> { + let mut connection = connection_pool.access_storage().await; + let object_store = create_object_store_from_env(); + + let optional_metadata = connection + .witness_generator_dal() + .get_next_scheduler_witness_job( + self.config.witness_generation_timeout(), + self.config.max_attempts, + ); + + if let Some(metadata) = optional_metadata { + let prev_metadata = connection + .blocks_dal() + .get_block_metadata(metadata.block_number - 1); + let previous_aux_hash = prev_metadata + .as_ref() + .map_or([0u8; 32], |e| e.metadata.aux_data_hash.0); + let previous_meta_hash = + prev_metadata.map_or([0u8; 32], |e| e.metadata.meta_parameters_hash.0); + let job = scheduler::get_artifacts( + metadata, + previous_aux_hash, + previous_meta_hash, + &object_store, + ) + .await; + return Some(((job.block_number, job.job.aggregation_round()), job)); + } + + let optional_metadata = connection + .witness_generator_dal() + .get_next_node_aggregation_witness_job( + self.config.witness_generation_timeout(), + self.config.max_attempts, + ); + + if let Some(metadata) = optional_metadata { + let job = node_aggregation::get_artifacts(metadata, &object_store).await; + return Some(((job.block_number, job.job.aggregation_round()), job)); + } + + let optional_metadata = connection + .witness_generator_dal() + .get_next_leaf_aggregation_witness_job( + self.config.witness_generation_timeout(), + self.config.max_attempts, + ); + + if let Some(metadata) = optional_metadata { + let job = leaf_aggregation::get_artifacts(metadata, &object_store).await; + return Some(((job.block_number, job.job.aggregation_round()), job)); + } + + let optional_metadata = connection + .witness_generator_dal() + .get_next_basic_circuit_witness_job( + self.config.witness_generation_timeout(), + self.config.max_attempts, + ); + + if let Some(metadata) = optional_metadata { + let job = basic_circuits::get_artifacts(metadata.block_number, &object_store).await; + return Some(((job.block_number, job.job.aggregation_round()), job)); + } + + None + } + + async fn save_failure( + connection_pool: ConnectionPool, + job_id: (L1BatchNumber, AggregationRound), + started_at: Instant, + error: String, + ) -> () { + let config: WitnessGeneratorConfig = WitnessGeneratorConfig::from_env(); + connection_pool + .access_storage_blocking() + .witness_generator_dal() + .mark_witness_job_as_failed( + job_id.0, + job_id.1, + started_at.elapsed(), + error, + config.max_attempts, + ); + } + + #[allow(clippy::async_yields_async)] + async fn process_job( + connection_pool: ConnectionPool, + job: WitnessGeneratorJob, + started_at: Instant, + ) -> tokio::task::JoinHandle> { + tokio::task::spawn_blocking(move || { + Self::process_job_sync(connection_pool.clone(), job, started_at) + }) + } + + async fn save_result( + connection_pool: ConnectionPool, + job_id: (L1BatchNumber, AggregationRound), + started_at: Instant, + optional_artifacts: Option, + ) { + match optional_artifacts { + None => (), + Some(artifacts) => { + let mut object_store = create_object_store_from_env(); + let block_number = job_id.0; + match artifacts { + WitnessGeneratorArtifacts::BasicCircuits(boxed_basic_circuit_artifacts) => { + let basic_circuit_artifacts = *boxed_basic_circuit_artifacts; + let circuits = + get_circuit_types(&basic_circuit_artifacts.serialized_circuits); + basic_circuits::save_artifacts( + block_number, + basic_circuit_artifacts, + &mut object_store, + ) + .await; + basic_circuits::update_database( + connection_pool, + started_at, + block_number, + circuits, + ); + } + WitnessGeneratorArtifacts::LeafAggregation( + boxed_leaf_aggregation_artifacts, + ) => { + let leaf_aggregation_artifacts = *boxed_leaf_aggregation_artifacts; + let leaf_circuits_len = leaf_aggregation_artifacts.leaf_circuits.len(); + let circuits = + get_circuit_types(&leaf_aggregation_artifacts.serialized_circuits); + leaf_aggregation::save_artifacts( + block_number, + leaf_aggregation_artifacts, + &mut object_store, + ) + .await; + leaf_aggregation::update_database( + connection_pool, + started_at, + block_number, + leaf_circuits_len, + circuits, + ); + } + WitnessGeneratorArtifacts::NodeAggregation( + boxed_node_aggregation_artifacts, + ) => { + let node_aggregation_artifacts = *boxed_node_aggregation_artifacts; + let circuits = + get_circuit_types(&node_aggregation_artifacts.serialized_circuits); + node_aggregation::save_artifacts( + block_number, + node_aggregation_artifacts, + &mut object_store, + ) + .await; + node_aggregation::update_database( + connection_pool, + started_at, + block_number, + circuits, + ); + } + WitnessGeneratorArtifacts::Scheduler(boxed_scheduler_artifacts) => { + let scheduler_artifacts = *boxed_scheduler_artifacts; + let circuits = get_circuit_types(&scheduler_artifacts.serialized_circuits); + scheduler::save_artifacts( + block_number, + scheduler_artifacts.serialized_circuits, + &mut object_store, + ) + .await; + scheduler::update_database( + connection_pool, + started_at, + block_number, + scheduler_artifacts.final_aggregation_result, + circuits, + ); + } + }; + } + } + } +} + +fn get_circuit_types(serialized_circuits: &[(String, Vec)]) -> Vec { + serialized_circuits + .iter() + .map(|(circuit, _)| circuit.clone()) + .collect() +} + +fn track_witness_generation_stage( + block_number: L1BatchNumber, + started_at: Instant, + round: AggregationRound, +) { + let stage = match round { + AggregationRound::BasicCircuits => "basic_circuits", + AggregationRound::LeafAggregation => "leaf_aggregation", + AggregationRound::NodeAggregation => "node_aggregation", + AggregationRound::Scheduler => "scheduler", + }; + metrics::histogram!( + "server.witness_generator.processing_time", + started_at.elapsed(), + "stage" => format!("wit_gen_{}", stage) + ); + metrics::gauge!( + "server.block_number", + block_number.0 as f64, + "stage" => format!("wit_gen_{}", stage) + ); +} + +fn serialize_circuits( + individual_circuits: &[ZkSyncCircuit>], +) -> Vec<(String, Vec)> { + individual_circuits + .iter() + .map(|circuit| { + ( + circuit.short_description().to_owned(), + bincode::serialize(&circuit).expect("failed to serialize circuit"), + ) + }) + .collect() +} + +const _SCHEDULER_CIRCUIT_INDEX: u8 = 0; diff --git a/core/bin/zksync_core/src/witness_generator/node_aggregation.rs b/core/bin/zksync_core/src/witness_generator/node_aggregation.rs new file mode 100644 index 000000000000..e5095d967264 --- /dev/null +++ b/core/bin/zksync_core/src/witness_generator/node_aggregation.rs @@ -0,0 +1,263 @@ +use std::collections::HashMap; +use std::env; +use std::time::Instant; + +use zksync_config::configs::WitnessGeneratorConfig; +use zksync_dal::ConnectionPool; +use zksync_object_store::gcs_utils::{ + aggregation_outputs_blob_url, final_node_aggregations_blob_url, leaf_layer_subqueues_blob_url, +}; +use zksync_object_store::object_store::{ + DynamicObjectStore, NODE_AGGREGATION_WITNESS_JOBS_BUCKET_PATH, + SCHEDULER_WITNESS_JOBS_BUCKET_PATH, +}; +use zksync_types::{ + circuit::{ + LEAF_CIRCUIT_INDEX, LEAF_SPLITTING_FACTOR, NODE_CIRCUIT_INDEX, NODE_SPLITTING_FACTOR, + }, + proofs::{ + AggregationRound, PrepareNodeAggregationCircuitJob, WitnessGeneratorJob, + WitnessGeneratorJobInput, WitnessGeneratorJobMetadata, + }, + zkevm_test_harness::{ + abstract_zksync_circuit::concrete_circuits::ZkSyncCircuit, + bellman::bn256::Bn256, + bellman::plonk::better_better_cs::setup::VerificationKey, + ff::to_hex, + witness::{ + self, + oracle::VmWitnessOracle, + recursive_aggregation::{erase_vk_type, padding_aggregations}, + }, + LeafAggregationOutputDataWitness, NodeAggregationOutputDataWitness, + }, + L1BatchNumber, +}; +use zksync_verification_key_server::{ + get_vk_for_circuit_type, get_vks_for_basic_circuits, get_vks_for_commitment, +}; + +use crate::witness_generator; +use crate::witness_generator::track_witness_generation_stage; +use crate::witness_generator::utils::save_prover_input_artifacts; + +pub struct NodeAggregationArtifacts { + pub final_node_aggregation: NodeAggregationOutputDataWitness, + pub serialized_circuits: Vec<(String, Vec)>, +} + +pub fn process_node_aggregation_job( + config: WitnessGeneratorConfig, + started_at: Instant, + block_number: L1BatchNumber, + job: PrepareNodeAggregationCircuitJob, +) -> NodeAggregationArtifacts { + let stage_started_at = Instant::now(); + zksync_prover_utils::ensure_initial_setup_keys_present( + &config.initial_setup_key_path, + &config.key_download_url, + ); + env::set_var("CRS_FILE", config.initial_setup_key_path); + vlog::info!("Keys loaded in {:?}", stage_started_at.elapsed()); + let stage_started_at = Instant::now(); + + let verification_keys: HashMap< + u8, + VerificationKey>>, + > = get_vks_for_basic_circuits(); + + let padding_aggregations = padding_aggregations(NODE_SPLITTING_FACTOR); + + let (_, set_committment, g2_points) = + witness::recursive_aggregation::form_base_circuits_committment(get_vks_for_commitment( + verification_keys, + )); + + let node_aggregation_vk = get_vk_for_circuit_type(NODE_CIRCUIT_INDEX); + + let leaf_aggregation_vk = get_vk_for_circuit_type(LEAF_CIRCUIT_INDEX); + + let (_, leaf_aggregation_vk_committment) = + witness::recursive_aggregation::compute_vk_encoding_and_committment(erase_vk_type( + leaf_aggregation_vk.clone(), + )); + + let (_, node_aggregation_vk_committment) = + witness::recursive_aggregation::compute_vk_encoding_and_committment(erase_vk_type( + node_aggregation_vk, + )); + + vlog::info!( + "commitments: basic set: {:?}, leaf: {:?}, node: {:?}", + to_hex(&set_committment), + to_hex(&leaf_aggregation_vk_committment), + to_hex(&node_aggregation_vk_committment) + ); + vlog::info!("Commitments generated in {:?}", stage_started_at.elapsed()); + + // fs::write("previous_level_proofs.bincode", bincode::serialize(&job.previous_level_proofs).unwrap()).unwrap(); + // fs::write("leaf_aggregation_vk.bincode", bincode::serialize(&leaf_aggregation_vk).unwrap()).unwrap(); + // fs::write("previous_level_leafs_aggregations.bincode", bincode::serialize(&job.previous_level_leafs_aggregations).unwrap()).unwrap(); + // fs::write("previous_sequence.bincode", bincode::serialize(&job.previous_sequence).unwrap()).unwrap(); + // fs::write("padding_aggregations.bincode", bincode::serialize(&padding_aggregations).unwrap()).unwrap(); + // fs::write("set_committment.bincode", bincode::serialize(&set_committment).unwrap()).unwrap(); + // fs::write("node_aggregation_vk_committment.bincode", bincode::serialize(&node_aggregation_vk_committment).unwrap()).unwrap(); + // fs::write("leaf_aggregation_vk_committment.bincode", bincode::serialize(&leaf_aggregation_vk_committment).unwrap()).unwrap(); + // fs::write("g2_points.bincode", bincode::serialize(&g2_points).unwrap()).unwrap(); + + let stage_started_at = Instant::now(); + let (_, final_node_aggregations, node_circuits) = + zksync_types::zkevm_test_harness::witness::recursive_aggregation::prepare_node_aggregations( + job.previous_level_proofs, + leaf_aggregation_vk, + true, + 0, + job.previous_level_leafs_aggregations, + Vec::default(), + job.previous_sequence, + LEAF_SPLITTING_FACTOR, + NODE_SPLITTING_FACTOR, + padding_aggregations, + set_committment, + node_aggregation_vk_committment, + leaf_aggregation_vk_committment, + g2_points, + ); + + vlog::info!( + "prepare_node_aggregations took {:?}", + stage_started_at.elapsed() + ); + + assert_eq!( + node_circuits.len(), + 1, + "prepare_node_aggregations returned more than one circuit" + ); + assert_eq!( + final_node_aggregations.len(), + 1, + "prepare_node_aggregations returned more than one node aggregation" + ); + + let serialized_circuits: Vec<(String, Vec)> = + witness_generator::serialize_circuits(&node_circuits); + + vlog::info!( + "Node witness generation for block {} is complete in {:?}. Number of circuits: {}", + block_number.0, + started_at.elapsed(), + node_circuits.len() + ); + + NodeAggregationArtifacts { + final_node_aggregation: final_node_aggregations.into_iter().next().unwrap(), + serialized_circuits, + } +} + +pub fn update_database( + connection_pool: ConnectionPool, + started_at: Instant, + block_number: L1BatchNumber, + circuits: Vec, +) { + let mut connection = connection_pool.access_storage_blocking(); + let mut transaction = connection.start_transaction_blocking(); + + // inserts artifacts into the scheduler_witness_jobs table + // and advances it to waiting_for_proofs status + transaction + .witness_generator_dal() + .save_node_aggregation_artifacts(block_number); + transaction.prover_dal().insert_prover_jobs( + block_number, + circuits, + AggregationRound::NodeAggregation, + ); + transaction + .witness_generator_dal() + .mark_witness_job_as_successful( + block_number, + AggregationRound::NodeAggregation, + started_at.elapsed(), + ); + + transaction.commit_blocking(); + track_witness_generation_stage(block_number, started_at, AggregationRound::NodeAggregation); +} + +pub async fn get_artifacts( + metadata: WitnessGeneratorJobMetadata, + object_store: &DynamicObjectStore, +) -> WitnessGeneratorJob { + let leaf_layer_subqueues_serialized = object_store + .get( + NODE_AGGREGATION_WITNESS_JOBS_BUCKET_PATH, + leaf_layer_subqueues_blob_url(metadata.block_number), + ) + .expect( + "leaf_layer_subqueues is not found in a `queued` `node_aggregation_witness_jobs` job", + ); + let leaf_layer_subqueues = bincode::deserialize::< + Vec< + zksync_types::zkevm_test_harness::encodings::QueueSimulator< + Bn256, + zksync_types::zkevm_test_harness::encodings::recursion_request::RecursionRequest< + Bn256, + >, + 2, + 2, + >, + >, + >(&leaf_layer_subqueues_serialized) + .expect("leaf_layer_subqueues deserialization failed"); + + let aggregation_outputs_serialized = object_store + .get( + NODE_AGGREGATION_WITNESS_JOBS_BUCKET_PATH, + aggregation_outputs_blob_url(metadata.block_number), + ) + .expect( + "aggregation_outputs is not found in a `queued` `node_aggregation_witness_jobs` job", + ); + let aggregation_outputs = bincode::deserialize::>>( + &aggregation_outputs_serialized, + ) + .expect("aggregation_outputs deserialization failed"); + + WitnessGeneratorJob { + block_number: metadata.block_number, + job: WitnessGeneratorJobInput::NodeAggregation(Box::new( + PrepareNodeAggregationCircuitJob { + previous_level_proofs: metadata.proofs, + previous_level_leafs_aggregations: aggregation_outputs, + previous_sequence: leaf_layer_subqueues, + }, + )), + } +} + +pub async fn save_artifacts( + block_number: L1BatchNumber, + artifacts: NodeAggregationArtifacts, + object_store: &mut DynamicObjectStore, +) { + let final_node_aggregations_serialized = bincode::serialize(&artifacts.final_node_aggregation) + .expect("cannot serialize final_node_aggregations"); + + object_store + .put( + SCHEDULER_WITNESS_JOBS_BUCKET_PATH, + final_node_aggregations_blob_url(block_number), + final_node_aggregations_serialized, + ) + .unwrap(); + save_prover_input_artifacts( + block_number, + artifacts.serialized_circuits, + object_store, + AggregationRound::NodeAggregation, + ) + .await; +} diff --git a/core/bin/zksync_core/src/witness_generator/precalculated_merkle_paths_provider.rs b/core/bin/zksync_core/src/witness_generator/precalculated_merkle_paths_provider.rs new file mode 100644 index 000000000000..6215abc302a6 --- /dev/null +++ b/core/bin/zksync_core/src/witness_generator/precalculated_merkle_paths_provider.rs @@ -0,0 +1,267 @@ +use serde::{Deserialize, Serialize}; +use std::convert::TryInto; +use zksync_types::proofs::{PrepareBasicCircuitsJob, StorageLogMetadata}; +use zksync_types::zkevm_test_harness::witness::tree::BinaryHasher; +use zksync_types::zkevm_test_harness::witness::tree::{ + BinarySparseStorageTree, EnumeratedBinaryLeaf, LeafQuery, ZkSyncStorageLeaf, +}; +use zksync_types::zkevm_test_harness::Blake2s256; + +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +pub struct PrecalculatedMerklePathsProvider { + // We keep the root hash of the last processed leaf, as it is needed by the the witness generator. + pub root_hash: Vec, + // The ordered list of expected leaves to be interacted with + pub pending_leaves: Vec, + // The index that would be assigned to the next new leaf + pub next_enumeration_index: u64, + // For every Storage Write Log we expect two invocations: `get_leaf` and `insert_leaf`. + // We set this flag to `true` after the initial `get_leaf` is invoked. + pub is_get_leaf_invoked: bool, +} + +impl PrecalculatedMerklePathsProvider { + pub fn new(input: PrepareBasicCircuitsJob) -> Self { + let root_hash = input + .merkle_paths + .first() + .map(|e| e.root_hash.clone()) + .unwrap_or_else(|| vec![0; 32]); + + vlog::debug!("Initializing PrecalculatedMerklePathsProvider. Initial root_hash: {:?}, initial next_enumeration_index: {:?}", root_hash, input.next_enumeration_index); + Self { + root_hash, + pending_leaves: input.merkle_paths, + next_enumeration_index: input.next_enumeration_index, + is_get_leaf_invoked: false, + } + } +} + +impl BinarySparseStorageTree<256, 32, 32, 8, 32, Blake2s256, ZkSyncStorageLeaf> + for PrecalculatedMerklePathsProvider +{ + fn empty() -> Self { + unreachable!("`empty` must not be invoked by the witness generator code"); + } + + fn next_enumeration_index(&self) -> u64 { + self.next_enumeration_index + } + + fn set_next_enumeration_index(&mut self, _value: u64) { + unreachable!( + "`set_next_enumeration_index` must not be invoked by the witness generator code" + ); + } + + fn root(&self) -> [u8; 32] { + self.root_hash.clone().try_into().unwrap() + } + + fn get_leaf(&mut self, index: &[u8; 32]) -> LeafQuery<256, 32, 32, 32, ZkSyncStorageLeaf> { + vlog::trace!( + "Invoked get_leaf({:?}). pending leaves size: {:?}. current root: {:?}", + index, + self.pending_leaves.len(), + self.root() + ); + assert!( + !self.is_get_leaf_invoked, + "`get_leaf()` invoked more than once or get_leaf is invoked when insert_leaf was expected" + ); + let next = self.pending_leaves.first().unwrap_or_else(|| { + panic!( + "invoked `get_leaf({:?})` with empty `pending_leaves`", + index + ) + }); + self.root_hash = next.root_hash.clone(); + + assert_eq!( + &next.leaf_hashed_key_array(), + index, + "`get_leaf` hashed key mismatch" + ); + + let mut res = LeafQuery { + leaf: ZkSyncStorageLeaf { + index: next.leaf_enumeration_index, + value: next.value_read, + }, + first_write: next.first_write, + index: *index, + merkle_path: next.merkle_paths_array(), + }; + + if next.is_write { + // If it is a write, the next invocation will be `insert_leaf` with the very same parameters + self.is_get_leaf_invoked = true; + if res.first_write { + res.leaf.index = 0; + } + } else { + // If it is a read, the next invocation will relate to the next `pending_leaf` + self.pending_leaves.remove(0); + }; + + res + } + + fn insert_leaf( + &mut self, + index: &[u8; 32], + leaf: ZkSyncStorageLeaf, + ) -> LeafQuery<256, 32, 32, 32, ZkSyncStorageLeaf> { + vlog::trace!( + "Invoked insert_leaf({:?}). pending leaves size: {:?}. current root: {:?}", + index, + self.pending_leaves.len(), + self.root() + ); + + assert!( + self.is_get_leaf_invoked, + "`get_leaf()` is expected to be invoked before `insert_leaf()`" + ); + let next = self.pending_leaves.remove(0); + self.root_hash = next.root_hash.clone(); + + assert!( + next.is_write, + "invoked `insert_leaf({:?})`, but get_leaf() expected", + index + ); + + assert_eq!( + &next.leaf_hashed_key_array(), + index, + "insert_leaf hashed key mismatch", + ); + + assert_eq!( + &next.value_written, &leaf.value, + "insert_leaf enumeration index mismatch", + ); + + // reset is_get_leaf_invoked for the next get/insert invocation + self.is_get_leaf_invoked = false; + + // if this insert was in fact the very first insert, it should bump the `next_enumeration_index` + self.next_enumeration_index = self + .next_enumeration_index + .max(next.leaf_enumeration_index + 1); + + LeafQuery { + leaf: ZkSyncStorageLeaf { + index: next.leaf_enumeration_index, + value: next.value_written, + }, + first_write: next.first_write, + index: *index, + merkle_path: next.merkle_paths_array(), + } + } + + // Method to segregate the given leafs into 2 types: + // * leafs that are updated for first time + // * leafs that are not updated for the first time. + // The consumer of method must ensure that the length of passed argument indexes and leafs are same, + // and the merkle paths specified during the initialization must contains same number of write + // leaf nodes as that of the leafs passed as argument. + fn filter_renumerate<'a>( + &self, + mut indexes: impl Iterator, + mut leafs: impl Iterator, + ) -> ( + u64, + Vec<([u8; 32], ZkSyncStorageLeaf)>, + Vec, + ) { + vlog::trace!( + "invoked filter_renumerate(), pending leaves size: {:?}", + self.pending_leaves.len() + ); + let mut first_writes = vec![]; + let mut updates = vec![]; + let write_pending_leaves = self + .pending_leaves + .iter() + .filter(|&l| l.is_write) + .collect::>(); + let write_pending_leaves_iter = write_pending_leaves.iter(); + let mut length = 0; + for (&pending_leaf, (idx, mut leaf)) in + write_pending_leaves_iter.zip((&mut indexes).zip(&mut leafs)) + { + leaf.set_index(pending_leaf.leaf_enumeration_index); + if pending_leaf.first_write { + first_writes.push((*idx, leaf)); + } else { + updates.push(leaf); + } + length += 1; + } + assert_eq!( + length, + write_pending_leaves.len(), + "pending leaves: len({}) must be of same length as leafs and indexes: len({})", + write_pending_leaves.len(), + length + ); + assert!( + indexes.next().is_none(), + "indexes must be of same length as leafs and pending leaves: len({})", + write_pending_leaves.len() + ); + assert!( + leafs.next().is_none(), + "leafs must be of same length as indexes and pending leaves: len({})", + write_pending_leaves.len() + ); + (self.next_enumeration_index, first_writes, updates) + } + + fn verify_inclusion( + root: &[u8; 32], + query: &LeafQuery<256, 32, 32, 32, ZkSyncStorageLeaf>, + ) -> bool { + //copied from zkevm_test_harness/src/witness/tree/mod.rs with minor changes + vlog::trace!( + "invoked verify_inclusion. Index: {:?}, root: {:?})", + query.index, + root + ); + + let mut leaf_bytes = vec![0u8; 8 + 32]; // can make a scratch space somewhere later on + leaf_bytes[8..].copy_from_slice(query.leaf.value()); + + let leaf_index_bytes = query.leaf.current_index().to_be_bytes(); + leaf_bytes[0..8].copy_from_slice(&leaf_index_bytes); + + let leaf_hash = Blake2s256::leaf_hash(&leaf_bytes); + + let mut current_hash = leaf_hash; + for level in 0..256 { + let (l, r) = if is_right_side_node(&query.index, level) { + (&query.merkle_path[level], ¤t_hash) + } else { + (¤t_hash, &query.merkle_path[level]) + }; + + let this_level_hash = Blake2s256::node_hash(level, l, r); + + current_hash = this_level_hash; + } + + root == ¤t_hash + } +} + +fn is_right_side_node(index: &[u8; N], depth: usize) -> bool { + debug_assert!(depth < N * 8); + let byte_idx = depth / 8; + let bit_idx = depth % 8; + + index[byte_idx] & (1u8 << bit_idx) != 0 +} diff --git a/core/bin/zksync_core/src/witness_generator/scheduler.rs b/core/bin/zksync_core/src/witness_generator/scheduler.rs new file mode 100644 index 000000000000..ff5bb6aca098 --- /dev/null +++ b/core/bin/zksync_core/src/witness_generator/scheduler.rs @@ -0,0 +1,235 @@ +use std::collections::HashMap; +use std::time::Instant; + +use zksync_dal::ConnectionPool; +use zksync_object_store::gcs_utils::{ + final_node_aggregations_blob_url, scheduler_witness_blob_url, +}; +use zksync_object_store::object_store::{DynamicObjectStore, SCHEDULER_WITNESS_JOBS_BUCKET_PATH}; +use zksync_types::{ + circuit::{ + LEAF_CIRCUIT_INDEX, LEAF_SPLITTING_FACTOR, NODE_CIRCUIT_INDEX, NODE_SPLITTING_FACTOR, + }, + proofs::{ + AggregationRound, PrepareSchedulerCircuitJob, WitnessGeneratorJob, + WitnessGeneratorJobInput, WitnessGeneratorJobMetadata, + }, + zkevm_test_harness::{ + abstract_zksync_circuit::concrete_circuits::ZkSyncCircuit, + bellman::{bn256::Bn256, plonk::better_better_cs::setup::VerificationKey}, + sync_vm::scheduler::BlockApplicationWitness, + witness::{self, oracle::VmWitnessOracle, recursive_aggregation::erase_vk_type}, + NodeAggregationOutputDataWitness, SchedulerCircuitInstanceWitness, + }, + L1BatchNumber, +}; +use zksync_verification_key_server::{ + get_vk_for_circuit_type, get_vks_for_basic_circuits, get_vks_for_commitment, +}; + +use crate::witness_generator; +use crate::witness_generator::track_witness_generation_stage; +use crate::witness_generator::utils::save_prover_input_artifacts; + +pub struct SchedulerArtifacts { + pub final_aggregation_result: BlockApplicationWitness, + pub serialized_circuits: Vec<(String, Vec)>, +} + +pub fn process_scheduler_job( + started_at: Instant, + block_number: L1BatchNumber, + job: PrepareSchedulerCircuitJob, +) -> SchedulerArtifacts { + let stage_started_at = Instant::now(); + + let verification_keys: HashMap< + u8, + VerificationKey>>, + > = get_vks_for_basic_circuits(); + + let (_, set_committment, g2_points) = + witness::recursive_aggregation::form_base_circuits_committment(get_vks_for_commitment( + verification_keys, + )); + + vlog::info!( + "Verification keys loaded in {:?}", + stage_started_at.elapsed() + ); + + let leaf_aggregation_vk = get_vk_for_circuit_type(LEAF_CIRCUIT_INDEX); + + let node_aggregation_vk = get_vk_for_circuit_type(NODE_CIRCUIT_INDEX); + + let (_, leaf_aggregation_vk_committment) = + witness::recursive_aggregation::compute_vk_encoding_and_committment(erase_vk_type( + leaf_aggregation_vk, + )); + + let (_, node_aggregation_vk_committment) = + witness::recursive_aggregation::compute_vk_encoding_and_committment(erase_vk_type( + node_aggregation_vk.clone(), + )); + + vlog::info!("Commitments generated in {:?}", stage_started_at.elapsed()); + let stage_started_at = Instant::now(); + + // fs::write("incomplete_scheduler_witness.bincode", bincode::serialize(&job.incomplete_scheduler_witness).unwrap()).unwrap(); + // fs::write("node_final_proof_level_proofs.bincode", bincode::serialize(&job.node_final_proof_level_proof).unwrap()).unwrap(); + // fs::write("node_aggregation_vk.bincode", bincode::serialize(&node_aggregation_vk).unwrap()).unwrap(); + // fs::write("final_node_aggregations.bincode", bincode::serialize(&job.final_node_aggregations).unwrap()).unwrap(); + // fs::write("leaf_vks_committment.bincode", bincode::serialize(&set_committment).unwrap()).unwrap(); + // fs::write("node_aggregation_vk_committment.bincode", bincode::serialize(&node_aggregation_vk_committment).unwrap()).unwrap(); + // fs::write("leaf_aggregation_vk_committment.bincode", bincode::serialize(&leaf_aggregation_vk_committment).unwrap()).unwrap(); + // fs::write("previous_aux_hash.bincode", bincode::serialize(&job.previous_aux_hash).unwrap()).unwrap(); + // fs::write("previous_meta_hash.bincode", bincode::serialize(&job.previous_meta_hash).unwrap()).unwrap(); + // fs::write("g2_points.bincode", bincode::serialize(&g2_points).unwrap()).unwrap(); + + let (scheduler_circuit, final_aggregation_result) = + zksync_types::zkevm_test_harness::witness::recursive_aggregation::prepare_scheduler_circuit( + job.incomplete_scheduler_witness, + job.node_final_proof_level_proof, + node_aggregation_vk, + job.final_node_aggregations, + set_committment, + node_aggregation_vk_committment, + leaf_aggregation_vk_committment, + job.previous_aux_hash, + job.previous_meta_hash, + (LEAF_SPLITTING_FACTOR * NODE_SPLITTING_FACTOR) as u32, + g2_points, + ); + + vlog::info!( + "prepare_scheduler_circuit took {:?}", + stage_started_at.elapsed() + ); + + let serialized_circuits: Vec<(String, Vec)> = + witness_generator::serialize_circuits(&vec![scheduler_circuit]); + + vlog::info!( + "Scheduler generation for block {} is complete in {:?}", + block_number.0, + started_at.elapsed() + ); + + SchedulerArtifacts { + final_aggregation_result, + serialized_circuits, + } +} + +pub fn update_database( + connection_pool: ConnectionPool, + started_at: Instant, + block_number: L1BatchNumber, + final_aggregation_result: BlockApplicationWitness, + circuits: Vec, +) { + let mut connection = connection_pool.access_storage_blocking(); + let mut transaction = connection.start_transaction_blocking(); + let block = transaction + .blocks_dal() + .get_block_metadata(block_number) + .expect("L1 batch should exist"); + + assert_eq!( + block.metadata.aux_data_hash.0, final_aggregation_result.aux_data_hash, + "Commitment for aux data is wrong" + ); + + assert_eq!( + block.metadata.pass_through_data_hash.0, final_aggregation_result.passthrough_data_hash, + "Commitment for pass through data is wrong" + ); + + assert_eq!( + block.metadata.meta_parameters_hash.0, final_aggregation_result.meta_data_hash, + "Commitment for metadata is wrong" + ); + + assert_eq!( + block.metadata.commitment.0, final_aggregation_result.block_header_hash, + "Commitment is wrong" + ); + + transaction.prover_dal().insert_prover_jobs( + block_number, + circuits, + AggregationRound::Scheduler, + ); + + transaction + .witness_generator_dal() + .save_final_aggregation_result( + block_number, + final_aggregation_result.aggregation_result_coords, + ); + + transaction + .witness_generator_dal() + .mark_witness_job_as_successful( + block_number, + AggregationRound::Scheduler, + started_at.elapsed(), + ); + + transaction.commit_blocking(); + track_witness_generation_stage(block_number, started_at, AggregationRound::Scheduler); +} + +pub async fn save_artifacts( + block_number: L1BatchNumber, + serialized_circuits: Vec<(String, Vec)>, + object_store: &mut DynamicObjectStore, +) { + save_prover_input_artifacts( + block_number, + serialized_circuits, + object_store, + AggregationRound::Scheduler, + ) + .await; +} + +pub async fn get_artifacts( + metadata: WitnessGeneratorJobMetadata, + previous_aux_hash: [u8; 32], + previous_meta_hash: [u8; 32], + object_store: &DynamicObjectStore, +) -> WitnessGeneratorJob { + let scheduler_witness_serialized = object_store + .get( + SCHEDULER_WITNESS_JOBS_BUCKET_PATH, + scheduler_witness_blob_url(metadata.block_number), + ) + .unwrap(); + let scheduler_witness = bincode::deserialize::>( + &scheduler_witness_serialized, + ) + .expect("scheduler_witness deserialization failed"); + + let final_node_aggregations_serialized = object_store + .get( + SCHEDULER_WITNESS_JOBS_BUCKET_PATH, + final_node_aggregations_blob_url(metadata.block_number), + ) + .expect("final_node_aggregations is not found in a `queued` `scheduler_witness_jobs` job"); + let final_node_aggregations = bincode::deserialize::>( + &final_node_aggregations_serialized, + ) + .expect("final_node_aggregations deserialization failed"); + + WitnessGeneratorJob { + block_number: metadata.block_number, + job: WitnessGeneratorJobInput::Scheduler(Box::new(PrepareSchedulerCircuitJob { + incomplete_scheduler_witness: scheduler_witness, + final_node_aggregations, + node_final_proof_level_proof: metadata.proofs.into_iter().next().unwrap(), + previous_aux_hash, + previous_meta_hash, + })), + } +} diff --git a/core/bin/zksync_core/src/witness_generator/tests.rs b/core/bin/zksync_core/src/witness_generator/tests.rs new file mode 100644 index 000000000000..73dd23343c23 --- /dev/null +++ b/core/bin/zksync_core/src/witness_generator/tests.rs @@ -0,0 +1,286 @@ +use crate::witness_generator::precalculated_merkle_paths_provider::PrecalculatedMerklePathsProvider; +use std::convert::TryInto; +use zksync_types::proofs::StorageLogMetadata; +use zksync_types::zkevm_test_harness::witness::tree::{BinarySparseStorageTree, ZkSyncStorageLeaf}; + +#[test] +fn test_filter_renumerate_all_first_writes() { + let logs = vec![ + generate_storage_log_metadata( + "DDC60818D8F7CFE42514F8EA3CC52806", + "12E9FF974B0FAEE514AD4AC50E2BDC6E", + false, + false, + 1, + ), + generate_storage_log_metadata( + "BDA1617CC883E2251D3BE0FD9B3F3064", + "D14917FCB067922F92322025D1BA50B4", + true, + true, + 2, + ), + generate_storage_log_metadata( + "77F035AD50811CFABD956F6F1B48E482", + "7CF33B959916CC9B56F21C427ED7CA18", + true, + true, + 3, + ), + ]; + let precalculated_merkle_paths_provider = PrecalculatedMerklePathsProvider { + root_hash: hex::decode("4AF44B3D5D4F9C7B117A68351AAB65CF").unwrap(), + pending_leaves: logs, + next_enumeration_index: 4, + is_get_leaf_invoked: false, + }; + let (leafs, indices) = generate_leafs_indices(); + + let (_, first_writes, updates) = + precalculated_merkle_paths_provider.filter_renumerate(indices.iter(), leafs.into_iter()); + assert_eq!(2, first_writes.len()); + assert_eq!(0, updates.len()); +} + +#[test] +fn test_filter_renumerate_all_repeated_writes() { + let logs = vec![ + generate_storage_log_metadata( + "DDC60818D8F7CFE42514F8EA3CC52806", + "12E9FF974B0FAEE514AD4AC50E2BDC6E", + false, + false, + 1, + ), + generate_storage_log_metadata( + "BDA1617CC883E2251D3BE0FD9B3F3064", + "D14917FCB067922F92322025D1BA50B4", + true, + false, + 2, + ), + generate_storage_log_metadata( + "77F035AD50811CFABD956F6F1B48E482", + "7CF33B959916CC9B56F21C427ED7CA18", + true, + false, + 3, + ), + ]; + let precalculated_merkle_paths_provider = PrecalculatedMerklePathsProvider { + root_hash: hex::decode("4AF44B3D5D4F9C7B117A68351AAB65CF").unwrap(), + pending_leaves: logs, + next_enumeration_index: 4, + is_get_leaf_invoked: false, + }; + let (leafs, indices) = generate_leafs_indices(); + + let (_, first_writes, updates) = + precalculated_merkle_paths_provider.filter_renumerate(indices.iter(), leafs.into_iter()); + assert_eq!(0, first_writes.len()); + assert_eq!(2, updates.len()); +} + +#[test] +fn test_filter_renumerate_repeated_writes_with_first_write() { + let logs = vec![ + generate_storage_log_metadata( + "DDC60818D8F7CFE42514F8EA3CC52806", + "12E9FF974B0FAEE514AD4AC50E2BDC6E", + false, + false, + 1, + ), + generate_storage_log_metadata( + "BDA1617CC883E2251D3BE0FD9B3F3064", + "D14917FCB067922F92322025D1BA50B4", + true, + false, + 2, + ), + generate_storage_log_metadata( + "77F035AD50811CFABD956F6F1B48E482", + "7CF33B959916CC9B56F21C427ED7CA18", + true, + true, + 3, + ), + ]; + let precalculated_merkle_paths_provider = PrecalculatedMerklePathsProvider { + root_hash: hex::decode("4AF44B3D5D4F9C7B117A68351AAB65CF").unwrap(), + pending_leaves: logs, + next_enumeration_index: 4, + is_get_leaf_invoked: false, + }; + let (leafs, indices) = generate_leafs_indices(); + + let (_, first_writes, updates) = + precalculated_merkle_paths_provider.filter_renumerate(indices.iter(), leafs.into_iter()); + assert_eq!(1, first_writes.len()); + assert_eq!(1, updates.len()); + assert_eq!(3, first_writes[0].1.index); + assert_eq!(2, updates[0].index); +} + +#[test] +#[should_panic(expected = "leafs must be of same length as indexes")] +fn test_filter_renumerate_panic_when_leafs_and_indices_are_of_different_length() { + let logs = vec![ + generate_storage_log_metadata( + "DDC60818D8F7CFE42514F8EA3CC52806", + "12E9FF974B0FAEE514AD4AC50E2BDC6E", + false, + false, + 1, + ), + generate_storage_log_metadata( + "BDA1617CC883E2251D3BE0FD9B3F3064", + "D14917FCB067922F92322025D1BA50B4", + true, + false, + 2, + ), + generate_storage_log_metadata( + "77F035AD50811CFABD956F6F1B48E482", + "7CF33B959916CC9B56F21C427ED7CA18", + true, + true, + 3, + ), + ]; + let precalculated_merkle_paths_provider = PrecalculatedMerklePathsProvider { + root_hash: hex::decode("4AF44B3D5D4F9C7B117A68351AAB65CF").unwrap(), + pending_leaves: logs, + next_enumeration_index: 4, + is_get_leaf_invoked: false, + }; + + let leafs = vec![ + generate_leaf( + 1, + "AD558076F725ED8B5E5B42920422E9BEAD558076F725ED8B5E5B42920422E9BE", + ), + generate_leaf( + 1, + "98A0EADBD6118391B744252DA348873C98A0EADBD6118391B744252DA348873C", + ), + generate_leaf( + 2, + "72868932BBB002043AF50363EEB65AE172868932BBB002043AF50363EEB65AE1", + ), + ]; + let indices = vec![ + string_to_array("5534D106E0B590953AC0FC7D65CA3B2E5534D106E0B590953AC0FC7D65CA3B2E"), + string_to_array("00309D72EF0AD9786DA9044109E1704B00309D72EF0AD9786DA9044109E1704B"), + ]; + + precalculated_merkle_paths_provider.filter_renumerate(indices.iter(), leafs.into_iter()); +} + +#[test] +#[should_panic(expected = "indexes must be of same length as leafs and pending leaves")] +fn test_filter_renumerate_panic_when_indices_and_pending_leaves_are_of_different_length() { + let logs = vec![ + generate_storage_log_metadata( + "DDC60818D8F7CFE42514F8EA3CC52806", + "12E9FF974B0FAEE514AD4AC50E2BDC6E", + false, + false, + 1, + ), + generate_storage_log_metadata( + "BDA1617CC883E2251D3BE0FD9B3F3064", + "D14917FCB067922F92322025D1BA50B4", + true, + false, + 2, + ), + generate_storage_log_metadata( + "77F035AD50811CFABD956F6F1B48E482", + "7CF33B959916CC9B56F21C427ED7CA18", + true, + true, + 3, + ), + ]; + let precalculated_merkle_paths_provider = PrecalculatedMerklePathsProvider { + root_hash: hex::decode("4AF44B3D5D4F9C7B117A68351AAB65CF").unwrap(), + pending_leaves: logs, + next_enumeration_index: 4, + is_get_leaf_invoked: false, + }; + + let leafs = vec![ + generate_leaf( + 1, + "AD558076F725ED8B5E5B42920422E9BEAD558076F725ED8B5E5B42920422E9BE", + ), + generate_leaf( + 1, + "98A0EADBD6118391B744252DA348873C98A0EADBD6118391B744252DA348873C", + ), + generate_leaf( + 2, + "72868932BBB002043AF50363EEB65AE172868932BBB002043AF50363EEB65AE1", + ), + ]; + let indices = vec![ + string_to_array("5534D106E0B590953AC0FC7D65CA3B2E5534D106E0B590953AC0FC7D65CA3B2E"), + string_to_array("00309D72EF0AD9786DA9044109E1704B00309D72EF0AD9786DA9044109E1704B"), + string_to_array("930058748339A83E06F0D1D22937E92A930058748339A83E06F0D1D22937E92A"), + ]; + + precalculated_merkle_paths_provider.filter_renumerate(indices.iter(), leafs.into_iter()); +} + +fn generate_leafs_indices() -> (Vec, Vec<[u8; 32]>) { + let leafs = vec![ + generate_leaf( + 1, + "AD558076F725ED8B5E5B42920422E9BEAD558076F725ED8B5E5B42920422E9BE", + ), + generate_leaf( + 2, + "72868932BBB002043AF50363EEB65AE172868932BBB002043AF50363EEB65AE1", + ), + ]; + let indices = vec![ + string_to_array("5534D106E0B590953AC0FC7D65CA3B2E5534D106E0B590953AC0FC7D65CA3B2E"), + string_to_array("00309D72EF0AD9786DA9044109E1704B00309D72EF0AD9786DA9044109E1704B"), + ]; + (leafs, indices) +} + +fn generate_leaf(index: u64, value: &str) -> ZkSyncStorageLeaf { + ZkSyncStorageLeaf { + index, + value: string_to_array(value), + } +} + +fn string_to_array(value: &str) -> [u8; 32] { + let array_value: [u8; 32] = hex::decode(value) + .expect("Hex decoding failed") + .try_into() + .unwrap(); + array_value +} + +fn generate_storage_log_metadata( + root_hash: &str, + merkle_path: &str, + is_write: bool, + first_write: bool, + leaf_enumeration_index: u64, +) -> StorageLogMetadata { + StorageLogMetadata { + root_hash: hex::decode(root_hash).expect("Hex decoding failed"), + is_write, + first_write, + merkle_paths: vec![hex::decode(merkle_path).expect("Hex decoding failed")], + leaf_hashed_key: Default::default(), + leaf_enumeration_index, + value_written: [0; 32], + value_read: [0; 32], + } +} diff --git a/core/bin/zksync_core/src/witness_generator/utils.rs b/core/bin/zksync_core/src/witness_generator/utils.rs new file mode 100644 index 000000000000..1749c3a9740b --- /dev/null +++ b/core/bin/zksync_core/src/witness_generator/utils.rs @@ -0,0 +1,34 @@ +use vm::zk_evm::abstractions::MEMORY_CELLS_OTHER_PAGES; +use vm::zk_evm::ethereum_types::U256; +use zksync_object_store::gcs_utils::prover_circuit_input_blob_url; +use zksync_object_store::object_store::{DynamicObjectStore, PROVER_JOBS_BUCKET_PATH}; +use zksync_types::{proofs::AggregationRound, L1BatchNumber}; + +pub fn expand_bootloader_contents(packed: Vec<(usize, U256)>) -> Vec { + let mut result: [u8; MEMORY_CELLS_OTHER_PAGES] = [0; MEMORY_CELLS_OTHER_PAGES]; + + for (offset, value) in packed { + value.to_big_endian(&mut result[(offset * 32)..(offset + 1) * 32]); + } + + result.to_vec() +} + +pub async fn save_prover_input_artifacts( + block_number: L1BatchNumber, + serialized_circuits: Vec<(String, Vec)>, + object_store: &mut DynamicObjectStore, + aggregation_round: AggregationRound, +) { + for (sequence_number, (circuit, input)) in serialized_circuits.into_iter().enumerate() { + let circuit_input_blob_url = prover_circuit_input_blob_url( + block_number, + sequence_number, + circuit, + aggregation_round, + ); + object_store + .put(PROVER_JOBS_BUCKET_PATH, circuit_input_blob_url, input) + .unwrap(); + } +} diff --git a/core/lib/basic_types/Cargo.toml b/core/lib/basic_types/Cargo.toml new file mode 100644 index 000000000000..419a4613716d --- /dev/null +++ b/core/lib/basic_types/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "zksync_basic_types" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-2" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] + +[dependencies] +web3 = { version= "0.18.0", default-features = false, features = ["http-rustls-tls", "test", "signing"] } +serde = { version = "1.0", features = ["derive"] } diff --git a/core/lib/basic_types/src/lib.rs b/core/lib/basic_types/src/lib.rs new file mode 100644 index 000000000000..616e47a71ab2 --- /dev/null +++ b/core/lib/basic_types/src/lib.rs @@ -0,0 +1,153 @@ +//! The declaration of the most primitive types used in zkSync network. +//! +//! Most of them are just re-exported from the `web3` crate. + +#[macro_use] +mod macros; + +pub mod network; + +use serde::{Deserialize, Serialize}; +use std::convert::{Infallible, TryFrom, TryInto}; +use std::fmt; +use std::num::ParseIntError; +use std::ops::{Add, Deref, DerefMut, Sub}; +use std::str::FromStr; + +pub use web3; +pub use web3::ethabi; +pub use web3::types::{ + Address, Bytes, Log, TransactionRequest, H128, H160, H2048, H256, U128, U256, U64, +}; + +/// Account place in the global state tree is uniquely identified by its address. +/// Binary this type is represented by 160 bit big-endian representation of account address. +#[derive(Debug, Clone, Copy, Eq, PartialEq, Serialize, Deserialize, Hash, Ord, PartialOrd)] +pub struct AccountTreeId { + address: Address, +} + +impl AccountTreeId { + pub fn new(address: Address) -> Self { + Self { address } + } + + pub fn address(&self) -> &Address { + &self.address + } + + #[allow(clippy::wrong_self_convention)] // In that case, reference makes more sense. + pub fn to_fixed_bytes(&self) -> [u8; 20] { + let mut result = [0u8; 20]; + result.copy_from_slice(&self.address.to_fixed_bytes()); + result + } + + pub fn from_fixed_bytes(value: [u8; 20]) -> Self { + let address = Address::from_slice(&value); + Self { address } + } +} + +impl Default for AccountTreeId { + fn default() -> Self { + Self { + address: Address::zero(), + } + } +} + +#[allow(clippy::from_over_into)] +impl Into for AccountTreeId { + fn into(self) -> U256 { + let mut be_data = [0u8; 32]; + be_data[12..].copy_from_slice(&self.to_fixed_bytes()); + U256::from_big_endian(&be_data) + } +} + +impl TryFrom for AccountTreeId { + type Error = Infallible; + + fn try_from(val: U256) -> Result { + let mut be_data = vec![0; 32]; + val.to_big_endian(&mut be_data); + Ok(Self::from_fixed_bytes(be_data[12..].try_into().unwrap())) + } +} + +basic_type!( + /// zkSync network block sequential index. + MiniblockNumber, + u32 +); + +basic_type!( + /// zkSync L1 batch sequential index. + L1BatchNumber, + u32 +); + +basic_type!( + /// Ethereum network block sequential index. + L1BlockNumber, + u32 +); + +basic_type!( + /// zkSync account nonce. + Nonce, + u32 +); + +basic_type!( + /// Unique identifier of the priority operation in the zkSync network. + PriorityOpId, + u64 +); + +basic_type!( + /// ChainId in the Ethereum network. + L1ChainId, + u8 +); + +basic_type!( + /// ChainId in the ZkSync network. + L2ChainId, + u16 +); + +#[allow(clippy::derivable_impls)] +impl Default for MiniblockNumber { + fn default() -> Self { + Self(0) + } +} + +#[allow(clippy::derivable_impls)] +impl Default for L1BatchNumber { + fn default() -> Self { + Self(0) + } +} + +#[allow(clippy::derivable_impls)] +impl Default for L1BlockNumber { + fn default() -> Self { + Self(0) + } +} + +impl Default for L2ChainId { + fn default() -> Self { + Self(270) + } +} + +#[allow(clippy::derivable_impls)] +impl Default for PriorityOpId { + fn default() -> Self { + Self(0) + } +} diff --git a/core/lib/basic_types/src/macros.rs b/core/lib/basic_types/src/macros.rs new file mode 100644 index 000000000000..72d5bbe2d925 --- /dev/null +++ b/core/lib/basic_types/src/macros.rs @@ -0,0 +1,78 @@ +macro_rules! basic_type { + ($(#[$attr:meta])* $name:ident, $type:ty) => { + $(#[$attr])* + #[derive( + Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Hash, PartialOrd, Ord + )] + pub struct $name(pub $type); + + impl $name { + pub fn next(self) -> $name { + $name(self.0 + 1) + } + } + + impl Deref for $name { + type Target = $type; + + fn deref(&self) -> &Self::Target { + &self.0 + } + } + + impl DerefMut for $name { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } + } + + impl FromStr for $name { + type Err = ParseIntError; + + fn from_str(s: &str) -> Result { + let value = s.parse::<$type>()?; + Ok(Self(value)) + } + } + + impl fmt::Display for $name { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } + } + + impl Add<$type> for $name { + type Output = Self; + + fn add(self, other: $type) -> Self { + Self(self.0 + other) + } + } + + impl std::ops::AddAssign<$type> for $name { + fn add_assign(&mut self, other: $type) { + self.0 += other; + } + } + + impl Sub<$type> for $name { + type Output = Self; + + fn sub(self, other: $type) -> Self { + Self(self.0 - other) + } + } + + impl std::ops::SubAssign<$type> for $name { + fn sub_assign(&mut self, other: $type) { + self.0 -= other; + } + } + + impl From<$type> for $name { + fn from(value: $type) -> Self { + Self(value) + } + } + }; +} diff --git a/core/lib/basic_types/src/network.rs b/core/lib/basic_types/src/network.rs new file mode 100644 index 000000000000..cda956d1b34b --- /dev/null +++ b/core/lib/basic_types/src/network.rs @@ -0,0 +1,90 @@ +//! The network where the zkSync resides. +//! + +// Built-in uses +use std::{fmt, str::FromStr}; + +// External uses +use serde::{Deserialize, Serialize}; + +// Workspace uses +use crate::L1ChainId; + +// Local uses + +/// Network to be used for a zkSync client. +#[derive(Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub enum Network { + /// Ethereum Mainnet. + Mainnet, + /// Ethereum Rinkeby testnet. + Rinkeby, + /// Ethereum Ropsten testnet. + Ropsten, + /// Ethereum Görli testnet. + Goerli, + /// Self-hosted Ethereum network. + Localhost, + /// Unknown network type. + Unknown, + /// Test network for testkit purposes + Test, +} + +impl FromStr for Network { + type Err = String; + + fn from_str(string: &str) -> Result { + Ok(match string { + "mainnet" => Self::Mainnet, + "rinkeby" => Self::Rinkeby, + "ropsten" => Self::Ropsten, + "goerli" => Self::Goerli, + "localhost" => Self::Localhost, + "test" => Self::Test, + another => return Err(another.to_owned()), + }) + } +} + +impl fmt::Display for Network { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Mainnet => write!(f, "mainnet"), + Self::Rinkeby => write!(f, "rinkeby"), + Self::Ropsten => write!(f, "ropsten"), + Self::Goerli => write!(f, "goerli"), + Self::Localhost => write!(f, "localhost"), + Self::Unknown => write!(f, "unknown"), + Self::Test => write!(f, "test"), + } + } +} + +impl Network { + /// Returns the network chain ID on the Ethereum side. + pub fn from_chain_id(chain_id: L1ChainId) -> Self { + match *chain_id { + 1 => Self::Mainnet, + 3 => Self::Ropsten, + 4 => Self::Rinkeby, + 5 => Self::Goerli, + 9 => Self::Localhost, + _ => Self::Unknown, + } + } + + /// Returns the network chain ID on the Ethereum side. + pub fn chain_id(self) -> L1ChainId { + match self { + Self::Mainnet => L1ChainId(1), + Self::Ropsten => L1ChainId(3), + Self::Rinkeby => L1ChainId(4), + Self::Goerli => L1ChainId(5), + Self::Localhost => L1ChainId(9), + Self::Unknown => panic!("Unknown chain ID"), + Self::Test => panic!("Test chain ID"), + } + } +} diff --git a/core/lib/circuit_breaker/Cargo.toml b/core/lib/circuit_breaker/Cargo.toml new file mode 100644 index 000000000000..bdf811fc3458 --- /dev/null +++ b/core/lib/circuit_breaker/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "zksync_circuit_breaker" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] + +[dependencies] +zksync_types = { path = "../types", version = "1.0" } +zksync_config = { path = "../config", version = "1.0" } +zksync_contracts = { path = "../contracts", version = "1.0" } +zksync_dal = { path = "../dal", version = "1.0" } +zksync_eth_client = { path = "../eth_client", version = "1.0" } +zksync_utils = { path = "../utils", version = "1.0" } +zksync_verification_key_generator_and_server = { path = "../../bin/verification_key_generator_and_server", version = "1.0" } +thiserror = "1.0" +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +futures = { version = "0.3", features = ["compat"] } +tokio = { version = "1", features = ["time"] } +async-trait = "0.1" +hex = "0.4" +convert_case = "0.6.0" diff --git a/core/lib/circuit_breaker/src/code_hashes.rs b/core/lib/circuit_breaker/src/code_hashes.rs new file mode 100644 index 000000000000..d497df4bb1c4 --- /dev/null +++ b/core/lib/circuit_breaker/src/code_hashes.rs @@ -0,0 +1,80 @@ +use crate::{CircuitBreaker, CircuitBreakerError}; +use thiserror::Error; +use zksync_config::ZkSyncConfig; +use zksync_contracts::{DEFAULT_ACCOUNT_CODE, PROVED_BLOCK_BOOTLOADER_CODE}; +use zksync_eth_client::clients::http_client::EthereumClient; +use zksync_types::H256; +use zksync_utils::u256_to_h256; + +#[derive(Debug, Error)] +pub enum MismatchedCodeHashError { + #[error("Server has different bootloader code hash from the one on L1 contract, server: {server_hash:?}, contract: {contract_hash:?}")] + Bootloader { + server_hash: H256, + contract_hash: H256, + }, + #[error("Server has different default account code hash from the one on L1 contract, server: {server_hash:?}, contract: {contract_hash:?}")] + DefaultAccount { + server_hash: H256, + contract_hash: H256, + }, +} + +#[derive(Debug)] +pub struct CodeHashesChecker { + pub eth_client: EthereumClient, +} + +impl CodeHashesChecker { + pub fn new(config: &ZkSyncConfig) -> Self { + Self { + eth_client: EthereumClient::from_config(config), + } + } +} + +#[async_trait::async_trait] +impl CircuitBreaker for CodeHashesChecker { + async fn check(&self) -> Result<(), CircuitBreakerError> { + let bootloader_code_hash_on_l1: H256 = self + .eth_client + .call_main_contract_function( + "getL2BootloaderBytecodeHash", + (), + None, + Default::default(), + None, + ) + .await + .unwrap(); + if bootloader_code_hash_on_l1 != u256_to_h256(PROVED_BLOCK_BOOTLOADER_CODE.hash) { + return Err(CircuitBreakerError::MismatchedCodeHash( + MismatchedCodeHashError::Bootloader { + server_hash: u256_to_h256(PROVED_BLOCK_BOOTLOADER_CODE.hash), + contract_hash: bootloader_code_hash_on_l1, + }, + )); + } + + let default_account_code_hash_on_l1: H256 = self + .eth_client + .call_main_contract_function( + "getL2DefaultAccountBytecodeHash", + (), + None, + Default::default(), + None, + ) + .await + .unwrap(); + if default_account_code_hash_on_l1 != u256_to_h256(DEFAULT_ACCOUNT_CODE.hash) { + return Err(CircuitBreakerError::MismatchedCodeHash( + MismatchedCodeHashError::DefaultAccount { + server_hash: u256_to_h256(DEFAULT_ACCOUNT_CODE.hash), + contract_hash: default_account_code_hash_on_l1, + }, + )); + } + Ok(()) + } +} diff --git a/core/lib/circuit_breaker/src/facet_selectors.rs b/core/lib/circuit_breaker/src/facet_selectors.rs new file mode 100644 index 000000000000..9072d0f94a7c --- /dev/null +++ b/core/lib/circuit_breaker/src/facet_selectors.rs @@ -0,0 +1,126 @@ +use crate::{utils::unwrap_tuple, CircuitBreaker, CircuitBreakerError}; + +use std::collections::BTreeMap; +use std::env; +use std::fmt; +use std::str::FromStr; +use std::{fs, path::Path}; + +use convert_case::{Case, Casing}; + +use zksync_config::ZkSyncConfig; +use zksync_eth_client::clients::http_client::EthereumClient; +use zksync_types::ethabi::Token; +use zksync_types::Address; + +#[derive(Debug)] +pub struct MismatchedFacetSelectorsError { + pub server_selectors: String, + pub contract_selectors: String, +} + +impl fmt::Display for MismatchedFacetSelectorsError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!( + f, + "server: {}, contract: {}", + self.server_selectors, self.contract_selectors + ) + } +} + +#[derive(Debug)] +pub struct FacetSelectorsChecker { + eth_client: EthereumClient, + // BTreeMap is used to have fixed order of elements when printing error. + server_selectors: BTreeMap>, +} + +impl FacetSelectorsChecker { + pub fn new(config: &ZkSyncConfig) -> Self { + let zksync_home = env::var("ZKSYNC_HOME").unwrap_or_else(|_| ".".into()); + let path_str = "contracts/ethereum/artifacts/cache/solpp-generated-contracts/zksync/facets"; + let facets_path = Path::new(&zksync_home).join(path_str); + let paths = fs::read_dir(facets_path).unwrap(); + let server_selectors = paths + .into_iter() + .filter_map(|path| { + let file_name: String = path.unwrap().file_name().into_string().unwrap(); + let facet_name: &str = file_name.as_str().split('.').next().unwrap(); + // Exclude `Base` contract. + if facet_name == "Base" { + return None; + } + let env_name = format!( + "CONTRACTS_{}_FACET_ADDR", + facet_name.to_case(Case::ScreamingSnake) + ); + let address = Address::from_str(&env::var(env_name).unwrap()).unwrap(); + + let contract = zksync_contracts::load_contract( + format!("{0}/{1}.sol/{1}Facet.json", path_str, facet_name).as_str(), + ); + let selectors = contract + .functions + .into_values() + .map(|func| { + let func = func.first().cloned().unwrap(); + format!("0x{}", hex::encode(func.short_signature())) + }) + .collect(); + + Some((address, selectors)) + }) + .collect(); + + Self { + eth_client: EthereumClient::from_config(config), + server_selectors, + } + } +} + +impl FacetSelectorsChecker { + async fn get_contract_facet_selectors(&self) -> BTreeMap> { + let facets: Token = self + .eth_client + .call_main_contract_function("facets", (), None, Default::default(), None) + .await + .unwrap(); + let facets = facets.into_array().unwrap(); + facets + .into_iter() + .map(|facet| { + let tokens = unwrap_tuple(facet); + let address = tokens[0].clone().into_address().unwrap(); + let selectors = tokens[1] + .clone() + .into_array() + .unwrap() + .into_iter() + .map(|token| { + "0x".to_string() + hex::encode(token.into_fixed_bytes().unwrap()).as_str() + }) + .collect(); + (address, selectors) + }) + .collect() + } +} + +#[async_trait::async_trait] +impl CircuitBreaker for FacetSelectorsChecker { + async fn check(&self) -> Result<(), CircuitBreakerError> { + let contract_selectors = self.get_contract_facet_selectors().await; + if self.server_selectors != contract_selectors { + return Err(CircuitBreakerError::MismatchedFacetSelectors( + MismatchedFacetSelectorsError { + server_selectors: serde_json::to_string_pretty(&self.server_selectors).unwrap(), + contract_selectors: serde_json::to_string_pretty(&contract_selectors).unwrap(), + }, + )); + } + + Ok(()) + } +} diff --git a/core/lib/circuit_breaker/src/l1_txs.rs b/core/lib/circuit_breaker/src/l1_txs.rs new file mode 100644 index 000000000000..c3a7bd054919 --- /dev/null +++ b/core/lib/circuit_breaker/src/l1_txs.rs @@ -0,0 +1,24 @@ +use crate::{CircuitBreaker, CircuitBreakerError}; +use zksync_dal::ConnectionPool; + +#[derive(Debug)] +pub struct FailedL1TransactionChecker { + pub pool: ConnectionPool, +} + +#[async_trait::async_trait] +impl CircuitBreaker for FailedL1TransactionChecker { + async fn check(&self) -> Result<(), CircuitBreakerError> { + if self + .pool + .access_storage() + .await + .eth_sender_dal() + .get_number_of_failed_transactions() + > 0 + { + return Err(CircuitBreakerError::FailedL1Transaction); + } + Ok(()) + } +} diff --git a/core/lib/circuit_breaker/src/lib.rs b/core/lib/circuit_breaker/src/lib.rs new file mode 100644 index 000000000000..034581ab2822 --- /dev/null +++ b/core/lib/circuit_breaker/src/lib.rs @@ -0,0 +1,79 @@ +use std::time::Duration; + +use futures::channel::oneshot; +use thiserror::Error; +use tokio::sync::watch; + +use zksync_config::configs::chain::CircuitBreakerConfig; + +use crate::code_hashes::MismatchedCodeHashError; +use crate::facet_selectors::MismatchedFacetSelectorsError; +use crate::vks::VerifierError; + +pub mod code_hashes; +pub mod facet_selectors; +pub mod l1_txs; +pub mod utils; +pub mod vks; + +#[derive(Debug, Error)] +pub enum CircuitBreakerError { + #[error("System has failed L1 transaction")] + FailedL1Transaction, + #[error("Mismatched code hash: {0}")] + MismatchedCodeHash(MismatchedCodeHashError), + #[error("Verifier error: {0}")] + Verifier(VerifierError), + #[error("Mismatched facet selectors: {0}")] + MismatchedFacetSelectors(MismatchedFacetSelectorsError), +} + +/// Checks circuit breakers +#[derive(Debug)] +pub struct CircuitBreakerChecker { + circuit_breakers: Vec>, + sync_interval: Duration, +} + +#[async_trait::async_trait] +pub trait CircuitBreaker: std::fmt::Debug + Send + Sync + 'static { + async fn check(&self) -> Result<(), CircuitBreakerError>; +} + +impl CircuitBreakerChecker { + pub fn new( + circuit_breakers: Vec>, + config: &CircuitBreakerConfig, + ) -> Self { + Self { + circuit_breakers, + sync_interval: config.sync_interval(), + } + } + + pub async fn check(&self) -> Result<(), CircuitBreakerError> { + for circuit_breaker in &self.circuit_breakers { + circuit_breaker.check().await?; + } + Ok(()) + } + + pub async fn run( + self, + circuit_breaker_sender: oneshot::Sender, + stop_receiver: watch::Receiver, + ) { + loop { + if *stop_receiver.borrow() { + break; + } + if let Err(error) = self.check().await { + circuit_breaker_sender + .send(error) + .expect("failed to send circuit breaker messsage"); + return; + } + tokio::time::sleep(self.sync_interval).await; + } + } +} diff --git a/core/lib/circuit_breaker/src/utils.rs b/core/lib/circuit_breaker/src/utils.rs new file mode 100644 index 000000000000..ddaa387e694c --- /dev/null +++ b/core/lib/circuit_breaker/src/utils.rs @@ -0,0 +1,9 @@ +use zksync_types::ethabi::Token; + +pub fn unwrap_tuple(token: Token) -> Vec { + if let Token::Tuple(tokens) = token { + tokens + } else { + panic!("Tuple was expected, got: {}", token); + } +} diff --git a/core/lib/circuit_breaker/src/vks.rs b/core/lib/circuit_breaker/src/vks.rs new file mode 100644 index 000000000000..e4cd88b1a6be --- /dev/null +++ b/core/lib/circuit_breaker/src/vks.rs @@ -0,0 +1,275 @@ +use crate::{utils::unwrap_tuple, CircuitBreaker, CircuitBreakerError}; +use serde::{Deserialize, Serialize}; +use std::convert::TryInto; +use std::{env, str::FromStr}; +use thiserror::Error; +use zksync_config::ZkSyncConfig; +use zksync_eth_client::clients::http_client::EthereumClient; +use zksync_types::ethabi::Token; +use zksync_types::zkevm_test_harness::bellman::{ + bn256::{Fq, Fq2, Fr, G1Affine, G2Affine}, + CurveAffine, PrimeField, +}; +use zksync_types::{Address, H256}; +use zksync_verification_key_server::get_vk_for_circuit_type; + +#[derive(Debug, Error)] +pub enum VerifierError { + #[error("Verifier address from the env var is different from the one in Diamond Proxy contract, from env: {address_from_env:?}, from contract: {address_from_contract:?}")] + VerifierAddressMismatch { + address_from_env: Address, + address_from_contract: Address, + }, + #[error("Server has different vks commitment from the one on L1 contract, server: {server_vks:?}, contract: {contract_vks:?}")] + VksCommitment { + server_vks: VksCommitment, + contract_vks: VksCommitment, + }, + #[error("Server has different Scheduler VK from the one on L1 contract, server: {server_vk}, contract: {contract_vk}")] + SchedulerVk { + server_vk: String, + contract_vk: String, + }, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct VksCommitment { + pub node: H256, + pub leaf: H256, + pub basic_circuits: H256, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct VerificationKey { + pub n: usize, + pub num_inputs: usize, + + pub gate_setup_commitments: Vec, + pub gate_selectors_commitments: Vec, + pub permutation_commitments: Vec, + + pub lookup_selector_commitment: Option, + pub lookup_tables_commitments: Vec, + pub lookup_table_type_commitment: Option, + + pub non_residues: Vec, + pub g2_elements: [G2Affine; 2], +} + +#[derive(Debug)] +pub struct VksChecker { + pub eth_client: EthereumClient, +} + +impl VksChecker { + pub fn new(config: &ZkSyncConfig) -> Self { + Self { + eth_client: EthereumClient::from_config(config), + } + } + + async fn check_verifier_address(&self) -> Result<(), CircuitBreakerError> { + let address_from_env = + Address::from_str(&env::var("CONTRACTS_VERIFIER_ADDR").unwrap()).unwrap(); + let address_from_contract: Address = self + .eth_client + .call_main_contract_function("getVerifier", (), None, Default::default(), None) + .await + .unwrap(); + if address_from_env != address_from_contract { + return Err(CircuitBreakerError::Verifier( + VerifierError::VerifierAddressMismatch { + address_from_env, + address_from_contract, + }, + )); + } + Ok(()) + } + + async fn check_commitments(&self) -> Result<(), CircuitBreakerError> { + let verifier_params_token: Token = self + .eth_client + .call_main_contract_function("getVerifierParams", (), None, Default::default(), None) + .await + .unwrap(); + let vks_vec: Vec = unwrap_tuple(verifier_params_token) + .into_iter() + .map(|token| H256::from_slice(&token.into_fixed_bytes().unwrap())) + .collect(); + let contract_vks = VksCommitment { + node: vks_vec[0], + leaf: vks_vec[1], + basic_circuits: vks_vec[2], + }; + + let server_vks = VksCommitment { + node: H256::from_str(&env::var("CONTRACTS_VK_COMMITMENT_NODE").unwrap()).unwrap(), + leaf: H256::from_str(&env::var("CONTRACTS_VK_COMMITMENT_LEAF").unwrap()).unwrap(), + basic_circuits: H256::from_str( + &env::var("CONTRACTS_VK_COMMITMENT_BASIC_CIRCUITS").unwrap(), + ) + .unwrap(), + }; + + if contract_vks != server_vks { + return Err(CircuitBreakerError::Verifier( + VerifierError::VksCommitment { + contract_vks, + server_vks, + }, + )); + } + Ok(()) + } + + async fn check_scheduler_vk(&self) -> Result<(), CircuitBreakerError> { + let server_vk = get_vk_for_circuit_type(0); + let server_vk = VerificationKey { + n: server_vk.n, + num_inputs: server_vk.num_inputs, + gate_setup_commitments: server_vk.gate_setup_commitments, + gate_selectors_commitments: server_vk.gate_selectors_commitments, + permutation_commitments: server_vk.permutation_commitments, + lookup_selector_commitment: server_vk.lookup_selector_commitment, + lookup_tables_commitments: server_vk.lookup_tables_commitments, + lookup_table_type_commitment: server_vk.lookup_table_type_commitment, + non_residues: server_vk.non_residues, + g2_elements: server_vk.g2_elements, + }; + + let contract_vk = self.get_contract_vk().await; + + if server_vk != contract_vk { + return Err(CircuitBreakerError::Verifier(VerifierError::SchedulerVk { + server_vk: serde_json::to_string_pretty(&server_vk).unwrap(), + contract_vk: serde_json::to_string_pretty(&contract_vk).unwrap(), + })); + } + Ok(()) + } + + async fn get_contract_vk(&self) -> VerificationKey { + let verifier_contract_address = + Address::from_str(&env::var("CONTRACTS_VERIFIER_ADDR").unwrap()).unwrap(); + let verifier_contract_abi = zksync_contracts::verifier_contract(); + let vk_token: Token = self + .eth_client + .call_contract_function( + "get_verification_key", + (), + None, + Default::default(), + None, + verifier_contract_address, + verifier_contract_abi, + ) + .await + .unwrap(); + + let tokens = unwrap_tuple(vk_token); + let n = tokens[0].clone().into_uint().unwrap().as_usize() - 1; + let num_inputs = tokens[1].clone().into_uint().unwrap().as_usize(); + let gate_selectors_commitments = tokens[3] + .clone() + .into_fixed_array() + .unwrap() + .into_iter() + .map(g1_affine_from_token) + .collect(); + let gate_setup_commitments = tokens[4] + .clone() + .into_fixed_array() + .unwrap() + .into_iter() + .map(g1_affine_from_token) + .collect(); + let permutation_commitments = tokens[5] + .clone() + .into_fixed_array() + .unwrap() + .into_iter() + .map(g1_affine_from_token) + .collect(); + let lookup_selector_commitment = g1_affine_from_token(tokens[6].clone()); + let lookup_tables_commitments = tokens[7] + .clone() + .into_fixed_array() + .unwrap() + .into_iter() + .map(g1_affine_from_token) + .collect(); + let lookup_table_type_commitment = g1_affine_from_token(tokens[8].clone()); + let non_residues = tokens[9] + .clone() + .into_fixed_array() + .unwrap() + .into_iter() + .map(fr_from_token) + .collect(); + let g2_elements = tokens[10] + .clone() + .into_fixed_array() + .unwrap() + .into_iter() + .map(g2_affine_from_token) + .collect::>() + .try_into() + .unwrap(); + + VerificationKey { + n, + num_inputs, + + gate_setup_commitments, + gate_selectors_commitments, + permutation_commitments, + + lookup_selector_commitment: Some(lookup_selector_commitment), + lookup_tables_commitments, + lookup_table_type_commitment: Some(lookup_table_type_commitment), + + non_residues, + g2_elements, + } + } +} + +#[async_trait::async_trait] +impl CircuitBreaker for VksChecker { + async fn check(&self) -> Result<(), CircuitBreakerError> { + self.check_verifier_address().await?; + self.check_commitments().await?; + self.check_scheduler_vk().await?; + Ok(()) + } +} + +fn g1_affine_from_token(token: Token) -> G1Affine { + let tokens = unwrap_tuple(token); + G1Affine::from_xy_unchecked( + Fq::from_str(&tokens[0].clone().into_uint().unwrap().to_string()).unwrap(), + Fq::from_str(&tokens[1].clone().into_uint().unwrap().to_string()).unwrap(), + ) +} + +fn fr_from_token(token: Token) -> Fr { + let tokens = unwrap_tuple(token); + Fr::from_str(&tokens[0].clone().into_uint().unwrap().to_string()).unwrap() +} + +fn g2_affine_from_token(token: Token) -> G2Affine { + let tokens = unwrap_tuple(token); + let tokens0 = tokens[0].clone().into_fixed_array().unwrap(); + let tokens1 = tokens[1].clone().into_fixed_array().unwrap(); + G2Affine::from_xy_unchecked( + Fq2 { + c1: Fq::from_str(&tokens0[0].clone().into_uint().unwrap().to_string()).unwrap(), + c0: Fq::from_str(&tokens0[1].clone().into_uint().unwrap().to_string()).unwrap(), + }, + Fq2 { + c1: Fq::from_str(&tokens1[0].clone().into_uint().unwrap().to_string()).unwrap(), + c0: Fq::from_str(&tokens1[1].clone().into_uint().unwrap().to_string()).unwrap(), + }, + ) +} diff --git a/core/lib/config/Cargo.toml b/core/lib/config/Cargo.toml new file mode 100644 index 000000000000..8dee29bc0f59 --- /dev/null +++ b/core/lib/config/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "zksync_config" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-2" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] + +[dependencies] +zksync_basic_types = { path = "../../lib/basic_types", version = "1.0" } +zksync_utils = { path = "../../lib/utils", version = "1.0" } + +url = "2.1" +num = "0.3.1" +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +envy = "0.4" +once_cell = "1.13.0" +bigdecimal = "0.2.0" diff --git a/core/lib/config/src/configs/api.rs b/core/lib/config/src/configs/api.rs new file mode 100644 index 000000000000..62c20909232f --- /dev/null +++ b/core/lib/config/src/configs/api.rs @@ -0,0 +1,239 @@ +/// External uses +use serde::Deserialize; +/// Built-in uses +use std::net::SocketAddr; +use std::time::Duration; +// Local uses +pub use crate::configs::utils::Prometheus; +use crate::envy_load; +use zksync_basic_types::H256; + +/// API configuration. +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct ApiConfig { + /// Configuration options for the Web3 JSON RPC servers. + pub web3_json_rpc: Web3JsonRpc, + /// Configuration options for the REST servers. + pub explorer: Explorer, + /// Configuration options for the Prometheus exporter. + pub prometheus: Prometheus, +} + +impl ApiConfig { + pub fn from_env() -> Self { + Self { + web3_json_rpc: envy_load!("web3_json_rpc", "API_WEB3_JSON_RPC_"), + explorer: envy_load!("explorer", "API_EXPLORER_"), + prometheus: envy_load!("prometheus", "API_PROMETHEUS_"), + } + } +} + +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct Web3JsonRpc { + /// Port to which the HTTP RPC server is listening. + pub http_port: u16, + /// URL to access HTTP RPC server. + pub http_url: String, + /// Port to which the WebSocket RPC server is listening. + pub ws_port: u16, + /// URL to access WebSocket RPC server. + pub ws_url: String, + /// Max possible limit of entities to be requested once. + pub req_entities_limit: Option, + /// Max possible limit of filters to be in the state at once. + pub filters_limit: Option, + /// Max possible limit of subscriptions to be in the state at once. + pub subscriptions_limit: Option, + /// Interval between polling db for pubsub (in ms). + pub pubsub_polling_interval: Option, + /// number of threads per server + pub threads_per_server: u32, + /// Tx nonce: how far ahead from the committed nonce can it be. + pub max_nonce_ahead: u32, + /// The multiplier to use when suggesting gas price. Should be higher than one, + /// otherwise if the L1 prices soar, the suggested gas price won't be sufficient to be included in block + pub gas_price_scale_factor: f64, + /// Inbound transaction limit used for throttling + pub transactions_per_sec_limit: Option, + /// Timeout for requests (in s) + pub request_timeout: Option, + /// Private keys for accounts managed by node + pub account_pks: Option>, + /// The factor by which to scale the gasLimit + pub estimate_gas_scale_factor: f64, + /// The max possible number of gas that `eth_estimateGas` is allowed to overestimate. + pub estimate_gas_acceptable_overestimation: u32, +} + +impl Web3JsonRpc { + pub fn http_bind_addr(&self) -> SocketAddr { + SocketAddr::new("0.0.0.0".parse().unwrap(), self.http_port) + } + + pub fn ws_bind_addr(&self) -> SocketAddr { + SocketAddr::new("0.0.0.0".parse().unwrap(), self.ws_port) + } + + pub fn req_entities_limit(&self) -> usize { + self.req_entities_limit.unwrap_or_else(|| 2u32.pow(10)) as usize + } + + pub fn filters_limit(&self) -> usize { + self.filters_limit.unwrap_or(10000) as usize + } + + pub fn subscriptions_limit(&self) -> usize { + self.subscriptions_limit.unwrap_or(10000) as usize + } + + pub fn pubsub_interval(&self) -> Duration { + Duration::from_millis(self.pubsub_polling_interval.unwrap_or(200)) + } + + pub fn request_timeout(&self) -> Duration { + Duration::from_secs(self.request_timeout.unwrap_or(10)) + } + + pub fn account_pks(&self) -> Vec { + self.account_pks.clone().unwrap_or_default() + } +} + +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct Explorer { + /// Port to which the REST server is listening. + pub port: u16, + /// URL to access REST server. + pub url: String, + /// Interval between polling db for network stats (in ms). + pub network_stats_polling_interval: Option, + /// Max possible limit of entities to be requested once. + pub req_entities_limit: Option, + /// Max possible value of (offset + limit) in pagination endpoints. + pub offset_limit: Option, + /// number of threads per server + pub threads_per_server: u32, +} + +impl Explorer { + pub fn bind_addr(&self) -> SocketAddr { + SocketAddr::new("0.0.0.0".parse().unwrap(), self.port) + } + + pub fn network_stats_interval(&self) -> Duration { + Duration::from_millis(self.network_stats_polling_interval.unwrap_or(1000)) + } + + pub fn req_entities_limit(&self) -> usize { + self.req_entities_limit.unwrap_or(100) as usize + } + + pub fn offset_limit(&self) -> usize { + self.offset_limit.unwrap_or(10000) as usize + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::configs::test_utils::set_env; + use std::net::IpAddr; + use std::str::FromStr; + + fn expected_config() -> ApiConfig { + ApiConfig { + web3_json_rpc: Web3JsonRpc { + http_port: 3050, + http_url: "http://127.0.0.1:3050".into(), + ws_port: 3051, + ws_url: "ws://127.0.0.1:3051".into(), + req_entities_limit: Some(10000), + filters_limit: Some(10000), + subscriptions_limit: Some(10000), + pubsub_polling_interval: Some(200), + threads_per_server: 128, + max_nonce_ahead: 5, + transactions_per_sec_limit: Some(1000), + request_timeout: Some(10), + account_pks: Some(vec![ + H256::from_str( + "0x0000000000000000000000000000000000000000000000000000000000000001", + ) + .unwrap(), + H256::from_str( + "0x0000000000000000000000000000000000000000000000000000000000000002", + ) + .unwrap(), + ]), + estimate_gas_scale_factor: 1.0f64, + gas_price_scale_factor: 1.2, + estimate_gas_acceptable_overestimation: 1000, + }, + explorer: Explorer { + port: 3070, + url: "http://127.0.0.1:3070".into(), + network_stats_polling_interval: Some(1000), + req_entities_limit: Some(100), + offset_limit: Some(10000), + threads_per_server: 128, + }, + prometheus: Prometheus { + listener_port: 3312, + pushgateway_url: "http://127.0.0.1:9091".into(), + push_interval_ms: Some(100), + }, + } + } + + #[test] + fn from_env() { + let config = r#" +API_WEB3_JSON_RPC_HTTP_PORT="3050" +API_WEB3_JSON_RPC_HTTP_URL="http://127.0.0.1:3050" +API_WEB3_JSON_RPC_WS_PORT="3051" +API_WEB3_JSON_RPC_WS_URL="ws://127.0.0.1:3051" +API_WEB3_JSON_RPC_REQ_ENTITIES_LIMIT=10000 +API_WEB3_JSON_RPC_FILTERS_LIMIT=10000 +API_WEB3_JSON_RPC_SUBSCRIPTIONS_LIMIT=10000 +API_WEB3_JSON_RPC_PUBSUB_POLLING_INTERVAL=200 +API_WEB3_JSON_RPC_THREADS_PER_SERVER=128 +API_WEB3_JSON_RPC_MAX_NONCE_AHEAD=5 +API_WEB3_JSON_RPC_GAS_PRICE_SCALE_FACTOR=1.2 +API_WEB3_JSON_RPC_TRANSACTIONS_PER_SEC_LIMIT=1000 +API_WEB3_JSON_RPC_REQUEST_TIMEOUT=10 +API_WEB3_JSON_RPC_ACCOUNT_PKS=0x0000000000000000000000000000000000000000000000000000000000000001,0x0000000000000000000000000000000000000000000000000000000000000002 +API_WEB3_JSON_RPC_ESTIMATE_GAS_SCALE_FACTOR=1.0 +API_WEB3_JSON_RPC_ESTIMATE_GAS_ACCEPTABLE_OVERESTIMATION=1000 +API_EXPLORER_PORT="3070" +API_EXPLORER_URL="http://127.0.0.1:3070" +API_EXPLORER_NETWORK_STATS_POLLING_INTERVAL="1000" +API_EXPLORER_REQ_ENTITIES_LIMIT=100 +API_EXPLORER_OFFSET_LIMIT=10000 +API_EXPLORER_THREADS_PER_SERVER=128 +API_PROMETHEUS_LISTENER_PORT="3312" +API_PROMETHEUS_PUSHGATEWAY_URL="http://127.0.0.1:9091" +API_PROMETHEUS_PUSH_INTERVAL_MS=100 + "#; + set_env(config); + + let actual = ApiConfig::from_env(); + assert_eq!(actual, expected_config()); + } + + /// Checks the correctness of the config helper methods. + #[test] + fn methods() { + let config = expected_config(); + let bind_broadcast_addr: IpAddr = "0.0.0.0".parse().unwrap(); + + assert_eq!( + config.web3_json_rpc.pubsub_interval(), + Duration::from_millis(200) + ); + assert_eq!( + config.explorer.bind_addr(), + SocketAddr::new(bind_broadcast_addr, config.explorer.port) + ); + } +} diff --git a/core/lib/config/src/configs/chain.rs b/core/lib/config/src/configs/chain.rs new file mode 100644 index 000000000000..3af005531e76 --- /dev/null +++ b/core/lib/config/src/configs/chain.rs @@ -0,0 +1,199 @@ +/// External uses +use serde::Deserialize; +/// Built-in uses +use std::time::Duration; +// Local uses +use zksync_basic_types::network::Network; +use zksync_basic_types::Address; + +use crate::envy_load; + +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct ChainConfig { + /// L1 parameters configuration. + pub eth: Eth, + /// State keeper / block generating configuration. + pub state_keeper: StateKeeperConfig, + /// Operations manager / Metadata calculator. + pub operations_manager: OperationsManager, + /// mempool configuration + pub mempool: MempoolConfig, + /// circuit breaker configuration + pub circuit_breaker: CircuitBreakerConfig, +} + +impl ChainConfig { + pub fn from_env() -> Self { + Self { + eth: envy_load!("eth", "CHAIN_ETH_"), + state_keeper: envy_load!("state_keeper", "CHAIN_STATE_KEEPER_"), + operations_manager: envy_load!("operations_manager", "CHAIN_OPERATIONS_MANAGER_"), + mempool: envy_load!("mempool", "CHAIN_MEMPOOL_"), + circuit_breaker: envy_load!("circuit_breaker", "CHAIN_CIRCUIT_BREAKER_"), + } + } +} + +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct Eth { + /// Name of the used Ethereum network, e.g. `localhost` or `rinkeby`. + pub network: Network, + /// Name of current zkSync network + /// Used for Sentry environment + pub zksync_network: String, + /// ID of current zkSync network treated as ETH network ID. + /// Used to distinguish zkSync from other Web3-capable networks. + pub zksync_network_id: u16, +} + +#[derive(Debug, Deserialize, Clone, PartialEq, Default)] +pub struct StateKeeperConfig { + /// Detones the amount of slots for transactions in the block. + pub transaction_slots: usize, + + pub block_commit_deadline_ms: u64, + pub miniblock_commit_deadline_ms: u64, + + pub max_single_tx_gas: u32, + + pub max_allowed_l2_tx_gas_limit: u32, + + /// Configuration option for tx to be rejected in case + /// it takes more percentage of the block capacity than this value. + pub reject_tx_at_geometry_percentage: f64, + /// Configuration option for tx to be rejected in case + /// it takes more percentage of the block capacity than this value. + pub reject_tx_at_eth_params_percentage: f64, + /// Configuration option for tx to be rejected in case + /// it takes more percentage of the block capacity than this value. + pub reject_tx_at_gas_percentage: f64, + /// Denotes the percentage of geometry params used in l2 block, that triggers l2 block seal. + pub close_block_at_geometry_percentage: f64, + /// Denotes the percentage of l1 params used in l2 block, that triggers l2 block seal. + pub close_block_at_eth_params_percentage: f64, + /// Denotes the percentage of l1 gas used in l2 block, that triggers l2 block seal. + pub close_block_at_gas_percentage: f64, + + pub fee_account_addr: Address, + + pub reexecute_each_tx: bool, +} + +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct OperationsManager { + /// Sleep time in ms when there is no new input data + pub delay_interval: u64, +} + +impl OperationsManager { + pub fn delay_interval(&self) -> Duration { + Duration::from_millis(self.delay_interval) + } +} + +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct CircuitBreakerConfig { + pub sync_interval_ms: u64, +} + +impl CircuitBreakerConfig { + pub fn sync_interval(&self) -> Duration { + Duration::from_millis(self.sync_interval_ms) + } +} + +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct MempoolConfig { + pub sync_interval_ms: u64, + pub sync_batch_size: usize, + pub capacity: u64, + pub stuck_tx_timeout: u64, + pub remove_stuck_txs: bool, +} + +impl MempoolConfig { + pub fn sync_interval(&self) -> Duration { + Duration::from_millis(self.sync_interval_ms) + } + + pub fn stuck_tx_timeout(&self) -> Duration { + Duration::from_secs(self.stuck_tx_timeout) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::configs::test_utils::{addr, set_env}; + + fn expected_config() -> ChainConfig { + ChainConfig { + eth: Eth { + network: "localhost".parse().unwrap(), + zksync_network: "localhost".to_string(), + zksync_network_id: 270, + }, + state_keeper: StateKeeperConfig { + transaction_slots: 50, + block_commit_deadline_ms: 2500, + miniblock_commit_deadline_ms: 1000, + max_single_tx_gas: 1_000_000, + max_allowed_l2_tx_gas_limit: 2_000_000_000, + close_block_at_eth_params_percentage: 0.2, + close_block_at_gas_percentage: 0.8, + close_block_at_geometry_percentage: 0.5, + reject_tx_at_eth_params_percentage: 0.8, + reject_tx_at_geometry_percentage: 0.3, + fee_account_addr: addr("de03a0B5963f75f1C8485B355fF6D30f3093BDE7"), + reject_tx_at_gas_percentage: 0.5, + reexecute_each_tx: true, + }, + operations_manager: OperationsManager { + delay_interval: 100, + }, + mempool: MempoolConfig { + sync_interval_ms: 10, + sync_batch_size: 1000, + capacity: 1_000_000, + stuck_tx_timeout: 10, + remove_stuck_txs: true, + }, + circuit_breaker: CircuitBreakerConfig { + sync_interval_ms: 1000, + }, + } + } + + #[test] + fn from_env() { + let config = r#" +CHAIN_ETH_NETWORK="localhost" +CHAIN_ETH_ZKSYNC_NETWORK="localhost" +CHAIN_ETH_ZKSYNC_NETWORK_ID=270 +CHAIN_STATE_KEEPER_TRANSACTION_SLOTS="50" +CHAIN_STATE_KEEPER_FEE_ACCOUNT_ADDR="0xde03a0B5963f75f1C8485B355fF6D30f3093BDE7" +CHAIN_STATE_KEEPER_MAX_SINGLE_TX_GAS="1000000" +CHAIN_STATE_KEEPER_MAX_ALLOWED_L2_TX_GAS_LIMIT="2000000000" +CHAIN_STATE_KEEPER_CLOSE_BLOCK_AT_GEOMETRY_PERCENTAGE="0.5" +CHAIN_STATE_KEEPER_CLOSE_BLOCK_AT_GAS_PERCENTAGE="0.8" +CHAIN_STATE_KEEPER_CLOSE_BLOCK_AT_ETH_PARAMS_PERCENTAGE="0.2" +CHAIN_STATE_KEEPER_REJECT_TX_AT_GEOMETRY_PERCENTAGE="0.3" +CHAIN_STATE_KEEPER_REJECT_TX_AT_ETH_PARAMS_PERCENTAGE="0.8" +CHAIN_STATE_KEEPER_REJECT_TX_AT_GAS_PERCENTAGE="0.5" +CHAIN_STATE_KEEPER_REEXECUTE_EACH_TX="true" +CHAIN_STATE_KEEPER_BLOCK_COMMIT_DEADLINE_MS="2500" +CHAIN_STATE_KEEPER_MINIBLOCK_COMMIT_DEADLINE_MS="1000" +CHAIN_OPERATIONS_MANAGER_DELAY_INTERVAL="100" +CHAIN_MEMPOOL_SYNC_INTERVAL_MS="10" +CHAIN_MEMPOOL_SYNC_BATCH_SIZE="1000" +CHAIN_MEMPOOL_STUCK_TX_TIMEOUT="10" +CHAIN_MEMPOOL_REMOVE_STUCK_TXS="true" +CHAIN_MEMPOOL_CAPACITY="1000000" +CHAIN_CIRCUIT_BREAKER_SYNC_INTERVAL_MS="1000" + "#; + set_env(config); + + let actual = ChainConfig::from_env(); + assert_eq!(actual, expected_config()); + } +} diff --git a/core/lib/config/src/configs/circuit_synthesizer.rs b/core/lib/config/src/configs/circuit_synthesizer.rs new file mode 100644 index 000000000000..b64c950af54b --- /dev/null +++ b/core/lib/config/src/configs/circuit_synthesizer.rs @@ -0,0 +1,83 @@ +use std::time::Duration; + +use serde::Deserialize; + +use crate::envy_load; + +/// Configuration for the witness generation +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct CircuitSynthesizerConfig { + /// Max time for circuit to be synthesized + pub generation_timeout_in_secs: u16, + /// Max attempts for synthesizing circuit + pub max_attempts: u32, + /// Max time before an `reserved` prover instance in considered as `available` + pub gpu_prover_queue_timeout_in_secs: u16, + /// Max time to wait to get a free prover instance + pub prover_instance_wait_timeout_in_secs: u16, + // Time to wait between 2 consecutive poll to get new prover instance. + pub prover_instance_poll_time_in_milli_secs: u16, + /// Configurations for prometheus + pub prometheus_listener_port: u16, + pub prometheus_pushgateway_url: String, + pub prometheus_push_interval_ms: Option, +} + +impl CircuitSynthesizerConfig { + pub fn from_env() -> Self { + envy_load!("circuit_synthesizer", "CIRCUIT_SYNTHESIZER_") + } + + pub fn generation_timeout(&self) -> Duration { + Duration::from_secs(self.generation_timeout_in_secs as u64) + } + + pub fn prover_instance_wait_timeout(&self) -> Duration { + Duration::from_secs(self.prover_instance_wait_timeout_in_secs as u64) + } + + pub fn gpu_prover_queue_timeout(&self) -> Duration { + Duration::from_secs(self.gpu_prover_queue_timeout_in_secs as u64) + } + + pub fn prover_instance_poll_time(&self) -> Duration { + Duration::from_millis(self.prover_instance_poll_time_in_milli_secs as u64) + } +} + +#[cfg(test)] +mod tests { + use crate::configs::test_utils::set_env; + + use super::*; + + fn expected_config() -> CircuitSynthesizerConfig { + CircuitSynthesizerConfig { + generation_timeout_in_secs: 1000u16, + max_attempts: 2, + gpu_prover_queue_timeout_in_secs: 1000u16, + prover_instance_wait_timeout_in_secs: 1000u16, + prover_instance_poll_time_in_milli_secs: 250u16, + prometheus_listener_port: 3314, + prometheus_pushgateway_url: "http://127.0.0.1:9091".to_string(), + prometheus_push_interval_ms: Some(100), + } + } + + #[test] + fn from_env() { + let config = r#" + CIRCUIT_SYNTHESIZER_GENERATION_TIMEOUT_IN_SECS=1000 + CIRCUIT_SYNTHESIZER_MAX_ATTEMPTS=2 + CIRCUIT_SYNTHESIZER_GPU_PROVER_QUEUE_TIMEOUT_IN_SECS=1000 + CIRCUIT_SYNTHESIZER_PROVER_INSTANCE_WAIT_TIMEOUT_IN_SECS=1000 + CIRCUIT_SYNTHESIZER_PROVER_INSTANCE_POLL_TIME_IN_MILLI_SECS=250 + CIRCUIT_SYNTHESIZER_PROMETHEUS_LISTENER_PORT=3314 + CIRCUIT_SYNTHESIZER_PROMETHEUS_PUSHGATEWAY_URL="http://127.0.0.1:9091" + CIRCUIT_SYNTHESIZER_PROMETHEUS_PUSH_INTERVAL_MS=100 + "#; + set_env(config); + let actual = CircuitSynthesizerConfig::from_env(); + assert_eq!(actual, expected_config()); + } +} diff --git a/core/lib/config/src/configs/contract_verifier.rs b/core/lib/config/src/configs/contract_verifier.rs new file mode 100644 index 000000000000..26323e7bb97b --- /dev/null +++ b/core/lib/config/src/configs/contract_verifier.rs @@ -0,0 +1,57 @@ +// Built-in uses +use std::time::Duration; +// External uses +use serde::Deserialize; +// Local uses +use crate::envy_load; + +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct ContractVerifierConfig { + /// Max time of a single compilation (in s). + pub compilation_timeout: u64, + /// Interval between polling db for verification requests (in ms). + pub polling_interval: Option, + /// Port to which the Prometheus exporter server is listening. + pub prometheus_port: u16, +} + +impl ContractVerifierConfig { + pub fn from_env() -> Self { + envy_load!("contract_verifier", "CONTRACT_VERIFIER_") + } + + pub fn compilation_timeout(&self) -> Duration { + Duration::from_secs(self.compilation_timeout) + } + + pub fn polling_interval(&self) -> Duration { + Duration::from_millis(self.polling_interval.unwrap_or(1000)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::configs::test_utils::set_env; + + fn expected_config() -> ContractVerifierConfig { + ContractVerifierConfig { + compilation_timeout: 30, + polling_interval: Some(1000), + prometheus_port: 3314, + } + } + + #[test] + fn from_env() { + let config = r#" + CONTRACT_VERIFIER_COMPILATION_TIMEOUT=30 + CONTRACT_VERIFIER_POLLING_INTERVAL=1000 + CONTRACT_VERIFIER_PROMETHEUS_PORT=3314 + "#; + set_env(config); + + let actual = ContractVerifierConfig::from_env(); + assert_eq!(actual, expected_config()); + } +} diff --git a/core/lib/config/src/configs/contracts.rs b/core/lib/config/src/configs/contracts.rs new file mode 100644 index 000000000000..81481e968f01 --- /dev/null +++ b/core/lib/config/src/configs/contracts.rs @@ -0,0 +1,85 @@ +// External uses +use serde::Deserialize; +// Workspace uses +use zksync_basic_types::{Address, H256}; +// Local uses +use crate::envy_load; + +/// Data about deployed contracts. +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct ContractsConfig { + pub mailbox_facet_addr: Address, + pub executor_facet_addr: Address, + pub governance_facet_addr: Address, + pub diamond_cut_facet_addr: Address, + pub getters_facet_addr: Address, + pub verifier_addr: Address, + pub diamond_init_addr: Address, + pub diamond_upgrade_init_addr: Address, + pub diamond_proxy_addr: Address, + pub genesis_tx_hash: H256, + pub l1_erc20_bridge_proxy_addr: Address, + pub l1_erc20_bridge_impl_addr: Address, + pub l2_erc20_bridge_addr: Address, + pub l1_allow_list_addr: Address, + pub l2_testnet_paymaster_addr: Option
, +} + +impl ContractsConfig { + pub fn from_env() -> Self { + envy_load!("contracts", "CONTRACTS_") + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::configs::test_utils::{addr, hash, set_env}; + + fn expected_config() -> ContractsConfig { + ContractsConfig { + mailbox_facet_addr: addr("0f6Fa881EF414Fc6E818180657c2d5CD7Ac6cCAd"), + executor_facet_addr: addr("18B631537801963A964211C0E86645c1aBfbB2d3"), + governance_facet_addr: addr("1e12b20BE86bEc3A0aC95aA52ade345cB9AE7a32"), + diamond_cut_facet_addr: addr("8656770FA78c830456B00B4fFCeE6b1De0e1b888"), + getters_facet_addr: addr("8656770FA78c830456B00B4fFCeE6b1De0e1b888"), + verifier_addr: addr("34782eE00206EAB6478F2692caa800e4A581687b"), + diamond_init_addr: addr("FFC35A5e767BE36057c34586303498e3de7C62Ba"), + diamond_upgrade_init_addr: addr("FFC35A5e767BE36057c34586303498e3de7C62Ba"), + diamond_proxy_addr: addr("F00B988a98Ca742e7958DeF9F7823b5908715f4a"), + genesis_tx_hash: hash( + "b99ebfea46cbe05a21cd80fe5597d97b204befc52a16303f579c607dc1ac2e2e", + ), + l1_erc20_bridge_proxy_addr: addr("8656770FA78c830456B00B4fFCeE6b1De0e1b888"), + l1_erc20_bridge_impl_addr: addr("8656770FA78c830456B00B4fFCeE6b1De0e1b888"), + l2_erc20_bridge_addr: addr("8656770FA78c830456B00B4fFCeE6b1De0e1b888"), + l1_allow_list_addr: addr("8656770FA78c830456B00B4fFCeE6b1De0e1b888"), + l2_testnet_paymaster_addr: Some(addr("FC073319977e314F251EAE6ae6bE76B0B3BAeeCF")), + } + } + + #[test] + fn from_env() { + let config = r#" +CONTRACTS_MAILBOX_FACET_ADDR="0x0f6Fa881EF414Fc6E818180657c2d5CD7Ac6cCAd" +CONTRACTS_EXECUTOR_FACET_ADDR="0x18B631537801963A964211C0E86645c1aBfbB2d3" +CONTRACTS_GOVERNANCE_FACET_ADDR="0x1e12b20BE86bEc3A0aC95aA52ade345cB9AE7a32" +CONTRACTS_DIAMOND_CUT_FACET_ADDR="0x8656770FA78c830456B00B4fFCeE6b1De0e1b888" +CONTRACTS_GETTERS_FACET_ADDR="0x8656770FA78c830456B00B4fFCeE6b1De0e1b888" +CONTRACTS_VERIFIER_ADDR="0x34782eE00206EAB6478F2692caa800e4A581687b" +CONTRACTS_DIAMOND_INIT_ADDR="0xFFC35A5e767BE36057c34586303498e3de7C62Ba" +CONTRACTS_DIAMOND_UPGRADE_INIT_ADDR="0xFFC35A5e767BE36057c34586303498e3de7C62Ba" +CONTRACTS_DIAMOND_PROXY_ADDR="0xF00B988a98Ca742e7958DeF9F7823b5908715f4a" +CONTRACTS_GENESIS_TX_HASH="0xb99ebfea46cbe05a21cd80fe5597d97b204befc52a16303f579c607dc1ac2e2e" +CONTRACTS_L1_ERC20_BRIDGE_PROXY_ADDR="0x8656770FA78c830456B00B4fFCeE6b1De0e1b888" +CONTRACTS_L1_ALLOW_LIST_ADDR="0x8656770FA78c830456B00B4fFCeE6b1De0e1b888" +CONTRACTS_L1_ERC20_BRIDGE_IMPL_ADDR="0x8656770FA78c830456B00B4fFCeE6b1De0e1b888" +CONTRACTS_L2_ERC20_BRIDGE_ADDR="0x8656770FA78c830456B00B4fFCeE6b1De0e1b888" +CONTRACTS_L2_TESTNET_PAYMASTER_ADDR="FC073319977e314F251EAE6ae6bE76B0B3BAeeCF" + "#; + set_env(config); + + let actual = ContractsConfig::from_env(); + assert_eq!(actual, expected_config()); + } +} diff --git a/core/lib/config/src/configs/database.rs b/core/lib/config/src/configs/database.rs new file mode 100644 index 000000000000..d1c5a909b38a --- /dev/null +++ b/core/lib/config/src/configs/database.rs @@ -0,0 +1,150 @@ +use serde::Deserialize; +use std::env; +use std::time::Duration; + +/// Database configuration. +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct DBConfig { + /// Path to the database data directory. + pub path: String, + /// Path to the database data directory that serves state cache. + pub state_keeper_db_path: String, + /// Path to merkle tree backup directory + pub merkle_tree_backup_path: String, + /// Fast ssd path + pub merkle_tree_fast_ssd_path: String, + /// Number of backups to keep + pub backup_count: usize, + /// Time interval between performing backups + pub backup_interval_ms: u64, + /// Maximum number of blocks to be processed by the full tree at a time + pub max_block_batch: usize, +} + +impl Default for DBConfig { + fn default() -> Self { + Self { + path: "./db".to_owned(), + state_keeper_db_path: "./db/state_keeper".to_owned(), + merkle_tree_backup_path: "./db/backups".to_owned(), + merkle_tree_fast_ssd_path: "./db/lightweight".to_owned(), + backup_count: 5, + backup_interval_ms: 60_000, + max_block_batch: 100, + } + } +} + +impl DBConfig { + pub fn from_env() -> Self { + let mut config = DBConfig::default(); + if let Ok(path) = env::var("DATABASE_PATH") { + config.path = path; + } + if let Ok(path) = env::var("DATABASE_STATE_KEEPER_DB_PATH") { + config.state_keeper_db_path = path; + } + if let Ok(path) = env::var("DATABASE_MERKLE_TREE_BACKUP_PATH") { + config.merkle_tree_backup_path = path; + } + if let Ok(path) = env::var("DATABASE_MERKLE_TREE_FAST_SSD_PATH") { + config.merkle_tree_fast_ssd_path = path; + } + if let Ok(Ok(count)) = env::var("DATABASE_BACKUP_COUNT").map(|s| s.parse()) { + config.backup_count = count; + } + if let Ok(Ok(interval)) = env::var("DATABASE_BACKUP_INTERVAL_MS").map(|s| s.parse()) { + config.backup_interval_ms = interval; + } + if let Ok(Ok(size)) = env::var("DATABASE_MAX_BLOCK_BATCH").map(|s| s.parse()) { + config.max_block_batch = size; + } + config + } + + /// Path to the database data directory. + pub fn path(&self) -> &str { + &self.path + } + + /// Path to the database data directory that serves state cache. + pub fn state_keeper_db_path(&self) -> &str { + &self.state_keeper_db_path + } + + /// Path to the merkle tree backup directory. + pub fn merkle_tree_backup_path(&self) -> &str { + &self.merkle_tree_backup_path + } + + pub fn merkle_tree_fast_ssd_path(&self) -> &str { + &self.merkle_tree_fast_ssd_path + } + + /// Number of backups to keep + pub fn backup_count(&self) -> usize { + self.backup_count + } + + pub fn backup_interval(&self) -> Duration { + Duration::from_millis(self.backup_interval_ms) + } + + pub fn max_block_batch(&self) -> usize { + self.max_block_batch + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::configs::test_utils::set_env; + + fn expected_config() -> DBConfig { + DBConfig { + path: "./db".to_owned(), + state_keeper_db_path: "./db/state_keeper".to_owned(), + merkle_tree_backup_path: "./db/backups".to_owned(), + merkle_tree_fast_ssd_path: "./db/lightweight".to_owned(), + backup_count: 5, + backup_interval_ms: 60_000, + max_block_batch: 100, + } + } + + #[test] + fn from_env() { + let config = r#" +DATABASE_PATH="./db" +DATABASE_STATE_KEEPER_DB_PATH="./db/state_keeper" +DATABASE_MERKLE_TREE_BACKUP_PATH="./db/backups" +DATABASE_MERKLE_TREE_FAST_SSD_PATH="./db/lightweight" +DATABASE_BACKUP_COUNT=5 +DATABASE_BACKUP_INTERVAL_MS=60000 +DATABASE_MAX_BLOCK_BATCH=100 + "#; + set_env(config); + + let actual = DBConfig::from_env(); + assert_eq!(actual, expected_config()); + } + + /// Checks the correctness of the config helper methods. + #[test] + fn methods() { + let config = expected_config(); + + assert_eq!(config.path(), &config.path); + assert_eq!(config.state_keeper_db_path(), &config.state_keeper_db_path); + assert_eq!( + config.merkle_tree_backup_path(), + &config.merkle_tree_backup_path + ); + assert_eq!( + config.merkle_tree_fast_ssd_path(), + &config.merkle_tree_fast_ssd_path + ); + assert_eq!(config.backup_count(), config.backup_count); + assert_eq!(config.backup_interval().as_secs(), 60); + } +} diff --git a/core/lib/config/src/configs/eth_client.rs b/core/lib/config/src/configs/eth_client.rs new file mode 100644 index 000000000000..96df517f425c --- /dev/null +++ b/core/lib/config/src/configs/eth_client.rs @@ -0,0 +1,51 @@ +// External uses +use serde::Deserialize; +// Local uses +use crate::envy_load; + +/// Configuration for the Ethereum gateways. +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct ETHClientConfig { + /// Numeric identifier of the L1 network (e.g. `9` for localhost). + pub chain_id: u8, + /// Address of the Ethereum node API. + pub web3_url: String, +} + +impl ETHClientConfig { + pub fn from_env() -> Self { + let config: Self = envy_load!("eth_client", "ETH_CLIENT_"); + if config.web3_url.find(',').is_some() { + panic!( + "Multiple web3 URLs aren't supported anymore. Provided invalid value: {}", + config.web3_url + ); + } + config + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::configs::test_utils::set_env; + + fn expected_config() -> ETHClientConfig { + ETHClientConfig { + chain_id: 9, + web3_url: "http://127.0.0.1:8545".into(), + } + } + + #[test] + fn from_env() { + let config = r#" +ETH_CLIENT_CHAIN_ID="9" +ETH_CLIENT_WEB3_URL="http://127.0.0.1:8545" + "#; + set_env(config); + + let actual = ETHClientConfig::from_env(); + assert_eq!(actual, expected_config()); + } +} diff --git a/core/lib/config/src/configs/eth_sender.rs b/core/lib/config/src/configs/eth_sender.rs new file mode 100644 index 000000000000..afbc65511553 --- /dev/null +++ b/core/lib/config/src/configs/eth_sender.rs @@ -0,0 +1,171 @@ +// Built-in uses +use std::time::Duration; +// External uses +use serde::Deserialize; +// Workspace uses +use zksync_basic_types::{Address, H256}; +// Local uses +use crate::envy_load; + +/// Configuration for the Ethereum sender crate. +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct ETHSenderConfig { + /// Options related to the Ethereum sender directly. + pub sender: SenderConfig, + /// Options related to the `GasAdjuster` submodule. + pub gas_adjuster: GasAdjusterConfig, +} + +impl ETHSenderConfig { + pub fn from_env() -> Self { + Self { + sender: envy_load!("eth_sender", "ETH_SENDER_SENDER_"), + gas_adjuster: envy_load!("eth_sender.gas_adjuster", "ETH_SENDER_GAS_ADJUSTER_"), + } + } +} + +#[derive(Debug, Deserialize, Clone, Copy, PartialEq)] +pub enum ProofSendingMode { + OnlyRealProofs, + OnlySampledProofs, + SkipEveryProof, +} + +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct SenderConfig { + pub aggregated_proof_sizes: Vec, + /// Private key of the operator account. + pub operator_private_key: H256, + /// Address of the operator account. + pub operator_commit_eth_addr: Address, + /// mount of confirmations required to consider L1 transaction committed. + pub wait_confirmations: u64, + /// Node polling period in seconds. + pub tx_poll_period: u64, + /// The maximum number of unconfirmed Ethereum transactions. + pub max_txs_in_flight: u64, + /// The mode in which proofs are sent. + pub proof_sending_mode: ProofSendingMode, + + pub max_aggregated_tx_gas: u32, + pub max_eth_tx_data_size: usize, + pub max_aggregated_blocks_to_commit: u32, + pub max_aggregated_blocks_to_execute: u32, + pub aggregated_block_commit_deadline: u64, + pub aggregated_block_prove_deadline: u64, + pub aggregated_block_execute_deadline: u64, + pub timestamp_criteria_max_allowed_lag: usize, +} + +impl SenderConfig { + /// Converts `self.tx_poll_period` into `Duration`. + pub fn tx_poll_period(&self) -> Duration { + Duration::from_secs(self.tx_poll_period) + } +} + +#[derive(Debug, Deserialize, Copy, Clone, PartialEq)] +pub struct GasAdjusterConfig { + /// Priority Fee to be used by GasAdjuster + pub default_priority_fee_per_gas: u64, + /// Number of blocks collected by GasAdjuster from which base_fee median is taken + pub max_base_fee_samples: usize, + /// Parameter of the transaction base_fee_per_gas pricing formula + pub pricing_formula_parameter_a: f64, + /// Parameter of the transaction base_fee_per_gas pricing formula + pub pricing_formula_parameter_b: f64, + /// Parameter by which the base fee will be multiplied for internal purposes + pub internal_l1_pricing_multiplier: f64, + /// Node polling period in seconds + pub poll_period: u64, +} + +impl GasAdjusterConfig { + /// Converts `self.poll_period` into `Duration`. + pub fn poll_period(&self) -> Duration { + Duration::from_secs(self.poll_period) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::configs::test_utils::{addr, hash, set_env}; + + fn expected_config() -> ETHSenderConfig { + ETHSenderConfig { + sender: SenderConfig { + aggregated_proof_sizes: vec![1, 5], + aggregated_block_commit_deadline: 30, + aggregated_block_prove_deadline: 3_000, + aggregated_block_execute_deadline: 4_000, + max_aggregated_tx_gas: 4_000_000, + max_eth_tx_data_size: 120_000, + + timestamp_criteria_max_allowed_lag: 30, + max_aggregated_blocks_to_commit: 3, + max_aggregated_blocks_to_execute: 4, + wait_confirmations: 1, + tx_poll_period: 3, + max_txs_in_flight: 3, + operator_private_key: hash( + "27593fea79697e947890ecbecce7901b0008345e5d7259710d0dd5e500d040be", + ), + operator_commit_eth_addr: addr("de03a0B5963f75f1C8485B355fF6D30f3093BDE7"), + proof_sending_mode: ProofSendingMode::SkipEveryProof, + }, + gas_adjuster: GasAdjusterConfig { + default_priority_fee_per_gas: 20000000000, + max_base_fee_samples: 10000, + pricing_formula_parameter_a: 1.5, + pricing_formula_parameter_b: 1.0005, + internal_l1_pricing_multiplier: 0.8, + poll_period: 15, + }, + } + } + + #[test] + fn from_env() { + let config = r#" +ETH_SENDER_SENDER_WAIT_CONFIRMATIONS="1" +ETH_SENDER_SENDER_TX_POLL_PERIOD="3" +ETH_SENDER_SENDER_MAX_TXS_IN_FLIGHT="3" +ETH_SENDER_SENDER_OPERATOR_PRIVATE_KEY="0x27593fea79697e947890ecbecce7901b0008345e5d7259710d0dd5e500d040be" +ETH_SENDER_SENDER_OPERATOR_COMMIT_ETH_ADDR="0xde03a0B5963f75f1C8485B355fF6D30f3093BDE7" +ETH_SENDER_SENDER_PROOF_SENDING_MODE="SkipEveryProof" +ETH_SENDER_GAS_ADJUSTER_DEFAULT_PRIORITY_FEE_PER_GAS="20000000000" +ETH_SENDER_GAS_ADJUSTER_MAX_BASE_FEE_SAMPLES="10000" +ETH_SENDER_GAS_ADJUSTER_PRICING_FORMULA_PARAMETER_A="1.5" +ETH_SENDER_GAS_ADJUSTER_PRICING_FORMULA_PARAMETER_B="1.0005" +ETH_SENDER_GAS_ADJUSTER_INTERNAL_L1_PRICING_MULTIPLIER="0.8" +ETH_SENDER_GAS_ADJUSTER_POLL_PERIOD="15" +ETH_SENDER_WAIT_FOR_PROOFS="false" +ETH_SENDER_SENDER_AGGREGATED_PROOF_SIZES="1,5" +ETH_SENDER_SENDER_MAX_AGGREGATED_BLOCKS_TO_COMMIT="3" +ETH_SENDER_SENDER_MAX_AGGREGATED_BLOCKS_TO_EXECUTE="4" +ETH_SENDER_SENDER_AGGREGATED_BLOCK_COMMIT_DEADLINE="30" +ETH_SENDER_SENDER_AGGREGATED_BLOCK_PROVE_DEADLINE="3000" +ETH_SENDER_SENDER_AGGREGATED_BLOCK_EXECUTE_DEADLINE="4000" +ETH_SENDER_SENDER_TIMESTAMP_CRITERIA_MAX_ALLOWED_LAG="30" +ETH_SENDER_SENDER_MAX_AGGREGATED_TX_GAS="4000000" +ETH_SENDER_SENDER_MAX_ETH_TX_DATA_SIZE="120000" + "#; + set_env(config); + + let actual = ETHSenderConfig::from_env(); + assert_eq!(actual, expected_config()); + } + + /// Checks the correctness of the config helper methods. + #[test] + fn methods() { + let config = expected_config(); + + assert_eq!( + config.sender.tx_poll_period(), + Duration::from_secs(config.sender.tx_poll_period) + ); + } +} diff --git a/core/lib/config/src/configs/eth_watch.rs b/core/lib/config/src/configs/eth_watch.rs new file mode 100644 index 000000000000..f1b6e003651a --- /dev/null +++ b/core/lib/config/src/configs/eth_watch.rs @@ -0,0 +1,64 @@ +// Built-in uses +use std::time::Duration; +// External uses +use serde::Deserialize; +// Local uses +use crate::envy_load; + +/// Configuration for the Ethereum sender crate. +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct ETHWatchConfig { + /// Amount of confirmations for the priority operation to be processed. + /// In production this should be a non-zero value because of block reverts. + pub confirmations_for_eth_event: u64, + /// How often we want to poll the Ethereum node. + /// Value in milliseconds. + pub eth_node_poll_interval: u64, +} + +impl ETHWatchConfig { + pub fn from_env() -> Self { + envy_load!("eth_watch", "ETH_WATCH_") + } + + /// Converts `self.eth_node_poll_interval` into `Duration`. + pub fn poll_interval(&self) -> Duration { + Duration::from_millis(self.eth_node_poll_interval) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::configs::test_utils::set_env; + + fn expected_config() -> ETHWatchConfig { + ETHWatchConfig { + confirmations_for_eth_event: 0, + eth_node_poll_interval: 300, + } + } + + #[test] + fn from_env() { + let config = r#" +ETH_WATCH_CONFIRMATIONS_FOR_ETH_EVENT="0" +ETH_WATCH_ETH_NODE_POLL_INTERVAL="300" + "#; + set_env(config); + + let actual = ETHWatchConfig::from_env(); + assert_eq!(actual, expected_config()); + } + + /// Checks the correctness of the config helper methods. + #[test] + fn methods() { + let config = expected_config(); + + assert_eq!( + config.poll_interval(), + Duration::from_millis(config.eth_node_poll_interval) + ); + } +} diff --git a/core/lib/config/src/configs/fetcher.rs b/core/lib/config/src/configs/fetcher.rs new file mode 100644 index 000000000000..5f51dc76edef --- /dev/null +++ b/core/lib/config/src/configs/fetcher.rs @@ -0,0 +1,108 @@ +use std::time::Duration; + +// Built-in uses +// External uses +use serde::Deserialize; +// Workspace uses +// Local uses +use crate::envy_load; +#[derive(Debug, Deserialize, Clone, Copy, PartialEq)] +pub enum TokenListSource { + OneInch, + Mock, +} + +#[derive(Debug, Deserialize, Clone, Copy, PartialEq)] +pub enum TokenPriceSource { + CoinGecko, + CoinMarketCap, + Mock, +} + +#[derive(Debug, Deserialize, Clone, Copy, PartialEq)] +pub enum TokenTradingVolumeSource { + Uniswap, + Mock, +} + +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct SingleFetcherConfig { + /// Indicator of the API to be used for getting information. + pub source: TYPE, + /// URL of the API to use for fetching data. Not used for `mock` source. + pub url: String, + // Interval for fetching API data in seconds. Basically, how ofter do we need to poll third-part APIs. + pub fetching_interval: u64, +} + +impl SingleFetcherConfig { + pub fn fetching_interval(&self) -> Duration { + Duration::from_secs(self.fetching_interval) + } +} + +/// Configuration for the third-party API data fetcher. +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct FetcherConfig { + pub token_list: SingleFetcherConfig, + pub token_price: SingleFetcherConfig, + pub token_trading_volume: SingleFetcherConfig, +} + +impl FetcherConfig { + pub fn from_env() -> Self { + Self { + token_list: envy_load!("token_list", "FETCHER_TOKEN_LIST_"), + token_price: envy_load!("token_price", "FETCHER_TOKEN_PRICE_"), + token_trading_volume: envy_load!( + "token_trading_volume", + "FETCHER_TOKEN_TRADING_VOLUME_" + ), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::configs::test_utils::set_env; + + fn expected_config() -> FetcherConfig { + FetcherConfig { + token_list: SingleFetcherConfig { + source: TokenListSource::OneInch, + url: "http://127.0.0.1:1020".into(), + fetching_interval: 10, + }, + token_price: SingleFetcherConfig { + source: TokenPriceSource::CoinGecko, + url: "http://127.0.0.1:9876".into(), + fetching_interval: 7, + }, + token_trading_volume: SingleFetcherConfig { + source: TokenTradingVolumeSource::Uniswap, + url: "http://127.0.0.1:9975/graphql".to_string(), + fetching_interval: 5, + }, + } + } + + #[test] + fn from_env() { + let config = r#" +FETCHER_TOKEN_LIST_SOURCE="OneInch" +FETCHER_TOKEN_LIST_URL="http://127.0.0.1:1020" +FETCHER_TOKEN_LIST_FETCHING_INTERVAL="10" +FETCHER_TOKEN_PRICE_SOURCE="CoinGecko" +FETCHER_TOKEN_PRICE_URL="http://127.0.0.1:9876" +FETCHER_TOKEN_PRICE_FETCHING_INTERVAL="7" +FETCHER_TOKEN_TRADING_VOLUME_SOURCE="Uniswap" +FETCHER_TOKEN_TRADING_VOLUME_URL="http://127.0.0.1:9975/graphql" +FETCHER_TOKEN_TRADING_VOLUME_FETCHING_INTERVAL="5" + "#; + set_env(config); + + let actual = FetcherConfig::from_env(); + assert_eq!(actual, expected_config()); + } +} diff --git a/core/lib/config/src/configs/mod.rs b/core/lib/config/src/configs/mod.rs new file mode 100644 index 000000000000..43e9b1efeb5e --- /dev/null +++ b/core/lib/config/src/configs/mod.rs @@ -0,0 +1,43 @@ +// Public re-exports +pub use self::{ + api::ApiConfig, chain::ChainConfig, circuit_synthesizer::CircuitSynthesizerConfig, + contract_verifier::ContractVerifierConfig, contracts::ContractsConfig, database::DBConfig, + eth_client::ETHClientConfig, eth_sender::ETHSenderConfig, eth_sender::GasAdjusterConfig, + eth_watch::ETHWatchConfig, fetcher::FetcherConfig, nfs::NfsConfig, + object_store::ObjectStoreConfig, prover::ProverConfig, prover::ProverConfigs, + prover_group::ProverGroupConfig, utils::Prometheus, witness_generator::WitnessGeneratorConfig, +}; + +pub mod api; +pub mod chain; +pub mod circuit_synthesizer; +pub mod contract_verifier; +pub mod contracts; +pub mod database; +pub mod eth_client; +pub mod eth_sender; +pub mod eth_watch; +pub mod fetcher; +pub mod nfs; +pub mod object_store; +pub mod prover; +pub mod prover_group; +pub mod utils; +pub mod witness_generator; + +#[cfg(test)] +pub(crate) mod test_utils; + +/// Convenience macro that loads the structure from the environment variable given the prefix. +/// +/// # Panics +/// +/// Panics if the config cannot be loaded from the environment variables. +#[macro_export] +macro_rules! envy_load { + ($name:expr, $prefix:expr) => { + envy::prefixed($prefix) + .from_env() + .unwrap_or_else(|err| panic!("Cannot load config <{}>: {}", $name, err)) + }; +} diff --git a/core/lib/config/src/configs/nfs.rs b/core/lib/config/src/configs/nfs.rs new file mode 100644 index 000000000000..9232f96d5c76 --- /dev/null +++ b/core/lib/config/src/configs/nfs.rs @@ -0,0 +1,35 @@ +use crate::envy_load; +use serde::Deserialize; +/// Configuration for the Network file system. +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct NfsConfig { + pub setup_key_mount_path: String, +} + +impl NfsConfig { + pub fn from_env() -> Self { + envy_load!("nfs", "NFS_") + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::configs::test_utils::set_env; + + fn expected_config() -> NfsConfig { + NfsConfig { + setup_key_mount_path: "/path/to/setup_keys".to_string(), + } + } + + #[test] + fn from_env() { + let config = r#" +NFS_SETUP_KEY_MOUNT_PATH="/path/to/setup_keys" + "#; + set_env(config); + let actual = NfsConfig::from_env(); + assert_eq!(actual, expected_config()); + } +} diff --git a/core/lib/config/src/configs/object_store.rs b/core/lib/config/src/configs/object_store.rs new file mode 100644 index 000000000000..cf15852c701c --- /dev/null +++ b/core/lib/config/src/configs/object_store.rs @@ -0,0 +1,45 @@ +use crate::envy_load; +use serde::Deserialize; + +/// Configuration for the object store +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct ObjectStoreConfig { + pub service_account_path: String, + pub bucket_base_url: String, + pub mode: String, + pub file_backed_base_path: String, +} + +impl ObjectStoreConfig { + pub fn from_env() -> Self { + envy_load!("object_store", "OBJECT_STORE_") + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::configs::test_utils::set_env; + + fn expected_config() -> ObjectStoreConfig { + ObjectStoreConfig { + service_account_path: "/path/to/service_account.json".to_string(), + bucket_base_url: "/base/url".to_string(), + mode: "FileBacked".to_string(), + file_backed_base_path: "artifacts".to_string(), + } + } + + #[test] + fn from_env() { + let config = r#" +OBJECT_STORE_SERVICE_ACCOUNT_PATH="/path/to/service_account.json" +OBJECT_STORE_BUCKET_BASE_URL="/base/url" +OBJECT_STORE_MODE="FileBacked" +OBJECT_STORE_FILE_BACKED_BASE_PATH="artifacts" + "#; + set_env(config); + let actual = ObjectStoreConfig::from_env(); + assert_eq!(actual, expected_config()); + } +} diff --git a/core/lib/config/src/configs/prover.rs b/core/lib/config/src/configs/prover.rs new file mode 100644 index 000000000000..73f4abd92a24 --- /dev/null +++ b/core/lib/config/src/configs/prover.rs @@ -0,0 +1,307 @@ +use std::time::Duration; + +// Built-in uses +// External uses +use serde::Deserialize; + +// Local uses +use crate::envy_load; + +/// Configuration for the prover application +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct ProverConfig { + /// Port to which the Prometheus exporter server is listening. + pub prometheus_port: u16, + /// Currently only a single (largest) key is supported. We'll support different ones in the future + pub initial_setup_key_path: String, + /// https://storage.googleapis.com/universal-setup/setup_2\^26.key + pub key_download_url: String, + /// Max time for proof to be generated + pub generation_timeout_in_secs: u16, + /// Number of threads to be used concurrent proof generation. + pub number_of_threads: u16, + /// Max attempts for generating proof + pub max_attempts: u32, + // Polling time in mill-seconds. + pub polling_duration_in_millis: u64, + // Path to setup keys for individual circuit. + pub setup_keys_path: String, + // Group id for this prover, provers running the same circuit types shall have same group id. + pub specialized_prover_group_id: u8, + // Number of setup-keys kept in memory without swapping + // number_of_setup_slots = (R-C*A-4)/S + // R is available ram + // C is the number of parallel synth + // A is the size of Assembly that is 12gb + // S is the size of the Setup that is 20gb + // constant 4 is for the data copy with gpu + pub number_of_setup_slots: u8, + /// Port at which server would be listening to receive incoming assembly + pub assembly_receiver_port: u16, + /// Socket polling time for receiving incoming assembly + pub assembly_receiver_poll_time_in_millis: u64, + /// maximum number of assemblies that are kept in memory, + pub assembly_queue_capacity: usize, +} + +/// Prover configs for different machine types that are currently supported. +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct ProverConfigs { + // used by witness-generator + pub non_gpu: ProverConfig, + // https://gcloud-compute.com/a2-highgpu-2g.html + pub two_gpu_forty_gb_mem: ProverConfig, + // https://gcloud-compute.com/a2-ultragpu-1g.html + pub one_gpu_eighty_gb_mem: ProverConfig, + // https://gcloud-compute.com/a2-ultragpu-2g.html + pub two_gpu_eighty_gb_mem: ProverConfig, + // https://gcloud-compute.com/a2-ultragpu-4g.html + pub four_gpu_eighty_gb_mem: ProverConfig, +} + +impl ProverConfig { + pub fn proof_generation_timeout(&self) -> Duration { + Duration::from_secs(self.generation_timeout_in_secs as u64) + } +} + +impl ProverConfigs { + pub fn from_env() -> Self { + Self { + non_gpu: envy_load!("non_gpu", "PROVER_NON_GPU_"), + two_gpu_forty_gb_mem: envy_load!( + "two_gpu_forty_gb_mem", + "PROVER_TWO_GPU_FORTY_GB_MEM_" + ), + one_gpu_eighty_gb_mem: envy_load!( + "one_gpu_eighty_gb_mem", + "PROVER_ONE_GPU_EIGHTY_GB_MEM_" + ), + two_gpu_eighty_gb_mem: envy_load!( + "two_gpu_eighty_gb_mem", + "PROVER_TWO_GPU_EIGHTY_GB_MEM_" + ), + four_gpu_eighty_gb_mem: envy_load!( + "four_gpu_eighty_gb_mem", + "PROVER_FOUR_GPU_EIGHTY_GB_MEM_" + ), + } + } +} + +#[cfg(test)] +mod tests { + use crate::configs::test_utils::set_env; + + use super::*; + + fn expected_config() -> ProverConfigs { + ProverConfigs { + non_gpu: ProverConfig { + prometheus_port: 3313, + initial_setup_key_path: "key".to_owned(), + key_download_url: "value".to_owned(), + generation_timeout_in_secs: 2700u16, + number_of_threads: 2, + max_attempts: 4, + polling_duration_in_millis: 5, + setup_keys_path: "/usr/src/setup-keys".to_string(), + specialized_prover_group_id: 0, + number_of_setup_slots: 2, + assembly_receiver_port: 17791, + assembly_receiver_poll_time_in_millis: 250, + assembly_queue_capacity: 5, + }, + two_gpu_forty_gb_mem: ProverConfig { + prometheus_port: 3313, + initial_setup_key_path: "key".to_owned(), + key_download_url: "value".to_owned(), + generation_timeout_in_secs: 2700u16, + number_of_threads: 2, + max_attempts: 4, + polling_duration_in_millis: 5, + setup_keys_path: "/usr/src/setup-keys".to_string(), + specialized_prover_group_id: 1, + number_of_setup_slots: 5, + assembly_receiver_port: 17791, + assembly_receiver_poll_time_in_millis: 250, + assembly_queue_capacity: 5, + }, + one_gpu_eighty_gb_mem: ProverConfig { + prometheus_port: 3313, + initial_setup_key_path: "key".to_owned(), + key_download_url: "value".to_owned(), + generation_timeout_in_secs: 2700u16, + number_of_threads: 4, + max_attempts: 4, + polling_duration_in_millis: 5, + setup_keys_path: "/usr/src/setup-keys".to_string(), + specialized_prover_group_id: 2, + number_of_setup_slots: 5, + assembly_receiver_port: 17791, + assembly_receiver_poll_time_in_millis: 250, + assembly_queue_capacity: 5, + }, + two_gpu_eighty_gb_mem: ProverConfig { + prometheus_port: 3313, + initial_setup_key_path: "key".to_owned(), + key_download_url: "value".to_owned(), + generation_timeout_in_secs: 2700u16, + number_of_threads: 9, + max_attempts: 4, + polling_duration_in_millis: 5, + setup_keys_path: "/usr/src/setup-keys".to_string(), + specialized_prover_group_id: 3, + number_of_setup_slots: 9, + assembly_receiver_port: 17791, + assembly_receiver_poll_time_in_millis: 250, + assembly_queue_capacity: 5, + }, + four_gpu_eighty_gb_mem: ProverConfig { + prometheus_port: 3313, + initial_setup_key_path: "key".to_owned(), + key_download_url: "value".to_owned(), + generation_timeout_in_secs: 2700u16, + number_of_threads: 18, + max_attempts: 4, + polling_duration_in_millis: 5, + setup_keys_path: "/usr/src/setup-keys".to_string(), + specialized_prover_group_id: 4, + number_of_setup_slots: 18, + assembly_receiver_port: 17791, + assembly_receiver_poll_time_in_millis: 250, + assembly_queue_capacity: 5, + }, + } + } + + const CONFIG: &str = r#" +PROVER_NON_GPU_PROMETHEUS_PORT="3313" +PROVER_NON_GPU_INITIAL_SETUP_KEY_PATH="key" +PROVER_NON_GPU_KEY_DOWNLOAD_URL="value" +PROVER_NON_GPU_GENERATION_TIMEOUT_IN_SECS=2700 +PROVER_NON_GPU_NUMBER_OF_THREADS="2" +PROVER_NON_GPU_MAX_ATTEMPTS="4" +PROVER_NON_GPU_POLLING_DURATION_IN_MILLIS=5 +PROVER_NON_GPU_SETUP_KEYS_PATH="/usr/src/setup-keys" +PROVER_NON_GPU_NUMBER_OF_SETUP_SLOTS=2 +PROVER_NON_GPU_ASSEMBLY_RECEIVER_PORT=17791 +PROVER_NON_GPU_ASSEMBLY_RECEIVER_POLL_TIME_IN_MILLIS=250 +PROVER_NON_GPU_ASSEMBLY_QUEUE_CAPACITY=5 +PROVER_NON_GPU_SPECIALIZED_PROVER_GROUP_ID=0 + +PROVER_TWO_GPU_FORTY_GB_MEM_PROMETHEUS_PORT="3313" +PROVER_TWO_GPU_FORTY_GB_MEM_INITIAL_SETUP_KEY_PATH="key" +PROVER_TWO_GPU_FORTY_GB_MEM_KEY_DOWNLOAD_URL="value" +PROVER_TWO_GPU_FORTY_GB_MEM_GENERATION_TIMEOUT_IN_SECS=2700 +PROVER_TWO_GPU_FORTY_GB_MEM_NUMBER_OF_THREADS="2" +PROVER_TWO_GPU_FORTY_GB_MEM_MAX_ATTEMPTS="4" +PROVER_TWO_GPU_FORTY_GB_MEM_POLLING_DURATION_IN_MILLIS=5 +PROVER_TWO_GPU_FORTY_GB_MEM_SETUP_KEYS_PATH="/usr/src/setup-keys" +PROVER_TWO_GPU_FORTY_GB_MEM_NUMBER_OF_SETUP_SLOTS=5 +PROVER_TWO_GPU_FORTY_GB_MEM_ASSEMBLY_RECEIVER_PORT=17791 +PROVER_TWO_GPU_FORTY_GB_MEM_ASSEMBLY_RECEIVER_POLL_TIME_IN_MILLIS=250 +PROVER_TWO_GPU_FORTY_GB_MEM_ASSEMBLY_QUEUE_CAPACITY=5 +PROVER_TWO_GPU_FORTY_GB_MEM_SPECIALIZED_PROVER_GROUP_ID=1 + +PROVER_ONE_GPU_EIGHTY_GB_MEM_PROMETHEUS_PORT="3313" +PROVER_ONE_GPU_EIGHTY_GB_MEM_INITIAL_SETUP_KEY_PATH="key" +PROVER_ONE_GPU_EIGHTY_GB_MEM_KEY_DOWNLOAD_URL="value" +PROVER_ONE_GPU_EIGHTY_GB_MEM_GENERATION_TIMEOUT_IN_SECS=2700 +PROVER_ONE_GPU_EIGHTY_GB_MEM_NUMBER_OF_THREADS="4" +PROVER_ONE_GPU_EIGHTY_GB_MEM_MAX_ATTEMPTS="4" +PROVER_ONE_GPU_EIGHTY_GB_MEM_POLLING_DURATION_IN_MILLIS=5 +PROVER_ONE_GPU_EIGHTY_GB_MEM_SETUP_KEYS_PATH="/usr/src/setup-keys" +PROVER_ONE_GPU_EIGHTY_GB_MEM_NUMBER_OF_SETUP_SLOTS=5 +PROVER_ONE_GPU_EIGHTY_GB_MEM_ASSEMBLY_RECEIVER_PORT=17791 +PROVER_ONE_GPU_EIGHTY_GB_MEM_ASSEMBLY_RECEIVER_POLL_TIME_IN_MILLIS=250 +PROVER_ONE_GPU_EIGHTY_GB_MEM_ASSEMBLY_QUEUE_CAPACITY=5 +PROVER_ONE_GPU_EIGHTY_GB_MEM_SPECIALIZED_PROVER_GROUP_ID=2 + +PROVER_TWO_GPU_EIGHTY_GB_MEM_PROMETHEUS_PORT="3313" +PROVER_TWO_GPU_EIGHTY_GB_MEM_INITIAL_SETUP_KEY_PATH="key" +PROVER_TWO_GPU_EIGHTY_GB_MEM_KEY_DOWNLOAD_URL="value" +PROVER_TWO_GPU_EIGHTY_GB_MEM_GENERATION_TIMEOUT_IN_SECS=2700 +PROVER_TWO_GPU_EIGHTY_GB_MEM_NUMBER_OF_THREADS="9" +PROVER_TWO_GPU_EIGHTY_GB_MEM_MAX_ATTEMPTS="4" +PROVER_TWO_GPU_EIGHTY_GB_MEM_POLLING_DURATION_IN_MILLIS=5 +PROVER_TWO_GPU_EIGHTY_GB_MEM_SETUP_KEYS_PATH="/usr/src/setup-keys" +PROVER_TWO_GPU_EIGHTY_GB_MEM_NUMBER_OF_SETUP_SLOTS=9 +PROVER_TWO_GPU_EIGHTY_GB_MEM_ASSEMBLY_RECEIVER_PORT=17791 +PROVER_TWO_GPU_EIGHTY_GB_MEM_ASSEMBLY_RECEIVER_POLL_TIME_IN_MILLIS=250 +PROVER_TWO_GPU_EIGHTY_GB_MEM_ASSEMBLY_QUEUE_CAPACITY=5 +PROVER_TWO_GPU_EIGHTY_GB_MEM_SPECIALIZED_PROVER_GROUP_ID=3 + +PROVER_FOUR_GPU_EIGHTY_GB_MEM_PROMETHEUS_PORT="3313" +PROVER_FOUR_GPU_EIGHTY_GB_MEM_INITIAL_SETUP_KEY_PATH="key" +PROVER_FOUR_GPU_EIGHTY_GB_MEM_KEY_DOWNLOAD_URL="value" +PROVER_FOUR_GPU_EIGHTY_GB_MEM_GENERATION_TIMEOUT_IN_SECS=2700 +PROVER_FOUR_GPU_EIGHTY_GB_MEM_NUMBER_OF_THREADS="18" +PROVER_FOUR_GPU_EIGHTY_GB_MEM_MAX_ATTEMPTS="4" +PROVER_FOUR_GPU_EIGHTY_GB_MEM_POLLING_DURATION_IN_MILLIS=5 +PROVER_FOUR_GPU_EIGHTY_GB_MEM_SETUP_KEYS_PATH="/usr/src/setup-keys" +PROVER_FOUR_GPU_EIGHTY_GB_MEM_NUMBER_OF_SETUP_SLOTS=18 +PROVER_FOUR_GPU_EIGHTY_GB_MEM_ASSEMBLY_RECEIVER_PORT=17791 +PROVER_FOUR_GPU_EIGHTY_GB_MEM_ASSEMBLY_RECEIVER_POLL_TIME_IN_MILLIS=250 +PROVER_FOUR_GPU_EIGHTY_GB_MEM_ASSEMBLY_QUEUE_CAPACITY=5 +PROVER_FOUR_GPU_EIGHTY_GB_MEM_SPECIALIZED_PROVER_GROUP_ID=4 + "#; + + #[test] + fn from_env() { + set_env(CONFIG); + let actual = ProverConfigs::from_env(); + assert_eq!(actual, expected_config()); + } + + fn convert<'a, T: IntoIterator>( + iter: T, + prefix: &str, + ) -> ProverConfig { + let iter = iter + .into_iter() + .map(|(x, y)| (x.to_string(), y.to_string())); + + envy::prefixed(prefix).from_iter(iter).unwrap() + } + + #[test] + fn from_env_some() { + let expected_config = ProverConfig { + prometheus_port: 3313, + initial_setup_key_path: "key".to_owned(), + key_download_url: "value".to_owned(), + generation_timeout_in_secs: 2700u16, + number_of_threads: 2, + max_attempts: 4, + polling_duration_in_millis: 5, + setup_keys_path: "/usr/src/setup-keys".to_string(), + specialized_prover_group_id: 0, + number_of_setup_slots: 11, + assembly_receiver_port: 17791, + assembly_receiver_poll_time_in_millis: 250, + assembly_queue_capacity: 5, + }; + + let config = [ + ("PROVER_PROMETHEUS_PORT", "3313"), + ("PROVER_INITIAL_SETUP_KEY_PATH", "key"), + ("PROVER_KEY_DOWNLOAD_URL", "value"), + ("PROVER_GENERATION_TIMEOUT_IN_SECS", "2700"), + ("PROVER_NUMBER_OF_THREADS", "2"), + ("PROVER_MAX_ATTEMPTS", "4"), + ("PROVER_POLLING_DURATION_IN_MILLIS", "5"), + ("PROVER_SETUP_KEYS_PATH", "/usr/src/setup-keys"), + ("PROVER_NUMBER_OF_SETUP_SLOTS", "11"), + ("PROVER_ASSEMBLY_RECEIVER_PORT", "17791"), + ("PROVER_ASSEMBLY_RECEIVER_POLL_TIME_IN_MILLIS", "250"), + ("PROVER_ASSEMBLY_QUEUE_CAPACITY", "5"), + ("PROVER_SPECIALIZED_PROVER_GROUP_ID", "0"), + ] + .iter() + .chain(vec![&("PROVER_CIRCUIT_TYPES", "1,2")]); + + let actual = convert(config, "PROVER_"); + assert_eq!(actual, expected_config); + } +} diff --git a/core/lib/config/src/configs/prover_group.rs b/core/lib/config/src/configs/prover_group.rs new file mode 100644 index 000000000000..507ce56f95fa --- /dev/null +++ b/core/lib/config/src/configs/prover_group.rs @@ -0,0 +1,190 @@ +use serde::Deserialize; + +use crate::envy_load; + +/// Configuration for the grouping of specialized provers. +/// This config would be used by circuit-synthesizer and provers. +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct ProverGroupConfig { + pub group_0_circuit_ids: Vec, + pub group_1_circuit_ids: Vec, + pub group_2_circuit_ids: Vec, + pub group_3_circuit_ids: Vec, + pub group_4_circuit_ids: Vec, + pub group_5_circuit_ids: Vec, + pub group_6_circuit_ids: Vec, + pub group_7_circuit_ids: Vec, + pub group_8_circuit_ids: Vec, + pub group_9_circuit_ids: Vec, +} + +impl ProverGroupConfig { + pub fn from_env() -> Self { + envy_load!("prover_group", "PROVER_GROUP_") + } + + pub fn get_circuit_ids_for_group_id(&self, group_id: u8) -> Option> { + match group_id { + 0 => Some(self.group_0_circuit_ids.clone()), + 1 => Some(self.group_1_circuit_ids.clone()), + 2 => Some(self.group_2_circuit_ids.clone()), + 3 => Some(self.group_3_circuit_ids.clone()), + 4 => Some(self.group_4_circuit_ids.clone()), + 5 => Some(self.group_5_circuit_ids.clone()), + 6 => Some(self.group_6_circuit_ids.clone()), + 7 => Some(self.group_7_circuit_ids.clone()), + 8 => Some(self.group_8_circuit_ids.clone()), + 9 => Some(self.group_9_circuit_ids.clone()), + _ => None, + } + } + + pub fn get_group_id_for_circuit_id(&self, circuit_id: u8) -> Option { + let configs = [ + &self.group_0_circuit_ids, + &self.group_1_circuit_ids, + &self.group_2_circuit_ids, + &self.group_3_circuit_ids, + &self.group_4_circuit_ids, + &self.group_5_circuit_ids, + &self.group_6_circuit_ids, + &self.group_7_circuit_ids, + &self.group_8_circuit_ids, + &self.group_9_circuit_ids, + ]; + configs + .iter() + .enumerate() + .find(|(_, group)| group.contains(&circuit_id)) + .map(|(group_id, _)| group_id as u8) + } +} + +#[cfg(test)] +mod tests { + use crate::configs::test_utils::set_env; + + use super::*; + + fn expected_config() -> ProverGroupConfig { + ProverGroupConfig { + group_0_circuit_ids: vec![0, 18], + group_1_circuit_ids: vec![1, 4], + group_2_circuit_ids: vec![2, 5], + group_3_circuit_ids: vec![6, 7], + group_4_circuit_ids: vec![8, 9], + group_5_circuit_ids: vec![10, 11], + group_6_circuit_ids: vec![12, 13], + group_7_circuit_ids: vec![14, 15], + group_8_circuit_ids: vec![16, 17], + group_9_circuit_ids: vec![3], + } + } + + const CONFIG: &str = r#" + PROVER_GROUP_GROUP_0_CIRCUIT_IDS="0,18" + PROVER_GROUP_GROUP_1_CIRCUIT_IDS="1,4" + PROVER_GROUP_GROUP_2_CIRCUIT_IDS="2,5" + PROVER_GROUP_GROUP_3_CIRCUIT_IDS="6,7" + PROVER_GROUP_GROUP_4_CIRCUIT_IDS="8,9" + PROVER_GROUP_GROUP_5_CIRCUIT_IDS="10,11" + PROVER_GROUP_GROUP_6_CIRCUIT_IDS="12,13" + PROVER_GROUP_GROUP_7_CIRCUIT_IDS="14,15" + PROVER_GROUP_GROUP_8_CIRCUIT_IDS="16,17" + PROVER_GROUP_GROUP_9_CIRCUIT_IDS="3" + "#; + + #[test] + fn from_env() { + set_env(CONFIG); + let actual = ProverGroupConfig::from_env(); + assert_eq!(actual, expected_config()); + } + + #[test] + fn get_group_id_for_circuit_id() { + set_env(CONFIG); + let prover_group_config = ProverGroupConfig::from_env(); + + assert_eq!(Some(0), prover_group_config.get_group_id_for_circuit_id(0)); + assert_eq!(Some(0), prover_group_config.get_group_id_for_circuit_id(18)); + + assert_eq!(Some(1), prover_group_config.get_group_id_for_circuit_id(1)); + assert_eq!(Some(1), prover_group_config.get_group_id_for_circuit_id(4)); + + assert_eq!(Some(2), prover_group_config.get_group_id_for_circuit_id(2)); + assert_eq!(Some(2), prover_group_config.get_group_id_for_circuit_id(5)); + + assert_eq!(Some(3), prover_group_config.get_group_id_for_circuit_id(6)); + assert_eq!(Some(3), prover_group_config.get_group_id_for_circuit_id(7)); + + assert_eq!(Some(4), prover_group_config.get_group_id_for_circuit_id(8)); + assert_eq!(Some(4), prover_group_config.get_group_id_for_circuit_id(9)); + + assert_eq!(Some(5), prover_group_config.get_group_id_for_circuit_id(10)); + assert_eq!(Some(5), prover_group_config.get_group_id_for_circuit_id(11)); + + assert_eq!(Some(6), prover_group_config.get_group_id_for_circuit_id(12)); + assert_eq!(Some(6), prover_group_config.get_group_id_for_circuit_id(13)); + + assert_eq!(Some(7), prover_group_config.get_group_id_for_circuit_id(14)); + assert_eq!(Some(7), prover_group_config.get_group_id_for_circuit_id(15)); + + assert_eq!(Some(8), prover_group_config.get_group_id_for_circuit_id(16)); + assert_eq!(Some(8), prover_group_config.get_group_id_for_circuit_id(17)); + + assert_eq!(Some(9), prover_group_config.get_group_id_for_circuit_id(3)); + assert!(prover_group_config + .get_group_id_for_circuit_id(19) + .is_none()); + } + + #[test] + fn get_circuit_ids_for_group_id() { + set_env(CONFIG); + let prover_group_config = ProverGroupConfig::from_env(); + assert_eq!( + Some(vec![0, 18]), + prover_group_config.get_circuit_ids_for_group_id(0) + ); + assert_eq!( + Some(vec![1, 4]), + prover_group_config.get_circuit_ids_for_group_id(1) + ); + assert_eq!( + Some(vec![2, 5]), + prover_group_config.get_circuit_ids_for_group_id(2) + ); + assert_eq!( + Some(vec![6, 7]), + prover_group_config.get_circuit_ids_for_group_id(3) + ); + assert_eq!( + Some(vec![8, 9]), + prover_group_config.get_circuit_ids_for_group_id(4) + ); + assert_eq!( + Some(vec![10, 11]), + prover_group_config.get_circuit_ids_for_group_id(5) + ); + assert_eq!( + Some(vec![12, 13]), + prover_group_config.get_circuit_ids_for_group_id(6) + ); + assert_eq!( + Some(vec![14, 15]), + prover_group_config.get_circuit_ids_for_group_id(7) + ); + assert_eq!( + Some(vec![16, 17]), + prover_group_config.get_circuit_ids_for_group_id(8) + ); + assert_eq!( + Some(vec![3]), + prover_group_config.get_circuit_ids_for_group_id(9) + ); + assert!(prover_group_config + .get_circuit_ids_for_group_id(10) + .is_none()); + } +} diff --git a/core/lib/config/src/configs/test_utils.rs b/core/lib/config/src/configs/test_utils.rs new file mode 100644 index 000000000000..8581b81853f8 --- /dev/null +++ b/core/lib/config/src/configs/test_utils.rs @@ -0,0 +1,38 @@ +// Built-in uses. +use std::{env, str::FromStr}; +// Workspace uses +use zksync_basic_types::{Address, H256}; + +/// Parses the provided fixture in a form of `VARIABLE_NAME=variable_value` lines and +/// sets the corresponding environment variables. +pub fn set_env(fixture: &str) { + for line in fixture.split('\n').map(str::trim) { + if line.is_empty() { + // Skip empty lines. + continue; + } + + let elements: Vec<_> = line.split('=').collect(); + assert_eq!( + elements.len(), + 2, + "Incorrect line for setting environment variable: {}", + line + ); + + let variable_name = elements[0]; + let variable_value = elements[1].trim_matches('"'); + + env::set_var(variable_name, variable_value); + } +} + +/// Parses the address panicking upon deserialization failure. +pub fn addr(addr_str: &str) -> Address { + Address::from_str(addr_str).expect("Incorrect address string") +} + +/// Parses the H256 panicking upon deserialization failure. +pub fn hash(addr_str: &str) -> H256 { + H256::from_str(addr_str).expect("Incorrect hash string") +} diff --git a/core/lib/config/src/configs/utils.rs b/core/lib/config/src/configs/utils.rs new file mode 100644 index 000000000000..78711201328f --- /dev/null +++ b/core/lib/config/src/configs/utils.rs @@ -0,0 +1,18 @@ +use serde::Deserialize; +use std::time::Duration; + +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct Prometheus { + /// Port to which the Prometheus exporter server is listening. + pub listener_port: u16, + /// Url of Pushgateway. + pub pushgateway_url: String, + /// Push interval in ms. + pub push_interval_ms: Option, +} + +impl Prometheus { + pub fn push_interval(&self) -> Duration { + Duration::from_millis(self.push_interval_ms.unwrap_or(100)) + } +} diff --git a/core/lib/config/src/configs/witness_generator.rs b/core/lib/config/src/configs/witness_generator.rs new file mode 100644 index 000000000000..d11bf4427b46 --- /dev/null +++ b/core/lib/config/src/configs/witness_generator.rs @@ -0,0 +1,109 @@ +use std::time::Duration; + +// Built-in uses +// External uses +use serde::Deserialize; +// Local uses +use crate::envy_load; + +/// Configuration for the witness generation +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct WitnessGeneratorConfig { + /// Max time for witness to be generated + pub generation_timeout_in_secs: u16, + /// Currently only a single (largest) key is supported. + pub initial_setup_key_path: String, + /// https://storage.googleapis.com/universal-setup/setup_2\^26.key + pub key_download_url: String, + /// Max attempts for generating witness + pub max_attempts: u32, + /// Is sampling enabled + pub sampling_enabled: bool, + /// Safe prover lag to process block + pub sampling_safe_prover_lag: Option, + /// Max prover lag to process block + pub sampling_max_prover_lag: Option, + pub dump_arguments_for_blocks: Vec, +} + +#[derive(Debug, Clone, Copy)] +pub struct SamplingParams { + pub safe_prover_lag: usize, + pub max_prover_lag: usize, +} + +impl SamplingParams { + pub fn calculate_sampling_probability(&self, prover_lag: usize) -> f64 { + let numerator = self.max_prover_lag as f64 - prover_lag as f64; + let denominator = (self.max_prover_lag - self.safe_prover_lag).max(1) as f64; + (numerator / denominator).min(1f64).max(0f64) + } +} + +#[derive(Debug, Clone, Copy)] +pub enum SamplingMode { + Enabled(SamplingParams), + Disabled, +} + +impl WitnessGeneratorConfig { + pub fn from_env() -> Self { + envy_load!("witness", "WITNESS_") + } + + pub fn witness_generation_timeout(&self) -> Duration { + Duration::from_secs(self.generation_timeout_in_secs as u64) + } + + pub fn sampling_mode(&self) -> SamplingMode { + match ( + self.sampling_enabled, + self.sampling_safe_prover_lag, + self.sampling_max_prover_lag, + ) { + (true, Some(safe_prover_lag), Some(max_prover_lag)) => { + SamplingMode::Enabled(SamplingParams { + safe_prover_lag, + max_prover_lag, + }) + } + _ => SamplingMode::Disabled, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::configs::test_utils::set_env; + + fn expected_config() -> WitnessGeneratorConfig { + WitnessGeneratorConfig { + generation_timeout_in_secs: 900u16, + initial_setup_key_path: "key".to_owned(), + key_download_url: "value".to_owned(), + max_attempts: 4, + sampling_enabled: true, + sampling_safe_prover_lag: Some(50), + sampling_max_prover_lag: Some(300), + dump_arguments_for_blocks: vec![2, 3], + } + } + + #[test] + fn from_env() { + let config = r#" + WITNESS_GENERATION_TIMEOUT_IN_SECS=900 + WITNESS_INITIAL_SETUP_KEY_PATH="key" + WITNESS_KEY_DOWNLOAD_URL="value" + WITNESS_MAX_ATTEMPTS=4 + WITNESS_SAMPLING_ENABLED=true + WITNESS_SAMPLING_SAFE_PROVER_LAG=50 + WITNESS_SAMPLING_MAX_PROVER_LAG=300 + WITNESS_DUMP_ARGUMENTS_FOR_BLOCKS="2,3" + "#; + set_env(config); + let actual = WitnessGeneratorConfig::from_env(); + assert_eq!(actual, expected_config()); + } +} diff --git a/core/lib/config/src/constants/blocks.rs b/core/lib/config/src/constants/blocks.rs new file mode 100644 index 000000000000..d48d3007bc7c --- /dev/null +++ b/core/lib/config/src/constants/blocks.rs @@ -0,0 +1,8 @@ +use zksync_basic_types::H256; + +// Be design we don't have a term: uncle blocks. Hence we have to use rlp hash +// from empty list for ethereum compatibility. +pub const EMPTY_UNCLES_HASH: H256 = H256([ + 0x1d, 0xcc, 0x4d, 0xe8, 0xde, 0xc7, 0x5d, 0x7a, 0xab, 0x85, 0xb5, 0x67, 0xb6, 0xcc, 0xd4, 0x1a, + 0xd3, 0x12, 0x45, 0x1b, 0x94, 0x8a, 0x74, 0x13, 0xf0, 0xa1, 0x42, 0xfd, 0x40, 0xd4, 0x93, 0x47, +]); diff --git a/core/lib/config/src/constants/contracts.rs b/core/lib/config/src/constants/contracts.rs new file mode 100644 index 000000000000..3fe4349fed0a --- /dev/null +++ b/core/lib/config/src/constants/contracts.rs @@ -0,0 +1,96 @@ +use zksync_basic_types::{Address, H160, H256}; + +/// The following are addresses copied from the compiler_common +/// + +pub const BOOTLOADER_ADDRESS: Address = H160([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x80, 0x01, +]); + +pub const ACCOUNT_CODE_STORAGE_ADDRESS: Address = H160([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x80, 0x02, +]); + +pub const NONCE_HOLDER_ADDRESS: Address = H160([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x80, 0x03, +]); + +pub const KNOWN_CODES_STORAGE_ADDRESS: Address = H160([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x80, 0x04, +]); + +pub const IMMUTABLE_SIMULATOR_STORAGE_ADDRESS: Address = H160([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x80, 0x05, +]); + +pub const CONTRACT_DEPLOYER_ADDRESS: Address = H160([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x80, 0x06, +]); + +pub const CONTRACT_FORCE_DEPLOYER_ADDRESS: Address = H160([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x80, 0x07, +]); + +pub const L1_MESSENGER_ADDRESS: Address = H160([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x80, 0x08, +]); +pub const MSG_VALUE_SIMULATOR_ADDRESS: Address = H160([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x80, 0x09, +]); + +/// The `keccak256` predefined address. +pub const KECCAK256_PRECOMPILE_ADDRESS: Address = H160([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x80, 0x10, +]); + +pub const L2_ETH_TOKEN_ADDRESS: Address = H160([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x80, 0x0a, +]); + +pub const SYSTEM_CONTEXT_ADDRESS: Address = H160([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x80, 0x0b, +]); + +pub const BOOTLOADER_UTILITIES_ADDRESS: Address = H160([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x80, 0x0c, +]); + +pub const EVENT_WRITER_ADDRESS: Address = H160([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x80, 0x0d, +]); + +/// The `ecrecover` system contract address. +pub const ECRECOVER_PRECOMPILE_ADDRESS: Address = H160([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x01, +]); + +/// The `sha256` system contract address. +pub const SHA256_PRECOMPILE_ADDRESS: Address = H160([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x02, +]); + +pub const ERC20_TRANSFER_TOPIC: H256 = H256([ + 221, 242, 82, 173, 27, 226, 200, 155, 105, 194, 176, 104, 252, 55, 141, 170, 149, 43, 167, 241, + 99, 196, 161, 22, 40, 245, 90, 77, 245, 35, 179, 239, +]); + +pub const MINT_AND_BURN_ADDRESS: H160 = H160::zero(); + +// The storage_log.value database value for a contract that was deployed in a failed transaction. +pub const FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH: H256 = H256::zero(); diff --git a/core/lib/config/src/constants/crypto.rs b/core/lib/config/src/constants/crypto.rs new file mode 100644 index 000000000000..800a58c189a8 --- /dev/null +++ b/core/lib/config/src/constants/crypto.rs @@ -0,0 +1,23 @@ +use num::BigUint; +use once_cell::sync::Lazy; + +pub const ZKPORTER_IS_AVAILABLE: bool = false; + +/// Depth of the account tree. +pub const ROOT_TREE_DEPTH: usize = 256; +/// Cost of 1 byte of calldata in bytes. +pub const GAS_PER_PUBDATA_BYTE: u32 = 16; + +/// Maximum amount of bytes in one packed write storage slot. +/// Calculated as `(len(hash) + 1) + len(u256)` +pub const MAX_BYTES_PER_PACKED_SLOT: u64 = 65; + +/// Amount of gas required to publish one slot in pubdata. +pub static GAS_PER_SLOT: Lazy = + Lazy::new(|| BigUint::from(MAX_BYTES_PER_PACKED_SLOT) * BigUint::from(GAS_PER_PUBDATA_BYTE)); + +pub const MAX_TXS_IN_BLOCK: usize = 1024; + +pub const MAX_NEW_FACTORY_DEPS: usize = 32; + +pub const PAD_MSG_BEFORE_HASH_BITS_LEN: usize = 736; diff --git a/core/lib/config/src/constants/ethereum.rs b/core/lib/config/src/constants/ethereum.rs new file mode 100644 index 000000000000..d4b099ff865b --- /dev/null +++ b/core/lib/config/src/constants/ethereum.rs @@ -0,0 +1,21 @@ +use zksync_basic_types::Address; + +/// Priority op should be executed for this number of eth blocks. +pub const PRIORITY_EXPIRATION: u64 = 50000; +pub const MAX_L1_TRANSACTION_GAS_LIMIT: u64 = 300000; +pub static ETHEREUM_ADDRESS: Address = Address::zero(); + +/// This the number of pubdata such that it should be always possible to publish +/// from a single transaction. Note, that these pubdata bytes include only bytes that are +/// to be published inside the body of transaction (i.e. excluding of factory deps). +pub const GUARANTEED_PUBDATA_PER_L1_BATCH: u64 = 4000; + +/// The maximum number of pubdata per L1 batch. +pub const MAX_PUBDATA_PER_L1_BATCH: u64 = 120000; + +pub const MAX_L2_TX_GAS_LIMIT: u64 = 80000000; + +// The users should always be able to provide `MAX_GAS_PER_PUBDATA_BYTE` gas per pubdata in their +// transactions so that they are able to send at least GUARANTEED_PUBDATA_PER_L1_BATCH bytes per +// transaction. +pub const MAX_GAS_PER_PUBDATA_BYTE: u64 = MAX_L2_TX_GAS_LIMIT / GUARANTEED_PUBDATA_PER_L1_BATCH; diff --git a/core/lib/config/src/constants/fees/intrinsic.rs b/core/lib/config/src/constants/fees/intrinsic.rs new file mode 100644 index 000000000000..b36c8d57086e --- /dev/null +++ b/core/lib/config/src/constants/fees/intrinsic.rs @@ -0,0 +1,20 @@ +//! THIS FILE IS AUTOGENERATED: DO NOT EDIT MANUALLY! +//! The file with constants related to fees most of which need to be computed +use super::IntrinsicSystemGasConstants; + +pub const fn get_intrinsic_constants() -> IntrinsicSystemGasConstants { + IntrinsicSystemGasConstants { + l2_tx_intrinsic_gas: 14070, + l2_tx_intrinsic_pubdata: 0, + l2_tx_gas_for_refund_transfer: 7343, + l1_tx_intrinsic_gas: 167157, + l1_tx_intrinsic_pubdata: 88, + l1_tx_min_gas_base: 173484, + l1_tx_delta_544_encoding_bytes: 1656, + l1_tx_delta_factory_dep_gas: 2473, + l1_tx_delta_factory_dep_pubdata: 64, + bootloader_intrinsic_gas: 182918, + bootloader_intrinsic_pubdata: 472, + bootloader_tx_memory_size_slots: 519017, + } +} diff --git a/core/lib/config/src/constants/fees/mod.rs b/core/lib/config/src/constants/fees/mod.rs new file mode 100644 index 000000000000..c489b6d9a09a --- /dev/null +++ b/core/lib/config/src/constants/fees/mod.rs @@ -0,0 +1,50 @@ +mod intrinsic; + +pub use intrinsic::*; + +pub struct IntrinsicSystemGasConstants { + // The overhead for each L2 transaction in computation (it is assumed that it is roughly independent of its structure) + pub l2_tx_intrinsic_gas: u32, + // The overhead for each L2 transaction in pubdata (it is assumed that it is roughly independent of its structure) + pub l2_tx_intrinsic_pubdata: u32, + // The number of gas the refund transfer requires + pub l2_tx_gas_for_refund_transfer: u32, + // The overhead for each L1 transaction in computation (it is assumed that it is roughly independent of its structure) + pub l1_tx_intrinsic_gas: u32, + // The overhead for each L1 transaction in pubdata (it is assumed that it is roughly independent of its structure) + pub l1_tx_intrinsic_pubdata: u32, + // The minimal price that each L1 transaction should cost to cover mandatory non-intrinsic parts. + // For instance, the user should be always able to hash at least some minimal transaction. + // (it is assumed that it is roughly independent of its structure) + pub l1_tx_min_gas_base: u32, + // The number of gas the transaction gains based on its length in words + // (for each 544 bytes, the number is not a coincidence, since each new 136 + // bytes require a new keccak round and the length of the encoding increases by 32 bytes at a time) + pub l1_tx_delta_544_encoding_bytes: u32, + // The number of gas an L1->L2 transaction gains with each new factory dependency + pub l1_tx_delta_factory_dep_gas: u32, + // The number of pubdata an L1->L2 transaction requires with each new factory dependency + pub l1_tx_delta_factory_dep_pubdata: u32, + // The nubmer of computational gas the bootloader requires + pub bootloader_intrinsic_gas: u32, + // The number of overhead pubdata the bootloader requires + pub bootloader_intrinsic_pubdata: u32, + // The number of memory available for transaction encoding + pub bootloader_tx_memory_size_slots: u32, +} + +/// The amount of gas we need to pay for each non-zero pubdata byte. +/// Note that it is bigger than 16 to account for potential overhead +pub const L1_GAS_PER_PUBDATA_BYTE: u32 = 17; + +/// The price the operator spends on 1 gas of computation in wei. (0.5 gwei) +pub const FAIR_L2_GAS_PRICE: u64 = 500000000; + +/// The amount of pubdata that is strictly guaranteed to be available for a block +pub const GUARANTEED_PUBDATA_IN_TX: u32 = 100000; + +/// The amount of overhead that is paid when the bytecode is published onchain. +/// It comes from the 64 bytes of additional two words for the bytecode length and offset in the ABI-encoding +/// of the commitment. The other "36" bytes are mostly an approximation of the amount of gas it takes +/// to properly hash it and compare with the corresponding L2->L1 message. +pub const PUBLISH_BYTECODE_OVERHEAD: u32 = 100; diff --git a/core/lib/config/src/constants/mod.rs b/core/lib/config/src/constants/mod.rs new file mode 100644 index 000000000000..8baf3548d4b5 --- /dev/null +++ b/core/lib/config/src/constants/mod.rs @@ -0,0 +1,15 @@ +pub mod blocks; +pub mod contracts; +pub mod crypto; +pub mod ethereum; +pub mod fees; +pub mod system_context; +pub mod trusted_slots; + +pub use blocks::*; +pub use contracts::*; +pub use crypto::*; +pub use ethereum::*; +pub use fees::*; +pub use system_context::*; +pub use trusted_slots::*; diff --git a/core/lib/config/src/constants/system_context.rs b/core/lib/config/src/constants/system_context.rs new file mode 100644 index 000000000000..4802cf3cc8e5 --- /dev/null +++ b/core/lib/config/src/constants/system_context.rs @@ -0,0 +1,57 @@ +// Since circuit hashes do not cover a range of 256 bits +// all user-set slots will have the two highest bits set to 0. +// That is why all internal slots will have the form `b11...` but we use `b1111 = xff` for simplicity. + +use zksync_basic_types::H256; + +pub const SYSTEM_CONTEXT_CHAIN_ID_POSITION: H256 = H256::zero(); + +pub const SYSTEM_CONTEXT_TX_ORIGIN_POSITION: H256 = H256([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, +]); + +pub const SYSTEM_CONTEXT_GAS_PRICE_POSITION: H256 = H256([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, +]); + +pub const SYSTEM_CONTEXT_BLOCK_GAS_LIMIT_POSITION: H256 = H256([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, +]); +// Some dummy value. We will put the real block gas limit later on. +pub const SYSTEM_CONTEXT_BLOCK_GAS_LIMIT: H256 = H256([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, +]); + +pub const SYSTEM_CONTEXT_COINBASE_POSITION: H256 = H256([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, +]); + +pub const SYSTEM_CONTEXT_DIFFICULTY_POSITION: H256 = H256([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05, +]); +// 2500000000000000. THe number is chosen for compatibility with other L2s. +pub const SYSTEM_CONTEXT_DIFFICULTY: H256 = H256([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0xE1, 0xBC, 0x9B, 0xF0, 0x40, 0x00, +]); + +pub const SYSTEM_CONTEXT_BASE_FEE_POSITION: H256 = H256([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, +]); + +// Tenth of a gwei in wei. 1 gwei is 10^9 wei, so 0.1 gwei is 10^8 wei. +const TENTH_OF_GWEI: u64 = 10u64.pow(8); +// The base fee in wei. u64 as u32 would limit this price to be ~4.3 gwei. +pub const SYSTEM_CONTEXT_MINIMAL_BASE_FEE: u64 = TENTH_OF_GWEI; + +pub const SYSTEM_CONTEXT_BLOCK_INFO_POSITION: H256 = H256([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x07, +]); diff --git a/core/lib/config/src/constants/trusted_slots.rs b/core/lib/config/src/constants/trusted_slots.rs new file mode 100644 index 000000000000..fe1f68968677 --- /dev/null +++ b/core/lib/config/src/constants/trusted_slots.rs @@ -0,0 +1,52 @@ +use once_cell::sync::Lazy; +use zksync_basic_types::{H256, U256}; +use zksync_utils::h256_to_u256; + +/// +/// Well known-slots (e.g. proxy addresses in popular EIPs). +/// + +const ERC1967_ROLLBACK_SLOT: H256 = H256([ + 0x49, 0x10, 0xfd, 0xfa, 0x16, 0xfe, 0xd3, 0x26, 0x0e, 0xd0, 0xe7, 0x14, 0x7f, 0x7c, 0xc6, 0xda, + 0x11, 0xa6, 0x02, 0x08, 0xb5, 0xb9, 0x40, 0x6d, 0x12, 0xa6, 0x35, 0x61, 0x4f, 0xfd, 0x91, 0x43, +]); + +const ERC1967_IMPLEMENTATION_SLOT: H256 = H256([ + 0x36, 0x08, 0x94, 0xa1, 0x3b, 0xa1, 0xa3, 0x21, 0x06, 0x67, 0xc8, 0x28, 0x49, 0x2d, 0xb9, 0x8d, + 0xca, 0x3e, 0x20, 0x76, 0xcc, 0x37, 0x35, 0xa9, 0x20, 0xa3, 0xca, 0x50, 0x5d, 0x38, 0x2b, 0xbc, +]); + +const ERC1967_ADMIN_SLOT: H256 = H256([ + 0xb5, 0x31, 0x27, 0x68, 0x4a, 0x56, 0x8b, 0x31, 0x73, 0xae, 0x13, 0xb9, 0xf8, 0xa6, 0x01, 0x6e, + 0x24, 0x3e, 0x63, 0xb6, 0xe8, 0xee, 0x11, 0x78, 0xd6, 0xa7, 0x17, 0x85, 0x0b, 0x5d, 0x61, 0x03, +]); + +const ERC1967_BEACON_SLOT: H256 = H256([ + 0xa3, 0xf0, 0xad, 0x74, 0xe5, 0x42, 0x3a, 0xeb, 0xfd, 0x80, 0xd3, 0xef, 0x43, 0x46, 0x57, 0x83, + 0x35, 0xa9, 0xa7, 0x2a, 0xea, 0xee, 0x59, 0xff, 0x6c, 0xb3, 0x58, 0x2b, 0x35, 0x13, 0x3d, 0x50, +]); + +const INITIALIZER_INITIALING_SLOT: H256 = H256([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, +]); + +pub static TRUSTED_TOKEN_SLOTS: Lazy> = Lazy::new(|| { + vec![ + ERC1967_ROLLBACK_SLOT, + ERC1967_IMPLEMENTATION_SLOT, + ERC1967_ADMIN_SLOT, + INITIALIZER_INITIALING_SLOT, + ] + .into_iter() + .map(h256_to_u256) + .collect() +}); + +// These slots contain addresses that should themselves be trusted. +pub static TRUSTED_ADDRESS_SLOTS: Lazy> = Lazy::new(|| { + vec![ERC1967_BEACON_SLOT] + .into_iter() + .map(h256_to_u256) + .collect() +}); diff --git a/core/lib/config/src/lib.rs b/core/lib/config/src/lib.rs new file mode 100644 index 000000000000..8514f9614c50 --- /dev/null +++ b/core/lib/config/src/lib.rs @@ -0,0 +1,48 @@ +#![allow(clippy::upper_case_acronyms, clippy::derive_partial_eq_without_eq)] + +use serde::Deserialize; + +pub use crate::configs::{ + ApiConfig, ChainConfig, ContractVerifierConfig, ContractsConfig, DBConfig, ETHClientConfig, + ETHSenderConfig, ETHWatchConfig, FetcherConfig, GasAdjusterConfig, ObjectStoreConfig, + ProverConfig, ProverConfigs, +}; + +pub mod configs; +pub mod constants; +pub mod test_config; + +#[derive(Debug, Deserialize, Clone, PartialEq)] +pub struct ZkSyncConfig { + pub api: ApiConfig, + pub chain: ChainConfig, + pub contracts: ContractsConfig, + pub db: DBConfig, + pub eth_client: ETHClientConfig, + pub eth_sender: ETHSenderConfig, + pub eth_watch: ETHWatchConfig, + pub fetcher: FetcherConfig, + pub prover: ProverConfigs, + pub object_store: ObjectStoreConfig, +} + +impl ZkSyncConfig { + pub fn from_env() -> Self { + Self { + api: ApiConfig::from_env(), + chain: ChainConfig::from_env(), + contracts: ContractsConfig::from_env(), + db: DBConfig::from_env(), + eth_client: ETHClientConfig::from_env(), + eth_sender: ETHSenderConfig::from_env(), + eth_watch: ETHWatchConfig::from_env(), + fetcher: FetcherConfig::from_env(), + prover: ProverConfigs::from_env(), + object_store: ObjectStoreConfig::from_env(), + } + } + + pub fn default_db() -> DBConfig { + DBConfig::default() + } +} diff --git a/core/lib/config/src/test_config/mod.rs b/core/lib/config/src/test_config/mod.rs new file mode 100644 index 000000000000..53501a72f5a3 --- /dev/null +++ b/core/lib/config/src/test_config/mod.rs @@ -0,0 +1,63 @@ +// Built-in deps +use std::fs; +// External uses +use serde::Deserialize; +// Workspace uses +// Local uses + +/// Transforms relative path like `constant/some_file.json` into full path like +/// `$ZKSYNC_HOME/etc/test_config/constant/some_file.json`. +fn config_path(postfix: &str) -> String { + let home = std::env::var("ZKSYNC_HOME").expect("ZKSYNC_HOME variable must be set"); + + format!("{}/etc/test_config/{}", home, postfix) +} + +fn load_json(path: &str) -> serde_json::Value { + serde_json::from_str(&fs::read_to_string(path).expect("Invalid config path")) + .expect("Invalid config format") +} + +/// Common Ethereum parameters. +#[derive(Debug, Deserialize)] +pub struct EthConfig { + /// Set of 12 words for connecting to an Ethereum wallet. + pub test_mnemonic: String, +} + +/// Common Api addresses. +#[derive(Debug, Deserialize)] +pub struct ApiConfig { + /// Address of the rest api. + pub rest_api_url: String, +} + +macro_rules! impl_config { + ($name_config:ident, $file:tt) => { + impl $name_config { + pub fn load() -> Self { + let object = load_json(&config_path(&format!("{}.json", $file))); + serde_json::from_value(object) + .expect(&format!("Cannot deserialize config from '{}'", $file)) + } + } + }; +} + +impl_config!(ApiConfig, "constant/api"); +impl_config!(EthConfig, "constant/eth"); + +#[derive(Debug)] +pub struct TestConfig { + pub eth: EthConfig, + pub api: ApiConfig, +} + +impl TestConfig { + pub fn load() -> Self { + Self { + eth: EthConfig::load(), + api: ApiConfig::load(), + } + } +} diff --git a/core/lib/contracts/Cargo.toml b/core/lib/contracts/Cargo.toml new file mode 100644 index 000000000000..0ae977013dc9 --- /dev/null +++ b/core/lib/contracts/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "zksync_contracts" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-2" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] + +[dependencies] +ethabi = "16.0.0" +serde_json = "1.0" +zksync_utils = { path = "../utils", version = "1.0" } +once_cell = "1.7" +hex = "0.4" diff --git a/core/lib/contracts/src/lib.rs b/core/lib/contracts/src/lib.rs new file mode 100644 index 000000000000..4ddce9e04b74 --- /dev/null +++ b/core/lib/contracts/src/lib.rs @@ -0,0 +1,230 @@ +#![allow(clippy::derive_partial_eq_without_eq)] + +use ethabi::ethereum_types::U256; +use ethabi::Contract; +use once_cell::sync::Lazy; +use std::fs::{self, File}; +use std::path::Path; + +use zksync_utils::bytecode::hash_bytecode; +use zksync_utils::{bytes_to_be_words, h256_to_u256}; + +const ZKSYNC_CONTRACT_FILE: &str = + "contracts/ethereum/artifacts/cache/solpp-generated-contracts/zksync/interfaces/IZkSync.sol/IZkSync.json"; +const VERIFIER_CONTRACT_FILE: &str = + "contracts/ethereum/artifacts/cache/solpp-generated-contracts/zksync/Verifier.sol/Verifier.json"; +const IERC20_CONTRACT_FILE: &str = + "contracts/ethereum/artifacts/cache/solpp-generated-contracts/common/interfaces/IERC20.sol/IERC20.json"; +const FAIL_ON_RECEIVE_CONTRACT_FILE: &str = + "contracts/ethereum/artifacts/cache/solpp-generated-contracts/zksync/dev-contracts/FailOnReceive.sol/FailOnReceive.json"; +const L2_BRIDGE_CONTRACT_FILE: &str = + "contracts/zksync/artifacts-zk/cache-zk/solpp-generated-contracts/bridge/interfaces/IL2Bridge.sol/IL2Bridge.json"; +const LOADNEXT_CONTRACT_FILE: &str = + "etc/contracts-test-data/artifacts-zk/contracts/loadnext/loadnext_contract.sol/LoadnextContract.json"; +const LOADNEXT_SIMPLE_CONTRACT_FILE: &str = + "etc/contracts-test-data/artifacts-zk/contracts/loadnext/loadnext_contract.sol/Foo.json"; + +fn read_file_to_json_value(path: impl AsRef) -> serde_json::Value { + let zksync_home = std::env::var("ZKSYNC_HOME").unwrap_or_else(|_| ".".into()); + let path = Path::new(&zksync_home).join(path); + serde_json::from_reader( + File::open(&path).unwrap_or_else(|e| panic!("Failed to open file {:?}: {}", path, e)), + ) + .unwrap_or_else(|e| panic!("Failed to parse file {:?}: {}", path, e)) +} + +pub fn load_contract + std::fmt::Debug>(path: P) -> Contract { + serde_json::from_value(read_file_to_json_value(&path)["abi"].take()) + .unwrap_or_else(|e| panic!("Failed to parse contract abi from file {:?}: {}", path, e)) +} + +pub fn load_sys_contract(contract_name: &str) -> Contract { + load_contract(format!( + "etc/system-contracts/artifacts-zk/cache-zk/solpp-generated-contracts/{0}.sol/{0}.json", + contract_name + )) +} + +pub fn read_contract_abi(path: impl AsRef) -> String { + read_file_to_json_value(path)["abi"] + .as_str() + .expect("Failed to parse abi") + .to_string() +} + +pub fn zksync_contract() -> Contract { + load_contract(ZKSYNC_CONTRACT_FILE) +} + +pub fn erc20_contract() -> Contract { + load_contract(IERC20_CONTRACT_FILE) +} + +pub fn l2_bridge_contract() -> Contract { + load_contract(L2_BRIDGE_CONTRACT_FILE) +} + +pub fn verifier_contract() -> Contract { + load_contract(VERIFIER_CONTRACT_FILE) +} + +#[derive(Debug, Clone)] +pub struct TestContract { + /// Contract bytecode to be used for sending deploy transaction. + pub bytecode: Vec, + /// Contract ABI. + pub contract: Contract, + + pub factory_deps: Vec>, +} + +/// Reads test contract bytecode and its ABI. +pub fn get_loadnext_contract() -> TestContract { + let bytecode = read_bytecode(LOADNEXT_CONTRACT_FILE); + let dep = read_bytecode(LOADNEXT_SIMPLE_CONTRACT_FILE); + + TestContract { + bytecode, + contract: loadnext_contract(), + factory_deps: vec![dep], + } +} + +// Returns loadnext contract and its factory dependencies +pub fn loadnext_contract() -> Contract { + load_contract("etc/contracts-test-data/artifacts-zk/contracts/loadnext/loadnext_contract.sol/LoadnextContract.json") +} + +pub fn loadnext_simple_contract() -> Contract { + load_contract( + "etc/contracts-test-data/artifacts-zk/contracts/loadnext/loadnext_contract.sol/Foo.json", + ) +} + +pub fn fail_on_receive_contract() -> Contract { + load_contract(FAIL_ON_RECEIVE_CONTRACT_FILE) +} + +pub fn deployer_contract() -> Contract { + load_sys_contract("ContractDeployer") +} + +pub fn eth_contract() -> Contract { + load_sys_contract("L2EthToken") +} + +pub fn known_codes_contract() -> Contract { + load_sys_contract("KnownCodesStorage") +} + +pub fn read_bytecode(path: impl AsRef) -> Vec { + let zksync_home = std::env::var("ZKSYNC_HOME").unwrap_or_else(|_| ".".into()); + let artifact_path = Path::new(&zksync_home).join(path); + let artifact = read_file_to_json_value(artifact_path.clone()); + + let bytecode = artifact["bytecode"] + .as_str() + .unwrap_or_else(|| panic!("Bytecode not found in {:?}", artifact_path)) + .strip_prefix("0x") + .unwrap_or_else(|| panic!("Bytecode in {:?} is not hex", artifact_path)); + + hex::decode(bytecode) + .unwrap_or_else(|err| panic!("Can't decode bytecode in {:?}: {}", artifact_path, err)) +} + +pub fn default_erc20_bytecode() -> Vec { + read_bytecode("etc/ERC20/artifacts-zk/contracts/ZkSyncERC20.sol/ZkSyncERC20.json") +} + +pub fn read_sys_contract_bytecode(directory: &str, name: &str) -> Vec { + read_bytecode(format!( + "etc/system-contracts/artifacts-zk/cache-zk/solpp-generated-contracts/{0}{1}.sol/{1}.json", + directory, name + )) +} + +pub fn read_bootloader_code(bootloader_type: &str) -> Vec { + read_zbin_bytecode(format!( + "etc/system-contracts/bootloader/build/artifacts/{}.yul/{}.yul.zbin", + bootloader_type, bootloader_type + )) +} + +pub fn read_proved_block_bootloader_bytecode() -> Vec { + read_bootloader_code("proved_block") +} + +pub fn read_playground_block_bootloader_bytecode() -> Vec { + read_bootloader_code("playground_block") +} + +pub fn get_loadnext_test_contract_path(file_name: &str, contract_name: &str) -> String { + format!( + "core/tests/loadnext/test-contracts/loadnext_contract/artifacts/loadnext_contract.sol/{}.sol:{}.abi", + file_name, contract_name + ) +} + +pub fn get_loadnext_test_contract_bytecode(file_name: &str, contract_name: &str) -> String { + format!( + "core/tests/loadnext/test-contracts/loadnext_contract/artifacts/loadnext_contract.sol/{}.sol:{}.zbin", + file_name, contract_name + ) +} +pub fn read_zbin_bytecode(zbin_path: impl AsRef) -> Vec { + let zksync_home = std::env::var("ZKSYNC_HOME").unwrap_or_else(|_| ".".into()); + let bytecode_path = Path::new(&zksync_home).join(zbin_path); + fs::read(&bytecode_path) + .unwrap_or_else(|err| panic!("Can't read .zbin bytecode at {:?}: {}", bytecode_path, err)) +} + +pub fn read_bootloader_bytecode() -> Vec { + read_zbin_bytecode("etc/system-contracts/bootloader/artifacts/bootloader/bootloader.yul.zbin") +} + +/// Hash of code and code which consists of 32 bytes words +#[derive(Debug)] +pub struct SystemContractCode { + pub code: Vec, + pub hash: U256, +} + +pub static PROVED_BLOCK_BOOTLOADER_CODE: Lazy = Lazy::new(|| { + let bytecode = read_proved_block_bootloader_bytecode(); + let hash = hash_bytecode(&bytecode); + + SystemContractCode { + code: bytes_to_be_words(bytecode), + hash: h256_to_u256(hash), + } +}); + +pub static PLAYGROUND_BLOCK_BOOTLOADER_CODE: Lazy = Lazy::new(|| { + let bytecode = read_playground_block_bootloader_bytecode(); + let hash = hash_bytecode(&bytecode); + + SystemContractCode { + code: bytes_to_be_words(bytecode), + hash: h256_to_u256(hash), + } +}); + +pub static ESTIMATE_FEE_BLOCK_CODE: Lazy = Lazy::new(|| { + let bytecode = read_bootloader_code("fee_estimate"); + let hash = hash_bytecode(&bytecode); + + SystemContractCode { + code: bytes_to_be_words(bytecode), + hash: h256_to_u256(hash), + } +}); + +pub static DEFAULT_ACCOUNT_CODE: Lazy = Lazy::new(|| { + let bytecode = read_sys_contract_bytecode("", "DefaultAccount"); + let hash = hash_bytecode(&bytecode); + + SystemContractCode { + code: bytes_to_be_words(bytecode), + hash: h256_to_u256(hash), + } +}); diff --git a/core/lib/crypto/Cargo.toml b/core/lib/crypto/Cargo.toml new file mode 100644 index 000000000000..f4d7e7c2b400 --- /dev/null +++ b/core/lib/crypto/Cargo.toml @@ -0,0 +1,30 @@ +[package] +name = "zksync_crypto" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-2" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] +readme = "README.md" + +[dependencies] +#franklin-crypto = {git = "https://github.com/matter-labs/franklin-crypto", branch = "dev", features = ["multicore", "plonk"]} +#recursive_aggregation_circuit = { version = "1.0.0", git = "https://github.com/matter-labs/recursive_aggregation_circuit.git"} +#rescue_poseidon = { version = "0.4.0", git = "https://github.com/matter-labs/rescue-poseidon.git", branch="dev-dep" } +rand = "0.4" + +zksync_basic_types = { path = "../basic_types", version = "1.0" } +serde = "1.0" +thiserror = "1.0" +once_cell = "1.7" +hex = "0.4" +base64 = "0.13" +sha2 = "0.9" +blake2 = "0.10" + +[dev-dependencies] +serde_json = "1.0" + diff --git a/core/lib/crypto/README.md b/core/lib/crypto/README.md new file mode 100644 index 000000000000..2a4f3d4b9c5c --- /dev/null +++ b/core/lib/crypto/README.md @@ -0,0 +1,10 @@ +# zkSync crypto. Essential cryptography primitives for the zkSync network + +`zksync_crypto` is a crate containing essential zkSync cryptographic primitives, such as private keys and hashers. + +## License + +`zksync_crypto` is a part of zkSync stack, which is distributed under the terms of both the MIT license and the Apache +License (Version 2.0). + +See [LICENSE-APACHE](../../LICENSE-APACHE), [LICENSE-MIT](../../LICENSE-MIT) for details. diff --git a/core/lib/crypto/src/convert.rs b/core/lib/crypto/src/convert.rs new file mode 100644 index 000000000000..9fb8ac5ba408 --- /dev/null +++ b/core/lib/crypto/src/convert.rs @@ -0,0 +1,101 @@ +// use crate::{ +// error::ConversionError, +// franklin_crypto::bellman::pairing::ff::{PrimeField, PrimeFieldRepr}, +// }; +// +// /// Extension trait denoting common conversion method for field elements. +// pub trait FeConvert: PrimeField { +// /// Converts the field element into a byte array. +// fn to_bytes(&self) -> Vec { +// let mut buf: Vec = Vec::with_capacity(32); +// self.into_repr().write_be(&mut buf).unwrap(); +// +// buf +// } +// +// /// Reads a field element from its byte sequence representation. +// fn from_bytes(value: &[u8]) -> Result { +// let mut repr = Self::Repr::default(); +// +// // `repr.as_ref()` converts `repr` to a list of `u64`. Each element has 8 bytes, +// // so to obtain size in bytes, we multiply the array size with the size of `u64`. +// let expected_input_size = repr.as_ref().len() * 8; +// if value.len() != expected_input_size { +// return Err(ConversionError::IncorrectInputSize { +// size: value.len(), +// expected_size: expected_input_size, +// }); +// } +// repr.read_be(value).map_err(ConversionError::ParsingError)?; +// Self::from_repr(repr).map_err(From::from) +// } +// +// /// Returns hex representation of the field element without `0x` prefix. +// fn to_hex(&self) -> String { +// let mut buf: Vec = Vec::with_capacity(32); +// self.into_repr().write_be(&mut buf).unwrap(); +// hex::encode(&buf) +// } +// +// /// Reads a field element from its hexadecimal representation. +// fn from_hex(value: &str) -> Result { +// let value = if let Some(value) = value.strip_prefix("0x") { +// value +// } else { +// value +// }; +// +// // Buffer is reversed and read as little endian, since we pad it with zeros to +// // match the expected length. +// let mut buf = hex::decode(&value)?; +// buf.reverse(); +// let mut repr = Self::Repr::default(); +// +// // `repr.as_ref()` converts `repr` to a list of `u64`. Each element has 8 bytes, +// // so to obtain size in bytes, we multiply the array size with the size of `u64`. +// buf.resize(repr.as_ref().len() * 8, 0); +// repr.read_le(&buf[..]) +// .map_err(ConversionError::ParsingError)?; +// Self::from_repr(repr).map_err(From::from) +// } +// } +// +// impl FeConvert for T where T: PrimeField {} +// +// #[cfg(test)] +// mod tests { +// use super::*; +// +// use crate::{ +// rand::{Rand, SeedableRng, XorShiftRng}, +// Fr, +// }; +// +// /// Checks that converting FE to the hex form and back results +// /// in the same FE. +// #[test] +// fn fe_hex_roundtrip() { +// let mut rng = XorShiftRng::from_seed([1, 2, 3, 4]); +// +// let fr = Fr::rand(&mut rng); +// +// let encoded_fr = fr.to_hex(); +// let decoded_fr = Fr::from_hex(&encoded_fr).expect("Can't decode encoded fr"); +// +// assert_eq!(fr, decoded_fr); +// } +// +// /// Checks that converting FE to the bytes form and back results +// /// in the same FE. +// #[test] +// fn fe_bytes_roundtrip() { +// let mut rng = XorShiftRng::from_seed([1, 2, 3, 4]); +// +// let fr = Fr::rand(&mut rng); +// +// let encoded_fr = fr.to_bytes(); +// let decoded_fr = Fr::from_bytes(&encoded_fr).expect("Can't decode encoded fr"); +// +// assert_eq!(fr, decoded_fr); +// } +// } diff --git a/core/lib/crypto/src/error.rs b/core/lib/crypto/src/error.rs new file mode 100644 index 000000000000..e7ecbd4c4ac3 --- /dev/null +++ b/core/lib/crypto/src/error.rs @@ -0,0 +1,21 @@ +// use crate::franklin_crypto::bellman::pairing::ff; +// use hex::FromHexError; +// use thiserror::Error; +// +// #[derive(Debug, Error, PartialEq)] +// pub enum PackingError { +// #[error("Input integer is too big for packing. Actual: {integer}, limit: {limit}")] +// IntegerTooBig { integer: u128, limit: u128 }, +// } +// +// #[derive(Debug, Error)] +// pub enum ConversionError { +// #[error("Incorrect input size. Actual: {size}, expected: {expected_size}")] +// IncorrectInputSize { size: usize, expected_size: usize }, +// #[error("Cannot decode hex: {0}")] +// HexDecodingError(#[from] FromHexError), +// #[error("Cannot parse value {0}")] +// ParsingError(std::io::Error), +// #[error("Cannot convert into prime field value: {0}")] +// PrimeFieldDecodingError(#[from] ff::PrimeFieldDecodingError), +// } diff --git a/core/lib/crypto/src/hasher/blake2.rs b/core/lib/crypto/src/hasher/blake2.rs new file mode 100644 index 000000000000..cb2d22be72ea --- /dev/null +++ b/core/lib/crypto/src/hasher/blake2.rs @@ -0,0 +1,36 @@ +use crate::hasher::Hasher; +use blake2::{Blake2s256, Digest}; + +#[derive(Default, Clone, Debug)] +pub struct Blake2Hasher; + +impl Hasher> for Blake2Hasher { + /// Gets the hash of the byte sequence. + fn hash_bytes>(&self, value: I) -> Vec { + let mut hasher = Blake2s256::new(); + let value: Vec = value.into_iter().collect(); + hasher.update(&value); + + hasher.finalize().to_vec() + } + + /// Get the hash of the hashes sequence. + fn hash_elements>>(&self, elements: I) -> Vec { + let elems: Vec = elements.into_iter().flatten().collect(); + + let mut hasher = Blake2s256::new(); + hasher.update(&elems); + hasher.finalize().to_vec() + } + + /// Merges two hashes into one. + fn compress(&self, lhs: &Vec, rhs: &Vec) -> Vec { + let mut elems = vec![]; + elems.extend(lhs); + elems.extend(rhs); + + let mut hasher = Blake2s256::new(); + hasher.update(&elems); + hasher.finalize().to_vec() + } +} diff --git a/core/lib/crypto/src/hasher/keccak.rs b/core/lib/crypto/src/hasher/keccak.rs new file mode 100644 index 000000000000..f3191cf1c03e --- /dev/null +++ b/core/lib/crypto/src/hasher/keccak.rs @@ -0,0 +1,28 @@ +use crate::hasher::Hasher; +use zksync_basic_types::web3::signing::keccak256; + +#[derive(Default, Clone, Debug)] +pub struct KeccakHasher; + +impl Hasher> for KeccakHasher { + /// Gets the hash of the byte sequence. + fn hash_bytes>(&self, value: I) -> Vec { + let value: Vec = value.into_iter().collect(); + keccak256(&value).to_vec() + } + + /// Get the hash of the hashes sequence. + fn hash_elements>>(&self, elements: I) -> Vec { + let elems: Vec = elements.into_iter().flatten().collect(); + keccak256(&elems).to_vec() + } + + /// Merges two hashes into one. + fn compress(&self, lhs: &Vec, rhs: &Vec) -> Vec { + let mut elems = vec![]; + elems.extend(lhs); + elems.extend(rhs); + + keccak256(&elems).to_vec() + } +} diff --git a/core/lib/crypto/src/hasher/mod.rs b/core/lib/crypto/src/hasher/mod.rs new file mode 100644 index 000000000000..c71e7cc267a1 --- /dev/null +++ b/core/lib/crypto/src/hasher/mod.rs @@ -0,0 +1,23 @@ +pub mod blake2; +pub mod keccak; +pub mod sha256; + +/// Definition of hasher suitable for calculating state hash. +/// +/// # Panics +/// +/// This structure expects input data to be correct, as it's main usage is the Merkle tree maintenance, +/// which assumes the consistent state. +/// It means that caller is responsible for checking that input values are actually valid, e.g. for `Vec` +/// it must be checked that byte sequence can be deserialized to hash object expected by the chosen hasher +/// implementation. +/// +/// What it *actually* means, that is incorrect input data will cause the code to panic. +pub trait Hasher { + /// Gets the hash of the byte sequence. + fn hash_bytes>(&self, value: I) -> Hash; + /// Get the hash of the hashes sequence. + fn hash_elements>(&self, elements: I) -> Hash; + /// Merges two hashes into one. + fn compress(&self, lhs: &Hash, rhs: &Hash) -> Hash; +} diff --git a/core/lib/crypto/src/hasher/sha256.rs b/core/lib/crypto/src/hasher/sha256.rs new file mode 100644 index 000000000000..ef42ecc632c5 --- /dev/null +++ b/core/lib/crypto/src/hasher/sha256.rs @@ -0,0 +1,36 @@ +use crate::hasher::Hasher; +use sha2::{Digest, Sha256}; + +#[derive(Default, Clone, Debug)] +pub struct Sha256Hasher; + +impl Hasher> for Sha256Hasher { + /// Gets the hash of the byte sequence. + fn hash_bytes>(&self, value: I) -> Vec { + let mut sha256 = Sha256::new(); + let value: Vec = value.into_iter().collect(); + sha256.update(&value); + + sha256.finalize().to_vec() + } + + /// Get the hash of the hashes sequence. + fn hash_elements>>(&self, elements: I) -> Vec { + let elems: Vec = elements.into_iter().flatten().collect(); + + let mut sha256 = Sha256::new(); + sha256.update(&elems); + sha256.finalize().to_vec() + } + + /// Merges two hashes into one. + fn compress(&self, lhs: &Vec, rhs: &Vec) -> Vec { + let mut elems = vec![]; + elems.extend(lhs); + elems.extend(rhs); + + let mut sha256 = Sha256::new(); + sha256.update(&elems); + sha256.finalize().to_vec() + } +} diff --git a/core/lib/crypto/src/lib.rs b/core/lib/crypto/src/lib.rs new file mode 100644 index 000000000000..e3955fddfd52 --- /dev/null +++ b/core/lib/crypto/src/lib.rs @@ -0,0 +1,52 @@ +//! `zksync_crypto` is a crate containing essential zkSync cryptographic primitives, such as private keys and hashers. + +#![allow(clippy::upper_case_acronyms, clippy::derive_partial_eq_without_eq)] + +// use crate::franklin_crypto::{ +// bellman::{pairing::bn256, plonk::better_cs::cs::PlonkCsWidth4WithNextStepParams}, +// // eddsa::{PrivateKey as PrivateKeyImport, PublicKey as PublicKeyImport}, +// jubjub::JubjubEngine, +// }; + +mod crypto_exports { + // pub use crate::franklin_crypto::{ + // bellman, + // bellman::{pairing, pairing::ff}, + // }; + // pub use franklin_crypto; + pub use rand; + // pub use recursive_aggregation_circuit; +} + +pub use crypto_exports::*; + +pub mod convert; +pub mod error; +pub mod hasher; +pub mod primitives; +pub mod proof; +pub mod serialization; + +// pub use crypto_exports::*; +// +// pub type Engine = bn256::Bn256; +// pub type Fr = bn256::Fr; +// pub type Fs = ::Fs; +// pub type PlonkCS = PlonkCsWidth4WithNextStepParams; + +// pub type PrivateKey = PrivateKeyImport; +// pub type PublicKey = PublicKeyImport; + +// /// Decodes a private key from a field element. +// pub fn priv_key_from_fs(fs: Fs) -> PrivateKey { +// PrivateKeyImport(fs) +// } + +// /// Converts private key into a corresponding public key. +// pub fn public_key_from_private(pk: &PrivateKey) -> PublicKey { +// PublicKey::from_private( +// pk, +// FixedGenerators::SpendingKeyGenerator, +// ¶ms::JUBJUB_PARAMS, +// ) +// } diff --git a/core/lib/crypto/src/primitives.rs b/core/lib/crypto/src/primitives.rs new file mode 100644 index 000000000000..62f7a98b58e9 --- /dev/null +++ b/core/lib/crypto/src/primitives.rs @@ -0,0 +1,132 @@ +// // Built-in deps +// // External deps +// use crate::franklin_crypto::bellman::pairing::{ +// bn256::Bn256, +// ff::{PrimeField, PrimeFieldRepr, ScalarEngine}, +// CurveAffine, Engine, +// }; +// use zksync_basic_types::U256; +// // Workspace deps +// +// pub struct EthereumSerializer; +// +// impl EthereumSerializer { +// pub fn serialize_g1(point: &::G1Affine) -> (U256, U256) { +// if point.is_zero() { +// return (U256::zero(), U256::zero()); +// } +// let uncompressed = point.into_uncompressed(); +// +// let uncompressed_slice = uncompressed.as_ref(); +// +// // bellman serializes points as big endian and in the form x, y +// // ethereum expects the same order in memory +// let x = U256::from_big_endian(&uncompressed_slice[0..32]); +// let y = U256::from_big_endian(&uncompressed_slice[32..64]); +// +// (x, y) +// } +// +// pub fn serialize_g2(point: &::G2Affine) -> ((U256, U256), (U256, U256)) { +// let uncompressed = point.into_uncompressed(); +// +// let uncompressed_slice = uncompressed.as_ref(); +// +// // bellman serializes points as big endian and in the form x1*u, x0, y1*u, y0 +// // ethereum expects the same order in memory +// let x_1 = U256::from_big_endian(&uncompressed_slice[0..32]); +// let x_0 = U256::from_big_endian(&uncompressed_slice[32..64]); +// let y_1 = U256::from_big_endian(&uncompressed_slice[64..96]); +// let y_0 = U256::from_big_endian(&uncompressed_slice[96..128]); +// +// ((x_1, x_0), (y_1, y_0)) +// } +// +// pub fn serialize_fe(field_element: &::Fr) -> U256 { +// let mut be_bytes = [0u8; 32]; +// field_element +// .into_repr() +// .write_be(&mut be_bytes[..]) +// .expect("get new root BE bytes"); +// U256::from_big_endian(&be_bytes[..]) +// } +// } +// +// pub struct BitConvert; +// +// impl BitConvert { +// /// Converts a set of bits to a set of bytes in direct order. +// #[allow(clippy::wrong_self_convention)] +// pub fn into_bytes(bits: Vec) -> Vec { +// assert_eq!(bits.len() % 8, 0); +// let mut message_bytes: Vec = vec![]; +// +// let byte_chunks = bits.chunks(8); +// for byte_chunk in byte_chunks { +// let mut byte = 0u8; +// for (i, bit) in byte_chunk.iter().enumerate() { +// if *bit { +// byte |= 1 << i; +// } +// } +// message_bytes.push(byte); +// } +// +// message_bytes +// } +// +// /// Converts a set of bits to a set of bytes in reverse order for each byte. +// #[allow(clippy::wrong_self_convention)] +// pub fn into_bytes_ordered(bits: Vec) -> Vec { +// assert_eq!(bits.len() % 8, 0); +// let mut message_bytes: Vec = vec![]; +// +// let byte_chunks = bits.chunks(8); +// for byte_chunk in byte_chunks { +// let mut byte = 0u8; +// for (i, bit) in byte_chunk.iter().rev().enumerate() { +// if *bit { +// byte |= 1 << i; +// } +// } +// message_bytes.push(byte); +// } +// +// message_bytes +// } +// +// /// Converts a set of Big Endian bytes to a set of bits. +// pub fn from_be_bytes(bytes: &[u8]) -> Vec { +// let mut bits = vec![]; +// for byte in bytes { +// let mut temp = *byte; +// for _ in 0..8 { +// bits.push(temp & 0x80 == 0x80); +// temp <<= 1; +// } +// } +// bits +// } +// } +// +// #[cfg(test)] +// mod test { +// use super::*; +// +// #[test] +// fn test_bits_conversions() { +// let mut bits = vec![]; +// +// bits.extend(vec![true, false, false, true, true, false, true, false]); +// bits.extend(vec![false, false, true, true, false, true, true, false]); +// bits.extend(vec![false, false, false, false, false, false, false, true]); +// +// let bytes = BitConvert::into_bytes(bits.clone()); +// assert_eq!(bytes, vec![89, 108, 128]); +// +// let bytes = BitConvert::into_bytes_ordered(bits.clone()); +// assert_eq!(bytes, vec![154, 54, 1]); +// +// assert_eq!(BitConvert::from_be_bytes(&[154, 54, 1]), bits); +// } +// } diff --git a/core/lib/crypto/src/proof.rs b/core/lib/crypto/src/proof.rs new file mode 100644 index 000000000000..3af6228410af --- /dev/null +++ b/core/lib/crypto/src/proof.rs @@ -0,0 +1,59 @@ +use serde::{Deserialize, Serialize}; +use zksync_basic_types::{ethabi::Token, U256}; + +/// Encoded representation of the aggregated block proof. +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +pub struct EncodedAggregatedProof { + pub aggregated_input: U256, + pub proof: Vec, + pub subproof_limbs: Vec, + pub individual_vk_inputs: Vec, + pub individual_vk_idxs: Vec, +} + +impl EncodedAggregatedProof { + pub fn get_eth_tx_args(&self) -> Token { + let subproof_limbs = Token::Array( + self.subproof_limbs + .iter() + .map(|v| Token::Uint(*v)) + .collect(), + ); + let proof = Token::Array( + self.proof + .iter() + .map(|p| Token::Uint(U256::from(p))) + .collect(), + ); + + Token::Tuple(vec![subproof_limbs, proof]) + } +} + +impl Default for EncodedAggregatedProof { + fn default() -> Self { + Self { + aggregated_input: U256::default(), + proof: vec![U256::default(); 34], + subproof_limbs: vec![U256::default(); 16], + individual_vk_inputs: vec![U256::default(); 1], + individual_vk_idxs: vec![U256::default(); 1], + } + } +} + +/// Encoded representation of the block proof. +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +pub struct EncodedSingleProof { + pub inputs: Vec, + pub proof: Vec, +} + +impl Default for EncodedSingleProof { + fn default() -> Self { + Self { + inputs: vec![U256::default(); 1], + proof: vec![U256::default(); 33], + } + } +} diff --git a/core/lib/crypto/src/serialization.rs b/core/lib/crypto/src/serialization.rs new file mode 100644 index 000000000000..41ab89fefcad --- /dev/null +++ b/core/lib/crypto/src/serialization.rs @@ -0,0 +1,462 @@ +// //! Common serialization utilities. +// //! +// //! This module provides building blocks for serializing and deserializing +// //! common `zksync` types. +// +// use crate::{ +// bellman::plonk::{ +// better_better_cs::{cs::Circuit as NewCircuit, proof::Proof as NewProof}, +// better_cs::{cs::PlonkCsWidth4WithNextStepParams, keys::Proof as OldProof}, +// }, +// convert::FeConvert, +// primitives::EthereumSerializer, +// proof::EncodedSingleProof, +// recursive_aggregation_circuit::circuit::RecursiveAggregationCircuitBn256, +// Engine, Fr, +// }; +// use serde::{de, ser, Deserialize, Deserializer, Serialize, Serializer}; +// use zksync_basic_types::U256; +// +// /// Blanket structure implementing serializing/deserializing methods for `Fr`. +// /// +// /// This structure is required, since `Fr` does not originate in the current +// /// crate and we can't implement `serde` traits for it. +// /// +// /// ## Example: +// /// +// /// ``` +// /// use zksync_crypto::serialization::FrSerde; +// /// use zksync_crypto::Fr; +// /// use serde::{Serialize, Deserialize}; +// /// +// /// #[derive(Clone, Debug, Serialize, Deserialize)] +// /// pub struct SomeStructure { +// /// #[serde(with = "FrSerde")] +// /// pub some_data: Fr, +// /// } +// /// ``` +// pub struct FrSerde; +// +// impl FrSerde { +// pub fn serialize(value: &Fr, serializer: S) -> Result +// where +// S: Serializer, +// { +// // First, serialize `Fr` to hexadecimal string. +// let hex_value = value.to_hex(); +// +// // Then, serialize it using `Serialize` trait implementation for `String`. +// String::serialize(&hex_value, serializer) +// } +// +// pub fn deserialize<'de, D>(deserializer: D) -> Result +// where +// D: Deserializer<'de>, +// { +// // First, deserialize a string value. It is expected to be a +// // hexadecimal representation of `Fr`. +// let deserialized_string = String::deserialize(deserializer)?; +// +// // Then, parse hexadecimal string to obtain `Fr`. +// Fr::from_hex(&deserialized_string).map_err(de::Error::custom) +// } +// } +// +// /// Blanket structure implementing serializing/deserializing methods for `Option`. +// /// +// /// ## Example: +// /// +// /// ``` +// /// use zksync_crypto::serialization::OptionalFrSerde; +// /// use zksync_crypto::Fr; +// /// use serde::{Serialize, Deserialize}; +// /// +// /// #[derive(Clone, Debug, Serialize, Deserialize)] +// /// pub struct SomeStructure { +// /// #[serde(with = "OptionalFrSerde")] +// /// pub maybe_some_data: Option, +// /// } +// /// ``` +// pub struct OptionalFrSerde; +// +// impl OptionalFrSerde { +// pub fn serialize(value: &Option, serializer: S) -> Result +// where +// S: Serializer, +// { +// let optional_hex_value = value.map(|fr| fr.to_hex()); +// +// Option::serialize(&optional_hex_value, serializer) +// } +// +// pub fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> +// where +// D: Deserializer<'de>, +// { +// let optional_deserialized_string: Option = Option::deserialize(deserializer)?; +// +// // Apply `fe_from_hex` to the contents of `Option`, then transpose result to have +// // `Result, ..>` and adapt error to the expected format. +// optional_deserialized_string +// .map(|v| Fr::from_hex(&v)) +// .transpose() +// .map_err(de::Error::custom) +// } +// } +// +// /// Blanket structure implementing serializing/deserializing methods for `Vec>`. +// /// +// /// ## Example: +// /// +// /// ``` +// /// use zksync_crypto::serialization::VecOptionalFrSerde; +// /// use zksync_crypto::Fr; +// /// use serde::{Serialize, Deserialize}; +// /// +// /// #[derive(Clone, Debug, Serialize, Deserialize)] +// /// pub struct SomeStructure { +// /// #[serde(with = "VecOptionalFrSerde")] +// /// pub maybe_some_data: Vec>, +// /// } +// /// ``` +// pub struct VecOptionalFrSerde; +// +// impl VecOptionalFrSerde { +// pub fn serialize(operations: &[Option], ser: S) -> Result +// where +// S: Serializer, +// { +// let mut res = Vec::with_capacity(operations.len()); +// for value in operations.iter() { +// let v = value.map(|fr| fr.to_hex()); +// res.push(v); +// } +// Vec::serialize(&res, ser) +// } +// +// pub fn deserialize<'de, D>(deserializer: D) -> Result>, D::Error> +// where +// D: Deserializer<'de>, +// { +// let str_vec: Vec> = Vec::deserialize(deserializer)?; +// let mut res = Vec::with_capacity(str_vec.len()); +// for s in str_vec.into_iter() { +// if let Some(a) = s { +// let v = Fr::from_hex(&a).map_err(de::Error::custom)?; +// res.push(Some(v)); +// } else { +// res.push(None); +// } +// } +// Ok(res) +// } +// } +// +// /// Blanket structure implementing serializing/deserializing methods for `Vec`. +// /// +// /// ## Example: +// /// +// /// ``` +// /// use zksync_crypto::serialization::VecFrSerde; +// /// use zksync_crypto::Fr; +// /// use serde::{Serialize, Deserialize}; +// /// +// /// #[derive(Clone, Debug, Serialize, Deserialize)] +// /// pub struct SomeStructure { +// /// #[serde(with = "VecFrSerde")] +// /// pub vec_fr: Vec, +// /// } +// /// ``` +// pub struct VecFrSerde; +// +// impl VecFrSerde { +// pub fn serialize(operations: &[Fr], ser: S) -> Result +// where +// S: Serializer, +// { +// let mut res = Vec::with_capacity(operations.len()); +// for fr in operations.iter() { +// res.push(fr.to_hex()); +// } +// Vec::serialize(&res, ser) +// } +// +// pub fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> +// where +// D: Deserializer<'de>, +// { +// let str_vec: Vec = Vec::deserialize(deserializer)?; +// let mut res = Vec::with_capacity(str_vec.len()); +// for s in str_vec.into_iter() { +// let v = Fr::from_hex(&s).map_err(de::Error::custom)?; +// res.push(v); +// } +// Ok(res) +// } +// } +// +// pub struct SingleProofSerde; +// +// impl SingleProofSerde { +// pub fn serialize( +// value: &OldProof, +// serializer: S, +// ) -> Result +// where +// S: Serializer, +// { +// // First, serialize `SingleProof` to base64 string. +// let mut bytes = Vec::new(); +// value.write(&mut bytes).map_err(ser::Error::custom)?; +// let base64_value = base64::encode(&bytes); +// +// // Then, serialize it using `Serialize` trait implementation for `String`. +// String::serialize(&base64_value, serializer) +// } +// +// pub fn deserialize<'de, D>( +// deserializer: D, +// ) -> Result, D::Error> +// where +// D: Deserializer<'de>, +// { +// // First, deserialize a string value. It is expected to be a +// // base64 representation of `SingleProof`. +// let deserialized_string = String::deserialize(deserializer)?; +// let bytes = base64::decode(&deserialized_string).map_err(de::Error::custom)?; +// +// // Then, parse hexadecimal string to obtain `SingleProof`. +// OldProof::read(&*bytes).map_err(de::Error::custom) +// } +// } +// +// pub struct AggregatedProofSerde; +// +// impl AggregatedProofSerde { +// pub fn serialize( +// value: &NewProof>, +// serializer: S, +// ) -> Result +// where +// S: Serializer, +// { +// // First, serialize `AggregatedProof` to base64 string. +// let mut bytes = Vec::new(); +// value.write(&mut bytes).map_err(ser::Error::custom)?; +// let base64_value = base64::encode(&bytes); +// +// // Then, serialize it using `Serialize` trait implementation for `String`. +// String::serialize(&base64_value, serializer) +// } +// +// pub fn deserialize<'de, D>( +// deserializer: D, +// ) -> Result>, D::Error> +// where +// D: Deserializer<'de>, +// { +// // First, deserialize a string value. It is expected to be a +// // base64 representation of `AggregatedProof`. +// let deserialized_string = String::deserialize(deserializer)?; +// let bytes = base64::decode(&deserialized_string).map_err(de::Error::custom)?; +// +// // Then, parse hexadecimal string to obtain `SingleProof`. +// NewProof::read(&*bytes).map_err(de::Error::custom) +// } +// } +// +// pub fn serialize_new_proof>( +// proof: &NewProof, +// ) -> (Vec, Vec) { +// let mut inputs = vec![]; +// for input in proof.inputs.iter() { +// inputs.push(EthereumSerializer::serialize_fe(input)); +// } +// let mut serialized_proof = vec![]; +// +// for c in proof.state_polys_commitments.iter() { +// let (x, y) = EthereumSerializer::serialize_g1(c); +// serialized_proof.push(x); +// serialized_proof.push(y); +// } +// +// let (x, y) = EthereumSerializer::serialize_g1(&proof.copy_permutation_grand_product_commitment); +// serialized_proof.push(x); +// serialized_proof.push(y); +// +// for c in proof.quotient_poly_parts_commitments.iter() { +// let (x, y) = EthereumSerializer::serialize_g1(c); +// serialized_proof.push(x); +// serialized_proof.push(y); +// } +// +// for c in proof.state_polys_openings_at_z.iter() { +// serialized_proof.push(EthereumSerializer::serialize_fe(c)); +// } +// +// for (_, _, c) in proof.state_polys_openings_at_dilations.iter() { +// serialized_proof.push(EthereumSerializer::serialize_fe(c)); +// } +// +// assert_eq!(proof.gate_setup_openings_at_z.len(), 0); +// +// for (_, c) in proof.gate_selectors_openings_at_z.iter() { +// serialized_proof.push(EthereumSerializer::serialize_fe(c)); +// } +// +// for c in proof.copy_permutation_polys_openings_at_z.iter() { +// serialized_proof.push(EthereumSerializer::serialize_fe(c)); +// } +// +// serialized_proof.push(EthereumSerializer::serialize_fe( +// &proof.copy_permutation_grand_product_opening_at_z_omega, +// )); +// serialized_proof.push(EthereumSerializer::serialize_fe( +// &proof.quotient_poly_opening_at_z, +// )); +// serialized_proof.push(EthereumSerializer::serialize_fe( +// &proof.linearization_poly_opening_at_z, +// )); +// +// let (x, y) = EthereumSerializer::serialize_g1(&proof.opening_proof_at_z); +// serialized_proof.push(x); +// serialized_proof.push(y); +// +// let (x, y) = EthereumSerializer::serialize_g1(&proof.opening_proof_at_z_omega); +// serialized_proof.push(x); +// serialized_proof.push(y); +// +// (inputs, serialized_proof) +// } +// +// pub fn serialize_single_proof( +// proof: &OldProof, +// ) -> EncodedSingleProof { +// let mut inputs = vec![]; +// for input in proof.input_values.iter() { +// let ser = EthereumSerializer::serialize_fe(input); +// inputs.push(ser); +// } +// let mut serialized_proof = vec![]; +// +// for c in proof.wire_commitments.iter() { +// let (x, y) = EthereumSerializer::serialize_g1(c); +// serialized_proof.push(x); +// serialized_proof.push(y); +// } +// +// let (x, y) = EthereumSerializer::serialize_g1(&proof.grand_product_commitment); +// serialized_proof.push(x); +// serialized_proof.push(y); +// +// for c in proof.quotient_poly_commitments.iter() { +// let (x, y) = EthereumSerializer::serialize_g1(c); +// serialized_proof.push(x); +// serialized_proof.push(y); +// } +// +// for c in proof.wire_values_at_z.iter() { +// serialized_proof.push(EthereumSerializer::serialize_fe(c)); +// } +// +// for c in proof.wire_values_at_z_omega.iter() { +// serialized_proof.push(EthereumSerializer::serialize_fe(c)); +// } +// +// serialized_proof.push(EthereumSerializer::serialize_fe( +// &proof.grand_product_at_z_omega, +// )); +// serialized_proof.push(EthereumSerializer::serialize_fe( +// &proof.quotient_polynomial_at_z, +// )); +// serialized_proof.push(EthereumSerializer::serialize_fe( +// &proof.linearization_polynomial_at_z, +// )); +// +// for c in proof.permutation_polynomials_at_z.iter() { +// serialized_proof.push(EthereumSerializer::serialize_fe(c)); +// } +// +// let (x, y) = EthereumSerializer::serialize_g1(&proof.opening_at_z_proof); +// serialized_proof.push(x); +// serialized_proof.push(y); +// +// let (x, y) = EthereumSerializer::serialize_g1(&proof.opening_at_z_omega_proof); +// serialized_proof.push(x); +// serialized_proof.push(y); +// +// EncodedSingleProof { +// inputs, +// proof: serialized_proof, +// } +// } +// +// #[cfg(test)] +// mod tests { +// use super::*; +// use serde::{Deserialize, Serialize}; +// use serde_json::json; +// +// #[test] +// fn test_fr_serialize() { +// #[derive(Debug, Default, Serialize, Deserialize)] +// struct Reference { +// #[serde(with = "FrSerde")] +// value: Fr, +// } +// +// let value = Reference::default(); +// let serialized_fr = serde_json::to_string(&value).expect("Serialization failed"); +// let expected = json!({ +// "value": "0000000000000000000000000000000000000000000000000000000000000000" +// }); +// +// assert_eq!(serialized_fr, expected.to_string()); +// } +// +// #[test] +// fn test_optional_fr_serialize() { +// #[derive(Debug, Default, Serialize, Deserialize)] +// struct Reference { +// #[serde(with = "OptionalFrSerde")] +// value: Option, +// } +// +// // Check serialization of `None`. +// let value = Reference { value: None }; +// let serialized_fr = serde_json::to_string(&value).expect("Serialization failed"); +// let expected = json!({ "value": null }); +// +// assert_eq!(serialized_fr, expected.to_string()); +// +// // Check serialization of `Some`. +// let value = Reference { +// value: Some(Fr::default()), +// }; +// let serialized_fr = serde_json::to_string(&value).expect("Serialization failed"); +// let expected = json!({ +// "value": "0000000000000000000000000000000000000000000000000000000000000000" +// }); +// +// assert_eq!(serialized_fr, expected.to_string()); +// } +// +// #[test] +// fn test_vec_optional_fr_serialize() { +// #[derive(Debug, Default, Serialize, Deserialize)] +// struct Reference { +// #[serde(with = "VecOptionalFrSerde")] +// value: Vec>, +// } +// +// let value = Reference { +// value: vec![None, Some(Fr::default())], +// }; +// let serialized_fr = serde_json::to_string(&value).expect("Serialization failed"); +// let expected = json!({ +// "value": [null, "0000000000000000000000000000000000000000000000000000000000000000"] +// }); +// +// assert_eq!(serialized_fr, expected.to_string()); +// } +// } diff --git a/core/lib/dal/.gitignore b/core/lib/dal/.gitignore new file mode 100644 index 000000000000..4c49bd78f1d0 --- /dev/null +++ b/core/lib/dal/.gitignore @@ -0,0 +1 @@ +.env diff --git a/core/lib/dal/Cargo.toml b/core/lib/dal/Cargo.toml new file mode 100644 index 000000000000..acc26ad7bde3 --- /dev/null +++ b/core/lib/dal/Cargo.toml @@ -0,0 +1,50 @@ +[package] +name = "zksync_dal" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-2" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] + +[dependencies] +zksync_utils = { path = "../utils", version = "1.0" } +zksync_config = { path = "../config", version = "1.0" } +zksync_contracts = { path = "../contracts", version = "1.0" } +zksync_types = { path = "../types", version = "1.0" } +vm = { path = "../vm", version = "0.1.0" } +zksync_state = { path = "../state", version = "1.0" } +zksync_storage = { path = "../storage", version = "1.0" } +zksync_web3_decl = { path = "../web3_decl", version = "1.0" } +itertools = "0.10.1" +vlog = { path = "../../lib/vlog", version = "1.0" } +zksync_object_store = { path = "../object_store", version = "1.0" } +thiserror = "1.0" +anyhow = "1.0" +metrics = "0.20" +async-std = "1.12.0" +sqlx = { version = "0.5", default-features = false, features = [ + "runtime-async-std-native-tls", + "macros", + "postgres", + "bigdecimal", + "chrono", + "json", + "offline", + "migrate", + "ipnetwork" +] } +serde_json = "1.0" +bigdecimal = "0.2.0" +bincode = "1" + +num = { version = "0.3.1" } +hex = "0.4" +once_cell = "1.7" + +[dev-dependencies] +db_test_macro = { path = "../db_test_macro", version = "0.1.0" } +tokio = { version = "1", features = ["time"] } +#criterion = "0.3.0" diff --git a/core/lib/dal/migrations/20211026134308_init.down.sql b/core/lib/dal/migrations/20211026134308_init.down.sql new file mode 100644 index 000000000000..7cee93fe176b --- /dev/null +++ b/core/lib/dal/migrations/20211026134308_init.down.sql @@ -0,0 +1,39 @@ +DROP TABLE tokens; + +DROP INDEX transactions_block_number_idx; +DROP TABLE transactions; + +DROP TABLE storage; + +DROP INDEX contracts_block_number_idx; +DROP INDEX contracts_tx_hash_idx; +DROP TABLE contracts; + +DROP TABLE proof; +DROP TABLE aggregated_proof; + +DROP INDEX storage_logs_block_number_idx; +DROP INDEX storage_logs_raw_key_block_number_idx; +DROP TABLE storage_logs; + +DROP TABLE contract_sources; +DROP TABLE transaction_traces; + +DROP INDEX events_tx_location_idx; +DROP INDEX events_address_idx; +DROP INDEX events_topic1_idx; +DROP INDEX events_topic2_idx; +DROP INDEX events_topic3_idx; +DROP INDEX events_topic4_idx; +DROP INDEX events_tx_hash_idx; +DROP TABLE events; + +DROP TABLE factory_deps; + +DROP INDEX blocks_eth_commit_tx_id_idx; +DROP INDEX blocks_eth_execute_tx_id_idx; +DROP INDEX blocks_eth_prove_tx_id_idx; +DROP TABLE blocks; + +DROP TABLE eth_txs_history; +DROP TABLE eth_txs diff --git a/core/lib/dal/migrations/20211026134308_init.up.sql b/core/lib/dal/migrations/20211026134308_init.up.sql new file mode 100644 index 000000000000..624751c90ce0 --- /dev/null +++ b/core/lib/dal/migrations/20211026134308_init.up.sql @@ -0,0 +1,232 @@ +CREATE TABLE storage +( + raw_key BYTEA PRIMARY KEY, + value BYTEA NOT NULL, + tx_hash BYTEA NOT NULL, + address BYTEA NOT NULL, + key BYTEA NOT NULL, + + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL +) + WITH (fillfactor = 50); + +CREATE TABLE eth_txs ( + id SERIAL NOT NULL PRIMARY KEY , + nonce BIGINT NOT NULL, + raw_tx BYTEA NOT NULL, + contract_address TEXT NOT NULL, + tx_type TEXT NOT NULL, + gas_price BIGINT, + confirmed_tx_hash TEXT, + confirmed_at TIMESTAMP, + gas_used BIGINT, + + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL +); + +CREATE TABLE eth_txs_history ( + id SERIAL NOT NULL PRIMARY KEY , + eth_tx_id SERIAL NOT NULL REFERENCES eth_txs (id) ON DELETE CASCADE, + gas_price BIGINT NOT NULL , + deadline_block INT NOT NULL, + tx_hash TEXT NOT NULL, + error TEXT, + + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL +); + +CREATE TABLE blocks +( + number BIGSERIAL PRIMARY KEY, + timestamp BIGINT NOT NULL, + is_finished BOOL NOT NULL, + priority_ops_complexity NUMERIC(80) NOT NULL, + l1_tx_count INT NOT NULL, + l2_tx_count INT NOT NULL, + fee_account_address BYTEA NOT NULL, + bloom BYTEA NOT NULL, + priority_ops_onchain_data BYTEA[] NOT NULL, + processable_onchain_ops BYTEA[] NOT NULL, + + hash BYTEA, + parent_hash BYTEA, + commitment BYTEA, + compressed_write_logs BYTEA, + compressed_contracts BYTEA, + eth_prove_tx_id INT REFERENCES eth_txs (id) ON DELETE SET NULL, + eth_commit_tx_id INT REFERENCES eth_txs (id) ON DELETE SET NULL, + eth_execute_tx_id INT REFERENCES eth_txs (id) ON DELETE SET NULL, + + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL +); + +CREATE INDEX blocks_eth_commit_tx_id_idx ON blocks (eth_commit_tx_id); +CREATE INDEX blocks_eth_execute_tx_id_idx ON blocks (eth_execute_tx_id); +CREATE INDEX blocks_eth_prove_tx_id_idx ON blocks (eth_prove_tx_id); + +CREATE TABLE transactions +( + hash BYTEA PRIMARY KEY, + is_priority BOOLEAN NOT NULL, + full_fee NUMERIC(80), + layer_2_tip_fee NUMERIC(80), + initiator_address BYTEA NOT NULL, + nonce BIGINT NOT NULL, + signature BYTEA, + valid_from NUMERIC(20) NOT NULL, + valid_until NUMERIC(20) NOT NULL, + fee_token BYTEA, + input BYTEA, + data JSONB NOT NULL, + type VARCHAR NOT NULL, + received_at TIMESTAMP NOT NULL, + priority_op_id BIGINT, + + block_number BIGINT REFERENCES blocks (number) ON DELETE SET NULL, + index_in_block INT, + error VARCHAR, + + ergs_limit NUMERIC(80), + ergs_price_limit NUMERIC(80), + ergs_per_storage_limit NUMERIC(80), + ergs_per_pubdata_limit NUMERIC(80), + tx_format INTEGER, + + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL +); + +CREATE INDEX transactions_block_number_idx ON transactions (block_number); + +CREATE TABLE aggregated_proof ( + id SERIAL PRIMARY KEY, + from_block_number BIGINT REFERENCES blocks (number) ON DELETE CASCADE, + to_block_number BIGINT REFERENCES blocks (number) ON DELETE CASCADE, + proof BYTEA, + eth_prove_tx_id INT REFERENCES eth_txs (id) ON DELETE SET NULL, + + created_at TIMESTAMP NOT NULL +); + +CREATE TABLE proof ( + block_number BIGINT PRIMARY KEY REFERENCES blocks (number) ON DELETE CASCADE, + proof BYTEA, + + created_at TIMESTAMP NOT NULL +); + +CREATE TABLE contracts +( + address BYTEA PRIMARY KEY, + bytecode BYTEA NOT NULL, + tx_hash BYTEA NOT NULL, + block_number BIGINT NOT NULL REFERENCES blocks (number) ON DELETE CASCADE, + + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL +); + +CREATE INDEX contracts_block_number_idx ON contracts (block_number); +CREATE INDEX contracts_tx_hash_idx ON contracts (tx_hash); + +CREATE TABLE storage_logs +( + id BIGSERIAL PRIMARY KEY, + raw_key BYTEA NOT NULL, + address BYTEA NOT NULL, + key BYTEA NOT NULL, + value BYTEA NOT NULL, + operation_number INT NOT NULL, + tx_hash BYTEA NOT NULL, + block_number BIGINT NOT NULL REFERENCES blocks (number) ON DELETE CASCADE, + + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL +); + +CREATE INDEX storage_logs_block_number_idx ON storage_logs (block_number); +CREATE INDEX storage_logs_raw_key_block_number_idx ON storage_logs (raw_key, block_number DESC, operation_number DESC); + +CREATE TABLE contract_sources +( + address BYTEA PRIMARY KEY, + assembly_code TEXT NOT NULL, + pc_line_mapping JSONB NOT NULL, + + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL +); + +CREATE TABLE transaction_traces +( + tx_hash BYTEA PRIMARY KEY, + trace JSONB NOT NULL, + + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL +); + +CREATE TABLE events +( + id BIGSERIAL PRIMARY KEY, + block_number BIGINT NOT NULL REFERENCES blocks (number) ON DELETE CASCADE, + tx_hash BYTEA NOT NULL, + tx_index_in_block INT NOT NULL, + address BYTEA NOT NULL, + + event_index_in_block INT NOT NULL, + event_index_in_tx INT NOT NULL, + + topic1 BYTEA NOT NULL, + topic2 BYTEA NOT NULL, + topic3 BYTEA NOT NULL, + topic4 BYTEA NOT NULL, + + value BYTEA NOT NULL, + + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL +); + +CREATE INDEX events_tx_location_idx ON events (block_number, tx_index_in_block); +CREATE INDEX events_address_idx ON events USING hash (address); +CREATE INDEX events_topic1_idx ON events USING hash (topic1); +CREATE INDEX events_topic2_idx ON events USING hash (topic2); +CREATE INDEX events_topic3_idx ON events USING hash (topic3); +CREATE INDEX events_topic4_idx ON events USING hash (topic4); +CREATE INDEX events_tx_hash_idx ON events USING hash (tx_hash); + +CREATE TABLE factory_deps +( + bytecode_hash BYTEA PRIMARY KEY, + bytecode BYTEA NOT NULL, + block_number BIGINT NOT NULL REFERENCES blocks (number) ON DELETE CASCADE, + + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL +); + +CREATE TABLE tokens ( + address BYTEA PRIMARY KEY, + name VARCHAR NOT NULL, + symbol VARCHAR NOT NULL, + decimals INT NOT NULL, + well_known BOOLEAN NOT NULL, + + token_list_name VARCHAR, + token_list_symbol VARCHAR, + token_list_decimals INT, + + usd_price NUMERIC, + usd_price_updated_at TIMESTAMP, + + market_volume NUMERIC, + market_volume_updated_at TIMESTAMP, + + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL +) diff --git a/core/lib/dal/migrations/20220120160234_add_tx_execution_info.down.sql b/core/lib/dal/migrations/20220120160234_add_tx_execution_info.down.sql new file mode 100644 index 000000000000..291607f9e445 --- /dev/null +++ b/core/lib/dal/migrations/20220120160234_add_tx_execution_info.down.sql @@ -0,0 +1 @@ +ALTER TABLE transactions DROP COLUMN execution_info; diff --git a/core/lib/dal/migrations/20220120160234_add_tx_execution_info.up.sql b/core/lib/dal/migrations/20220120160234_add_tx_execution_info.up.sql new file mode 100644 index 000000000000..9958a482543f --- /dev/null +++ b/core/lib/dal/migrations/20220120160234_add_tx_execution_info.up.sql @@ -0,0 +1 @@ +ALTER TABLE transactions ADD COLUMN execution_info JSONB NOT NULL DEFAULT '{}'; diff --git a/core/lib/dal/migrations/20220127113853_tx_received_at_index.down.sql b/core/lib/dal/migrations/20220127113853_tx_received_at_index.down.sql new file mode 100644 index 000000000000..6d3893ff2dd6 --- /dev/null +++ b/core/lib/dal/migrations/20220127113853_tx_received_at_index.down.sql @@ -0,0 +1 @@ +DROP INDEX transactions_received_at_idx; diff --git a/core/lib/dal/migrations/20220127113853_tx_received_at_index.up.sql b/core/lib/dal/migrations/20220127113853_tx_received_at_index.up.sql new file mode 100644 index 000000000000..6c95825e9928 --- /dev/null +++ b/core/lib/dal/migrations/20220127113853_tx_received_at_index.up.sql @@ -0,0 +1 @@ +CREATE INDEX transactions_received_at_idx ON transactions(received_at); diff --git a/core/lib/dal/migrations/20220204131627_add_merkle_root.down.sql b/core/lib/dal/migrations/20220204131627_add_merkle_root.down.sql new file mode 100644 index 000000000000..bbfd110c795e --- /dev/null +++ b/core/lib/dal/migrations/20220204131627_add_merkle_root.down.sql @@ -0,0 +1 @@ +ALTER TABLE blocks DROP COLUMN merkle_root_hash; diff --git a/core/lib/dal/migrations/20220204131627_add_merkle_root.up.sql b/core/lib/dal/migrations/20220204131627_add_merkle_root.up.sql new file mode 100644 index 000000000000..b0443818ac5b --- /dev/null +++ b/core/lib/dal/migrations/20220204131627_add_merkle_root.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE blocks ADD COLUMN merkle_root_hash BYTEA; +UPDATE blocks SET merkle_root_hash = hash; diff --git a/core/lib/dal/migrations/20220204223302_transactions_new_mempool.down.sql b/core/lib/dal/migrations/20220204223302_transactions_new_mempool.down.sql new file mode 100644 index 000000000000..f2395ca2af77 --- /dev/null +++ b/core/lib/dal/migrations/20220204223302_transactions_new_mempool.down.sql @@ -0,0 +1,4 @@ +ALTER TABLE transactions DELETE COLUMN mempool_preceding_nonce_gaps; +ALTER TABLE transactions ALTER COLUMN nonce SET NOT NULL; + +DROP INDEX transactions_initiator_address_nonce; diff --git a/core/lib/dal/migrations/20220204223302_transactions_new_mempool.up.sql b/core/lib/dal/migrations/20220204223302_transactions_new_mempool.up.sql new file mode 100644 index 000000000000..b7efd43da471 --- /dev/null +++ b/core/lib/dal/migrations/20220204223302_transactions_new_mempool.up.sql @@ -0,0 +1,9 @@ +ALTER TABLE transactions ADD COLUMN mempool_preceding_nonce_gaps SMALLINT; +ALTER TABLE transactions ALTER COLUMN nonce DROP NOT NULL; + +UPDATE transactions SET nonce = NULL where transactions.is_priority = TRUE; +UPDATE transactions SET mempool_preceding_nonce_gaps = 0 where transactions.is_priority = FALSE; + +DELETE FROM transactions WHERE block_number IS NULL and error IS NOT NULL; + +CREATE UNIQUE INDEX transactions_initiator_address_nonce ON transactions (initiator_address, nonce); diff --git a/core/lib/dal/migrations/20220304160112_drop_events_indices.down.sql b/core/lib/dal/migrations/20220304160112_drop_events_indices.down.sql new file mode 100644 index 000000000000..ea5562600c55 --- /dev/null +++ b/core/lib/dal/migrations/20220304160112_drop_events_indices.down.sql @@ -0,0 +1,5 @@ +CREATE INDEX events_address_idx ON events USING hash (address); +CREATE INDEX events_topic1_idx ON events USING hash (topic1); +CREATE INDEX events_topic2_idx ON events USING hash (topic2); +CREATE INDEX events_topic3_idx ON events USING hash (topic3); +CREATE INDEX events_topic4_idx ON events USING hash (topic4); diff --git a/core/lib/dal/migrations/20220304160112_drop_events_indices.up.sql b/core/lib/dal/migrations/20220304160112_drop_events_indices.up.sql new file mode 100644 index 000000000000..d854066cd682 --- /dev/null +++ b/core/lib/dal/migrations/20220304160112_drop_events_indices.up.sql @@ -0,0 +1,5 @@ +DROP INDEX events_address_idx; +DROP INDEX events_topic1_idx; +DROP INDEX events_topic2_idx; +DROP INDEX events_topic3_idx; +DROP INDEX events_topic4_idx; diff --git a/core/lib/dal/migrations/20220308124416_add_l2_to_l1_communication.down.sql b/core/lib/dal/migrations/20220308124416_add_l2_to_l1_communication.down.sql new file mode 100644 index 000000000000..a4ecab9c7a25 --- /dev/null +++ b/core/lib/dal/migrations/20220308124416_add_l2_to_l1_communication.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE blocks DROP COLUMN l2_to_l1_logs; +ALTER TABLE blocks DROP COLUMN l2_to_l1_messages; diff --git a/core/lib/dal/migrations/20220308124416_add_l2_to_l1_communication.up.sql b/core/lib/dal/migrations/20220308124416_add_l2_to_l1_communication.up.sql new file mode 100644 index 000000000000..415d43f116bd --- /dev/null +++ b/core/lib/dal/migrations/20220308124416_add_l2_to_l1_communication.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE blocks ADD COLUMN l2_to_l1_logs BYTEA[] NOT NULL DEFAULT '{}'; +ALTER TABLE blocks ADD COLUMN l2_to_l1_messages BYTEA[] NOT NULL DEFAULT '{}'; diff --git a/core/lib/dal/migrations/20220315095541_txs_priority_id.down.sql b/core/lib/dal/migrations/20220315095541_txs_priority_id.down.sql new file mode 100644 index 000000000000..a9ed17e06ad3 --- /dev/null +++ b/core/lib/dal/migrations/20220315095541_txs_priority_id.down.sql @@ -0,0 +1 @@ +DROP INDEX transactions_priority_op_id_idx; \ No newline at end of file diff --git a/core/lib/dal/migrations/20220315095541_txs_priority_id.up.sql b/core/lib/dal/migrations/20220315095541_txs_priority_id.up.sql new file mode 100644 index 000000000000..968b62571c48 --- /dev/null +++ b/core/lib/dal/migrations/20220315095541_txs_priority_id.up.sql @@ -0,0 +1 @@ +CREATE INDEX ON "transactions" (priority_op_id) WHERE priority_op_id IS NOT NULL; \ No newline at end of file diff --git a/core/lib/dal/migrations/20220315144416_remove_tx_valid_in.down.sql b/core/lib/dal/migrations/20220315144416_remove_tx_valid_in.down.sql new file mode 100644 index 000000000000..f386119447c1 --- /dev/null +++ b/core/lib/dal/migrations/20220315144416_remove_tx_valid_in.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE transactions ADD COLUMN valid_from NUMERIC(20) NOT NULL DEFAULT 0; +ALTER TABLE transactions ADD COLUMN valid_until NUMERIC(20) NOT NULL DEFAULT 0; diff --git a/core/lib/dal/migrations/20220315144416_remove_tx_valid_in.up.sql b/core/lib/dal/migrations/20220315144416_remove_tx_valid_in.up.sql new file mode 100644 index 000000000000..9cd1e4749f8d --- /dev/null +++ b/core/lib/dal/migrations/20220315144416_remove_tx_valid_in.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE transactions DROP COLUMN valid_from; +ALTER TABLE transactions DROP COLUMN valid_until; diff --git a/core/lib/dal/migrations/20220321131756_change_events_loc_index.down.sql b/core/lib/dal/migrations/20220321131756_change_events_loc_index.down.sql new file mode 100644 index 000000000000..7395817f6605 --- /dev/null +++ b/core/lib/dal/migrations/20220321131756_change_events_loc_index.down.sql @@ -0,0 +1,2 @@ +CREATE INDEX events_tx_location_idx ON events (block_number, tx_index_in_block); +DROP INDEX events_block_idx; diff --git a/core/lib/dal/migrations/20220321131756_change_events_loc_index.up.sql b/core/lib/dal/migrations/20220321131756_change_events_loc_index.up.sql new file mode 100644 index 000000000000..47a6cdfcf19d --- /dev/null +++ b/core/lib/dal/migrations/20220321131756_change_events_loc_index.up.sql @@ -0,0 +1,2 @@ +DROP INDEX events_tx_location_idx; +CREATE INDEX events_block_idx ON events (block_number); diff --git a/core/lib/dal/migrations/20220325153146_remove_events_id.down.sql b/core/lib/dal/migrations/20220325153146_remove_events_id.down.sql new file mode 100644 index 000000000000..ee107ddabf59 --- /dev/null +++ b/core/lib/dal/migrations/20220325153146_remove_events_id.down.sql @@ -0,0 +1,4 @@ +ALTER TABLE events DROP CONSTRAINT events_pkey; +ALTER TABLE events ADD COLUMN id SERIAL PRIMARY KEY; + +CREATE INDEX events_block_idx ON events (block_number); diff --git a/core/lib/dal/migrations/20220325153146_remove_events_id.up.sql b/core/lib/dal/migrations/20220325153146_remove_events_id.up.sql new file mode 100644 index 000000000000..e5ba475ea8d4 --- /dev/null +++ b/core/lib/dal/migrations/20220325153146_remove_events_id.up.sql @@ -0,0 +1,5 @@ +ALTER TABLE events DROP CONSTRAINT events_pkey; +ALTER TABLE events DROP COLUMN id; +ALTER TABLE events ADD PRIMARY KEY (block_number, event_index_in_block); + +DROP INDEX events_block_idx; diff --git a/core/lib/dal/migrations/20220401114554_storage_tables_migration.down.sql b/core/lib/dal/migrations/20220401114554_storage_tables_migration.down.sql new file mode 100644 index 000000000000..2e50078a1fb4 --- /dev/null +++ b/core/lib/dal/migrations/20220401114554_storage_tables_migration.down.sql @@ -0,0 +1,6 @@ +ALTER TABLE storage RENAME COLUMN hashed_key TO raw_key; + +ALTER TABLE storage_logs DROP CONSTRAINT storage_logs_pkey; +ALTER TABLE storage_logs RENAME COLUMN hashed_key TO raw_key; +ALTER TABLE storage_logs ADD COLUMN id SERIAL PRIMARY KEY; +CREATE INDEX storage_logs_raw_key_block_number_idx ON storage_logs (raw_key, block_number DESC, operation_number DESC); diff --git a/core/lib/dal/migrations/20220401114554_storage_tables_migration.up.sql b/core/lib/dal/migrations/20220401114554_storage_tables_migration.up.sql new file mode 100644 index 000000000000..91a58b8089f0 --- /dev/null +++ b/core/lib/dal/migrations/20220401114554_storage_tables_migration.up.sql @@ -0,0 +1,7 @@ +ALTER TABLE storage RENAME COLUMN raw_key TO hashed_key; + +DROP INDEX storage_logs_raw_key_block_number_idx; +ALTER TABLE storage_logs DROP CONSTRAINT storage_logs_pkey; +ALTER TABLE storage_logs DROP COLUMN id; +ALTER TABLE storage_logs RENAME COLUMN raw_key TO hashed_key; +ALTER TABLE storage_logs ADD PRIMARY KEY (hashed_key, block_number, operation_number); diff --git a/core/lib/dal/migrations/20220404102332_eth_tx_index.down.sql b/core/lib/dal/migrations/20220404102332_eth_tx_index.down.sql new file mode 100644 index 000000000000..eb96686583d4 --- /dev/null +++ b/core/lib/dal/migrations/20220404102332_eth_tx_index.down.sql @@ -0,0 +1 @@ +DROP INDEX eth_txs_history_eth_tx_id_idx; diff --git a/core/lib/dal/migrations/20220404102332_eth_tx_index.up.sql b/core/lib/dal/migrations/20220404102332_eth_tx_index.up.sql new file mode 100644 index 000000000000..33d67b475c3d --- /dev/null +++ b/core/lib/dal/migrations/20220404102332_eth_tx_index.up.sql @@ -0,0 +1 @@ +CREATE INDEX ON "eth_txs_history" (eth_tx_id); diff --git a/core/lib/dal/migrations/20220406085905_eth_tx_has_failed_status.down.sql b/core/lib/dal/migrations/20220406085905_eth_tx_has_failed_status.down.sql new file mode 100644 index 000000000000..4036955ee6e5 --- /dev/null +++ b/core/lib/dal/migrations/20220406085905_eth_tx_has_failed_status.down.sql @@ -0,0 +1 @@ +ALTER TABLE eth_txs DROP COLUMN has_failed; diff --git a/core/lib/dal/migrations/20220406085905_eth_tx_has_failed_status.up.sql b/core/lib/dal/migrations/20220406085905_eth_tx_has_failed_status.up.sql new file mode 100644 index 000000000000..e878892f0634 --- /dev/null +++ b/core/lib/dal/migrations/20220406085905_eth_tx_has_failed_status.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE eth_txs ADD COLUMN has_failed BOOLEAN NOT NULL default false; +CREATE INDEX eth_txs_has_failed_idx ON eth_txs(has_failed) WHERE has_failed = TRUE; diff --git a/core/lib/dal/migrations/20220412142956_add_block_hash_index.down.sql b/core/lib/dal/migrations/20220412142956_add_block_hash_index.down.sql new file mode 100644 index 000000000000..ec02fec513de --- /dev/null +++ b/core/lib/dal/migrations/20220412142956_add_block_hash_index.down.sql @@ -0,0 +1 @@ +DROP INDEX blocks_hash; diff --git a/core/lib/dal/migrations/20220412142956_add_block_hash_index.up.sql b/core/lib/dal/migrations/20220412142956_add_block_hash_index.up.sql new file mode 100644 index 000000000000..80b6d1ecbad6 --- /dev/null +++ b/core/lib/dal/migrations/20220412142956_add_block_hash_index.up.sql @@ -0,0 +1 @@ +CREATE INDEX blocks_hash ON blocks USING hash (hash); diff --git a/core/lib/dal/migrations/20220421125432_drop_to_porter_authorized.down.sql b/core/lib/dal/migrations/20220421125432_drop_to_porter_authorized.down.sql new file mode 100644 index 000000000000..5b427a45cd4d --- /dev/null +++ b/core/lib/dal/migrations/20220421125432_drop_to_porter_authorized.down.sql @@ -0,0 +1,3 @@ +UPDATE transactions +SET data = data || '{"to_porter_authorized": false}'::jsonb +WHERE type = 'deposit' diff --git a/core/lib/dal/migrations/20220421125432_drop_to_porter_authorized.up.sql b/core/lib/dal/migrations/20220421125432_drop_to_porter_authorized.up.sql new file mode 100644 index 000000000000..4bef42f30da5 --- /dev/null +++ b/core/lib/dal/migrations/20220421125432_drop_to_porter_authorized.up.sql @@ -0,0 +1,3 @@ +UPDATE transactions +SET data = data - 'to_porter_authorized' +WHERE type = 'deposit' diff --git a/core/lib/dal/migrations/20220504154136_remove_nonce_gaps.down.sql b/core/lib/dal/migrations/20220504154136_remove_nonce_gaps.down.sql new file mode 100644 index 000000000000..907b2ff9bdae --- /dev/null +++ b/core/lib/dal/migrations/20220504154136_remove_nonce_gaps.down.sql @@ -0,0 +1 @@ +ALTER TABLE transactions ADD COLUMN mempool_preceding_nonce_gaps SMALLINT; diff --git a/core/lib/dal/migrations/20220504154136_remove_nonce_gaps.up.sql b/core/lib/dal/migrations/20220504154136_remove_nonce_gaps.up.sql new file mode 100644 index 000000000000..76a1703bd880 --- /dev/null +++ b/core/lib/dal/migrations/20220504154136_remove_nonce_gaps.up.sql @@ -0,0 +1 @@ +ALTER TABLE transactions DROP COLUMN mempool_preceding_nonce_gaps; diff --git a/core/lib/dal/migrations/20220509214600_using_base_fee_and_priority_fee.down.sql b/core/lib/dal/migrations/20220509214600_using_base_fee_and_priority_fee.down.sql new file mode 100644 index 000000000000..40f4d74714f4 --- /dev/null +++ b/core/lib/dal/migrations/20220509214600_using_base_fee_and_priority_fee.down.sql @@ -0,0 +1,6 @@ +ALTER TABLE eth_txs DROP COLUMN base_fee_per_gas; +ALTER TABLE eth_txs DROP COLUMN priority_fee_per_gas; +ALTER TABLE eth_txs ALTER COLUMN gas_price SET NOT NULL; +ALTER TABLE eth_txs_history DROP COLUMN base_fee_per_gas; +ALTER TABLE eth_txs_history DROP COLUMN priority_fee_per_gas; +ALTER TABLE eth_txs_history ALTER COLUMN gas_price SET NOT NULL; diff --git a/core/lib/dal/migrations/20220509214600_using_base_fee_and_priority_fee.up.sql b/core/lib/dal/migrations/20220509214600_using_base_fee_and_priority_fee.up.sql new file mode 100644 index 000000000000..8a57c8e75941 --- /dev/null +++ b/core/lib/dal/migrations/20220509214600_using_base_fee_and_priority_fee.up.sql @@ -0,0 +1,6 @@ +ALTER TABLE eth_txs ADD COLUMN base_fee_per_gas BIGINT; +ALTER TABLE eth_txs ADD COLUMN priority_fee_per_gas BIGINT; +ALTER TABLE eth_txs ALTER COLUMN gas_price DROP NOT NULL; +ALTER TABLE eth_txs_history ADD COLUMN base_fee_per_gas BIGINT; +ALTER TABLE eth_txs_history ADD COLUMN priority_fee_per_gas BIGINT; +ALTER TABLE eth_txs_history ALTER COLUMN gas_price DROP NOT NULL; diff --git a/core/lib/dal/migrations/20220517135000_add_contract_address_column.down.sql b/core/lib/dal/migrations/20220517135000_add_contract_address_column.down.sql new file mode 100644 index 000000000000..12da9bc24ddb --- /dev/null +++ b/core/lib/dal/migrations/20220517135000_add_contract_address_column.down.sql @@ -0,0 +1 @@ +ALTER TABLE transactions DROP COLUMN IF EXISTS contract_address; diff --git a/core/lib/dal/migrations/20220517135000_add_contract_address_column.up.sql b/core/lib/dal/migrations/20220517135000_add_contract_address_column.up.sql new file mode 100644 index 000000000000..ca2661970b58 --- /dev/null +++ b/core/lib/dal/migrations/20220517135000_add_contract_address_column.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE transactions ADD COLUMN IF NOT EXISTS contract_address BYTEA; +CREATE INDEX IF NOT EXISTS transactions_contract_address_idx ON transactions (contract_address); diff --git a/core/lib/dal/migrations/20220519101248_btree_events_indices.down.sql b/core/lib/dal/migrations/20220519101248_btree_events_indices.down.sql new file mode 100644 index 000000000000..dda05a23f17f --- /dev/null +++ b/core/lib/dal/migrations/20220519101248_btree_events_indices.down.sql @@ -0,0 +1,5 @@ +DROP INDEX IF EXISTS events_address_idx; +DROP INDEX IF EXISTS events_topic1_idx; +DROP INDEX IF EXISTS events_topic2_idx; +DROP INDEX IF EXISTS events_topic3_idx; +DROP INDEX IF EXISTS events_topic4_idx; diff --git a/core/lib/dal/migrations/20220519101248_btree_events_indices.up.sql b/core/lib/dal/migrations/20220519101248_btree_events_indices.up.sql new file mode 100644 index 000000000000..9d7bb93899f1 --- /dev/null +++ b/core/lib/dal/migrations/20220519101248_btree_events_indices.up.sql @@ -0,0 +1,5 @@ +CREATE INDEX IF NOT EXISTS events_address_idx ON events USING btree (address); +CREATE INDEX IF NOT EXISTS events_topic1_idx ON events USING btree (topic1); +CREATE INDEX IF NOT EXISTS events_topic2_idx ON events USING btree (topic2); +CREATE INDEX IF NOT EXISTS events_topic3_idx ON events USING btree (topic3); +CREATE INDEX IF NOT EXISTS events_topic4_idx ON events USING btree (topic4); diff --git a/core/lib/dal/migrations/20220519103453_in_mempool.down.sql b/core/lib/dal/migrations/20220519103453_in_mempool.down.sql new file mode 100644 index 000000000000..adc7f9b0a357 --- /dev/null +++ b/core/lib/dal/migrations/20220519103453_in_mempool.down.sql @@ -0,0 +1 @@ +ALTER TABLE transactions DROP COLUMN IF EXISTS in_mempool; diff --git a/core/lib/dal/migrations/20220519103453_in_mempool.up.sql b/core/lib/dal/migrations/20220519103453_in_mempool.up.sql new file mode 100644 index 000000000000..8b81b721eba1 --- /dev/null +++ b/core/lib/dal/migrations/20220519103453_in_mempool.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE transactions ADD COLUMN IF NOT EXISTS in_mempool BOOLEAN NOT NULL default false; +CREATE INDEX IF NOT EXISTS transactions_in_mempool_idx ON transactions (in_mempool) WHERE in_mempool = TRUE; diff --git a/core/lib/dal/migrations/20220519150925_eth_sender_refactor.down.sql b/core/lib/dal/migrations/20220519150925_eth_sender_refactor.down.sql new file mode 100644 index 000000000000..c5a2ceebdf71 --- /dev/null +++ b/core/lib/dal/migrations/20220519150925_eth_sender_refactor.down.sql @@ -0,0 +1,16 @@ +ALTER TABLE eth_txs ADD COLUMN gas_price BIGINT; +ALTER TABLE eth_txs ADD COLUMN priority_fee_per_gas BIGINT; +ALTER TABLE eth_txs ADD COLUMN base_fee_per_gas BIGINT; +ALTER TABLE eth_txs ADD COLUMN confirmed_tx_hash TEXT; +ALTER TABLE eth_txs ADD COLUMN confirmed_at TIMESTAMP NOT NULL; +ALTER TABLE eth_txs DROP COLUMN sent_at_block; + +ALTER TABLE eth_txs DROP COLUMN confirmed_eth_tx_history_id; + +ALTER TABLE eth_txs_history ALTER COLUMN priority_fee_per_gas DROP NOT NULL; +ALTER TABLE eth_txs_history ALTER COLUMN base_fee_per_gas DROP NOT NULL; +ALTER TABLE eth_txs_history ADD COLUMN deadline_block INT; +ALTER TABLE eth_txs_history ADD COLUMN error TEXT; +ALTER TABLE eth_txs_history ADD COLUMN gas_price BIGINT; +ALTER TABLE eth_txs_history DROP COLUMN confirmed_at; + diff --git a/core/lib/dal/migrations/20220519150925_eth_sender_refactor.up.sql b/core/lib/dal/migrations/20220519150925_eth_sender_refactor.up.sql new file mode 100644 index 000000000000..03a4fc4238c0 --- /dev/null +++ b/core/lib/dal/migrations/20220519150925_eth_sender_refactor.up.sql @@ -0,0 +1,17 @@ +ALTER TABLE eth_txs DROP COLUMN gas_price; +ALTER TABLE eth_txs DROP COLUMN priority_fee_per_gas; +ALTER TABLE eth_txs DROP COLUMN base_fee_per_gas; +ALTER TABLE eth_txs DROP COLUMN confirmed_tx_hash; +ALTER TABLE eth_txs DROP COLUMN confirmed_at; +ALTER TABLE eth_txs ADD COLUMN sent_at_block INT NOT NULL; + +ALTER TABLE eth_txs ADD COLUMN confirmed_eth_tx_history_id INT REFERENCES eth_txs_history(id) ON DELETE SET NULL; +CREATE INDEX inflight_eth_txs ON eth_txs ((confirmed_eth_tx_history_id IS NULL)); + +ALTER TABLE eth_txs_history ALTER COLUMN priority_fee_per_gas SET NOT NULL; +ALTER TABLE eth_txs_history ALTER COLUMN base_fee_per_gas SET NOT NULL; +ALTER TABLE eth_txs_history DROP COLUMN deadline_block; +ALTER TABLE eth_txs_history DROP COLUMN error; +ALTER TABLE eth_txs_history DROP COLUMN gas_price; +ALTER TABLE eth_txs_history ADD COLUMN confirmed_at TIMESTAMP; + diff --git a/core/lib/dal/migrations/20220519201431_change_block_commitment_model.down.sql b/core/lib/dal/migrations/20220519201431_change_block_commitment_model.down.sql new file mode 100644 index 000000000000..68d6a3f5b4d9 --- /dev/null +++ b/core/lib/dal/migrations/20220519201431_change_block_commitment_model.down.sql @@ -0,0 +1,2 @@ +-- Add down migration script here +ALTER TABLE blocks ADD COLUMN processable_onchain_ops BYTEA[] NOT NULL; diff --git a/core/lib/dal/migrations/20220519201431_change_block_commitment_model.up.sql b/core/lib/dal/migrations/20220519201431_change_block_commitment_model.up.sql new file mode 100644 index 000000000000..497dd31c1472 --- /dev/null +++ b/core/lib/dal/migrations/20220519201431_change_block_commitment_model.up.sql @@ -0,0 +1 @@ +ALTER TABLE blocks DROP COLUMN processable_onchain_ops; diff --git a/core/lib/dal/migrations/20220527103820_block_number_tx_index_indices.down.sql b/core/lib/dal/migrations/20220527103820_block_number_tx_index_indices.down.sql new file mode 100644 index 000000000000..658101320c98 --- /dev/null +++ b/core/lib/dal/migrations/20220527103820_block_number_tx_index_indices.down.sql @@ -0,0 +1,2 @@ +DROP INDEX IF EXISTS transactions_block_number_tx_index; +DROP INDEX IF EXISTS events_block_number_tx_index; diff --git a/core/lib/dal/migrations/20220527103820_block_number_tx_index_indices.up.sql b/core/lib/dal/migrations/20220527103820_block_number_tx_index_indices.up.sql new file mode 100644 index 000000000000..b126b00da9bf --- /dev/null +++ b/core/lib/dal/migrations/20220527103820_block_number_tx_index_indices.up.sql @@ -0,0 +1,2 @@ +CREATE INDEX IF NOT EXISTS transactions_block_number_tx_index ON transactions (block_number, index_in_block); +CREATE INDEX IF NOT EXISTS events_block_number_tx_index ON events (block_number, tx_index_in_block); diff --git a/core/lib/dal/migrations/20220601105448_contract_verification.down.sql b/core/lib/dal/migrations/20220601105448_contract_verification.down.sql new file mode 100644 index 000000000000..6ee9df075b05 --- /dev/null +++ b/core/lib/dal/migrations/20220601105448_contract_verification.down.sql @@ -0,0 +1,3 @@ +ALTER TABLE contracts DROP COLUMN IF EXISTS verification_info; +DROP TABLE IF EXISTS contract_verification_requests; +DROP INDEX IF EXISTS contract_verification_requests_queued_idx; diff --git a/core/lib/dal/migrations/20220601105448_contract_verification.up.sql b/core/lib/dal/migrations/20220601105448_contract_verification.up.sql new file mode 100644 index 000000000000..26f3ed1c28e5 --- /dev/null +++ b/core/lib/dal/migrations/20220601105448_contract_verification.up.sql @@ -0,0 +1,19 @@ +CREATE TABLE IF NOT EXISTS contract_verification_requests ( + id BIGSERIAL PRIMARY KEY, + contract_address BYTEA NOT NULL, + source_code TEXT NOT NULL, + contract_name TEXT NOT NULL, + compiler_version TEXT NOT NULL, + optimization_used BOOLEAN NOT NULL, + constructor_arguments BYTEA NOT NULL, + + status TEXT NOT NULL, + error TEXT, + + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL +); + +ALTER TABLE contracts ADD COLUMN IF NOT EXISTS verification_info JSONB; +CREATE INDEX IF NOT EXISTS contract_verification_requests_queued_idx + ON contract_verification_requests (created_at) WHERE status = 'queued'; diff --git a/core/lib/dal/migrations/20220610121550_add_predicted_gas_columns.down.sql b/core/lib/dal/migrations/20220610121550_add_predicted_gas_columns.down.sql new file mode 100644 index 000000000000..bf2b398dd371 --- /dev/null +++ b/core/lib/dal/migrations/20220610121550_add_predicted_gas_columns.down.sql @@ -0,0 +1,5 @@ +ALTER TABLE blocks DROP COLUMN IF EXISTS predicted_commit_gas_cost; +ALTER TABLE blocks DROP COLUMN IF EXISTS predicted_prove_gas_cost; +ALTER TABLE blocks DROP COLUMN IF EXISTS predicted_execute_gas_cost; + +ALTER TABLE eth_txs DROP COLUMN IF EXISTS predicted_gas_cost; diff --git a/core/lib/dal/migrations/20220610121550_add_predicted_gas_columns.up.sql b/core/lib/dal/migrations/20220610121550_add_predicted_gas_columns.up.sql new file mode 100644 index 000000000000..0dd40d70a859 --- /dev/null +++ b/core/lib/dal/migrations/20220610121550_add_predicted_gas_columns.up.sql @@ -0,0 +1,5 @@ +ALTER TABLE blocks ADD COLUMN IF NOT EXISTS predicted_commit_gas_cost BIGINT NOT NULL DEFAULT 0; +ALTER TABLE blocks ADD COLUMN IF NOT EXISTS predicted_prove_gas_cost BIGINT NOT NULL DEFAULT 0; +ALTER TABLE blocks ADD COLUMN IF NOT EXISTS predicted_execute_gas_cost BIGINT NOT NULL DEFAULT 0; + +ALTER TABLE eth_txs ADD COLUMN IF NOT EXISTS predicted_gas_cost BIGINT NOT NULL DEFAULT 0; diff --git a/core/lib/dal/migrations/20220611134715_token-bridging.down.sql b/core/lib/dal/migrations/20220611134715_token-bridging.down.sql new file mode 100644 index 000000000000..0c53092f238c --- /dev/null +++ b/core/lib/dal/migrations/20220611134715_token-bridging.down.sql @@ -0,0 +1,3 @@ +ALTER TABLE tokens DROP COLUMN l1_address; +ALTER TABLE tokens DROP COLUMN l2_address; +ALTER TABLE tokens ADD COLUMN address BYTEA; diff --git a/core/lib/dal/migrations/20220611134715_token-bridging.up.sql b/core/lib/dal/migrations/20220611134715_token-bridging.up.sql new file mode 100644 index 000000000000..e4eac95c7a73 --- /dev/null +++ b/core/lib/dal/migrations/20220611134715_token-bridging.up.sql @@ -0,0 +1,5 @@ +ALTER TABLE tokens ADD COLUMN l1_address BYTEA NOT NULL; +ALTER TABLE tokens ADD COLUMN l2_address BYTEA NOT NULL; +ALTER TABLE tokens DROP COLUMN address; +ALTER TABLE tokens ADD PRIMARY KEY (l1_address); +CREATE UNIQUE INDEX l2_address_index on tokens (l2_address); diff --git a/core/lib/dal/migrations/20220621103309_contract_verification_compilation_errors.down.sql b/core/lib/dal/migrations/20220621103309_contract_verification_compilation_errors.down.sql new file mode 100644 index 000000000000..ff31f8f637e9 --- /dev/null +++ b/core/lib/dal/migrations/20220621103309_contract_verification_compilation_errors.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE contract_verification_requests + DROP COLUMN IF EXISTS compilation_errors; diff --git a/core/lib/dal/migrations/20220621103309_contract_verification_compilation_errors.up.sql b/core/lib/dal/migrations/20220621103309_contract_verification_compilation_errors.up.sql new file mode 100644 index 000000000000..d90f909f14a9 --- /dev/null +++ b/core/lib/dal/migrations/20220621103309_contract_verification_compilation_errors.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE contract_verification_requests + ADD COLUMN IF NOT EXISTS compilation_errors JSONB; diff --git a/core/lib/dal/migrations/20220630134601_track_l1_block_number_for_priority_ops.down.sql b/core/lib/dal/migrations/20220630134601_track_l1_block_number_for_priority_ops.down.sql new file mode 100644 index 000000000000..4072d0059fd0 --- /dev/null +++ b/core/lib/dal/migrations/20220630134601_track_l1_block_number_for_priority_ops.down.sql @@ -0,0 +1 @@ +ALTER TABLE transactions DROP COLUMN l1_block_number; diff --git a/core/lib/dal/migrations/20220630134601_track_l1_block_number_for_priority_ops.up.sql b/core/lib/dal/migrations/20220630134601_track_l1_block_number_for_priority_ops.up.sql new file mode 100644 index 000000000000..c2c2aeeccd05 --- /dev/null +++ b/core/lib/dal/migrations/20220630134601_track_l1_block_number_for_priority_ops.up.sql @@ -0,0 +1 @@ +ALTER TABLE transactions ADD COLUMN l1_block_number INT; diff --git a/core/lib/dal/migrations/20220704121755_witness_inputs.down.sql b/core/lib/dal/migrations/20220704121755_witness_inputs.down.sql new file mode 100644 index 000000000000..a380962523da --- /dev/null +++ b/core/lib/dal/migrations/20220704121755_witness_inputs.down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS witness_inputs; diff --git a/core/lib/dal/migrations/20220704121755_witness_inputs.up.sql b/core/lib/dal/migrations/20220704121755_witness_inputs.up.sql new file mode 100644 index 000000000000..ed0baafcf064 --- /dev/null +++ b/core/lib/dal/migrations/20220704121755_witness_inputs.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS witness_inputs +( + block_number BIGINT NOT NULL REFERENCES blocks (number) ON DELETE CASCADE, + merkle_tree_paths BYTEA, + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL +); + +CREATE INDEX IF NOT EXISTS witness_inputs_block_number_idx ON witness_inputs (block_number); diff --git a/core/lib/dal/migrations/20220705133822_add_value_column.down.sql b/core/lib/dal/migrations/20220705133822_add_value_column.down.sql new file mode 100644 index 000000000000..69e6ed1e09e2 --- /dev/null +++ b/core/lib/dal/migrations/20220705133822_add_value_column.down.sql @@ -0,0 +1 @@ +ALTER TABLE transactions DROP COLUMN IF EXISTS value; diff --git a/core/lib/dal/migrations/20220705133822_add_value_column.up.sql b/core/lib/dal/migrations/20220705133822_add_value_column.up.sql new file mode 100644 index 000000000000..82aaa1e4f6dd --- /dev/null +++ b/core/lib/dal/migrations/20220705133822_add_value_column.up.sql @@ -0,0 +1 @@ +ALTER TABLE transactions ADD COLUMN IF NOT EXISTS value NUMERIC(80) NOT NULL DEFAULT 0; diff --git a/core/lib/dal/migrations/20220707151034_storage_logs_dedup.down.sql b/core/lib/dal/migrations/20220707151034_storage_logs_dedup.down.sql new file mode 100644 index 000000000000..bb548e7b5e4b --- /dev/null +++ b/core/lib/dal/migrations/20220707151034_storage_logs_dedup.down.sql @@ -0,0 +1 @@ +drop table storage_logs_dedup; \ No newline at end of file diff --git a/core/lib/dal/migrations/20220707151034_storage_logs_dedup.up.sql b/core/lib/dal/migrations/20220707151034_storage_logs_dedup.up.sql new file mode 100644 index 000000000000..1ff369c507ed --- /dev/null +++ b/core/lib/dal/migrations/20220707151034_storage_logs_dedup.up.sql @@ -0,0 +1,21 @@ +create table storage_logs_dedup +( + hashed_key bytea not null, + address bytea not null, + key bytea not null, + value_read bytea not null, + value_written bytea not null, + is_write boolean not null, + operation_number integer not null, + block_number bigint not null + constraint storage_logs_dedup_block_number_fkey + references blocks + on delete cascade, + created_at timestamp not null, + constraint storage_logs_dedup_pkey + primary key (hashed_key, block_number, operation_number) +); + +create index storage_logs_dedup_block_number_idx + on storage_logs_dedup (block_number); + diff --git a/core/lib/dal/migrations/20220708093726_make-type-not-null.down.sql b/core/lib/dal/migrations/20220708093726_make-type-not-null.down.sql new file mode 100644 index 000000000000..49efa0b5277d --- /dev/null +++ b/core/lib/dal/migrations/20220708093726_make-type-not-null.down.sql @@ -0,0 +1 @@ +ALTER TABLE transactions ALTER COLUMN type SET NOT NULL; diff --git a/core/lib/dal/migrations/20220708093726_make-type-not-null.up.sql b/core/lib/dal/migrations/20220708093726_make-type-not-null.up.sql new file mode 100644 index 000000000000..cd815697b4c3 --- /dev/null +++ b/core/lib/dal/migrations/20220708093726_make-type-not-null.up.sql @@ -0,0 +1 @@ +ALTER TABLE transactions ALTER COLUMN type DROP NOT NULL; diff --git a/core/lib/dal/migrations/20220711132020_save_initial_bootloader_content.down.sql b/core/lib/dal/migrations/20220711132020_save_initial_bootloader_content.down.sql new file mode 100644 index 000000000000..24458858a004 --- /dev/null +++ b/core/lib/dal/migrations/20220711132020_save_initial_bootloader_content.down.sql @@ -0,0 +1 @@ +ALTER TABLE blocks DROP COLUMN IF EXISTS initial_bootloader_heap_content; diff --git a/core/lib/dal/migrations/20220711132020_save_initial_bootloader_content.up.sql b/core/lib/dal/migrations/20220711132020_save_initial_bootloader_content.up.sql new file mode 100644 index 000000000000..6373cabfbd83 --- /dev/null +++ b/core/lib/dal/migrations/20220711132020_save_initial_bootloader_content.up.sql @@ -0,0 +1 @@ +ALTER TABLE blocks ADD COLUMN IF NOT EXISTS initial_bootloader_heap_content JSONB NOT NULL; diff --git a/core/lib/dal/migrations/20220712073626_add_columns_to_witness_inputs.down.sql b/core/lib/dal/migrations/20220712073626_add_columns_to_witness_inputs.down.sql new file mode 100644 index 000000000000..cd73be4906f5 --- /dev/null +++ b/core/lib/dal/migrations/20220712073626_add_columns_to_witness_inputs.down.sql @@ -0,0 +1 @@ +ALTER TABLE witness_inputs DROP COLUMN status; \ No newline at end of file diff --git a/core/lib/dal/migrations/20220712073626_add_columns_to_witness_inputs.up.sql b/core/lib/dal/migrations/20220712073626_add_columns_to_witness_inputs.up.sql new file mode 100644 index 000000000000..2fe61d3a6556 --- /dev/null +++ b/core/lib/dal/migrations/20220712073626_add_columns_to_witness_inputs.up.sql @@ -0,0 +1 @@ +ALTER TABLE witness_inputs ADD COLUMN status TEXT NOT NULL; diff --git a/core/lib/dal/migrations/20220712093408_add_used_contracts_to_blocks.down.sql b/core/lib/dal/migrations/20220712093408_add_used_contracts_to_blocks.down.sql new file mode 100644 index 000000000000..feaed7a47aac --- /dev/null +++ b/core/lib/dal/migrations/20220712093408_add_used_contracts_to_blocks.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE blocks DROP COLUMN used_contract_hashes; + diff --git a/core/lib/dal/migrations/20220712093408_add_used_contracts_to_blocks.up.sql b/core/lib/dal/migrations/20220712093408_add_used_contracts_to_blocks.up.sql new file mode 100644 index 000000000000..904bd8154ab9 --- /dev/null +++ b/core/lib/dal/migrations/20220712093408_add_used_contracts_to_blocks.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE blocks ADD COLUMN used_contract_hashes JSONB NOT NULL; + diff --git a/core/lib/dal/migrations/20220713090226_remove-priority-mode.down.sql b/core/lib/dal/migrations/20220713090226_remove-priority-mode.down.sql new file mode 100644 index 000000000000..71ace2012566 --- /dev/null +++ b/core/lib/dal/migrations/20220713090226_remove-priority-mode.down.sql @@ -0,0 +1 @@ +ALTER TABLE blocks ADD COLUMN priority_ops_complexity NUMERIC(80) NOT NULL; diff --git a/core/lib/dal/migrations/20220713090226_remove-priority-mode.up.sql b/core/lib/dal/migrations/20220713090226_remove-priority-mode.up.sql new file mode 100644 index 000000000000..ddb6cece9ae6 --- /dev/null +++ b/core/lib/dal/migrations/20220713090226_remove-priority-mode.up.sql @@ -0,0 +1 @@ +ALTER TABLE blocks DROP COLUMN priority_ops_complexity; diff --git a/core/lib/dal/migrations/20220729125750_add-contract-verification-started-at.down.sql b/core/lib/dal/migrations/20220729125750_add-contract-verification-started-at.down.sql new file mode 100644 index 000000000000..3c7e1b9beef8 --- /dev/null +++ b/core/lib/dal/migrations/20220729125750_add-contract-verification-started-at.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE contract_verification_requests + DROP COLUMN IF EXISTS processing_started_at; diff --git a/core/lib/dal/migrations/20220729125750_add-contract-verification-started-at.up.sql b/core/lib/dal/migrations/20220729125750_add-contract-verification-started-at.up.sql new file mode 100644 index 000000000000..3b5d6a2a8fab --- /dev/null +++ b/core/lib/dal/migrations/20220729125750_add-contract-verification-started-at.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE contract_verification_requests + ADD COLUMN IF NOT EXISTS processing_started_at TIMESTAMP; diff --git a/core/lib/dal/migrations/20220801131413_add_paymaster_data.down.sql b/core/lib/dal/migrations/20220801131413_add_paymaster_data.down.sql new file mode 100644 index 000000000000..fcb97ed55888 --- /dev/null +++ b/core/lib/dal/migrations/20220801131413_add_paymaster_data.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE transactions DROP COLUMN IF EXISTS paymaster; +ALTER TABLE transactions DROP COLUMN IF EXISTS paymaster_input; diff --git a/core/lib/dal/migrations/20220801131413_add_paymaster_data.up.sql b/core/lib/dal/migrations/20220801131413_add_paymaster_data.up.sql new file mode 100644 index 000000000000..80d4aaf1f70a --- /dev/null +++ b/core/lib/dal/migrations/20220801131413_add_paymaster_data.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE transactions ADD COLUMN IF NOT EXISTS paymaster BYTEA NOT NULL; +ALTER TABLE transactions ADD COLUMN IF NOT EXISTS paymaster_input BYTEA NOT NULL; diff --git a/core/lib/dal/migrations/20220807172230_block_metadata.down.sql b/core/lib/dal/migrations/20220807172230_block_metadata.down.sql new file mode 100644 index 000000000000..f38b676e3e0c --- /dev/null +++ b/core/lib/dal/migrations/20220807172230_block_metadata.down.sql @@ -0,0 +1,11 @@ +ALTER TABLE blocks DROP compressed_repeated_writes; +ALTER TABLE blocks DROP compressed_initial_writes; +ALTER TABLE blocks DROP l2_l1_compressed_messages; +ALTER TABLE blocks DROP l2_l1_merkle_root; +ALTER TABLE blocks DROP l2_l1_linear_hash; +ALTER TABLE blocks DROP ergs_per_pubdata_byte_in_block; +ALTER TABLE blocks DROP ergs_per_code_decommittment_word; +ALTER TABLE blocks DROP rollup_last_leaf_index; +ALTER TABLE blocks DROP zkporter_is_available; +ALTER TABLE blocks DROP bootloader_code_hash; +ALTER TABLE blocks DROP default_aa_code_hash; diff --git a/core/lib/dal/migrations/20220807172230_block_metadata.up.sql b/core/lib/dal/migrations/20220807172230_block_metadata.up.sql new file mode 100644 index 000000000000..3467c0af2439 --- /dev/null +++ b/core/lib/dal/migrations/20220807172230_block_metadata.up.sql @@ -0,0 +1,10 @@ +ALTER TABLE blocks ADD compressed_initial_writes BYTEA; +ALTER TABLE blocks ADD compressed_repeated_writes BYTEA; +ALTER TABLE blocks ADD l2_l1_compressed_messages BYTEA; +ALTER TABLE blocks ADD l2_l1_merkle_root BYTEA; +ALTER TABLE blocks ADD ergs_per_pubdata_byte_in_block INT; +ALTER TABLE blocks ADD ergs_per_code_decommittment_word INT; +ALTER TABLE blocks ADD rollup_last_leaf_index BIGINT; +ALTER TABLE blocks ADD zkporter_is_available BOOL; +ALTER TABLE blocks ADD bootloader_code_hash BYTEA; +ALTER TABLE blocks ADD default_aa_code_hash BYTEA; diff --git a/core/lib/dal/migrations/20220815130828_support_eip1559.down.sql b/core/lib/dal/migrations/20220815130828_support_eip1559.down.sql new file mode 100644 index 000000000000..a7c11e581583 --- /dev/null +++ b/core/lib/dal/migrations/20220815130828_support_eip1559.down.sql @@ -0,0 +1,7 @@ +ALTER TABLE transactions ADD COLUMN IF NOT EXISTS ergs_price_limit NUMERIC(80); + +ALTER TABLE transactions DROP COLUMN IF EXISTS max_fee_per_erg; +ALTER TABLE transactions DROP COLUMN IF EXISTS max_priority_fee_per_erg; +ALTER TABLE transactions DROP COLUMN IF EXISTS effective_gas_price NUMERIC(80); + +ALTER TABLE blocks DROP COLUMN IF EXISTS base_fee_per_erg; diff --git a/core/lib/dal/migrations/20220815130828_support_eip1559.up.sql b/core/lib/dal/migrations/20220815130828_support_eip1559.up.sql new file mode 100644 index 000000000000..879f894a6284 --- /dev/null +++ b/core/lib/dal/migrations/20220815130828_support_eip1559.up.sql @@ -0,0 +1,7 @@ +ALTER TABLE transactions DROP COLUMN IF EXISTS ergs_price_limit; + +ALTER TABLE transactions ADD COLUMN IF NOT EXISTS max_fee_per_erg NUMERIC(80); +ALTER TABLE transactions ADD COLUMN IF NOT EXISTS max_priority_fee_per_erg NUMERIC(80); +ALTER TABLE transactions ADD COLUMN IF NOT EXISTS effective_gas_price NUMERIC(80); + +ALTER TABLE blocks ADD COLUMN IF NOT EXISTS base_fee_per_erg NUMERIC(80) NOT NULL DEFAULT 1; diff --git a/core/lib/dal/migrations/20220816141002_add_prover_jobs.down.sql b/core/lib/dal/migrations/20220816141002_add_prover_jobs.down.sql new file mode 100644 index 000000000000..c264d19e9440 --- /dev/null +++ b/core/lib/dal/migrations/20220816141002_add_prover_jobs.down.sql @@ -0,0 +1 @@ +DROP TABLE prover_jobs; \ No newline at end of file diff --git a/core/lib/dal/migrations/20220816141002_add_prover_jobs.up.sql b/core/lib/dal/migrations/20220816141002_add_prover_jobs.up.sql new file mode 100644 index 000000000000..16a89116727f --- /dev/null +++ b/core/lib/dal/migrations/20220816141002_add_prover_jobs.up.sql @@ -0,0 +1,14 @@ +CREATE TABLE IF NOT EXISTS prover_jobs +( + id BIGSERIAL PRIMARY KEY, + block_number BIGINT NOT NULL REFERENCES blocks (number) ON DELETE CASCADE, + circuit_type TEXT NOT NULL, + prover_input BYTEA NOT NULL, + + status TEXT NOT NULL, + error TEXT, + + processing_started_at TIMESTAMP, + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL +); diff --git a/core/lib/dal/migrations/20220826100615_save_block_ergs_per_pubdata_limit.down.sql b/core/lib/dal/migrations/20220826100615_save_block_ergs_per_pubdata_limit.down.sql new file mode 100644 index 000000000000..41a43919e3d4 --- /dev/null +++ b/core/lib/dal/migrations/20220826100615_save_block_ergs_per_pubdata_limit.down.sql @@ -0,0 +1 @@ +ALTER TABLE blocks DROP COLUMN IF EXISTS ergs_per_pubdata_limit; diff --git a/core/lib/dal/migrations/20220826100615_save_block_ergs_per_pubdata_limit.up.sql b/core/lib/dal/migrations/20220826100615_save_block_ergs_per_pubdata_limit.up.sql new file mode 100644 index 000000000000..c75165477a0a --- /dev/null +++ b/core/lib/dal/migrations/20220826100615_save_block_ergs_per_pubdata_limit.up.sql @@ -0,0 +1 @@ +ALTER TABLE blocks ADD COLUMN IF NOT EXISTS ergs_per_pubdata_limit BIGINT NOT NULL DEFAULT 0; diff --git a/core/lib/dal/migrations/20220827110416_miniblocks.down.sql b/core/lib/dal/migrations/20220827110416_miniblocks.down.sql new file mode 100644 index 000000000000..459fd00e5711 --- /dev/null +++ b/core/lib/dal/migrations/20220827110416_miniblocks.down.sql @@ -0,0 +1,46 @@ +ALTER TABLE l1_batches RENAME TO blocks; + +ALTER TABLE proof RENAME CONSTRAINT proof_l1_batch_number_fkey TO proof_block_number_fkey; +ALTER TABLE proof RENAME COLUMN l1_batch_number TO block_number; + +ALTER TABLE prover_jobs RENAME CONSTRAINT prover_jobs_l1_batch_number_fkey TO prover_jobs_block_number_fkey; +ALTER TABLE prover_jobs RENAME COLUMN l1_batch_number TO block_number; + +ALTER TABLE witness_inputs RENAME CONSTRAINT witness_inputs_l1_batch_number_fkey TO witness_inputs_block_number_fkey; +ALTER TABLE witness_inputs RENAME COLUMN l1_batch_number TO block_number; + +ALTER TABLE storage_logs_dedup RENAME CONSTRAINT storage_logs_dedup_l1_batch_number_fkey TO storage_logs_dedup_block_number_fkey; +ALTER TABLE storage_logs_dedup RENAME COLUMN l1_batch_number TO block_number; + +ALTER TABLE storage_logs DROP CONSTRAINT storage_logs_miniblock_number_fkey; +ALTER TABLE storage_logs RENAME COLUMN miniblock_number TO block_number; +ALTER TABLE storage_logs ADD CONSTRAINT storage_logs_block_number_fkey + FOREIGN KEY (block_number) REFERENCES blocks (number); + +ALTER TABLE factory_deps DROP CONSTRAINT factory_deps_miniblock_number_fkey; +ALTER TABLE factory_deps RENAME COLUMN miniblock_number TO block_number; +ALTER TABLE factory_deps ADD CONSTRAINT factory_deps_block_number_fkey + FOREIGN KEY (block_number) REFERENCES blocks (number); + +ALTER TABLE events DROP CONSTRAINT events_miniblock_number_fkey; +ALTER TABLE events RENAME COLUMN miniblock_number TO block_number; +ALTER TABLE events ADD CONSTRAINT events_block_number_fkey + FOREIGN KEY (block_number) REFERENCES blocks (number); + +ALTER TABLE contracts DROP CONSTRAINT contracts_miniblock_number_fkey; +ALTER TABLE contracts RENAME COLUMN miniblock_number TO block_number; +ALTER TABLE contracts ADD CONSTRAINT contracts_block_number_fkey + FOREIGN KEY (block_number) REFERENCES blocks (number); + +ALTER TABLE transactions RENAME CONSTRAINT transactions_l1_batch_number_fkey TO transactions_block_number_fkey; +ALTER TABLE transactions RENAME COLUMN l1_batch_number TO block_number; + +DROP INDEX transactions_miniblock_number_tx_index_idx; +ALTER INDEX transactions_l1_batch_number_idx RENAME TO transactions_block_number_idx; +CREATE INDEX transactions_block_number_tx_index ON transactions (block_number, index_in_block); + +ALTER TABLE transactions DROP CONSTRAINT transactions_miniblock_number_fkey; +ALTER TABLE transactions DROP COLUMN miniblock_number; + +DROP INDEX miniblocks_l1_batch_number_idx; +DROP TABLE miniblocks; diff --git a/core/lib/dal/migrations/20220827110416_miniblocks.up.sql b/core/lib/dal/migrations/20220827110416_miniblocks.up.sql new file mode 100644 index 000000000000..60f2d2fa6d2e --- /dev/null +++ b/core/lib/dal/migrations/20220827110416_miniblocks.up.sql @@ -0,0 +1,68 @@ +CREATE TABLE miniblocks ( + number BIGSERIAL PRIMARY KEY, + l1_batch_number BIGINT, + timestamp BIGINT NOT NULL, + hash BYTEA NOT NULL, + + l1_tx_count INT NOT NULL, + l2_tx_count INT NOT NULL, + + base_fee_per_erg NUMERIC(80) NOT NULL, + ergs_per_pubdata_limit BIGINT NOT NULL, + + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL +); +CREATE INDEX miniblocks_l1_batch_number_idx ON miniblocks (l1_batch_number); + +INSERT INTO miniblocks + (number, l1_batch_number, timestamp, hash, l1_tx_count, l2_tx_count, base_fee_per_erg, ergs_per_pubdata_limit, created_at, updated_at) +SELECT number, number as l1_batch_number, timestamp, '\x' as hash, l1_tx_count, l2_tx_count, base_fee_per_erg, ergs_per_pubdata_limit, + now() as created_at, now() as updated_at +FROM blocks; + +ALTER TABLE transactions ADD COLUMN miniblock_number BIGINT; +ALTER TABLE transactions ADD CONSTRAINT transactions_miniblock_number_fkey + FOREIGN KEY (miniblock_number) REFERENCES miniblocks (number); +UPDATE transactions SET miniblock_number = block_number; + +DROP INDEX transactions_block_number_tx_index; +ALTER INDEX transactions_block_number_idx RENAME TO transactions_l1_batch_number_idx; +CREATE INDEX transactions_miniblock_number_tx_index_idx ON transactions (miniblock_number, index_in_block); + +ALTER TABLE transactions RENAME COLUMN block_number TO l1_batch_number; +ALTER TABLE transactions RENAME CONSTRAINT transactions_block_number_fkey to transactions_l1_batch_number_fkey; + +ALTER TABLE contracts DROP CONSTRAINT contracts_block_number_fkey; +ALTER TABLE contracts RENAME COLUMN block_number TO miniblock_number; +ALTER TABLE contracts ADD CONSTRAINT contracts_miniblock_number_fkey + FOREIGN KEY (miniblock_number) REFERENCES miniblocks (number); + +ALTER TABLE events DROP CONSTRAINT events_block_number_fkey; +ALTER TABLE events RENAME COLUMN block_number TO miniblock_number; +ALTER TABLE events ADD CONSTRAINT events_miniblock_number_fkey + FOREIGN KEY (miniblock_number) REFERENCES miniblocks (number); + +ALTER TABLE factory_deps DROP CONSTRAINT factory_deps_block_number_fkey; +ALTER TABLE factory_deps RENAME COLUMN block_number TO miniblock_number; +ALTER TABLE factory_deps ADD CONSTRAINT factory_deps_miniblock_number_fkey + FOREIGN KEY (miniblock_number) REFERENCES miniblocks (number); + +ALTER TABLE storage_logs DROP CONSTRAINT storage_logs_block_number_fkey; +ALTER TABLE storage_logs RENAME COLUMN block_number TO miniblock_number; +ALTER TABLE storage_logs ADD CONSTRAINT storage_logs_miniblock_number_fkey + FOREIGN KEY (miniblock_number) REFERENCES miniblocks (number); + +ALTER TABLE storage_logs_dedup RENAME COLUMN block_number TO l1_batch_number; +ALTER TABLE storage_logs_dedup RENAME CONSTRAINT storage_logs_dedup_block_number_fkey TO storage_logs_dedup_l1_batch_number_fkey; + +ALTER TABLE witness_inputs RENAME COLUMN block_number TO l1_batch_number; +ALTER TABLE witness_inputs RENAME CONSTRAINT witness_inputs_block_number_fkey TO witness_inputs_l1_batch_number_fkey; + +ALTER TABLE prover_jobs RENAME COLUMN block_number TO l1_batch_number; +ALTER TABLE prover_jobs RENAME CONSTRAINT prover_jobs_block_number_fkey TO prover_jobs_l1_batch_number_fkey; + +ALTER TABLE proof RENAME COLUMN block_number TO l1_batch_number; +ALTER TABLE proof RENAME CONSTRAINT proof_block_number_fkey TO proof_l1_batch_number_fkey; + +ALTER TABLE blocks RENAME TO l1_batches; diff --git a/core/lib/dal/migrations/20220902115015_add-solc-version-to-contract-verifier.down.sql b/core/lib/dal/migrations/20220902115015_add-solc-version-to-contract-verifier.down.sql new file mode 100644 index 000000000000..cb4e7ca2e45a --- /dev/null +++ b/core/lib/dal/migrations/20220902115015_add-solc-version-to-contract-verifier.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE contract_verification_requests DROP COLUMN compiler_solc_version; +ALTER TABLE contract_verification_requests RENAME COLUMN compiler_zksolc_version TO compiler_version; diff --git a/core/lib/dal/migrations/20220902115015_add-solc-version-to-contract-verifier.up.sql b/core/lib/dal/migrations/20220902115015_add-solc-version-to-contract-verifier.up.sql new file mode 100644 index 000000000000..be971ed74159 --- /dev/null +++ b/core/lib/dal/migrations/20220902115015_add-solc-version-to-contract-verifier.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE contract_verification_requests RENAME COLUMN compiler_version TO compiler_zksolc_version; +ALTER TABLE contract_verification_requests ADD COLUMN compiler_solc_version TEXT NOT NULL DEFAULT '0.8.16'; diff --git a/core/lib/dal/migrations/20220902124458_add_time_taken_column_in_witness_inputs.down.sql b/core/lib/dal/migrations/20220902124458_add_time_taken_column_in_witness_inputs.down.sql new file mode 100644 index 000000000000..b5716792dc82 --- /dev/null +++ b/core/lib/dal/migrations/20220902124458_add_time_taken_column_in_witness_inputs.down.sql @@ -0,0 +1 @@ +ALTER TABLE witness_inputs DROP COLUMN IF EXISTS time_taken; diff --git a/core/lib/dal/migrations/20220902124458_add_time_taken_column_in_witness_inputs.up.sql b/core/lib/dal/migrations/20220902124458_add_time_taken_column_in_witness_inputs.up.sql new file mode 100644 index 000000000000..383a6c85a13c --- /dev/null +++ b/core/lib/dal/migrations/20220902124458_add_time_taken_column_in_witness_inputs.up.sql @@ -0,0 +1 @@ +ALTER TABLE witness_inputs ADD COLUMN IF NOT EXISTS time_taken TIME NOT NULL DEFAULT '00:00:00'; \ No newline at end of file diff --git a/core/lib/dal/migrations/20220902190932_add_processing_started_at_to_witness_inputs.sql.down.sql b/core/lib/dal/migrations/20220902190932_add_processing_started_at_to_witness_inputs.sql.down.sql new file mode 100644 index 000000000000..f4aed688e8f1 --- /dev/null +++ b/core/lib/dal/migrations/20220902190932_add_processing_started_at_to_witness_inputs.sql.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE witness_inputs + DROP COLUMN IF EXISTS processing_started_at; diff --git a/core/lib/dal/migrations/20220902190932_add_processing_started_at_to_witness_inputs.sql.up.sql b/core/lib/dal/migrations/20220902190932_add_processing_started_at_to_witness_inputs.sql.up.sql new file mode 100644 index 000000000000..ed6f2047ebac --- /dev/null +++ b/core/lib/dal/migrations/20220902190932_add_processing_started_at_to_witness_inputs.sql.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE witness_inputs + ADD COLUMN IF NOT EXISTS processing_started_at TIMESTAMP; diff --git a/core/lib/dal/migrations/20220904161256_add_time_taken_column_in_prover_jobs.down.sql b/core/lib/dal/migrations/20220904161256_add_time_taken_column_in_prover_jobs.down.sql new file mode 100644 index 000000000000..bfa90683d29b --- /dev/null +++ b/core/lib/dal/migrations/20220904161256_add_time_taken_column_in_prover_jobs.down.sql @@ -0,0 +1 @@ +ALTER TABLE prover_jobs DROP COLUMN IF EXISTS time_taken; diff --git a/core/lib/dal/migrations/20220904161256_add_time_taken_column_in_prover_jobs.up.sql b/core/lib/dal/migrations/20220904161256_add_time_taken_column_in_prover_jobs.up.sql new file mode 100644 index 000000000000..693bcbba101e --- /dev/null +++ b/core/lib/dal/migrations/20220904161256_add_time_taken_column_in_prover_jobs.up.sql @@ -0,0 +1 @@ +ALTER TABLE prover_jobs ADD COLUMN IF NOT EXISTS time_taken TIME NOT NULL DEFAULT '00:00:00'; \ No newline at end of file diff --git a/core/lib/dal/migrations/20220908094615_add_error_field_to_queues.down.sql b/core/lib/dal/migrations/20220908094615_add_error_field_to_queues.down.sql new file mode 100644 index 000000000000..8fa4aa78b46a --- /dev/null +++ b/core/lib/dal/migrations/20220908094615_add_error_field_to_queues.down.sql @@ -0,0 +1,3 @@ +ALTER TABLE witness_inputs DROP COLUMN IF EXISTS error; +ALTER TABLE prover_jobs DROP COLUMN IF EXISTS error; + diff --git a/core/lib/dal/migrations/20220908094615_add_error_field_to_queues.up.sql b/core/lib/dal/migrations/20220908094615_add_error_field_to_queues.up.sql new file mode 100644 index 000000000000..8f7edcdb0421 --- /dev/null +++ b/core/lib/dal/migrations/20220908094615_add_error_field_to_queues.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE witness_inputs ADD COLUMN IF NOT EXISTS error VARCHAR; +ALTER TABLE prover_jobs ADD COLUMN IF NOT EXISTS error VARCHAR; + diff --git a/core/lib/dal/migrations/20220908103955_verifier-compilers-versions.down.sql b/core/lib/dal/migrations/20220908103955_verifier-compilers-versions.down.sql new file mode 100644 index 000000000000..e07fe59a28d0 --- /dev/null +++ b/core/lib/dal/migrations/20220908103955_verifier-compilers-versions.down.sql @@ -0,0 +1,2 @@ +DROP TABLE contract_verification_zksolc_versions; +DROP TABLE contract_verification_solc_versions; diff --git a/core/lib/dal/migrations/20220908103955_verifier-compilers-versions.up.sql b/core/lib/dal/migrations/20220908103955_verifier-compilers-versions.up.sql new file mode 100644 index 000000000000..9c87640a04f2 --- /dev/null +++ b/core/lib/dal/migrations/20220908103955_verifier-compilers-versions.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE contract_verification_zksolc_versions ( + version TEXT NOT NULL, + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL +); +CREATE TABLE contract_verification_solc_versions ( + version TEXT NOT NULL, + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL +); diff --git a/core/lib/dal/migrations/20220908145203_remove-redundant-fields.down.sql b/core/lib/dal/migrations/20220908145203_remove-redundant-fields.down.sql new file mode 100644 index 000000000000..50f09af0f2e2 --- /dev/null +++ b/core/lib/dal/migrations/20220908145203_remove-redundant-fields.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE transactions ADD column type TEXT; +ALTER TABLE transactions ADD column fee_token BYTEA; diff --git a/core/lib/dal/migrations/20220908145203_remove-redundant-fields.up.sql b/core/lib/dal/migrations/20220908145203_remove-redundant-fields.up.sql new file mode 100644 index 000000000000..932a7bb975d5 --- /dev/null +++ b/core/lib/dal/migrations/20220908145203_remove-redundant-fields.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE transactions DROP column type; +ALTER TABLE transactions DROP column fee_token; diff --git a/core/lib/dal/migrations/20220914102048_unique-blocknumber-for-witnesses.down.sql b/core/lib/dal/migrations/20220914102048_unique-blocknumber-for-witnesses.down.sql new file mode 100644 index 000000000000..f7b780d1a6a4 --- /dev/null +++ b/core/lib/dal/migrations/20220914102048_unique-blocknumber-for-witnesses.down.sql @@ -0,0 +1 @@ +ALTER TABLE witness_inputs DROP CONSTRAINT unique_witnesses; diff --git a/core/lib/dal/migrations/20220914102048_unique-blocknumber-for-witnesses.up.sql b/core/lib/dal/migrations/20220914102048_unique-blocknumber-for-witnesses.up.sql new file mode 100644 index 000000000000..4d6d614639b6 --- /dev/null +++ b/core/lib/dal/migrations/20220914102048_unique-blocknumber-for-witnesses.up.sql @@ -0,0 +1 @@ +ALTER TABLE witness_inputs ADD CONSTRAINT unique_witnesses UNIQUE (l1_batch_number); diff --git a/core/lib/dal/migrations/20220915125943_l1_batch_tx_number.down.sql b/core/lib/dal/migrations/20220915125943_l1_batch_tx_number.down.sql new file mode 100644 index 000000000000..ca08b687cc96 --- /dev/null +++ b/core/lib/dal/migrations/20220915125943_l1_batch_tx_number.down.sql @@ -0,0 +1 @@ +ALTER TABLE transactions DROP COLUMN l1_batch_tx_index; diff --git a/core/lib/dal/migrations/20220915125943_l1_batch_tx_number.up.sql b/core/lib/dal/migrations/20220915125943_l1_batch_tx_number.up.sql new file mode 100644 index 000000000000..b184dd44f8a1 --- /dev/null +++ b/core/lib/dal/migrations/20220915125943_l1_batch_tx_number.up.sql @@ -0,0 +1 @@ +ALTER TABLE transactions ADD COLUMN l1_batch_tx_index INT; diff --git a/core/lib/dal/migrations/20220930085018_add_proof_aggregations.down.sql b/core/lib/dal/migrations/20220930085018_add_proof_aggregations.down.sql new file mode 100644 index 000000000000..ec73b43cabdc --- /dev/null +++ b/core/lib/dal/migrations/20220930085018_add_proof_aggregations.down.sql @@ -0,0 +1,4 @@ +DROP TABLE IF EXISTS leaf_aggregation_witness_jobs; +DROP TABLE IF EXISTS node_aggregation_witness_jobs; +ALTER TABLE prover_jobs DELETE COLUMN aggregation_round; + diff --git a/core/lib/dal/migrations/20220930085018_add_proof_aggregations.up.sql b/core/lib/dal/migrations/20220930085018_add_proof_aggregations.up.sql new file mode 100644 index 000000000000..357118b4ba30 --- /dev/null +++ b/core/lib/dal/migrations/20220930085018_add_proof_aggregations.up.sql @@ -0,0 +1,35 @@ +CREATE TABLE IF NOT EXISTS leaf_aggregation_witness_jobs +( + l1_batch_number BIGINT PRIMARY KEY REFERENCES l1_batches (number) ON DELETE CASCADE, + + basic_circuits BYTEA NOT NULL, + basic_circuits_inputs BYTEA NOT NULL, + + number_of_basic_circuits INT NOT NULL, + status TEXT NOT NULL, + processing_started_at TIMESTAMP, + time_taken TIME, + error TEXT, + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL +); + +CREATE TABLE IF NOT EXISTS node_aggregation_witness_jobs +( + l1_batch_number BIGINT PRIMARY KEY REFERENCES l1_batches (number) ON DELETE CASCADE, + + leaf_layer_subqueues BYTEA NOT NULL, + aggregation_outputs BYTEA NOT NULL, + + number_of_leaf_circuits INT NOT NULL, + status TEXT NOT NULL, + processing_started_at TIMESTAMP, + time_taken TIME, + error TEXT, + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL +); + +-- 0 for basic, 1 for leaf, 2 for node, 3 for scheduler +ALTER TABLE prover_jobs ADD COLUMN aggregation_round INT NOT NULL DEFAULT 0; + diff --git a/core/lib/dal/migrations/20221001090302_add_proof_result.down.sql b/core/lib/dal/migrations/20221001090302_add_proof_result.down.sql new file mode 100644 index 000000000000..becad2d913a2 --- /dev/null +++ b/core/lib/dal/migrations/20221001090302_add_proof_result.down.sql @@ -0,0 +1 @@ +ALTER TABLE prover_jobs DELETE COLUMN result; \ No newline at end of file diff --git a/core/lib/dal/migrations/20221001090302_add_proof_result.up.sql b/core/lib/dal/migrations/20221001090302_add_proof_result.up.sql new file mode 100644 index 000000000000..87e20d2806cc --- /dev/null +++ b/core/lib/dal/migrations/20221001090302_add_proof_result.up.sql @@ -0,0 +1 @@ +ALTER TABLE prover_jobs ADD COLUMN result BYTEA; \ No newline at end of file diff --git a/core/lib/dal/migrations/20221001131821_add_static_artifact_storage.down.sql b/core/lib/dal/migrations/20221001131821_add_static_artifact_storage.down.sql new file mode 100644 index 000000000000..d82bc5d9de0c --- /dev/null +++ b/core/lib/dal/migrations/20221001131821_add_static_artifact_storage.down.sql @@ -0,0 +1 @@ +DROP TABLE static_artifact_storage; \ No newline at end of file diff --git a/core/lib/dal/migrations/20221001131821_add_static_artifact_storage.up.sql b/core/lib/dal/migrations/20221001131821_add_static_artifact_storage.up.sql new file mode 100644 index 000000000000..845ea1756aa6 --- /dev/null +++ b/core/lib/dal/migrations/20221001131821_add_static_artifact_storage.up.sql @@ -0,0 +1,7 @@ +CREATE TABLE static_artifact_storage +( + key VARCHAR NOT NULL PRIMARY KEY, + value BYTEA NOT NULL, + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL +); \ No newline at end of file diff --git a/core/lib/dal/migrations/20221002190817_add_circuit_id_to_proofs.down.sql b/core/lib/dal/migrations/20221002190817_add_circuit_id_to_proofs.down.sql new file mode 100644 index 000000000000..99f7e93e8c04 --- /dev/null +++ b/core/lib/dal/migrations/20221002190817_add_circuit_id_to_proofs.down.sql @@ -0,0 +1 @@ +ALTER TABLE prover_jobs DELETE COLUMN sequence_number; \ No newline at end of file diff --git a/core/lib/dal/migrations/20221002190817_add_circuit_id_to_proofs.up.sql b/core/lib/dal/migrations/20221002190817_add_circuit_id_to_proofs.up.sql new file mode 100644 index 000000000000..3c2f55766e64 --- /dev/null +++ b/core/lib/dal/migrations/20221002190817_add_circuit_id_to_proofs.up.sql @@ -0,0 +1 @@ +ALTER TABLE prover_jobs ADD COLUMN IF NOT EXISTS sequence_number INT NOT NULL default 0; \ No newline at end of file diff --git a/core/lib/dal/migrations/20221003090515_add-commitment-hash-fields.down.sql b/core/lib/dal/migrations/20221003090515_add-commitment-hash-fields.down.sql new file mode 100644 index 000000000000..50584ec7ceb0 --- /dev/null +++ b/core/lib/dal/migrations/20221003090515_add-commitment-hash-fields.down.sql @@ -0,0 +1,3 @@ +ALTER TABLE l1_batches DROP COLUMN aux_data_hash; +ALTER TABLE l1_batches DROP COLUMN pass_through_data_hash; +ALTER TABLE l1_batches DROP COLUMN meta_parameters_hash; diff --git a/core/lib/dal/migrations/20221003090515_add-commitment-hash-fields.up.sql b/core/lib/dal/migrations/20221003090515_add-commitment-hash-fields.up.sql new file mode 100644 index 000000000000..c09ae8b77dc4 --- /dev/null +++ b/core/lib/dal/migrations/20221003090515_add-commitment-hash-fields.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE l1_batches ADD COLUMN aux_data_hash BYTEA; +ALTER TABLE l1_batches ADD COLUMN pass_through_data_hash BYTEA; +ALTER TABLE l1_batches ADD COLUMN meta_parameters_hash BYTEA; diff --git a/core/lib/dal/migrations/20221004114549_add_scheduler_jobs_table.down.sql b/core/lib/dal/migrations/20221004114549_add_scheduler_jobs_table.down.sql new file mode 100644 index 000000000000..f240e7732846 --- /dev/null +++ b/core/lib/dal/migrations/20221004114549_add_scheduler_jobs_table.down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS scheduler_witness_jobs; \ No newline at end of file diff --git a/core/lib/dal/migrations/20221004114549_add_scheduler_jobs_table.up.sql b/core/lib/dal/migrations/20221004114549_add_scheduler_jobs_table.up.sql new file mode 100644 index 000000000000..143d80e7f3f2 --- /dev/null +++ b/core/lib/dal/migrations/20221004114549_add_scheduler_jobs_table.up.sql @@ -0,0 +1,18 @@ +CREATE TABLE IF NOT EXISTS scheduler_witness_jobs +( + l1_batch_number BIGINT PRIMARY KEY REFERENCES l1_batches (number) ON DELETE CASCADE, + + scheduler_witness BYTEA NOT NULL, + final_node_aggregations BYTEA, + + status TEXT NOT NULL, + processing_started_at TIMESTAMP, + time_taken TIME, + error TEXT, + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL +); + +ALTER TABLE node_aggregation_witness_jobs ALTER COLUMN leaf_layer_subqueues DROP NOT NULL; +ALTER TABLE node_aggregation_witness_jobs ALTER COLUMN aggregation_outputs DROP NOT NULL; +ALTER TABLE node_aggregation_witness_jobs ALTER COLUMN number_of_leaf_circuits DROP NOT NULL; diff --git a/core/lib/dal/migrations/20221007085909_add-attempts-for-jobs.down.sql b/core/lib/dal/migrations/20221007085909_add-attempts-for-jobs.down.sql new file mode 100644 index 000000000000..98fc878a6949 --- /dev/null +++ b/core/lib/dal/migrations/20221007085909_add-attempts-for-jobs.down.sql @@ -0,0 +1,6 @@ +ALTER TABLE prover_jobs DROP COLUMN attempts; +ALTER TABLE witness_inputs DROP COLUMN attempts; +ALTER TABLE leaf_aggregation_witness_jobs DROP COLUMN attempts; +ALTER TABLE node_aggregation_witness_jobs DROP COLUMN attempts; +ALTER TABLE scheduler_witness_jobs DROP COLUMN attempts; +ALTER TABLE contract_verification_requests DROP COLUMN attempts; diff --git a/core/lib/dal/migrations/20221007085909_add-attempts-for-jobs.up.sql b/core/lib/dal/migrations/20221007085909_add-attempts-for-jobs.up.sql new file mode 100644 index 000000000000..62587d84b4cd --- /dev/null +++ b/core/lib/dal/migrations/20221007085909_add-attempts-for-jobs.up.sql @@ -0,0 +1,6 @@ +ALTER TABLE prover_jobs ADD COLUMN attempts INT NOT NULL DEFAULT 0; +ALTER TABLE witness_inputs ADD COLUMN attempts INT NOT NULL DEFAULT 0; +ALTER TABLE leaf_aggregation_witness_jobs ADD COLUMN attempts INT NOT NULL DEFAULT 0; +ALTER TABLE node_aggregation_witness_jobs ADD COLUMN attempts INT NOT NULL DEFAULT 0; +ALTER TABLE scheduler_witness_jobs ADD COLUMN attempts INT NOT NULL DEFAULT 0; +ALTER TABLE contract_verification_requests ADD COLUMN attempts INT NOT NULL DEFAULT 0; diff --git a/core/lib/dal/migrations/20221010145858_remove_ergs_per_code_decommittment_word.down.sql b/core/lib/dal/migrations/20221010145858_remove_ergs_per_code_decommittment_word.down.sql new file mode 100644 index 000000000000..36e379675cd4 --- /dev/null +++ b/core/lib/dal/migrations/20221010145858_remove_ergs_per_code_decommittment_word.down.sql @@ -0,0 +1 @@ +ALTER TABLE l1_batches ADD COLUMN ergs_per_code_decommittment_word INT NOT NULL DEFAULT 0; diff --git a/core/lib/dal/migrations/20221010145858_remove_ergs_per_code_decommittment_word.up.sql b/core/lib/dal/migrations/20221010145858_remove_ergs_per_code_decommittment_word.up.sql new file mode 100644 index 000000000000..e56e2c327fcc --- /dev/null +++ b/core/lib/dal/migrations/20221010145858_remove_ergs_per_code_decommittment_word.up.sql @@ -0,0 +1 @@ +ALTER TABLE l1_batches DROP COLUMN ergs_per_code_decommittment_word; diff --git a/core/lib/dal/migrations/20221013104735_add_aggregation_result_coords_to_scheduler_jobs.down.sql b/core/lib/dal/migrations/20221013104735_add_aggregation_result_coords_to_scheduler_jobs.down.sql new file mode 100644 index 000000000000..8fe097d874e0 --- /dev/null +++ b/core/lib/dal/migrations/20221013104735_add_aggregation_result_coords_to_scheduler_jobs.down.sql @@ -0,0 +1 @@ +ALTER TABLE scheduler_witness_jobs DROP COLUMN aggregation_result_coords; diff --git a/core/lib/dal/migrations/20221013104735_add_aggregation_result_coords_to_scheduler_jobs.up.sql b/core/lib/dal/migrations/20221013104735_add_aggregation_result_coords_to_scheduler_jobs.up.sql new file mode 100644 index 000000000000..c0ba3c13dfdc --- /dev/null +++ b/core/lib/dal/migrations/20221013104735_add_aggregation_result_coords_to_scheduler_jobs.up.sql @@ -0,0 +1 @@ +ALTER TABLE scheduler_witness_jobs ADD COLUMN aggregation_result_coords BYTEA; diff --git a/core/lib/dal/migrations/20221018114513_verification-requests-panic-messages.down.sql b/core/lib/dal/migrations/20221018114513_verification-requests-panic-messages.down.sql new file mode 100644 index 000000000000..918107edd9db --- /dev/null +++ b/core/lib/dal/migrations/20221018114513_verification-requests-panic-messages.down.sql @@ -0,0 +1 @@ +ALTER TABLE contract_verification_requests DROP COLUMN panic_message; diff --git a/core/lib/dal/migrations/20221018114513_verification-requests-panic-messages.up.sql b/core/lib/dal/migrations/20221018114513_verification-requests-panic-messages.up.sql new file mode 100644 index 000000000000..278dfb8977af --- /dev/null +++ b/core/lib/dal/migrations/20221018114513_verification-requests-panic-messages.up.sql @@ -0,0 +1 @@ +ALTER TABLE contract_verification_requests ADD COLUMN panic_message TEXT; diff --git a/core/lib/dal/migrations/20221019112725_l2_to_l1_logs_table.down.sql b/core/lib/dal/migrations/20221019112725_l2_to_l1_logs_table.down.sql new file mode 100644 index 000000000000..446f6bbdc634 --- /dev/null +++ b/core/lib/dal/migrations/20221019112725_l2_to_l1_logs_table.down.sql @@ -0,0 +1,2 @@ +DROP INDEX l2_to_l1_logs_tx_hash_index; +DROP TABLE l2_to_l1_logs; diff --git a/core/lib/dal/migrations/20221019112725_l2_to_l1_logs_table.up.sql b/core/lib/dal/migrations/20221019112725_l2_to_l1_logs_table.up.sql new file mode 100644 index 000000000000..0c5f29dc89e9 --- /dev/null +++ b/core/lib/dal/migrations/20221019112725_l2_to_l1_logs_table.up.sql @@ -0,0 +1,22 @@ +CREATE TABLE l2_to_l1_logs ( + miniblock_number BIGINT NOT NULL REFERENCES miniblocks (number) ON DELETE CASCADE, + log_index_in_miniblock INT NOT NULL, + log_index_in_tx INT NOT NULL, + + tx_hash BYTEA NOT NULL, + + shard_id INT NOT NULL, + is_service BOOLEAN NOT NULL, + tx_index_in_miniblock INT NOT NULL, + tx_index_in_l1_batch INT NOT NULL, + sender BYTEA NOT NULL, + key BYTEA NOT NULL, + value BYTEA NOT NULL, + + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL, + + PRIMARY KEY (miniblock_number, log_index_in_miniblock) +); + +CREATE INDEX l2_to_l1_logs_tx_hash_index ON l2_to_l1_logs USING hash (tx_hash); diff --git a/core/lib/dal/migrations/20221031110209_miniblocks-hash-index.down.sql b/core/lib/dal/migrations/20221031110209_miniblocks-hash-index.down.sql new file mode 100644 index 000000000000..6c6eb6db94e8 --- /dev/null +++ b/core/lib/dal/migrations/20221031110209_miniblocks-hash-index.down.sql @@ -0,0 +1 @@ +DROP INDEX miniblocks_hash; diff --git a/core/lib/dal/migrations/20221031110209_miniblocks-hash-index.up.sql b/core/lib/dal/migrations/20221031110209_miniblocks-hash-index.up.sql new file mode 100644 index 000000000000..54178c9184c3 --- /dev/null +++ b/core/lib/dal/migrations/20221031110209_miniblocks-hash-index.up.sql @@ -0,0 +1 @@ +CREATE INDEX miniblocks_hash ON miniblocks USING hash (hash); diff --git a/core/lib/dal/migrations/20221103104136_add-signed-raw-tx.down.sql b/core/lib/dal/migrations/20221103104136_add-signed-raw-tx.down.sql new file mode 100644 index 000000000000..49dd6ccac2e6 --- /dev/null +++ b/core/lib/dal/migrations/20221103104136_add-signed-raw-tx.down.sql @@ -0,0 +1,4 @@ +ALTER TABLE eth_txs_history DROP COLUMN signed_raw_tx; +ALTER table eth_txs ALTER COLUMN sent_at_block SET NOT NULL; +ALTER table eth_txs_history DROP COLUMN sent_at_block; +ALTER table eth_txs_history DROP COLUMN sent_at; diff --git a/core/lib/dal/migrations/20221103104136_add-signed-raw-tx.up.sql b/core/lib/dal/migrations/20221103104136_add-signed-raw-tx.up.sql new file mode 100644 index 000000000000..bbc8c17dec4f --- /dev/null +++ b/core/lib/dal/migrations/20221103104136_add-signed-raw-tx.up.sql @@ -0,0 +1,5 @@ +ALTER TABLE eth_txs_history ADD COLUMN signed_raw_tx BYTEA; +-- Deprecated column +ALTER table eth_txs ALTER COLUMN sent_at_block DROP NOT NULL; +ALTER table eth_txs_history ADD COLUMN sent_at_block INT; +ALTER table eth_txs_history ADD COLUMN sent_at TIMESTAMP; diff --git a/core/lib/dal/migrations/20221108190838_set-primary-keys.down.sql b/core/lib/dal/migrations/20221108190838_set-primary-keys.down.sql new file mode 100644 index 000000000000..0a259e655e98 --- /dev/null +++ b/core/lib/dal/migrations/20221108190838_set-primary-keys.down.sql @@ -0,0 +1,5 @@ +ALTER TABLE contract_verification_zksolc_versions DROP CONSTRAINT contract_verification_zksolc_versions_pkey; +ALTER TABLE contract_verification_solc_versions DROP CONSTRAINT contract_verification_solc_versions_pkey; +CREATE INDEX witness_inputs_block_number_idx ON witness_inputs USING btree (l1_batch_number); +ALTER TABLE witness_inputs ADD CONSTRAINT unique_witnesses UNIQUE (l1_batch_number); +ALTER TABLE witness_inputs DROP CONSTRAINT witness_inputs_pkey; diff --git a/core/lib/dal/migrations/20221108190838_set-primary-keys.up.sql b/core/lib/dal/migrations/20221108190838_set-primary-keys.up.sql new file mode 100644 index 000000000000..0253fd5468bc --- /dev/null +++ b/core/lib/dal/migrations/20221108190838_set-primary-keys.up.sql @@ -0,0 +1,5 @@ +ALTER TABLE contract_verification_zksolc_versions ADD CONSTRAINT contract_verification_zksolc_versions_pkey PRIMARY KEY ( version ); +ALTER TABLE contract_verification_solc_versions ADD CONSTRAINT contract_verification_solc_versions_pkey PRIMARY KEY ( version ); +ALTER TABLE witness_inputs ADD CONSTRAINT witness_inputs_pkey PRIMARY KEY (l1_batch_number); +DROP INDEX witness_inputs_block_number_idx; +ALTER TABLE witness_inputs DROP CONSTRAINT unique_witnesses; diff --git a/core/lib/dal/migrations/20221109094807_block-skip-proof.down.sql b/core/lib/dal/migrations/20221109094807_block-skip-proof.down.sql new file mode 100644 index 000000000000..67a8b130d5d1 --- /dev/null +++ b/core/lib/dal/migrations/20221109094807_block-skip-proof.down.sql @@ -0,0 +1 @@ +ALTER TABLE l1_batches DROP COLUMN skip_proof; diff --git a/core/lib/dal/migrations/20221109094807_block-skip-proof.up.sql b/core/lib/dal/migrations/20221109094807_block-skip-proof.up.sql new file mode 100644 index 000000000000..2aed4ea59408 --- /dev/null +++ b/core/lib/dal/migrations/20221109094807_block-skip-proof.up.sql @@ -0,0 +1 @@ +ALTER TABLE l1_batches ADD COLUMN skip_proof BOOLEAN NOT NULL DEFAULT FALSE; diff --git a/core/lib/dal/migrations/20221110094012_add_merkel_tree_paths_blob_url_in_witness_inputs.down.sql b/core/lib/dal/migrations/20221110094012_add_merkel_tree_paths_blob_url_in_witness_inputs.down.sql new file mode 100644 index 000000000000..5582af5dc89a --- /dev/null +++ b/core/lib/dal/migrations/20221110094012_add_merkel_tree_paths_blob_url_in_witness_inputs.down.sql @@ -0,0 +1 @@ +ALTER TABLE witness_inputs DROP COLUMN IF EXISTS merkel_tree_paths_blob_url; diff --git a/core/lib/dal/migrations/20221110094012_add_merkel_tree_paths_blob_url_in_witness_inputs.up.sql b/core/lib/dal/migrations/20221110094012_add_merkel_tree_paths_blob_url_in_witness_inputs.up.sql new file mode 100644 index 000000000000..960497c63a04 --- /dev/null +++ b/core/lib/dal/migrations/20221110094012_add_merkel_tree_paths_blob_url_in_witness_inputs.up.sql @@ -0,0 +1 @@ +ALTER TABLE witness_inputs ADD COLUMN IF NOT EXISTS merkel_tree_paths_blob_url TEXT; diff --git a/core/lib/dal/migrations/20221110094252_add_basic_circuits_and_inputs_blob_url_in_leaf_aggregation_witness_jobs.down.sql b/core/lib/dal/migrations/20221110094252_add_basic_circuits_and_inputs_blob_url_in_leaf_aggregation_witness_jobs.down.sql new file mode 100644 index 000000000000..745d497f2de9 --- /dev/null +++ b/core/lib/dal/migrations/20221110094252_add_basic_circuits_and_inputs_blob_url_in_leaf_aggregation_witness_jobs.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE leaf_aggregation_witness_jobs DROP COLUMN IF EXISTS basic_circuits_blob_url; +ALTER TABLE leaf_aggregation_witness_jobs DROP COLUMN IF EXISTS basic_circuits_inputs_blob_url; diff --git a/core/lib/dal/migrations/20221110094252_add_basic_circuits_and_inputs_blob_url_in_leaf_aggregation_witness_jobs.up.sql b/core/lib/dal/migrations/20221110094252_add_basic_circuits_and_inputs_blob_url_in_leaf_aggregation_witness_jobs.up.sql new file mode 100644 index 000000000000..55a17170dc01 --- /dev/null +++ b/core/lib/dal/migrations/20221110094252_add_basic_circuits_and_inputs_blob_url_in_leaf_aggregation_witness_jobs.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE leaf_aggregation_witness_jobs ADD COLUMN IF NOT EXISTS basic_circuits_blob_url TEXT; +ALTER TABLE leaf_aggregation_witness_jobs ADD COLUMN IF NOT EXISTS basic_circuits_inputs_blob_url TEXT; diff --git a/core/lib/dal/migrations/20221110094339_add_leaf_layer_subques_and_aggregation_output_blob_url_in_node_aggregation_witness_jobs.down.sql b/core/lib/dal/migrations/20221110094339_add_leaf_layer_subques_and_aggregation_output_blob_url_in_node_aggregation_witness_jobs.down.sql new file mode 100644 index 000000000000..fa696a380f1b --- /dev/null +++ b/core/lib/dal/migrations/20221110094339_add_leaf_layer_subques_and_aggregation_output_blob_url_in_node_aggregation_witness_jobs.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE leaf_aggregation_witness_jobs DROP COLUMN IF EXISTS leaf_layer_subqueues_blob_url; +ALTER TABLE leaf_aggregation_witness_jobs DROP COLUMN IF EXISTS aggregation_outputs_blob_url; diff --git a/core/lib/dal/migrations/20221110094339_add_leaf_layer_subques_and_aggregation_output_blob_url_in_node_aggregation_witness_jobs.up.sql b/core/lib/dal/migrations/20221110094339_add_leaf_layer_subques_and_aggregation_output_blob_url_in_node_aggregation_witness_jobs.up.sql new file mode 100644 index 000000000000..06537916909a --- /dev/null +++ b/core/lib/dal/migrations/20221110094339_add_leaf_layer_subques_and_aggregation_output_blob_url_in_node_aggregation_witness_jobs.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE node_aggregation_witness_jobs ADD COLUMN IF NOT EXISTS leaf_layer_subqueues_blob_url TEXT; +ALTER TABLE node_aggregation_witness_jobs ADD COLUMN IF NOT EXISTS aggregation_outputs_blob_url TEXT; diff --git a/core/lib/dal/migrations/20221110094730_add_scheduler_witness_and_final_node_aggregations_blob_url_in_scheduler_witness_jobs.down.sql b/core/lib/dal/migrations/20221110094730_add_scheduler_witness_and_final_node_aggregations_blob_url_in_scheduler_witness_jobs.down.sql new file mode 100644 index 000000000000..fe261ab03e1d --- /dev/null +++ b/core/lib/dal/migrations/20221110094730_add_scheduler_witness_and_final_node_aggregations_blob_url_in_scheduler_witness_jobs.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE scheduler_witness_jobs DROP COLUMN IF EXISTS scheduler_witness_blob_url; +ALTER TABLE scheduler_witness_jobs DROP COLUMN IF EXISTS final_node_aggregations_blob_url; diff --git a/core/lib/dal/migrations/20221110094730_add_scheduler_witness_and_final_node_aggregations_blob_url_in_scheduler_witness_jobs.up.sql b/core/lib/dal/migrations/20221110094730_add_scheduler_witness_and_final_node_aggregations_blob_url_in_scheduler_witness_jobs.up.sql new file mode 100644 index 000000000000..6f0ee870e657 --- /dev/null +++ b/core/lib/dal/migrations/20221110094730_add_scheduler_witness_and_final_node_aggregations_blob_url_in_scheduler_witness_jobs.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE scheduler_witness_jobs ADD COLUMN IF NOT EXISTS scheduler_witness_blob_url TEXT; +ALTER TABLE scheduler_witness_jobs ADD COLUMN IF NOT EXISTS final_node_aggregations_blob_url TEXT; diff --git a/core/lib/dal/migrations/20221110095052_add_circuit_input_blob_url_column_in_prover_jobs.down.sql b/core/lib/dal/migrations/20221110095052_add_circuit_input_blob_url_column_in_prover_jobs.down.sql new file mode 100644 index 000000000000..cab73801edb2 --- /dev/null +++ b/core/lib/dal/migrations/20221110095052_add_circuit_input_blob_url_column_in_prover_jobs.down.sql @@ -0,0 +1 @@ +ALTER TABLE prover_jobs DROP COLUMN IF EXISTS circuit_input_blob_url; diff --git a/core/lib/dal/migrations/20221110095052_add_circuit_input_blob_url_column_in_prover_jobs.up.sql b/core/lib/dal/migrations/20221110095052_add_circuit_input_blob_url_column_in_prover_jobs.up.sql new file mode 100644 index 000000000000..08b54f502e0d --- /dev/null +++ b/core/lib/dal/migrations/20221110095052_add_circuit_input_blob_url_column_in_prover_jobs.up.sql @@ -0,0 +1 @@ +ALTER TABLE prover_jobs ADD COLUMN IF NOT EXISTS circuit_input_blob_url TEXT; diff --git a/core/lib/dal/migrations/20221202102801_events-tx-initiator-address.down.sql b/core/lib/dal/migrations/20221202102801_events-tx-initiator-address.down.sql new file mode 100644 index 000000000000..a9eb93dcba6b --- /dev/null +++ b/core/lib/dal/migrations/20221202102801_events-tx-initiator-address.down.sql @@ -0,0 +1,2 @@ +DROP INDEX events_tx_initiator_address_idx; +ALTER TABLE events DROP COLUMN tx_initiator_address BYTEA; diff --git a/core/lib/dal/migrations/20221202102801_events-tx-initiator-address.up.sql b/core/lib/dal/migrations/20221202102801_events-tx-initiator-address.up.sql new file mode 100644 index 000000000000..9d14f774358e --- /dev/null +++ b/core/lib/dal/migrations/20221202102801_events-tx-initiator-address.up.sql @@ -0,0 +1,4 @@ +-- `tx_initiator_address` column is needed only +-- for the Explorer API (in particular for the query from `get_account_transactions_hashes_page` method). +ALTER TABLE events ADD COLUMN tx_initiator_address BYTEA NOT NULL DEFAULT '\x0000000000000000000000000000000000000000'::bytea; +CREATE INDEX events_tx_initiator_address_idx ON events (tx_initiator_address); diff --git a/core/lib/dal/migrations/20221215085757_add_composite_index_to_prover_jobs.down.sql b/core/lib/dal/migrations/20221215085757_add_composite_index_to_prover_jobs.down.sql new file mode 100644 index 000000000000..5c7d3eee0aed --- /dev/null +++ b/core/lib/dal/migrations/20221215085757_add_composite_index_to_prover_jobs.down.sql @@ -0,0 +1 @@ +DROP INDEX prover_jobs_composite_index; diff --git a/core/lib/dal/migrations/20221215085757_add_composite_index_to_prover_jobs.up.sql b/core/lib/dal/migrations/20221215085757_add_composite_index_to_prover_jobs.up.sql new file mode 100644 index 000000000000..54f23a744b86 --- /dev/null +++ b/core/lib/dal/migrations/20221215085757_add_composite_index_to_prover_jobs.up.sql @@ -0,0 +1 @@ +CREATE UNIQUE INDEX prover_jobs_composite_index ON prover_jobs(l1_batch_number, aggregation_round, sequence_number); diff --git a/core/lib/dal/migrations/20221215094205_prover-job-identity.down.sql b/core/lib/dal/migrations/20221215094205_prover-job-identity.down.sql new file mode 100644 index 000000000000..5e4c66313762 --- /dev/null +++ b/core/lib/dal/migrations/20221215094205_prover-job-identity.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE prover_jobs + DROP COLUMN proccesed_by; \ No newline at end of file diff --git a/core/lib/dal/migrations/20221215094205_prover-job-identity.up.sql b/core/lib/dal/migrations/20221215094205_prover-job-identity.up.sql new file mode 100644 index 000000000000..9da908fff651 --- /dev/null +++ b/core/lib/dal/migrations/20221215094205_prover-job-identity.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE prover_jobs + ADD proccesed_by TEXT; \ No newline at end of file diff --git a/core/lib/dal/migrations/20221227165603_fee_model.down.sql b/core/lib/dal/migrations/20221227165603_fee_model.down.sql new file mode 100644 index 000000000000..311fc7884859 --- /dev/null +++ b/core/lib/dal/migrations/20221227165603_fee_model.down.sql @@ -0,0 +1,10 @@ +ALTER TABLE miniblocks DROP COLUMN l1_gas_price; +ALTER TABLE miniblocks DROP COLUMN l2_fair_ergs_price; + +ALTER TABLE l1_batches DROP COLUMN l1_gas_price; +ALTER TABLE l1_batches DROP COLUMN l2_fair_ergs_price; + +ALTER TABLE transactions DROP COLUMN refunded_ergs; + +-- ALTER TABLE miniblocks ADD COLUMN ergs_per_pubdata_limit BIGINT NOT NULL DEFAULT 0; +-- ALTER TABLE l1_batches ADD COLUMN ergs_per_pubdata_limit BIGINT NOT NULL DEFAULT 0; diff --git a/core/lib/dal/migrations/20221227165603_fee_model.up.sql b/core/lib/dal/migrations/20221227165603_fee_model.up.sql new file mode 100644 index 000000000000..bc9da745840f --- /dev/null +++ b/core/lib/dal/migrations/20221227165603_fee_model.up.sql @@ -0,0 +1,11 @@ +ALTER TABLE miniblocks ADD COLUMN l1_gas_price BIGINT NOT NULL DEFAULT 0; +ALTER TABLE miniblocks ADD COLUMN l2_fair_ergs_price BIGINT NOT NULL DEFAULT 0; + +ALTER TABLE l1_batches ADD COLUMN l1_gas_price BIGINT NOT NULL DEFAULT 0; +ALTER TABLE l1_batches ADD COLUMN l2_fair_ergs_price BIGINT NOT NULL DEFAULT 0; + +ALTER TABLE transactions ADD COLUMN refunded_ergs BIGINT NOT NULL DEFAULT 0; + +-- ALTER TABLE miniblocks DROP COLUMN ergs_per_pubdata_limit; +-- ALTER TABLE l1_batches DROP COLUMN ergs_per_pubdata_limit; +-- ALTER TABLE l1_batches DROP COLUMN ergs_per_pubdata_byte_in_block; diff --git a/core/lib/dal/migrations/20230104104142_add_gpu_prover_queue_table.down.sql b/core/lib/dal/migrations/20230104104142_add_gpu_prover_queue_table.down.sql new file mode 100644 index 000000000000..b8182517ca53 --- /dev/null +++ b/core/lib/dal/migrations/20230104104142_add_gpu_prover_queue_table.down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS gpu_prover_queue; diff --git a/core/lib/dal/migrations/20230104104142_add_gpu_prover_queue_table.up.sql b/core/lib/dal/migrations/20230104104142_add_gpu_prover_queue_table.up.sql new file mode 100644 index 000000000000..d074883e05de --- /dev/null +++ b/core/lib/dal/migrations/20230104104142_add_gpu_prover_queue_table.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE IF NOT EXISTS gpu_prover_queue +( + instance_host INET NOT NULL, + instance_port INT NOT NULL + CONSTRAINT valid_port CHECK (instance_port >= 0 AND instance_port <= 65535), + instance_status TEXT NOT NULL, + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL, + processing_started_at TIMESTAMP, + PRIMARY KEY (instance_host, instance_port) +); diff --git a/core/lib/dal/migrations/20230105122559_protective_reads.down.sql b/core/lib/dal/migrations/20230105122559_protective_reads.down.sql new file mode 100644 index 000000000000..5a7a64eed9f0 --- /dev/null +++ b/core/lib/dal/migrations/20230105122559_protective_reads.down.sql @@ -0,0 +1,2 @@ +DROP INDEX protective_reads_l1_batch_number_index; +DROP TABLE protective_reads; diff --git a/core/lib/dal/migrations/20230105122559_protective_reads.up.sql b/core/lib/dal/migrations/20230105122559_protective_reads.up.sql new file mode 100644 index 000000000000..e42b6581cbb8 --- /dev/null +++ b/core/lib/dal/migrations/20230105122559_protective_reads.up.sql @@ -0,0 +1,11 @@ +CREATE TABLE protective_reads ( + l1_batch_number BIGINT REFERENCES l1_batches (number) ON DELETE CASCADE, + address BYTEA NOT NULL, + key BYTEA NOT NULL, + + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL, + PRIMARY KEY (address, key, l1_batch_number) +); + +CREATE INDEX protective_reads_l1_batch_number_index ON protective_reads (l1_batch_number); diff --git a/core/lib/dal/migrations/20230105160906_remove_contracts_null_restriction.down.sql b/core/lib/dal/migrations/20230105160906_remove_contracts_null_restriction.down.sql new file mode 100644 index 000000000000..b8e5ee3bf510 --- /dev/null +++ b/core/lib/dal/migrations/20230105160906_remove_contracts_null_restriction.down.sql @@ -0,0 +1,5 @@ +ALTER TABLE contracts ALTER COLUMN bytecode SET NOT NULL; +ALTER TABLE contracts ALTER COLUMN tx_hash SET NOT NULL; +ALTER TABLE contracts ALTER COLUMN miniblock_number SET NOT NULL; +ALTER TABLE contracts ALTER COLUMN created_at SET NOT NULL; +ALTER TABLE contracts ALTER COLUMN updated_at SET NOT NULL; diff --git a/core/lib/dal/migrations/20230105160906_remove_contracts_null_restriction.up.sql b/core/lib/dal/migrations/20230105160906_remove_contracts_null_restriction.up.sql new file mode 100644 index 000000000000..c568109d40b7 --- /dev/null +++ b/core/lib/dal/migrations/20230105160906_remove_contracts_null_restriction.up.sql @@ -0,0 +1,5 @@ +ALTER TABLE contracts ALTER COLUMN bytecode DROP NOT NULL; +ALTER TABLE contracts ALTER COLUMN tx_hash DROP NOT NULL; +ALTER TABLE contracts ALTER COLUMN miniblock_number DROP NOT NULL; +ALTER TABLE contracts ALTER COLUMN created_at DROP NOT NULL; +ALTER TABLE contracts ALTER COLUMN updated_at DROP NOT NULL; diff --git a/core/lib/dal/migrations/20230109123145_add_blob_cleanup_status_column_to_tables_with_blobs.down.sql b/core/lib/dal/migrations/20230109123145_add_blob_cleanup_status_column_to_tables_with_blobs.down.sql new file mode 100644 index 000000000000..57e38498c54d --- /dev/null +++ b/core/lib/dal/migrations/20230109123145_add_blob_cleanup_status_column_to_tables_with_blobs.down.sql @@ -0,0 +1,14 @@ +ALTER TABLE prover_jobs + DROP COLUMN IF EXISTS is_blob_cleaned; + +ALTER TABLE witness_inputs + DROP COLUMN IF EXISTS is_blob_cleaned; + +ALTER TABLE leaf_aggregation_witness_jobs + DROP COLUMN IF EXISTS is_blob_cleaned; + +ALTER TABLE node_aggregation_witness_jobs + DROP COLUMN IF EXISTS is_blob_cleaned; + +ALTER TABLE scheduler_witness_jobs + DROP COLUMN IF EXISTS is_blob_cleaned; diff --git a/core/lib/dal/migrations/20230109123145_add_blob_cleanup_status_column_to_tables_with_blobs.up.sql b/core/lib/dal/migrations/20230109123145_add_blob_cleanup_status_column_to_tables_with_blobs.up.sql new file mode 100644 index 000000000000..2e27755beaf9 --- /dev/null +++ b/core/lib/dal/migrations/20230109123145_add_blob_cleanup_status_column_to_tables_with_blobs.up.sql @@ -0,0 +1,14 @@ +ALTER TABLE prover_jobs + ADD COLUMN IF NOT EXISTS is_blob_cleaned BOOLEAN NOT NULL DEFAULT FALSE; + +ALTER TABLE witness_inputs + ADD COLUMN IF NOT EXISTS is_blob_cleaned BOOLEAN NOT NULL DEFAULT FALSE; + +ALTER TABLE leaf_aggregation_witness_jobs + ADD COLUMN IF NOT EXISTS is_blob_cleaned BOOLEAN NOT NULL DEFAULT FALSE; + +ALTER TABLE node_aggregation_witness_jobs + ADD COLUMN IF NOT EXISTS is_blob_cleaned BOOLEAN NOT NULL DEFAULT FALSE; + +ALTER TABLE scheduler_witness_jobs + ADD COLUMN IF NOT EXISTS is_blob_cleaned BOOLEAN NOT NULL DEFAULT FALSE; diff --git a/core/lib/dal/migrations/20230109123703_add_blob_cleanup_status_index_for_tables_with_blobs.down.sql b/core/lib/dal/migrations/20230109123703_add_blob_cleanup_status_index_for_tables_with_blobs.down.sql new file mode 100644 index 000000000000..bb167ea47da5 --- /dev/null +++ b/core/lib/dal/migrations/20230109123703_add_blob_cleanup_status_index_for_tables_with_blobs.down.sql @@ -0,0 +1,9 @@ +DROP INDEX IF EXISTS prover_jobs_blob_cleanup_status_index; + +DROP INDEX IF EXISTS witness_inputs_blob_cleanup_status_index; + +DROP INDEX IF EXISTS leaf_aggregation_witness_jobs_blob_cleanup_status_index; + +DROP INDEX IF EXISTS node_aggregation_witness_jobs_blob_cleanup_status_index; + +DROP INDEX IF EXISTS scheduler_witness_jobs_blob_cleanup_status_index; diff --git a/core/lib/dal/migrations/20230109123703_add_blob_cleanup_status_index_for_tables_with_blobs.up.sql b/core/lib/dal/migrations/20230109123703_add_blob_cleanup_status_index_for_tables_with_blobs.up.sql new file mode 100644 index 000000000000..c589a9aed46e --- /dev/null +++ b/core/lib/dal/migrations/20230109123703_add_blob_cleanup_status_index_for_tables_with_blobs.up.sql @@ -0,0 +1,9 @@ +CREATE INDEX IF NOT EXISTS prover_jobs_blob_cleanup_status_index ON prover_jobs (status, is_blob_cleaned); + +CREATE INDEX IF NOT EXISTS witness_inputs_blob_cleanup_status_index ON witness_inputs (status, is_blob_cleaned); + +CREATE INDEX IF NOT EXISTS leaf_aggregation_witness_jobs_blob_cleanup_status_index ON leaf_aggregation_witness_jobs (status, is_blob_cleaned); + +CREATE INDEX IF NOT EXISTS node_aggregation_witness_jobs_blob_cleanup_status_index ON node_aggregation_witness_jobs (status, is_blob_cleaned); + +CREATE INDEX IF NOT EXISTS scheduler_witness_jobs_blob_cleanup_status_index ON scheduler_witness_jobs (status, is_blob_cleaned); diff --git a/core/lib/dal/migrations/20230111122457_refactor_contracts_verification_info.down.sql b/core/lib/dal/migrations/20230111122457_refactor_contracts_verification_info.down.sql new file mode 100644 index 000000000000..1699b97746df --- /dev/null +++ b/core/lib/dal/migrations/20230111122457_refactor_contracts_verification_info.down.sql @@ -0,0 +1,5 @@ +ALTER TABLE contracts ADD COLUMN bytecode BYTEA; +ALTER TABLE contracts ADD COLUMN tx_hash BYTEA; +ALTER TABLE contracts ADD COLUMN miniblock_number BIGINT; + +ALTER TABLE contracts_verification_info RENAME TO contracts; diff --git a/core/lib/dal/migrations/20230111122457_refactor_contracts_verification_info.up.sql b/core/lib/dal/migrations/20230111122457_refactor_contracts_verification_info.up.sql new file mode 100644 index 000000000000..ac40492b58bc --- /dev/null +++ b/core/lib/dal/migrations/20230111122457_refactor_contracts_verification_info.up.sql @@ -0,0 +1,7 @@ +ALTER TABLE contracts DROP COLUMN bytecode; +ALTER TABLE contracts DROP COLUMN tx_hash; +ALTER TABLE contracts DROP COLUMN miniblock_number; + +DELETE FROM contracts WHERE verification_info IS NULL; + +ALTER TABLE contracts RENAME TO contracts_verification_info; diff --git a/core/lib/dal/migrations/20230112111801_initial_writes.down.sql b/core/lib/dal/migrations/20230112111801_initial_writes.down.sql new file mode 100644 index 000000000000..d29cf67bccab --- /dev/null +++ b/core/lib/dal/migrations/20230112111801_initial_writes.down.sql @@ -0,0 +1,2 @@ +DROP INDEX initial_writes_l1_batch_number_index; +DROP TABLE initial_writes; diff --git a/core/lib/dal/migrations/20230112111801_initial_writes.up.sql b/core/lib/dal/migrations/20230112111801_initial_writes.up.sql new file mode 100644 index 000000000000..3a8bec0b0554 --- /dev/null +++ b/core/lib/dal/migrations/20230112111801_initial_writes.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE initial_writes ( + hashed_key BYTEA NOT NULL PRIMARY KEY, + l1_batch_number BIGINT NOT NULL REFERENCES l1_batches (number) ON DELETE CASCADE, + + created_at TIMESTAMP NOT NULL, + updated_at TIMESTAMP NOT NULL +); + +CREATE INDEX initial_writes_l1_batch_number_index ON initial_writes (l1_batch_number); diff --git a/core/lib/dal/migrations/20230113113154_add_storage_logs_index.down.sql b/core/lib/dal/migrations/20230113113154_add_storage_logs_index.down.sql new file mode 100644 index 000000000000..05f59755223d --- /dev/null +++ b/core/lib/dal/migrations/20230113113154_add_storage_logs_index.down.sql @@ -0,0 +1 @@ +DROP INDEX storage_logs_contract_address_tx_hash_idx; diff --git a/core/lib/dal/migrations/20230113113154_add_storage_logs_index.up.sql b/core/lib/dal/migrations/20230113113154_add_storage_logs_index.up.sql new file mode 100644 index 000000000000..a300bb592c7e --- /dev/null +++ b/core/lib/dal/migrations/20230113113154_add_storage_logs_index.up.sql @@ -0,0 +1,2 @@ +-- This is the ACCOUNT_CODE_STORAGE address. +CREATE INDEX storage_logs_contract_address_tx_hash_idx ON storage_logs (address, tx_hash) WHERE (address = '\x0000000000000000000000000000000000008002'); diff --git a/core/lib/dal/migrations/20230117123627_add_queue_capacity_and_free_slots_in_gpu_prover_queue.down.sql b/core/lib/dal/migrations/20230117123627_add_queue_capacity_and_free_slots_in_gpu_prover_queue.down.sql new file mode 100644 index 000000000000..17711f472168 --- /dev/null +++ b/core/lib/dal/migrations/20230117123627_add_queue_capacity_and_free_slots_in_gpu_prover_queue.down.sql @@ -0,0 +1,3 @@ +ALTER TABLE gpu_prover_queue + DROP COLUMN IF EXISTS queue_free_slots, + DROP COLUMN IF EXISTS queue_capacity; diff --git a/core/lib/dal/migrations/20230117123627_add_queue_capacity_and_free_slots_in_gpu_prover_queue.up.sql b/core/lib/dal/migrations/20230117123627_add_queue_capacity_and_free_slots_in_gpu_prover_queue.up.sql new file mode 100644 index 000000000000..ae01e7658f28 --- /dev/null +++ b/core/lib/dal/migrations/20230117123627_add_queue_capacity_and_free_slots_in_gpu_prover_queue.up.sql @@ -0,0 +1,3 @@ +ALTER TABLE gpu_prover_queue + ADD COLUMN IF NOT EXISTS queue_free_slots INTEGER NOT NULL, + ADD COLUMN IF NOT EXISTS queue_capacity INTEGER NOT NULL; diff --git a/core/lib/dal/migrations/20230119123216_drop_null_constraint_gpu_prover_queue.down.sql b/core/lib/dal/migrations/20230119123216_drop_null_constraint_gpu_prover_queue.down.sql new file mode 100644 index 000000000000..83bdf3812e7f --- /dev/null +++ b/core/lib/dal/migrations/20230119123216_drop_null_constraint_gpu_prover_queue.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE gpu_prover_queue ALTER COLUMN queue_capacity SET NOT NULL; +ALTER TABLE gpu_prover_queue ALTER COLUMN queue_free_slots SET NOT NULL; diff --git a/core/lib/dal/migrations/20230119123216_drop_null_constraint_gpu_prover_queue.up.sql b/core/lib/dal/migrations/20230119123216_drop_null_constraint_gpu_prover_queue.up.sql new file mode 100644 index 000000000000..51ce1184baa1 --- /dev/null +++ b/core/lib/dal/migrations/20230119123216_drop_null_constraint_gpu_prover_queue.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE gpu_prover_queue ALTER COLUMN queue_capacity DROP NOT NULL; +ALTER TABLE gpu_prover_queue ALTER COLUMN queue_free_slots DROP NOT NULL; diff --git a/core/lib/dal/migrations/20230119182427_l1_refunds.down.sql b/core/lib/dal/migrations/20230119182427_l1_refunds.down.sql new file mode 100644 index 000000000000..34cd95cf8f8c --- /dev/null +++ b/core/lib/dal/migrations/20230119182427_l1_refunds.down.sql @@ -0,0 +1,4 @@ +-- Add down migration script here + +ALTER TABLE transactions DROP COLUMN l1_tx_mint; +ALTER TABLE transactions DROP COLUMN l1_tx_refund_recipient; diff --git a/core/lib/dal/migrations/20230119182427_l1_refunds.up.sql b/core/lib/dal/migrations/20230119182427_l1_refunds.up.sql new file mode 100644 index 000000000000..d83d116130d9 --- /dev/null +++ b/core/lib/dal/migrations/20230119182427_l1_refunds.up.sql @@ -0,0 +1,4 @@ +-- Add up migration script here + +ALTER TABLE transactions ADD COLUMN l1_tx_mint NUMERIC; +ALTER TABLE transactions ADD COLUMN l1_tx_refund_recipient BYTEA; diff --git a/core/lib/dal/migrations/20230202142858_ergs_to_gas.down.sql b/core/lib/dal/migrations/20230202142858_ergs_to_gas.down.sql new file mode 100644 index 000000000000..0c6949df0d0d --- /dev/null +++ b/core/lib/dal/migrations/20230202142858_ergs_to_gas.down.sql @@ -0,0 +1,20 @@ +-- Add down migration script here +ALTER TABLE transactions RENAME COLUMN gas_limit TO ergs_limit; +ALTER TABLE transactions RENAME COLUMN gas_per_storage_limit TO ergs_per_storage_limit; +ALTER TABLE transactions RENAME COLUMN gas_per_pubdata_limit TO ergs_per_pubdata_limit; +ALTER TABLE transactions RENAME COLUMN refunded_gas TO refunded_ergs; +ALTER TABLE transactions RENAME COLUMN max_fee_per_gas TO max_fee_per_erg; +ALTER TABLE transactions RENAME COLUMN max_priority_fee_per_gas TO max_priority_fee_per_erg; + +ALTER TABLE l1_batches RENAME COLUMN gas_per_pubdata_byte_in_block TO ergs_per_pubdata_byte_in_block; +ALTER TABLE l1_batches RENAME COLUMN base_fee_per_gas TO base_fee_per_erg; +ALTER TABLE l1_batches RENAME COLUMN gas_per_pubdata_limit TO ergs_per_pubdata_limit; + +ALTER TABLE l1_batches RENAME COLUMN l2_fair_gas_price TO l2_fair_ergs_price; + +ALTER TABLE miniblocks RENAME COLUMN l2_fair_gas_price TO l2_fair_ergs_price; +ALTER TABLE miniblocks RENAME COLUMN base_fee_per_gas TO base_fee_per_erg; +ALTER TABLE miniblocks RENAME COLUMN gas_per_pubdata_limit TO ergs_per_pubdata_limit; + + + diff --git a/core/lib/dal/migrations/20230202142858_ergs_to_gas.up.sql b/core/lib/dal/migrations/20230202142858_ergs_to_gas.up.sql new file mode 100644 index 000000000000..404a827eb6e3 --- /dev/null +++ b/core/lib/dal/migrations/20230202142858_ergs_to_gas.up.sql @@ -0,0 +1,20 @@ +-- -- Add up migration script here +ALTER TABLE transactions RENAME COLUMN ergs_limit TO gas_limit; +ALTER TABLE transactions RENAME COLUMN ergs_per_storage_limit TO gas_per_storage_limit; +ALTER TABLE transactions RENAME COLUMN ergs_per_pubdata_limit TO gas_per_pubdata_limit; +ALTER TABLE transactions RENAME COLUMN refunded_ergs TO refunded_gas; +ALTER TABLE transactions RENAME COLUMN max_fee_per_erg TO max_fee_per_gas; +ALTER TABLE transactions RENAME COLUMN max_priority_fee_per_erg TO max_priority_fee_per_gas; + +ALTER TABLE l1_batches RENAME COLUMN ergs_per_pubdata_byte_in_block TO gas_per_pubdata_byte_in_block; +ALTER TABLE l1_batches RENAME COLUMN base_fee_per_erg TO base_fee_per_gas; +ALTER TABLE l1_batches RENAME COLUMN ergs_per_pubdata_limit TO gas_per_pubdata_limit; + +ALTER TABLE l1_batches RENAME COLUMN l2_fair_ergs_price TO l2_fair_gas_price; + +ALTER TABLE miniblocks RENAME COLUMN l2_fair_ergs_price TO l2_fair_gas_price; +ALTER TABLE miniblocks RENAME COLUMN base_fee_per_erg TO base_fee_per_gas; +ALTER TABLE miniblocks RENAME COLUMN ergs_per_pubdata_limit TO gas_per_pubdata_limit; + + + diff --git a/core/lib/dal/migrations/20230203102247_add_specialized_prover_group_id_in_gpu_prover_queue.down.sql b/core/lib/dal/migrations/20230203102247_add_specialized_prover_group_id_in_gpu_prover_queue.down.sql new file mode 100644 index 000000000000..b4f49e277203 --- /dev/null +++ b/core/lib/dal/migrations/20230203102247_add_specialized_prover_group_id_in_gpu_prover_queue.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE gpu_prover_queue + DROP COLUMN IF EXISTS specialized_prover_group_id; diff --git a/core/lib/dal/migrations/20230203102247_add_specialized_prover_group_id_in_gpu_prover_queue.up.sql b/core/lib/dal/migrations/20230203102247_add_specialized_prover_group_id_in_gpu_prover_queue.up.sql new file mode 100644 index 000000000000..3f30274e28a1 --- /dev/null +++ b/core/lib/dal/migrations/20230203102247_add_specialized_prover_group_id_in_gpu_prover_queue.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE gpu_prover_queue + ADD COLUMN IF NOT EXISTS specialized_prover_group_id SMALLINT; diff --git a/core/lib/dal/sqlx-data.json b/core/lib/dal/sqlx-data.json new file mode 100644 index 000000000000..ac5ce1d5900f --- /dev/null +++ b/core/lib/dal/sqlx-data.json @@ -0,0 +1,9428 @@ +{ + "db": "PostgreSQL", + "0016b523dc81ee51f566cf5f226a2a0b53c51e7d02318d6c23a55eb92cfa7f94": { + "describe": { + "columns": [ + { + "name": "initial_write_l1_batch_number?", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "current_l1_batch_number?", + "ordinal": 1, + "type_info": "Int8" + } + ], + "nullable": [ + null, + null + ], + "parameters": { + "Left": [ + "Bytea", + "Int8" + ] + } + }, + "query": "\n SELECT (SELECT l1_batch_number FROM initial_writes WHERE hashed_key = $1) as \"initial_write_l1_batch_number?\",\n (SELECT miniblocks.l1_batch_number FROM miniblocks WHERE number = $2) as \"current_l1_batch_number?\"\n " + }, + "00bf0b01e0ee03cd3fb8f1c88ac42c535532c55a15d23c57c32561cdffd91455": { + "describe": { + "columns": [ + { + "name": "l1_batch_number", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n SELECT l1_batch_number FROM node_aggregation_witness_jobs\n WHERE length(leaf_layer_subqueues) <> 0\n OR length(aggregation_outputs) <> 0\n LIMIT $1;\n " + }, + "01189407fab9be050ae75249f75b9503343500af700f00721e295871fa969172": { + "describe": { + "columns": [ + { + "name": "l2_address", + "ordinal": 0, + "type_info": "Bytea" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [] + } + }, + "query": "SELECT l2_address FROM tokens WHERE well_known = true" + }, + "01ebdc5b524e85033fb06d9166475f365643f744492e59ff12f10b419dd6d485": { + "describe": { + "columns": [ + { + "name": "bytecode_hash", + "ordinal": 0, + "type_info": "Bytea" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "SELECT bytecode_hash FROM factory_deps WHERE miniblock_number > $1" + }, + "021e878567c19a5ec20c79949da5286985f7b17d7b272e24f9a5c194050ec783": { + "describe": { + "columns": [ + { + "name": "l1_address", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "l2_address", + "ordinal": 1, + "type_info": "Bytea" + }, + { + "name": "symbol", + "ordinal": 2, + "type_info": "Varchar" + }, + { + "name": "name", + "ordinal": 3, + "type_info": "Varchar" + }, + { + "name": "decimals", + "ordinal": 4, + "type_info": "Int4" + }, + { + "name": "usd_price", + "ordinal": 5, + "type_info": "Numeric" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + true + ], + "parameters": { + "Left": [ + "Bytea" + ] + } + }, + "query": "\n SELECT l1_address, l2_address, symbol, name, decimals, usd_price\n FROM tokens\n WHERE l2_address = $1\n " + }, + "026012668b1efe74d34e84d24b3c23a462e54846ddf854cae5eeac923d2468be": { + "describe": { + "columns": [ + { + "name": "l1_batch_number", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "merkle_tree_paths", + "ordinal": 1, + "type_info": "Bytea" + }, + { + "name": "created_at", + "ordinal": 2, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 3, + "type_info": "Timestamp" + }, + { + "name": "status", + "ordinal": 4, + "type_info": "Text" + }, + { + "name": "time_taken", + "ordinal": 5, + "type_info": "Time" + }, + { + "name": "processing_started_at", + "ordinal": 6, + "type_info": "Timestamp" + }, + { + "name": "error", + "ordinal": 7, + "type_info": "Varchar" + }, + { + "name": "attempts", + "ordinal": 8, + "type_info": "Int4" + }, + { + "name": "merkel_tree_paths_blob_url", + "ordinal": 9, + "type_info": "Text" + }, + { + "name": "is_blob_cleaned", + "ordinal": 10, + "type_info": "Bool" + } + ], + "nullable": [ + false, + true, + false, + false, + false, + false, + true, + true, + false, + true, + false + ], + "parameters": { + "Left": [ + "Interval", + "Int4" + ] + } + }, + "query": "\n UPDATE witness_inputs\n SET status = 'in_progress', attempts = attempts + 1,\n updated_at = now(), processing_started_at = now()\n WHERE l1_batch_number = (\n SELECT l1_batch_number\n FROM witness_inputs\n WHERE status = 'queued' \n OR (status = 'in_progress' AND processing_started_at < now() - $1::interval)\n OR (status = 'failed' AND attempts < $2)\n ORDER BY l1_batch_number ASC\n LIMIT 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING witness_inputs.*\n " + }, + "03a34f0fd82bed22f14c5b36554bb958d407e9724fa5ea5123edc3c6607e545c": { + "describe": { + "columns": [ + { + "name": "block_hash?", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "address!", + "ordinal": 1, + "type_info": "Bytea" + }, + { + "name": "topic1!", + "ordinal": 2, + "type_info": "Bytea" + }, + { + "name": "topic2!", + "ordinal": 3, + "type_info": "Bytea" + }, + { + "name": "topic3!", + "ordinal": 4, + "type_info": "Bytea" + }, + { + "name": "topic4!", + "ordinal": 5, + "type_info": "Bytea" + }, + { + "name": "value!", + "ordinal": 6, + "type_info": "Bytea" + }, + { + "name": "miniblock_number!", + "ordinal": 7, + "type_info": "Int8" + }, + { + "name": "l1_batch_number?", + "ordinal": 8, + "type_info": "Int8" + }, + { + "name": "tx_hash!", + "ordinal": 9, + "type_info": "Bytea" + }, + { + "name": "tx_index_in_block!", + "ordinal": 10, + "type_info": "Int4" + }, + { + "name": "event_index_in_block!", + "ordinal": 11, + "type_info": "Int4" + }, + { + "name": "event_index_in_tx!", + "ordinal": 12, + "type_info": "Int4" + } + ], + "nullable": [ + true, + true, + true, + true, + true, + true, + true, + true, + true, + true, + true, + true, + true + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n WITH events_select AS (\n SELECT\n address, topic1, topic2, topic3, topic4, value,\n miniblock_number, tx_hash, tx_index_in_block,\n event_index_in_block, event_index_in_tx\n FROM events\n WHERE miniblock_number > $1\n ORDER BY miniblock_number ASC, event_index_in_block ASC\n )\n SELECT miniblocks.hash as \"block_hash?\",\n address as \"address!\", topic1 as \"topic1!\", topic2 as \"topic2!\", topic3 as \"topic3!\", topic4 as \"topic4!\", value as \"value!\",\n miniblock_number as \"miniblock_number!\", miniblocks.l1_batch_number as \"l1_batch_number?\", tx_hash as \"tx_hash!\",\n tx_index_in_block as \"tx_index_in_block!\", event_index_in_block as \"event_index_in_block!\", event_index_in_tx as \"event_index_in_tx!\"\n FROM events_select\n INNER JOIN miniblocks ON events_select.miniblock_number = miniblocks.number\n ORDER BY miniblock_number ASC, event_index_in_block ASC\n " + }, + "07f14f401347d74b8bb3595f5ec75e6379a8af0e2e4cbd5ee78d70583925d60b": { + "describe": { + "columns": [ + { + "name": "number", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "timestamp", + "ordinal": 1, + "type_info": "Int8" + }, + { + "name": "is_finished", + "ordinal": 2, + "type_info": "Bool" + }, + { + "name": "l1_tx_count", + "ordinal": 3, + "type_info": "Int4" + }, + { + "name": "l2_tx_count", + "ordinal": 4, + "type_info": "Int4" + }, + { + "name": "fee_account_address", + "ordinal": 5, + "type_info": "Bytea" + }, + { + "name": "bloom", + "ordinal": 6, + "type_info": "Bytea" + }, + { + "name": "priority_ops_onchain_data", + "ordinal": 7, + "type_info": "ByteaArray" + }, + { + "name": "hash", + "ordinal": 8, + "type_info": "Bytea" + }, + { + "name": "parent_hash", + "ordinal": 9, + "type_info": "Bytea" + }, + { + "name": "commitment", + "ordinal": 10, + "type_info": "Bytea" + }, + { + "name": "compressed_write_logs", + "ordinal": 11, + "type_info": "Bytea" + }, + { + "name": "compressed_contracts", + "ordinal": 12, + "type_info": "Bytea" + }, + { + "name": "eth_prove_tx_id", + "ordinal": 13, + "type_info": "Int4" + }, + { + "name": "eth_commit_tx_id", + "ordinal": 14, + "type_info": "Int4" + }, + { + "name": "eth_execute_tx_id", + "ordinal": 15, + "type_info": "Int4" + }, + { + "name": "created_at", + "ordinal": 16, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 17, + "type_info": "Timestamp" + }, + { + "name": "merkle_root_hash", + "ordinal": 18, + "type_info": "Bytea" + }, + { + "name": "l2_to_l1_logs", + "ordinal": 19, + "type_info": "ByteaArray" + }, + { + "name": "l2_to_l1_messages", + "ordinal": 20, + "type_info": "ByteaArray" + }, + { + "name": "predicted_commit_gas_cost", + "ordinal": 21, + "type_info": "Int8" + }, + { + "name": "predicted_prove_gas_cost", + "ordinal": 22, + "type_info": "Int8" + }, + { + "name": "predicted_execute_gas_cost", + "ordinal": 23, + "type_info": "Int8" + }, + { + "name": "initial_bootloader_heap_content", + "ordinal": 24, + "type_info": "Jsonb" + }, + { + "name": "used_contract_hashes", + "ordinal": 25, + "type_info": "Jsonb" + }, + { + "name": "compressed_initial_writes", + "ordinal": 26, + "type_info": "Bytea" + }, + { + "name": "compressed_repeated_writes", + "ordinal": 27, + "type_info": "Bytea" + }, + { + "name": "l2_l1_compressed_messages", + "ordinal": 28, + "type_info": "Bytea" + }, + { + "name": "l2_l1_merkle_root", + "ordinal": 29, + "type_info": "Bytea" + }, + { + "name": "l1_gas_price", + "ordinal": 30, + "type_info": "Int8" + }, + { + "name": "l2_fair_gas_price", + "ordinal": 31, + "type_info": "Int8" + }, + { + "name": "rollup_last_leaf_index", + "ordinal": 32, + "type_info": "Int8" + }, + { + "name": "zkporter_is_available", + "ordinal": 33, + "type_info": "Bool" + }, + { + "name": "bootloader_code_hash", + "ordinal": 34, + "type_info": "Bytea" + }, + { + "name": "default_aa_code_hash", + "ordinal": 35, + "type_info": "Bytea" + }, + { + "name": "base_fee_per_gas", + "ordinal": 36, + "type_info": "Numeric" + }, + { + "name": "aux_data_hash", + "ordinal": 37, + "type_info": "Bytea" + }, + { + "name": "pass_through_data_hash", + "ordinal": 38, + "type_info": "Bytea" + }, + { + "name": "meta_parameters_hash", + "ordinal": 39, + "type_info": "Bytea" + }, + { + "name": "skip_proof", + "ordinal": 40, + "type_info": "Bool" + }, + { + "name": "gas_per_pubdata_byte_in_block", + "ordinal": 41, + "type_info": "Int4" + }, + { + "name": "gas_per_pubdata_limit", + "ordinal": 42, + "type_info": "Int8" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false, + true, + true, + true, + true, + true, + true, + true, + true, + false, + false, + true, + false, + false, + false, + false, + false, + false, + false, + true, + true, + true, + true, + false, + false, + true, + true, + true, + true, + false, + true, + true, + true, + false, + true, + false + ], + "parameters": { + "Left": [ + "Int8", + "Int8" + ] + } + }, + "query": "\n SELECT number, timestamp, is_finished, l1_tx_count, l2_tx_count, fee_account_address, bloom, priority_ops_onchain_data, hash, parent_hash, commitment, compressed_write_logs, compressed_contracts, eth_prove_tx_id, eth_commit_tx_id, eth_execute_tx_id, created_at, updated_at, merkle_root_hash, l2_to_l1_logs, l2_to_l1_messages, predicted_commit_gas_cost, predicted_prove_gas_cost, predicted_execute_gas_cost, initial_bootloader_heap_content, used_contract_hashes, compressed_initial_writes, compressed_repeated_writes, l2_l1_compressed_messages, l2_l1_merkle_root, l1_gas_price, l2_fair_gas_price, rollup_last_leaf_index, zkporter_is_available, bootloader_code_hash, default_aa_code_hash, base_fee_per_gas, aux_data_hash, pass_through_data_hash, meta_parameters_hash, skip_proof, gas_per_pubdata_byte_in_block, gas_per_pubdata_limit\n FROM\n (SELECT l1_batches.*, row_number() over (order by number ASC) as row_number\n FROM l1_batches\n LEFT JOIN prover_jobs ON prover_jobs.l1_batch_number = l1_batches.number\n WHERE eth_commit_tx_id IS NOT NULL\n AND prover_jobs.aggregation_round = 3\n AND prover_jobs.status = 'successful'\n AND l1_batches.number > $1\n ORDER BY number LIMIT $2) inn\n WHERE number - row_number = $1\n " + }, + "0b934f7671826b45d5a6f95f30ae13f073a16bc54b1b933b52681901c676d623": { + "describe": { + "columns": [ + { + "name": "hash", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "is_priority", + "ordinal": 1, + "type_info": "Bool" + }, + { + "name": "full_fee", + "ordinal": 2, + "type_info": "Numeric" + }, + { + "name": "layer_2_tip_fee", + "ordinal": 3, + "type_info": "Numeric" + }, + { + "name": "initiator_address", + "ordinal": 4, + "type_info": "Bytea" + }, + { + "name": "nonce", + "ordinal": 5, + "type_info": "Int8" + }, + { + "name": "signature", + "ordinal": 6, + "type_info": "Bytea" + }, + { + "name": "input", + "ordinal": 7, + "type_info": "Bytea" + }, + { + "name": "data", + "ordinal": 8, + "type_info": "Jsonb" + }, + { + "name": "received_at", + "ordinal": 9, + "type_info": "Timestamp" + }, + { + "name": "priority_op_id", + "ordinal": 10, + "type_info": "Int8" + }, + { + "name": "l1_batch_number", + "ordinal": 11, + "type_info": "Int8" + }, + { + "name": "index_in_block", + "ordinal": 12, + "type_info": "Int4" + }, + { + "name": "error", + "ordinal": 13, + "type_info": "Varchar" + }, + { + "name": "gas_limit", + "ordinal": 14, + "type_info": "Numeric" + }, + { + "name": "gas_per_storage_limit", + "ordinal": 15, + "type_info": "Numeric" + }, + { + "name": "gas_per_pubdata_limit", + "ordinal": 16, + "type_info": "Numeric" + }, + { + "name": "tx_format", + "ordinal": 17, + "type_info": "Int4" + }, + { + "name": "created_at", + "ordinal": 18, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 19, + "type_info": "Timestamp" + }, + { + "name": "execution_info", + "ordinal": 20, + "type_info": "Jsonb" + }, + { + "name": "contract_address", + "ordinal": 21, + "type_info": "Bytea" + }, + { + "name": "in_mempool", + "ordinal": 22, + "type_info": "Bool" + }, + { + "name": "l1_block_number", + "ordinal": 23, + "type_info": "Int4" + }, + { + "name": "value", + "ordinal": 24, + "type_info": "Numeric" + }, + { + "name": "paymaster", + "ordinal": 25, + "type_info": "Bytea" + }, + { + "name": "paymaster_input", + "ordinal": 26, + "type_info": "Bytea" + }, + { + "name": "max_fee_per_gas", + "ordinal": 27, + "type_info": "Numeric" + }, + { + "name": "max_priority_fee_per_gas", + "ordinal": 28, + "type_info": "Numeric" + }, + { + "name": "effective_gas_price", + "ordinal": 29, + "type_info": "Numeric" + }, + { + "name": "miniblock_number", + "ordinal": 30, + "type_info": "Int8" + }, + { + "name": "l1_batch_tx_index", + "ordinal": 31, + "type_info": "Int4" + }, + { + "name": "refunded_gas", + "ordinal": 32, + "type_info": "Int8" + }, + { + "name": "l1_tx_mint", + "ordinal": 33, + "type_info": "Numeric" + }, + { + "name": "l1_tx_refund_recipient", + "ordinal": 34, + "type_info": "Bytea" + } + ], + "nullable": [ + false, + false, + true, + true, + false, + true, + true, + true, + false, + false, + true, + true, + true, + true, + true, + true, + true, + true, + false, + false, + false, + true, + false, + true, + false, + false, + false, + true, + true, + true, + true, + true, + false, + true, + true + ], + "parameters": { + "Left": [] + } + }, + "query": "\n SELECT * FROM transactions\n WHERE miniblock_number IS NOT NULL AND l1_batch_number IS NULL\n ORDER BY miniblock_number, index_in_block\n " + }, + "0cd13b94dc52a1a5228ed7a7c673add0aaf39a8bb378b97f49f256cb233b8a63": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [] + } + }, + "query": "DELETE FROM contract_verification_zksolc_versions" + }, + "0d1bed183c38304ff1a6c8c78dca03964e2e188a6d01f98eaf0c6b24f19b8b6f": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "ByteaArray" + ] + } + }, + "query": "UPDATE transactions SET in_mempool = FALSE FROM UNNEST ($1::bytea[]) AS s(address) WHERE transactions.in_mempool = TRUE AND transactions.initiator_address = s.address" + }, + "0d4dff244f0ea6685f9c2d9a7f639c9935ce30c7dfd2bf61784b6902aa1a7790": { + "describe": { + "columns": [ + { + "name": "l1_batch_number", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "basic_circuits_blob_url", + "ordinal": 1, + "type_info": "Text" + }, + { + "name": "basic_circuits_inputs_blob_url", + "ordinal": 2, + "type_info": "Text" + } + ], + "nullable": [ + false, + true, + true + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n SELECT l1_batch_number, basic_circuits_blob_url, basic_circuits_inputs_blob_url FROM leaf_aggregation_witness_jobs\n WHERE status='successful' AND is_blob_cleaned=FALSE\n AND basic_circuits_blob_url is NOT NULL\n AND basic_circuits_inputs_blob_url is NOT NULL\n AND updated_at < NOW() - INTERVAL '2 days'\n LIMIT $1;\n " + }, + "0d99b4015b29905862991e4f1a44a1021d48f50e99cb1701e7496ce6c3e15dc6": { + "describe": { + "columns": [ + { + "name": "number", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + null + ], + "parameters": { + "Left": [] + } + }, + "query": "SELECT MAX(number) as \"number\" FROM l1_batches WHERE is_finished = TRUE" + }, + "0dec12063dac83663f109ff19174ccb53b7f1e710679e65f96d86d90887b848a": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8Array" + ] + } + }, + "query": "\n UPDATE node_aggregation_witness_jobs\n SET leaf_layer_subqueues='',\n aggregation_outputs=''\n WHERE l1_batch_number = ANY($1);\n " + }, + "0f8a603899280c015b033c4160bc064865103e9d6d63a369f07a8e5d859a7b14": { + "describe": { + "columns": [ + { + "name": "timestamp", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "SELECT timestamp FROM miniblocks WHERE number = $1" + }, + "0fd885074c624bea478ec0a24a499cf1278773cdba92550439da5d3b70cbf38c": { + "describe": { + "columns": [ + { + "name": "count!", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "status!", + "ordinal": 1, + "type_info": "Text" + } + ], + "nullable": [ + null, + false + ], + "parameters": { + "Left": [] + } + }, + "query": "\n SELECT COUNT(*) as \"count!\", status as \"status!\"\n FROM prover_jobs\n GROUP BY status\n " + }, + "151aa7cab859c275f74f981ed146415e1e5242ebe259552d5b9fac333c0d9ce8": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8Array" + ] + } + }, + "query": "\n UPDATE scheduler_witness_jobs\n SET is_blob_cleaned=TRUE\n WHERE l1_batch_number = ANY($1);\n " + }, + "157fc4ef4f5fd831399219850bc59ec0bd32d938ec8685dacaf913efdccfe7fe": { + "describe": { + "columns": [ + { + "name": "l1_address", + "ordinal": 0, + "type_info": "Bytea" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [ + "Numeric" + ] + } + }, + "query": "SELECT l1_address FROM tokens WHERE market_volume > $1" + }, + "17a42a97e87a675bd465103ebedc63d6d091e5bb093c7905de70aed3dc71d823": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "DELETE FROM storage_logs WHERE miniblock_number > $1" + }, + "19b89495be8aa735db039ccc8a262786c58e54f132588c48f07d9537cf21d3ed": { + "describe": { + "columns": [ + { + "name": "sent_at_block", + "ordinal": 0, + "type_info": "Int4" + } + ], + "nullable": [ + true + ], + "parameters": { + "Left": [ + "Int4" + ] + } + }, + "query": "SELECT sent_at_block FROM eth_txs_history WHERE eth_tx_id = $1 AND sent_at_block IS NOT NULL ORDER BY created_at ASC LIMIT 1" + }, + "1a91acea72e56513a2a9e667bd5a2c171baa5fec01c51dcb7c7cf33f736c854d": { + "describe": { + "columns": [ + { + "name": "tx_hash", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "index_in_block", + "ordinal": 1, + "type_info": "Int4" + }, + { + "name": "l1_batch_tx_index", + "ordinal": 2, + "type_info": "Int4" + }, + { + "name": "block_number", + "ordinal": 3, + "type_info": "Int8" + }, + { + "name": "error", + "ordinal": 4, + "type_info": "Varchar" + }, + { + "name": "effective_gas_price", + "ordinal": 5, + "type_info": "Numeric" + }, + { + "name": "initiator_address", + "ordinal": 6, + "type_info": "Bytea" + }, + { + "name": "transfer_to?", + "ordinal": 7, + "type_info": "Jsonb" + }, + { + "name": "execute_contract_address?", + "ordinal": 8, + "type_info": "Jsonb" + }, + { + "name": "tx_format?", + "ordinal": 9, + "type_info": "Int4" + }, + { + "name": "refunded_gas", + "ordinal": 10, + "type_info": "Int8" + }, + { + "name": "gas_limit", + "ordinal": 11, + "type_info": "Numeric" + }, + { + "name": "block_hash?", + "ordinal": 12, + "type_info": "Bytea" + }, + { + "name": "l1_batch_number?", + "ordinal": 13, + "type_info": "Int8" + }, + { + "name": "contract_address?", + "ordinal": 14, + "type_info": "Bytea" + } + ], + "nullable": [ + false, + true, + true, + true, + true, + true, + false, + null, + null, + true, + false, + true, + false, + true, + false + ], + "parameters": { + "Left": [ + "Bytea", + "Bytea", + "Bytea" + ] + } + }, + "query": "\n WITH sl AS (\n SELECT * FROM storage_logs\n WHERE storage_logs.address = $1 AND storage_logs.tx_hash = $2\n ORDER BY storage_logs.miniblock_number DESC, storage_logs.operation_number DESC\n LIMIT 1\n )\n SELECT\n transactions.hash as tx_hash,\n transactions.index_in_block as index_in_block,\n transactions.l1_batch_tx_index as l1_batch_tx_index,\n transactions.miniblock_number as block_number,\n transactions.error as error,\n transactions.effective_gas_price as effective_gas_price,\n transactions.initiator_address as initiator_address,\n transactions.data->'to' as \"transfer_to?\",\n transactions.data->'contractAddress' as \"execute_contract_address?\",\n transactions.tx_format as \"tx_format?\",\n transactions.refunded_gas as refunded_gas,\n transactions.gas_limit as gas_limit,\n miniblocks.hash as \"block_hash?\",\n miniblocks.l1_batch_number as \"l1_batch_number?\",\n sl.key as \"contract_address?\"\n FROM transactions\n LEFT JOIN miniblocks\n ON miniblocks.number = transactions.miniblock_number\n LEFT JOIN sl\n ON sl.value != $3\n WHERE transactions.hash = $2\n " + }, + "1d26bb777f103d83523d223071eaa8391049c0efec9406e37003ac08065d389f": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Bytea", + "Bytea", + "Bytea", + "Bytea", + "Bytea", + "Bytea", + "Bytea", + "Bytea", + "Bool", + "Bytea", + "Int8", + "Bytea", + "Bytea", + "Bytea", + "Int8" + ] + } + }, + "query": "\n UPDATE l1_batches\n SET hash = $1, merkle_root_hash = $2, commitment = $3, default_aa_code_hash = $4,\n compressed_repeated_writes = $5, compressed_initial_writes = $6, l2_l1_compressed_messages = $7,\n l2_l1_merkle_root = $8,\n zkporter_is_available = $9, bootloader_code_hash = $10, rollup_last_leaf_index = $11,\n aux_data_hash = $12, pass_through_data_hash = $13, meta_parameters_hash = $14,\n updated_at = now()\n WHERE number = $15\n " + }, + "1d3e9cd259fb70a2bc81e8344576c3fb27b47ad6cdb6751d2a9b8c8d342b7a75": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Text", + "Int8" + ] + } + }, + "query": "\n UPDATE prover_jobs\n SET status = $1, updated_at = now()\n WHERE id = $2\n " + }, + "1eede5c2169aee5a767b3b6b829f53721c0c353956ccec31a75226a65325ae46": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [] + } + }, + "query": "UPDATE transactions SET in_mempool = FALSE WHERE in_mempool = TRUE" + }, + "1faf6552c221c75b7232b55210c0c37be76a57ec9dc94584b6ccb562e8b182f2": { + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "l1_batch_number", + "ordinal": 1, + "type_info": "Int8" + }, + { + "name": "circuit_type", + "ordinal": 2, + "type_info": "Text" + }, + { + "name": "prover_input", + "ordinal": 3, + "type_info": "Bytea" + }, + { + "name": "status", + "ordinal": 4, + "type_info": "Text" + }, + { + "name": "error", + "ordinal": 5, + "type_info": "Text" + }, + { + "name": "processing_started_at", + "ordinal": 6, + "type_info": "Timestamp" + }, + { + "name": "created_at", + "ordinal": 7, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 8, + "type_info": "Timestamp" + }, + { + "name": "time_taken", + "ordinal": 9, + "type_info": "Time" + }, + { + "name": "aggregation_round", + "ordinal": 10, + "type_info": "Int4" + }, + { + "name": "result", + "ordinal": 11, + "type_info": "Bytea" + }, + { + "name": "sequence_number", + "ordinal": 12, + "type_info": "Int4" + }, + { + "name": "attempts", + "ordinal": 13, + "type_info": "Int4" + }, + { + "name": "circuit_input_blob_url", + "ordinal": 14, + "type_info": "Text" + }, + { + "name": "proccesed_by", + "ordinal": 15, + "type_info": "Text" + }, + { + "name": "is_blob_cleaned", + "ordinal": 16, + "type_info": "Bool" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + true, + true, + false, + false, + false, + false, + true, + false, + false, + true, + true, + false + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "SELECT * from prover_jobs where id=$1" + }, + "227daa1e8d647c207869d7c306d9d13a38c6baf07281cf72cd93d20da2e3cf3c": { + "describe": { + "columns": [ + { + "name": "exists", + "ordinal": 0, + "type_info": "Bool" + } + ], + "nullable": [ + null + ], + "parameters": { + "Left": [ + "Bytea", + "Bytea" + ] + } + }, + "query": "\n SELECT true as \"exists\"\n FROM (\n SELECT * FROM storage_logs\n WHERE hashed_key = $1\n ORDER BY miniblock_number DESC, operation_number DESC\n LIMIT 1\n ) sl\n WHERE sl.value != $2\n " + }, + "22b57675a726d9cfeb82a60ba50c36cab1548d197ea56a7658d3f005df07c60b": { + "describe": { + "columns": [ + { + "name": "op_id", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + null + ], + "parameters": { + "Left": [] + } + }, + "query": "SELECT MAX(priority_op_id) as \"op_id\" from transactions where is_priority = true AND miniblock_number IS NOT NULL" + }, + "230cbdfecc31d22f490d98e52dacd69739b654491042dc32a0f5e672281822f7": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Bytea", + "Bytea" + ] + } + }, + "query": "update storage set value = $1 where hashed_key = $2" + }, + "2397c1a050d358b596c9881c379bf823e267c03172f72c42da84cc0c04cc9d93": { + "describe": { + "columns": [ + { + "name": "miniblock_number!", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "hash", + "ordinal": 1, + "type_info": "Bytea" + }, + { + "name": "index_in_block!", + "ordinal": 2, + "type_info": "Int4" + }, + { + "name": "l1_batch_tx_index!", + "ordinal": 3, + "type_info": "Int4" + } + ], + "nullable": [ + true, + false, + true, + true + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n SELECT miniblock_number as \"miniblock_number!\",\n hash, index_in_block as \"index_in_block!\", l1_batch_tx_index as \"l1_batch_tx_index!\"\n FROM transactions\n WHERE l1_batch_number = $1\n ORDER BY miniblock_number, index_in_block\n " + }, + "24abd3109457403cbb8dc59f8805e0426d6da3b766ddae1516d45ad0b1277bc7": { + "describe": { + "columns": [ + { + "name": "number", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "timestamp", + "ordinal": 1, + "type_info": "Int8" + }, + { + "name": "is_finished", + "ordinal": 2, + "type_info": "Bool" + }, + { + "name": "l1_tx_count", + "ordinal": 3, + "type_info": "Int4" + }, + { + "name": "l2_tx_count", + "ordinal": 4, + "type_info": "Int4" + }, + { + "name": "fee_account_address", + "ordinal": 5, + "type_info": "Bytea" + }, + { + "name": "bloom", + "ordinal": 6, + "type_info": "Bytea" + }, + { + "name": "priority_ops_onchain_data", + "ordinal": 7, + "type_info": "ByteaArray" + }, + { + "name": "hash", + "ordinal": 8, + "type_info": "Bytea" + }, + { + "name": "parent_hash", + "ordinal": 9, + "type_info": "Bytea" + }, + { + "name": "commitment", + "ordinal": 10, + "type_info": "Bytea" + }, + { + "name": "compressed_write_logs", + "ordinal": 11, + "type_info": "Bytea" + }, + { + "name": "compressed_contracts", + "ordinal": 12, + "type_info": "Bytea" + }, + { + "name": "eth_prove_tx_id", + "ordinal": 13, + "type_info": "Int4" + }, + { + "name": "eth_commit_tx_id", + "ordinal": 14, + "type_info": "Int4" + }, + { + "name": "eth_execute_tx_id", + "ordinal": 15, + "type_info": "Int4" + }, + { + "name": "created_at", + "ordinal": 16, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 17, + "type_info": "Timestamp" + }, + { + "name": "merkle_root_hash", + "ordinal": 18, + "type_info": "Bytea" + }, + { + "name": "l2_to_l1_logs", + "ordinal": 19, + "type_info": "ByteaArray" + }, + { + "name": "l2_to_l1_messages", + "ordinal": 20, + "type_info": "ByteaArray" + }, + { + "name": "predicted_commit_gas_cost", + "ordinal": 21, + "type_info": "Int8" + }, + { + "name": "predicted_prove_gas_cost", + "ordinal": 22, + "type_info": "Int8" + }, + { + "name": "predicted_execute_gas_cost", + "ordinal": 23, + "type_info": "Int8" + }, + { + "name": "initial_bootloader_heap_content", + "ordinal": 24, + "type_info": "Jsonb" + }, + { + "name": "used_contract_hashes", + "ordinal": 25, + "type_info": "Jsonb" + }, + { + "name": "compressed_initial_writes", + "ordinal": 26, + "type_info": "Bytea" + }, + { + "name": "compressed_repeated_writes", + "ordinal": 27, + "type_info": "Bytea" + }, + { + "name": "l2_l1_compressed_messages", + "ordinal": 28, + "type_info": "Bytea" + }, + { + "name": "l2_l1_merkle_root", + "ordinal": 29, + "type_info": "Bytea" + }, + { + "name": "gas_per_pubdata_byte_in_block", + "ordinal": 30, + "type_info": "Int4" + }, + { + "name": "rollup_last_leaf_index", + "ordinal": 31, + "type_info": "Int8" + }, + { + "name": "zkporter_is_available", + "ordinal": 32, + "type_info": "Bool" + }, + { + "name": "bootloader_code_hash", + "ordinal": 33, + "type_info": "Bytea" + }, + { + "name": "default_aa_code_hash", + "ordinal": 34, + "type_info": "Bytea" + }, + { + "name": "base_fee_per_gas", + "ordinal": 35, + "type_info": "Numeric" + }, + { + "name": "gas_per_pubdata_limit", + "ordinal": 36, + "type_info": "Int8" + }, + { + "name": "aux_data_hash", + "ordinal": 37, + "type_info": "Bytea" + }, + { + "name": "pass_through_data_hash", + "ordinal": 38, + "type_info": "Bytea" + }, + { + "name": "meta_parameters_hash", + "ordinal": 39, + "type_info": "Bytea" + }, + { + "name": "skip_proof", + "ordinal": 40, + "type_info": "Bool" + }, + { + "name": "l1_gas_price", + "ordinal": 41, + "type_info": "Int8" + }, + { + "name": "l2_fair_gas_price", + "ordinal": 42, + "type_info": "Int8" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false, + true, + true, + true, + true, + true, + true, + true, + true, + false, + false, + true, + false, + false, + false, + false, + false, + false, + false, + true, + true, + true, + true, + true, + true, + true, + true, + true, + false, + false, + true, + true, + true, + false, + false, + false + ], + "parameters": { + "Left": [ + "Int4" + ] + } + }, + "query": "SELECT * FROM l1_batches\n WHERE eth_commit_tx_id = $1 OR eth_prove_tx_id = $1 OR eth_execute_tx_id = $1" + }, + "251d3e3615046ec5f061cfba65dc5ad891ee7fa315abe39aedbd291e36140610": { + "describe": { + "columns": [ + { + "name": "tx_hash", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "topic2!", + "ordinal": 1, + "type_info": "Bytea" + }, + { + "name": "topic3!", + "ordinal": 2, + "type_info": "Bytea" + }, + { + "name": "value!", + "ordinal": 3, + "type_info": "Bytea" + }, + { + "name": "l1_address!", + "ordinal": 4, + "type_info": "Bytea" + }, + { + "name": "l2_address!", + "ordinal": 5, + "type_info": "Bytea" + }, + { + "name": "symbol!", + "ordinal": 6, + "type_info": "Varchar" + }, + { + "name": "name!", + "ordinal": 7, + "type_info": "Varchar" + }, + { + "name": "decimals!", + "ordinal": 8, + "type_info": "Int4" + }, + { + "name": "usd_price?", + "ordinal": 9, + "type_info": "Numeric" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false, + false, + true + ], + "parameters": { + "Left": [ + "ByteaArray", + "Bytea", + "Bytea" + ] + } + }, + "query": "\n SELECT tx_hash, topic2 as \"topic2!\", topic3 as \"topic3!\", value as \"value!\",\n tokens.l1_address as \"l1_address!\", tokens.l2_address as \"l2_address!\",\n tokens.symbol as \"symbol!\", tokens.name as \"name!\", tokens.decimals as \"decimals!\", tokens.usd_price as \"usd_price?\"\n FROM events\n INNER JOIN tokens ON\n events.topic4 = ('\\x000000000000000000000000'::bytea || tokens.l2_address)\n WHERE tx_hash = ANY($1) AND events.topic1 = $2 AND events.address = $3\n " + }, + "28b5117758d0e82672351c0cc2dbbfbe7b27e785d7a3d7e8d3ddde76b6aa2974": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Text", + "Int4", + "Int4", + "Int2" + ] + } + }, + "query": "\n INSERT INTO gpu_prover_queue (instance_host, instance_port, queue_capacity, queue_free_slots, instance_status, specialized_prover_group_id, created_at, updated_at)\n VALUES (cast($1::text as inet), $2, $3, $3, 'available', $4, now(), now())\n ON CONFLICT(instance_host, instance_port)\n DO UPDATE SET instance_status='available', queue_capacity=$3, queue_free_slots=$3, specialized_prover_group_id=$4, updated_at=now()" + }, + "2911797974d340cc75bb628866c24f77665e3dca3954f0c83860da488265f5c6": { + "describe": { + "columns": [ + { + "name": "address", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "key", + "ordinal": 1, + "type_info": "Bytea" + }, + { + "name": "value", + "ordinal": 2, + "type_info": "Bytea" + } + ], + "nullable": [ + false, + false, + false + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n SELECT address, key, value\n FROM storage_logs\n WHERE miniblock_number BETWEEN (SELECT MIN(number) FROM miniblocks WHERE l1_batch_number = $1)\n AND (SELECT MAX(number) FROM miniblocks WHERE l1_batch_number = $1)\n ORDER BY miniblock_number, operation_number\n " + }, + "292e7d004a45cf3c65b1be4c1dfe5f3aeeb8097af85329c6c181077aac4752c6": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "DELETE FROM storage_logs_dedup WHERE l1_batch_number > $1" + }, + "2b07fff3b8f793c010c0bd6f706d7c43786305e3335fd6ae344664ec60f815a8": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [] + } + }, + "query": "DELETE FROM contract_verification_solc_versions" + }, + "2b22e7d15adf069c8e68954059b83f71a71350f3325b4280840c4be7e54a319f": { + "describe": { + "columns": [ + { + "name": "l1_address", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "l2_address", + "ordinal": 1, + "type_info": "Bytea" + }, + { + "name": "name", + "ordinal": 2, + "type_info": "Varchar" + }, + { + "name": "symbol", + "ordinal": 3, + "type_info": "Varchar" + }, + { + "name": "decimals", + "ordinal": 4, + "type_info": "Int4" + } + ], + "nullable": [ + false, + false, + false, + false, + false + ], + "parameters": { + "Left": [] + } + }, + "query": "SELECT l1_address, l2_address, name, symbol, decimals FROM tokens\n WHERE well_known = true\n ORDER BY symbol" + }, + "2c136284610f728ddba3e255d7dc573b10e4baf9151de194b7d8e0dc40c40602": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Bytea", + "Jsonb" + ] + } + }, + "query": "INSERT INTO transaction_traces (tx_hash, trace, created_at, updated_at) VALUES ($1, $2, now(), now())" + }, + "2eea5d279edc2b23cab00d2be00d046f741552e5d86dfdf61d7e3847a4bb65d8": { + "describe": { + "columns": [ + { + "name": "count!", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + null + ], + "parameters": { + "Left": [ + "Bytea" + ] + } + }, + "query": "\n SELECT COUNT(*) as \"count!\"\n FROM contracts_verification_info\n WHERE address = $1\n " + }, + "2ff4a13a75537cc30b2c3d52d3ef6237850150e4a4569adeaa4da4a9ac5bc689": { + "describe": { + "columns": [ + { + "name": "bytecode", + "ordinal": 0, + "type_info": "Bytea" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [ + "Bytea", + "Int8" + ] + } + }, + "query": "SELECT bytecode FROM factory_deps WHERE bytecode_hash = $1 AND miniblock_number <= $2" + }, + "30a51f6a7d0146fc74d411e5ee1cee44550251c0d8e814891984ecc462bc0bcb": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8Array" + ] + } + }, + "query": "\n UPDATE scheduler_witness_jobs\n SET final_node_aggregations=''\n WHERE l1_batch_number = ANY($1);\n " + }, + "3221b722354995f0705ceaf913a48aa092129bb4ff561a1104196f5b25192576": { + "describe": { + "columns": [ + { + "name": "version", + "ordinal": 0, + "type_info": "Text" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [] + } + }, + "query": "SELECT version FROM contract_verification_zksolc_versions ORDER by version" + }, + "335826f54feadf6aa30a4e7668ad3f17a2afc6bd67d4f863e3ad61fefd1bd8d2": { + "describe": { + "columns": [ + { + "name": "number", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + null + ], + "parameters": { + "Left": [] + } + }, + "query": "SELECT MAX(number) as \"number\" FROM miniblocks" + }, + "3594189e579d00c5477476e999aef22fe0dff97c753db118270285e26a9e4366": { + "describe": { + "columns": [ + { + "name": "l1_batch_number", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n SELECT l1_batch_number FROM leaf_aggregation_witness_jobs\n WHERE length(basic_circuits) <> 0\n OR length(basic_circuits_inputs) <> 0\n LIMIT $1;\n " + }, + "35ef2dc2ac64f27e24679288e5a4f56ad03369cd9771cb4b2bc3dc17906d21e8": { + "describe": { + "columns": [ + { + "name": "l1_batch_number", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "leaf_layer_subqueues_blob_url", + "ordinal": 1, + "type_info": "Text" + }, + { + "name": "aggregation_outputs_blob_url", + "ordinal": 2, + "type_info": "Text" + } + ], + "nullable": [ + false, + true, + true + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n SELECT l1_batch_number, leaf_layer_subqueues_blob_url, aggregation_outputs_blob_url FROM node_aggregation_witness_jobs\n WHERE status='successful' AND is_blob_cleaned=FALSE\n AND leaf_layer_subqueues_blob_url is NOT NULL\n AND aggregation_outputs_blob_url is NOT NULL\n AND updated_at < NOW() - INTERVAL '2 days'\n LIMIT $1;\n " + }, + "393345441797999e9f11b8b5ddce0b64356e1e167056d7f76ef6dfffd3534607": { + "describe": { + "columns": [ + { + "name": "name!", + "ordinal": 0, + "type_info": "Varchar" + }, + { + "name": "symbol!", + "ordinal": 1, + "type_info": "Varchar" + }, + { + "name": "decimals!", + "ordinal": 2, + "type_info": "Int4" + } + ], + "nullable": [ + null, + null, + null + ], + "parameters": { + "Left": [ + "Bytea" + ] + } + }, + "query": "\n SELECT\n COALESCE(token_list_name, name) as \"name!\",\n COALESCE(token_list_symbol, symbol) as \"symbol!\",\n COALESCE(token_list_decimals, decimals) as \"decimals!\"\n FROM tokens WHERE l2_address = $1\n " + }, + "3ab6a849873a78c741f5266aceedbc0bce3486b0d28066b2edd53ddeff6ca43a": { + "describe": { + "columns": [ + { + "name": "hash", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "is_priority", + "ordinal": 1, + "type_info": "Bool" + }, + { + "name": "full_fee", + "ordinal": 2, + "type_info": "Numeric" + }, + { + "name": "layer_2_tip_fee", + "ordinal": 3, + "type_info": "Numeric" + }, + { + "name": "initiator_address", + "ordinal": 4, + "type_info": "Bytea" + }, + { + "name": "nonce", + "ordinal": 5, + "type_info": "Int8" + }, + { + "name": "signature", + "ordinal": 6, + "type_info": "Bytea" + }, + { + "name": "input", + "ordinal": 7, + "type_info": "Bytea" + }, + { + "name": "data", + "ordinal": 8, + "type_info": "Jsonb" + }, + { + "name": "received_at", + "ordinal": 9, + "type_info": "Timestamp" + }, + { + "name": "priority_op_id", + "ordinal": 10, + "type_info": "Int8" + }, + { + "name": "l1_batch_number", + "ordinal": 11, + "type_info": "Int8" + }, + { + "name": "index_in_block", + "ordinal": 12, + "type_info": "Int4" + }, + { + "name": "error", + "ordinal": 13, + "type_info": "Varchar" + }, + { + "name": "gas_limit", + "ordinal": 14, + "type_info": "Numeric" + }, + { + "name": "gas_per_storage_limit", + "ordinal": 15, + "type_info": "Numeric" + }, + { + "name": "gas_per_pubdata_limit", + "ordinal": 16, + "type_info": "Numeric" + }, + { + "name": "tx_format", + "ordinal": 17, + "type_info": "Int4" + }, + { + "name": "created_at", + "ordinal": 18, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 19, + "type_info": "Timestamp" + }, + { + "name": "execution_info", + "ordinal": 20, + "type_info": "Jsonb" + }, + { + "name": "contract_address", + "ordinal": 21, + "type_info": "Bytea" + }, + { + "name": "in_mempool", + "ordinal": 22, + "type_info": "Bool" + }, + { + "name": "l1_block_number", + "ordinal": 23, + "type_info": "Int4" + }, + { + "name": "value", + "ordinal": 24, + "type_info": "Numeric" + }, + { + "name": "paymaster", + "ordinal": 25, + "type_info": "Bytea" + }, + { + "name": "paymaster_input", + "ordinal": 26, + "type_info": "Bytea" + }, + { + "name": "max_fee_per_gas", + "ordinal": 27, + "type_info": "Numeric" + }, + { + "name": "max_priority_fee_per_gas", + "ordinal": 28, + "type_info": "Numeric" + }, + { + "name": "effective_gas_price", + "ordinal": 29, + "type_info": "Numeric" + }, + { + "name": "miniblock_number", + "ordinal": 30, + "type_info": "Int8" + }, + { + "name": "l1_batch_tx_index", + "ordinal": 31, + "type_info": "Int4" + }, + { + "name": "refunded_gas", + "ordinal": 32, + "type_info": "Int8" + }, + { + "name": "l1_tx_mint", + "ordinal": 33, + "type_info": "Numeric" + }, + { + "name": "l1_tx_refund_recipient", + "ordinal": 34, + "type_info": "Bytea" + }, + { + "name": "block_hash?", + "ordinal": 35, + "type_info": "Bytea" + }, + { + "name": "eth_commit_tx_hash?", + "ordinal": 36, + "type_info": "Text" + }, + { + "name": "eth_prove_tx_hash?", + "ordinal": 37, + "type_info": "Text" + }, + { + "name": "eth_execute_tx_hash?", + "ordinal": 38, + "type_info": "Text" + } + ], + "nullable": [ + false, + false, + true, + true, + false, + true, + true, + true, + false, + false, + true, + true, + true, + true, + true, + true, + true, + true, + false, + false, + false, + true, + false, + true, + false, + false, + false, + true, + true, + true, + true, + true, + false, + true, + true, + false, + false, + false, + false + ], + "parameters": { + "Left": [ + "Bytea" + ] + } + }, + "query": "\n SELECT transactions.*,\n miniblocks.hash as \"block_hash?\",\n commit_tx.tx_hash as \"eth_commit_tx_hash?\",\n prove_tx.tx_hash as \"eth_prove_tx_hash?\",\n execute_tx.tx_hash as \"eth_execute_tx_hash?\"\n FROM transactions\n LEFT JOIN miniblocks ON miniblocks.number = transactions.miniblock_number\n LEFT JOIN l1_batches ON l1_batches.number = miniblocks.l1_batch_number\n LEFT JOIN eth_txs_history as commit_tx ON (l1_batches.eth_commit_tx_id = commit_tx.eth_tx_id AND commit_tx.confirmed_at IS NOT NULL)\n LEFT JOIN eth_txs_history as prove_tx ON (l1_batches.eth_prove_tx_id = prove_tx.eth_tx_id AND prove_tx.confirmed_at IS NOT NULL)\n LEFT JOIN eth_txs_history as execute_tx ON (l1_batches.eth_execute_tx_id = execute_tx.eth_tx_id AND execute_tx.confirmed_at IS NOT NULL)\n WHERE transactions.hash = $1\n " + }, + "3c582aeed32235ef175707de412a9f9129fad6ea5e87ebb85f68e20664b0da46": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int4Array", + "ByteaArray", + "Int8" + ] + } + }, + "query": "\n UPDATE transactions\n SET \n l1_batch_number = $3,\n l1_batch_tx_index = data_table.l1_batch_tx_index,\n updated_at = now()\n FROM\n (SELECT\n UNNEST($1::int[]) AS l1_batch_tx_index,\n UNNEST($2::bytea[]) AS hash\n ) AS data_table\n WHERE transactions.hash=data_table.hash \n " + }, + "3d41f05e1d5c5a74e0605e66fe08e09f14b8bf0269e5dcde518aa08db92a3ea0": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "DELETE FROM events WHERE miniblock_number > $1" + }, + "3d7350a4252bfff0cb99d40330d09af2dcbda1a3f42a0d1f03ae88c4f5c3e5ef": { + "describe": { + "columns": [ + { + "name": "l1_batch_number?", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + null + ], + "parameters": { + "Left": [ + "Int4" + ] + } + }, + "query": "\n SELECT MIN(l1_batch_number) as \"l1_batch_number?\"\n FROM prover_jobs\n WHERE status = 'queued' OR status = 'in_progress'\n OR status = 'in_gpu_proof'\n OR (status = 'failed' AND attempts < $1)\n " + }, + "3de5668eca2211f9701304e374100d45b359b1f7832d4a30b325fa679012c3e7": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Bytea", + "Numeric", + "Timestamp" + ] + } + }, + "query": "UPDATE tokens SET market_volume = $2, market_volume_updated_at = $3, updated_at = now() WHERE l1_address = $1" + }, + "3e10488214ce80491123183171bef6b7fd2fbd89a9d2a39230efda23b5cbe65c": { + "describe": { + "columns": [ + { + "name": "instance_host", + "ordinal": 0, + "type_info": "Inet" + }, + { + "name": "instance_port", + "ordinal": 1, + "type_info": "Int4" + }, + { + "name": "instance_status", + "ordinal": 2, + "type_info": "Text" + }, + { + "name": "created_at", + "ordinal": 3, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 4, + "type_info": "Timestamp" + }, + { + "name": "processing_started_at", + "ordinal": 5, + "type_info": "Timestamp" + }, + { + "name": "queue_free_slots", + "ordinal": 6, + "type_info": "Int4" + }, + { + "name": "queue_capacity", + "ordinal": 7, + "type_info": "Int4" + }, + { + "name": "specialized_prover_group_id", + "ordinal": 8, + "type_info": "Int2" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + true, + true, + true, + true + ], + "parameters": { + "Left": [ + "Interval", + "Int2" + ] + } + }, + "query": "\n UPDATE gpu_prover_queue\n SET instance_status = 'reserved',\n updated_at = now(),\n processing_started_at = now()\n WHERE (instance_host, instance_port) in (\n SELECT instance_host, instance_port\n FROM gpu_prover_queue\n WHERE specialized_prover_group_id=$2\n AND (\n instance_status = 'available'\n OR (instance_status = 'reserved' AND processing_started_at < now() - $1::interval)\n )\n ORDER BY updated_at ASC\n LIMIT 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING gpu_prover_queue.*\n " + }, + "40a1960bf6dffd5711892edfc2a73c8f2db44aefe1436882dc3bee8447bb67bd": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8", + "Int8" + ] + } + }, + "query": "INSERT INTO initial_writes (hashed_key, l1_batch_number, created_at, updated_at)\n SELECT storage_logs_dedup.hashed_key, storage_logs_dedup.l1_batch_number, now(), now()\n FROM storage_logs_dedup\n WHERE l1_batch_number BETWEEN $1 AND $2\n AND is_write = TRUE\n ON CONFLICT DO NOTHING\n " + }, + "40e0d88efb9c9ea0a8630df05d0a9981a13020ad69fafa42358e857fb4f1a93a": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Bytea", + "Bytea", + "Bytea", + "Bytea", + "Bytea", + "Bytea", + "Bytea", + "Bytea", + "Bool", + "Bytea", + "Bytea", + "Int8", + "Bytea", + "Bytea", + "Bytea", + "Int8" + ] + } + }, + "query": "\n UPDATE l1_batches SET\n hash = $1, merkle_root_hash = $2, commitment = $3, default_aa_code_hash = $4,\n compressed_repeated_writes = $5, compressed_initial_writes = $6, l2_l1_compressed_messages = $7,\n l2_l1_merkle_root = $8, zkporter_is_available = $9, \n bootloader_code_hash = $10, parent_hash = $11, rollup_last_leaf_index = $12, \n aux_data_hash = $13, pass_through_data_hash = $14, meta_parameters_hash = $15,\n updated_at = NOW()\n WHERE number = $16 AND hash IS NULL\n " + }, + "41913b02b13a0dad87268c5e0d673d9f04d5207ab6a48b63004e6c3ed07b93bc": { + "describe": { + "columns": [ + { + "name": "number", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "timestamp", + "ordinal": 1, + "type_info": "Int8" + }, + { + "name": "is_finished", + "ordinal": 2, + "type_info": "Bool" + }, + { + "name": "l1_tx_count", + "ordinal": 3, + "type_info": "Int4" + }, + { + "name": "l2_tx_count", + "ordinal": 4, + "type_info": "Int4" + }, + { + "name": "fee_account_address", + "ordinal": 5, + "type_info": "Bytea" + }, + { + "name": "bloom", + "ordinal": 6, + "type_info": "Bytea" + }, + { + "name": "priority_ops_onchain_data", + "ordinal": 7, + "type_info": "ByteaArray" + }, + { + "name": "hash", + "ordinal": 8, + "type_info": "Bytea" + }, + { + "name": "parent_hash", + "ordinal": 9, + "type_info": "Bytea" + }, + { + "name": "commitment", + "ordinal": 10, + "type_info": "Bytea" + }, + { + "name": "compressed_write_logs", + "ordinal": 11, + "type_info": "Bytea" + }, + { + "name": "compressed_contracts", + "ordinal": 12, + "type_info": "Bytea" + }, + { + "name": "eth_prove_tx_id", + "ordinal": 13, + "type_info": "Int4" + }, + { + "name": "eth_commit_tx_id", + "ordinal": 14, + "type_info": "Int4" + }, + { + "name": "eth_execute_tx_id", + "ordinal": 15, + "type_info": "Int4" + }, + { + "name": "created_at", + "ordinal": 16, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 17, + "type_info": "Timestamp" + }, + { + "name": "merkle_root_hash", + "ordinal": 18, + "type_info": "Bytea" + }, + { + "name": "l2_to_l1_logs", + "ordinal": 19, + "type_info": "ByteaArray" + }, + { + "name": "l2_to_l1_messages", + "ordinal": 20, + "type_info": "ByteaArray" + }, + { + "name": "predicted_commit_gas_cost", + "ordinal": 21, + "type_info": "Int8" + }, + { + "name": "predicted_prove_gas_cost", + "ordinal": 22, + "type_info": "Int8" + }, + { + "name": "predicted_execute_gas_cost", + "ordinal": 23, + "type_info": "Int8" + }, + { + "name": "initial_bootloader_heap_content", + "ordinal": 24, + "type_info": "Jsonb" + }, + { + "name": "used_contract_hashes", + "ordinal": 25, + "type_info": "Jsonb" + }, + { + "name": "compressed_initial_writes", + "ordinal": 26, + "type_info": "Bytea" + }, + { + "name": "compressed_repeated_writes", + "ordinal": 27, + "type_info": "Bytea" + }, + { + "name": "l2_l1_compressed_messages", + "ordinal": 28, + "type_info": "Bytea" + }, + { + "name": "l2_l1_merkle_root", + "ordinal": 29, + "type_info": "Bytea" + }, + { + "name": "gas_per_pubdata_byte_in_block", + "ordinal": 30, + "type_info": "Int4" + }, + { + "name": "rollup_last_leaf_index", + "ordinal": 31, + "type_info": "Int8" + }, + { + "name": "zkporter_is_available", + "ordinal": 32, + "type_info": "Bool" + }, + { + "name": "bootloader_code_hash", + "ordinal": 33, + "type_info": "Bytea" + }, + { + "name": "default_aa_code_hash", + "ordinal": 34, + "type_info": "Bytea" + }, + { + "name": "base_fee_per_gas", + "ordinal": 35, + "type_info": "Numeric" + }, + { + "name": "gas_per_pubdata_limit", + "ordinal": 36, + "type_info": "Int8" + }, + { + "name": "aux_data_hash", + "ordinal": 37, + "type_info": "Bytea" + }, + { + "name": "pass_through_data_hash", + "ordinal": 38, + "type_info": "Bytea" + }, + { + "name": "meta_parameters_hash", + "ordinal": 39, + "type_info": "Bytea" + }, + { + "name": "skip_proof", + "ordinal": 40, + "type_info": "Bool" + }, + { + "name": "l1_gas_price", + "ordinal": 41, + "type_info": "Int8" + }, + { + "name": "l2_fair_gas_price", + "ordinal": 42, + "type_info": "Int8" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false, + true, + true, + true, + true, + true, + true, + true, + true, + false, + false, + true, + false, + false, + false, + false, + false, + false, + false, + true, + true, + true, + true, + true, + true, + true, + true, + true, + false, + false, + true, + true, + true, + false, + false, + false + ], + "parameters": { + "Left": [] + } + }, + "query": "SELECT * FROM l1_batches WHERE number = 0 OR eth_commit_tx_id IS NOT NULL AND commitment IS NOT NULL ORDER BY number DESC LIMIT 1" + }, + "42d2c16694dbf70205748008a18424bcbb689aff8317079dc6d60c411541167d": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8", + "Int8" + ] + } + }, + "query": "\n UPDATE l1_batches\n SET predicted_commit_gas_cost = $2, updated_at = now()\n WHERE number = $1\n " + }, + "438ea2edcf2e5ec1ec8b05da4d634e914e4d892441b6f2926f0926c7c90e33d1": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Bytea", + "Text", + "Jsonb" + ] + } + }, + "query": "INSERT INTO contract_sources (address, assembly_code, pc_line_mapping, created_at, updated_at)\n VALUES ($1, $2, $3, now(), now())\n ON CONFLICT (address)\n DO UPDATE SET assembly_code = $2, pc_line_mapping = $3, updated_at = now()\n " + }, + "43f48f445f7e1627123e04b56c1899d1eee17c44411f3cbc59a809e3b16c158c": { + "describe": { + "columns": [ + { + "name": "hashed_key", + "ordinal": 0, + "type_info": "Bytea" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "SELECT DISTINCT ON (hashed_key) hashed_key FROM\n (SELECT * FROM storage_logs WHERE miniblock_number > $1) inn" + }, + "492488cc22bdc88e6fcd9017fdf034b8325ca517b4302ab234e93b38cc3225b9": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8", + "Int4", + "Int4", + "Int8", + "Bool", + "Bytea", + "ByteaArray", + "ByteaArray", + "Bytea", + "ByteaArray", + "Int8", + "Int8", + "Int8", + "Jsonb", + "Jsonb", + "Numeric", + "Int8", + "Int8" + ] + } + }, + "query": "INSERT INTO l1_batches (number, l1_tx_count, l2_tx_count,\n timestamp, is_finished, fee_account_address, l2_to_l1_logs, l2_to_l1_messages, bloom, priority_ops_onchain_data,\n predicted_commit_gas_cost, predicted_prove_gas_cost, predicted_execute_gas_cost,\n initial_bootloader_heap_content, used_contract_hashes, base_fee_per_gas, l1_gas_price, l2_fair_gas_price,\n created_at, updated_at)\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, now(), now())\n " + }, + "4aafb16cb2b1cc8fa62f3065eefb4a4fa075f1d9c5fd9e61010b9d25d3532bcc": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8Array" + ] + } + }, + "query": "\n UPDATE leaf_aggregation_witness_jobs\n SET basic_circuits='',\n basic_circuits_inputs=''\n WHERE l1_batch_number = ANY($1);\n " + }, + "4ab8a25620b5400d836e1b847320d4e176629a27e1a6cb0666ab02bb55371769": { + "describe": { + "columns": [ + { + "name": "hash", + "ordinal": 0, + "type_info": "Bytea" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [ + "Interval" + ] + } + }, + "query": "DELETE FROM transactions WHERE miniblock_number IS NULL AND received_at < now() - $1::interval AND is_priority=false AND error IS NULL RETURNING hash" + }, + "4ac92a8436108097a32e94e53f7fe99261c7c3a40dbc433c20ccea3a7d06650c": { + "describe": { + "columns": [ + { + "name": "hashed_key", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "value!", + "ordinal": 1, + "type_info": "Bytea" + } + ], + "nullable": [ + false, + false + ], + "parameters": { + "Left": [ + "ByteaArray" + ] + } + }, + "query": "SELECT hashed_key, value as \"value!\" FROM storage WHERE hashed_key = ANY($1)" + }, + "4acb725974d006c388be8965c3dff2e4c538ab8d2366addb3fb8cff3b789f114": { + "describe": { + "columns": [ + { + "name": "count!", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + null + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "SELECT COUNT(*) as \"count!\" FROM storage_logs WHERE miniblock_number = $1" + }, + "4c0d2aa6e08f3b4748b88cad5cf7b3a9eb9c051e8e8e747a3c38c1b37ce3a6b7": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "DELETE FROM l2_to_l1_logs WHERE miniblock_number > $1" + }, + "4ca0356959e4cc50e09b6fe08e9d45cbd929601935506acbbade4a42c2eaea89": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8", + "Bytea", + "Text" + ] + } + }, + "query": "\n INSERT INTO scheduler_witness_jobs\n (l1_batch_number, scheduler_witness, scheduler_witness_blob_url, status, created_at, updated_at)\n VALUES ($1, $2, $3, 'waiting_for_artifacts', now(), now())\n " + }, + "4d7b5a423b29ce07bd12f168d1ee707e6e413d9a4f0daafb4beed102d22d1745": { + "describe": { + "columns": [ + { + "name": "address", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "key", + "ordinal": 1, + "type_info": "Bytea" + } + ], + "nullable": [ + false, + false + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n SELECT address, key FROM protective_reads\n WHERE l1_batch_number = $1\n " + }, + "4dc63a4431062cb1ae428db625251a6121c3aa2fc06e045ae07b3db6d2f66406": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8Array" + ] + } + }, + "query": "\n UPDATE witness_inputs\n SET is_blob_cleaned=TRUE\n WHERE l1_batch_number = ANY($1);\n " + }, + "5089dfb745ff04a9b071b5785e68194a6f6a7a72754d23a65adc7d6838f7f640": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int4" + ] + } + }, + "query": "UPDATE eth_txs SET has_failed = TRUE WHERE id = $1" + }, + "50f406ffe7802e753411baa0e348294bdb05c96b96b2041ee876e2b34a1a6ea6": { + "describe": { + "columns": [ + { + "name": "count!", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + null + ], + "parameters": { + "Left": [ + "Int8", + "Bytea", + "Bytea", + "Bytea", + "Bytea" + ] + } + }, + "query": "\n SELECT COUNT(*) as \"count!\"\n FROM l1_batches\n WHERE number = $1\n AND hash = $2\n AND merkle_root_hash = $3\n AND parent_hash = $4\n AND l2_l1_merkle_root = $5\n " + }, + "516e309a97010cd1eb8398b2b7ff809786703c075e4c3dff1133c41cdcfdd3f3": { + "describe": { + "columns": [ + { + "name": "number", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "timestamp", + "ordinal": 1, + "type_info": "Int8" + }, + { + "name": "is_finished", + "ordinal": 2, + "type_info": "Bool" + }, + { + "name": "l1_tx_count", + "ordinal": 3, + "type_info": "Int4" + }, + { + "name": "l2_tx_count", + "ordinal": 4, + "type_info": "Int4" + }, + { + "name": "fee_account_address", + "ordinal": 5, + "type_info": "Bytea" + }, + { + "name": "bloom", + "ordinal": 6, + "type_info": "Bytea" + }, + { + "name": "priority_ops_onchain_data", + "ordinal": 7, + "type_info": "ByteaArray" + }, + { + "name": "hash", + "ordinal": 8, + "type_info": "Bytea" + }, + { + "name": "parent_hash", + "ordinal": 9, + "type_info": "Bytea" + }, + { + "name": "commitment", + "ordinal": 10, + "type_info": "Bytea" + }, + { + "name": "compressed_write_logs", + "ordinal": 11, + "type_info": "Bytea" + }, + { + "name": "compressed_contracts", + "ordinal": 12, + "type_info": "Bytea" + }, + { + "name": "eth_prove_tx_id", + "ordinal": 13, + "type_info": "Int4" + }, + { + "name": "eth_commit_tx_id", + "ordinal": 14, + "type_info": "Int4" + }, + { + "name": "eth_execute_tx_id", + "ordinal": 15, + "type_info": "Int4" + }, + { + "name": "created_at", + "ordinal": 16, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 17, + "type_info": "Timestamp" + }, + { + "name": "merkle_root_hash", + "ordinal": 18, + "type_info": "Bytea" + }, + { + "name": "l2_to_l1_logs", + "ordinal": 19, + "type_info": "ByteaArray" + }, + { + "name": "l2_to_l1_messages", + "ordinal": 20, + "type_info": "ByteaArray" + }, + { + "name": "predicted_commit_gas_cost", + "ordinal": 21, + "type_info": "Int8" + }, + { + "name": "predicted_prove_gas_cost", + "ordinal": 22, + "type_info": "Int8" + }, + { + "name": "predicted_execute_gas_cost", + "ordinal": 23, + "type_info": "Int8" + }, + { + "name": "initial_bootloader_heap_content", + "ordinal": 24, + "type_info": "Jsonb" + }, + { + "name": "used_contract_hashes", + "ordinal": 25, + "type_info": "Jsonb" + }, + { + "name": "compressed_initial_writes", + "ordinal": 26, + "type_info": "Bytea" + }, + { + "name": "compressed_repeated_writes", + "ordinal": 27, + "type_info": "Bytea" + }, + { + "name": "l2_l1_compressed_messages", + "ordinal": 28, + "type_info": "Bytea" + }, + { + "name": "l2_l1_merkle_root", + "ordinal": 29, + "type_info": "Bytea" + }, + { + "name": "gas_per_pubdata_byte_in_block", + "ordinal": 30, + "type_info": "Int4" + }, + { + "name": "rollup_last_leaf_index", + "ordinal": 31, + "type_info": "Int8" + }, + { + "name": "zkporter_is_available", + "ordinal": 32, + "type_info": "Bool" + }, + { + "name": "bootloader_code_hash", + "ordinal": 33, + "type_info": "Bytea" + }, + { + "name": "default_aa_code_hash", + "ordinal": 34, + "type_info": "Bytea" + }, + { + "name": "base_fee_per_gas", + "ordinal": 35, + "type_info": "Numeric" + }, + { + "name": "gas_per_pubdata_limit", + "ordinal": 36, + "type_info": "Int8" + }, + { + "name": "aux_data_hash", + "ordinal": 37, + "type_info": "Bytea" + }, + { + "name": "pass_through_data_hash", + "ordinal": 38, + "type_info": "Bytea" + }, + { + "name": "meta_parameters_hash", + "ordinal": 39, + "type_info": "Bytea" + }, + { + "name": "skip_proof", + "ordinal": 40, + "type_info": "Bool" + }, + { + "name": "l1_gas_price", + "ordinal": 41, + "type_info": "Int8" + }, + { + "name": "l2_fair_gas_price", + "ordinal": 42, + "type_info": "Int8" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false, + true, + true, + true, + true, + true, + true, + true, + true, + false, + false, + true, + false, + false, + false, + false, + false, + false, + false, + true, + true, + true, + true, + true, + true, + true, + true, + true, + false, + false, + true, + true, + true, + false, + false, + false + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "SELECT * FROM l1_batches WHERE number = $1" + }, + "51faf352f402bf8137db9500d0438849a81334b35dc83b060ebfd956d1d3e791": { + "describe": { + "columns": [ + { + "name": "key", + "ordinal": 0, + "type_info": "Bytea" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [ + "Bytea", + "Int8", + "Bytea" + ] + } + }, + "query": "\n SELECT key\n FROM storage_logs\n WHERE address = $1 AND miniblock_number > $2 AND NOT EXISTS (\n SELECT 1 FROM storage_logs as s\n WHERE\n s.hashed_key = storage_logs.hashed_key AND\n (s.miniblock_number, s.operation_number) >= (storage_logs.miniblock_number, storage_logs.operation_number) AND\n s.value = $3\n )\n " + }, + "523efb18ba96382c55ee9566b5402f8dd3082ae4a66205a2122eea5961f8b86b": { + "describe": { + "columns": [ + { + "name": "count!", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + null + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "SELECT COUNT(*) as \"count!\" FROM transactions\n WHERE miniblock_number > $1 AND miniblock_number IS NOT NULL" + }, + "529fd4515b6c71592204e747d5dca9cb98d8863b354e35b8ac3486746fb8b49a": { + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "l1_batch_number", + "ordinal": 1, + "type_info": "Int8" + }, + { + "name": "circuit_type", + "ordinal": 2, + "type_info": "Text" + }, + { + "name": "prover_input", + "ordinal": 3, + "type_info": "Bytea" + }, + { + "name": "status", + "ordinal": 4, + "type_info": "Text" + }, + { + "name": "error", + "ordinal": 5, + "type_info": "Text" + }, + { + "name": "processing_started_at", + "ordinal": 6, + "type_info": "Timestamp" + }, + { + "name": "created_at", + "ordinal": 7, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 8, + "type_info": "Timestamp" + }, + { + "name": "time_taken", + "ordinal": 9, + "type_info": "Time" + }, + { + "name": "aggregation_round", + "ordinal": 10, + "type_info": "Int4" + }, + { + "name": "result", + "ordinal": 11, + "type_info": "Bytea" + }, + { + "name": "sequence_number", + "ordinal": 12, + "type_info": "Int4" + }, + { + "name": "attempts", + "ordinal": 13, + "type_info": "Int4" + }, + { + "name": "circuit_input_blob_url", + "ordinal": 14, + "type_info": "Text" + }, + { + "name": "proccesed_by", + "ordinal": 15, + "type_info": "Text" + }, + { + "name": "is_blob_cleaned", + "ordinal": 16, + "type_info": "Bool" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + true, + true, + false, + false, + false, + false, + true, + false, + false, + true, + true, + false + ], + "parameters": { + "Left": [ + "Interval", + "Int4", + "TextArray" + ] + } + }, + "query": "\n UPDATE prover_jobs\n SET status = 'in_progress', attempts = attempts + 1,\n updated_at = now(), processing_started_at = now()\n WHERE id = (\n SELECT id\n FROM prover_jobs\n WHERE circuit_type = ANY($3)\n AND\n ( status = 'queued'\n OR (status = 'in_progress' AND processing_started_at < now() - $1::interval)\n OR (status = 'failed' AND attempts < $2)\n )\n ORDER BY aggregation_round DESC, l1_batch_number ASC, id ASC\n LIMIT 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING prover_jobs.*\n " + }, + "5317ed0be137e9ed32abcd41486f53937b8508f5c6478523aa18826518e5f0ab": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8", + "ByteaArray", + "Int4Array", + "VarcharArray", + "JsonbArray" + ] + } + }, + "query": "\n UPDATE transactions\n SET\n miniblock_number = $1,\n index_in_block = data_table.index_in_block,\n error = NULLIF(data_table.error, ''),\n in_mempool=FALSE,\n execution_info = execution_info || data_table.new_execution_info,\n updated_at = now()\n FROM\n (\n SELECT\n UNNEST($2::bytea[]) AS hash,\n UNNEST($3::integer[]) AS index_in_block,\n UNNEST($4::varchar[]) AS error,\n UNNEST($5::jsonb[]) AS new_execution_info\n ) AS data_table\n WHERE transactions.hash = data_table.hash\n " + }, + "541d22a9ffe9c7b31833f203af0820cca4513d7a9e6feed7313757674c30e667": { + "describe": { + "columns": [ + { + "name": "address", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "key", + "ordinal": 1, + "type_info": "Bytea" + }, + { + "name": "value", + "ordinal": 2, + "type_info": "Bytea" + } + ], + "nullable": [ + false, + false, + false + ], + "parameters": { + "Left": [ + "Int8", + "Int8" + ] + } + }, + "query": "\n SELECT address, key, value FROM storage_logs\n WHERE miniblock_number >= $1 AND miniblock_number <= $2\n ORDER BY miniblock_number, operation_number ASC\n " + }, + "5543380548ce40063d43c1d54e368c7d385800d7ade9e720306808cc4c376978": { + "describe": { + "columns": [ + { + "name": "number", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "timestamp", + "ordinal": 1, + "type_info": "Int8" + }, + { + "name": "is_finished", + "ordinal": 2, + "type_info": "Bool" + }, + { + "name": "l1_tx_count", + "ordinal": 3, + "type_info": "Int4" + }, + { + "name": "l2_tx_count", + "ordinal": 4, + "type_info": "Int4" + }, + { + "name": "fee_account_address", + "ordinal": 5, + "type_info": "Bytea" + }, + { + "name": "bloom", + "ordinal": 6, + "type_info": "Bytea" + }, + { + "name": "priority_ops_onchain_data", + "ordinal": 7, + "type_info": "ByteaArray" + }, + { + "name": "hash", + "ordinal": 8, + "type_info": "Bytea" + }, + { + "name": "parent_hash", + "ordinal": 9, + "type_info": "Bytea" + }, + { + "name": "commitment", + "ordinal": 10, + "type_info": "Bytea" + }, + { + "name": "compressed_write_logs", + "ordinal": 11, + "type_info": "Bytea" + }, + { + "name": "compressed_contracts", + "ordinal": 12, + "type_info": "Bytea" + }, + { + "name": "eth_prove_tx_id", + "ordinal": 13, + "type_info": "Int4" + }, + { + "name": "eth_commit_tx_id", + "ordinal": 14, + "type_info": "Int4" + }, + { + "name": "eth_execute_tx_id", + "ordinal": 15, + "type_info": "Int4" + }, + { + "name": "created_at", + "ordinal": 16, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 17, + "type_info": "Timestamp" + }, + { + "name": "merkle_root_hash", + "ordinal": 18, + "type_info": "Bytea" + }, + { + "name": "l2_to_l1_logs", + "ordinal": 19, + "type_info": "ByteaArray" + }, + { + "name": "l2_to_l1_messages", + "ordinal": 20, + "type_info": "ByteaArray" + }, + { + "name": "predicted_commit_gas_cost", + "ordinal": 21, + "type_info": "Int8" + }, + { + "name": "predicted_prove_gas_cost", + "ordinal": 22, + "type_info": "Int8" + }, + { + "name": "predicted_execute_gas_cost", + "ordinal": 23, + "type_info": "Int8" + }, + { + "name": "initial_bootloader_heap_content", + "ordinal": 24, + "type_info": "Jsonb" + }, + { + "name": "used_contract_hashes", + "ordinal": 25, + "type_info": "Jsonb" + }, + { + "name": "compressed_initial_writes", + "ordinal": 26, + "type_info": "Bytea" + }, + { + "name": "compressed_repeated_writes", + "ordinal": 27, + "type_info": "Bytea" + }, + { + "name": "l2_l1_compressed_messages", + "ordinal": 28, + "type_info": "Bytea" + }, + { + "name": "l2_l1_merkle_root", + "ordinal": 29, + "type_info": "Bytea" + }, + { + "name": "gas_per_pubdata_byte_in_block", + "ordinal": 30, + "type_info": "Int4" + }, + { + "name": "rollup_last_leaf_index", + "ordinal": 31, + "type_info": "Int8" + }, + { + "name": "zkporter_is_available", + "ordinal": 32, + "type_info": "Bool" + }, + { + "name": "bootloader_code_hash", + "ordinal": 33, + "type_info": "Bytea" + }, + { + "name": "default_aa_code_hash", + "ordinal": 34, + "type_info": "Bytea" + }, + { + "name": "base_fee_per_gas", + "ordinal": 35, + "type_info": "Numeric" + }, + { + "name": "gas_per_pubdata_limit", + "ordinal": 36, + "type_info": "Int8" + }, + { + "name": "aux_data_hash", + "ordinal": 37, + "type_info": "Bytea" + }, + { + "name": "pass_through_data_hash", + "ordinal": 38, + "type_info": "Bytea" + }, + { + "name": "meta_parameters_hash", + "ordinal": 39, + "type_info": "Bytea" + }, + { + "name": "skip_proof", + "ordinal": 40, + "type_info": "Bool" + }, + { + "name": "l1_gas_price", + "ordinal": 41, + "type_info": "Int8" + }, + { + "name": "l2_fair_gas_price", + "ordinal": 42, + "type_info": "Int8" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false, + true, + true, + true, + true, + true, + true, + true, + true, + false, + false, + true, + false, + false, + false, + false, + false, + false, + false, + true, + true, + true, + true, + true, + true, + true, + true, + true, + false, + false, + true, + true, + true, + false, + false, + false + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "SELECT * FROM l1_batches WHERE eth_prove_tx_id IS NOT NULL AND eth_execute_tx_id IS NULL ORDER BY number LIMIT $1" + }, + "55ae3cf154fe027f9036c60d21b5fd32972fbb2b17a74562d7721ec69dd19971": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Bytea" + ] + } + }, + "query": "delete from storage where hashed_key = $1" + }, + "57742ed088179b89b50920a2ab1a103b745598ee0ba05d1793fc54e63b477319": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int4", + "Int8", + "Int8" + ] + } + }, + "query": "UPDATE l1_batches SET eth_commit_tx_id = $1, updated_at = now() WHERE number BETWEEN $2 AND $3" + }, + "580d973b404123108e8e8b27cd754f108a289e1556da10a466e4c795fbd23ddf": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int4", + "Int4" + ] + } + }, + "query": "UPDATE eth_txs_history SET sent_at_block = $2, sent_at = now()\n WHERE id = $1 AND sent_at_block IS NULL" + }, + "59b10abd699d19cbdf285334162ee40f294c5fad8f99fc00a4cdb3b233a494d6": { + "describe": { + "columns": [ + { + "name": "tx_hash", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "topic2!", + "ordinal": 1, + "type_info": "Bytea" + }, + { + "name": "topic3!", + "ordinal": 2, + "type_info": "Bytea" + }, + { + "name": "value!", + "ordinal": 3, + "type_info": "Bytea" + }, + { + "name": "l1_address!", + "ordinal": 4, + "type_info": "Bytea" + }, + { + "name": "l2_address!", + "ordinal": 5, + "type_info": "Bytea" + }, + { + "name": "symbol!", + "ordinal": 6, + "type_info": "Varchar" + }, + { + "name": "name!", + "ordinal": 7, + "type_info": "Varchar" + }, + { + "name": "decimals!", + "ordinal": 8, + "type_info": "Int4" + }, + { + "name": "usd_price?", + "ordinal": 9, + "type_info": "Numeric" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false, + false, + true + ], + "parameters": { + "Left": [ + "ByteaArray", + "Bytea", + "Bytea" + ] + } + }, + "query": "\n SELECT tx_hash, topic2 as \"topic2!\", topic3 as \"topic3!\", value as \"value!\",\n tokens.l1_address as \"l1_address!\", tokens.l2_address as \"l2_address!\",\n tokens.symbol as \"symbol!\", tokens.name as \"name!\", tokens.decimals as \"decimals!\", tokens.usd_price as \"usd_price?\"\n FROM events\n INNER JOIN tokens ON\n events.topic4 = ('\\x000000000000000000000000'::bytea || tokens.l2_address)\n WHERE tx_hash = ANY($1) AND events.topic1 = $2 AND events.address = $3\n ORDER BY tx_hash, miniblock_number ASC, event_index_in_block ASC\n " + }, + "5a5844af61cc685a414fcd3cad70900bdce8f48e905c105f8dd50dc52e0c6f14": { + "describe": { + "columns": [ + { + "name": "l1_batch_number", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "attempts", + "ordinal": 1, + "type_info": "Int4" + } + ], + "nullable": [ + false, + false + ], + "parameters": { + "Left": [ + "Text", + "Int8" + ] + } + }, + "query": "\n UPDATE prover_jobs\n SET status = 'failed', error = $1, updated_at = now()\n WHERE id = $2\n RETURNING l1_batch_number, attempts\n " + }, + "5aaec6df0337db49524e7dafd145334bfe66ca886f922d559d6f5484137003fd": { + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Int4" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [ + "Int4", + "Int8", + "Int8", + "Text", + "Bytea" + ] + } + }, + "query": "INSERT INTO eth_txs_history\n (eth_tx_id, base_fee_per_gas, priority_fee_per_gas, tx_hash, signed_raw_tx, created_at, updated_at)\n VALUES ($1, $2, $3, $4, $5, now(), now())\n RETURNING id" + }, + "5ac872e2c5a00b376cc053324b3776ef6a0bb7f6850e5a24a133dfee052c49e1": { + "describe": { + "columns": [ + { + "name": "value", + "ordinal": 0, + "type_info": "Bytea" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [ + "Bytea" + ] + } + }, + "query": "SELECT value FROM storage WHERE hashed_key = $1" + }, + "5b45825b92d6971d8b2fbad6eb68d24e1c666a54cbf1ceb1332e2039f9614d18": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "TextArray" + ] + } + }, + "query": "\n INSERT INTO contract_verification_zksolc_versions (version, created_at, updated_at)\n SELECT u.version, now(), now()\n FROM UNNEST($1::text[])\n AS u(version)\n " + }, + "5b85d8bdf297f55e65978edda4a0966ded1dc0d24f4701e7b6048124f38b4cea": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "ByteaArray", + "ByteaArray", + "Int8" + ] + } + }, + "query": "INSERT INTO factory_deps\n (bytecode_hash, bytecode, miniblock_number, created_at, updated_at)\n SELECT u.bytecode_hash, u.bytecode, $3, now(), now()\n FROM UNNEST($1::bytea[], $2::bytea[])\n AS u(bytecode_hash, bytecode)\n ON CONFLICT (bytecode_hash) DO NOTHING\n " + }, + "5bc8cdc7ed710bb2f9b0035654fd7e9dcc01731ca581c6aa75d55184817bc100": { + "describe": { + "columns": [ + { + "name": "number", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + null + ], + "parameters": { + "Left": [] + } + }, + "query": "SELECT MAX(number) as \"number\" FROM l1_batches WHERE hash IS NOT NULL" + }, + "5d1c3357b97f5e40a7e9d6fdcb7c3ebd8309e93f26e1c42d6371190f4aeaf8c6": { + "describe": { + "columns": [ + { + "name": "min?", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "max?", + "ordinal": 1, + "type_info": "Int8" + } + ], + "nullable": [ + null, + null + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n SELECT MIN(miniblocks.number) as \"min?\", MAX(miniblocks.number) as \"max?\"\n FROM miniblocks\n WHERE l1_batch_number = $1\n " + }, + "5e09f2359dd69380c1f183f613d82696029a56896e2b985738a2fa25d6cb8a71": { + "describe": { + "columns": [ + { + "name": "op_id", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + null + ], + "parameters": { + "Left": [] + } + }, + "query": "SELECT MAX(priority_op_id) as \"op_id\" from transactions where is_priority = true" + }, + "5f5974e7033eea82896a435c7776a6740f4a2df77175744a9670d3fee2f24b32": { + "describe": { + "columns": [ + { + "name": "address", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "topic1", + "ordinal": 1, + "type_info": "Bytea" + }, + { + "name": "topic2", + "ordinal": 2, + "type_info": "Bytea" + }, + { + "name": "topic3", + "ordinal": 3, + "type_info": "Bytea" + }, + { + "name": "topic4", + "ordinal": 4, + "type_info": "Bytea" + }, + { + "name": "value", + "ordinal": 5, + "type_info": "Bytea" + }, + { + "name": "block_hash", + "ordinal": 6, + "type_info": "Bytea" + }, + { + "name": "l1_batch_number?", + "ordinal": 7, + "type_info": "Int8" + }, + { + "name": "miniblock_number", + "ordinal": 8, + "type_info": "Int8" + }, + { + "name": "tx_hash", + "ordinal": 9, + "type_info": "Bytea" + }, + { + "name": "tx_index_in_block", + "ordinal": 10, + "type_info": "Int4" + }, + { + "name": "event_index_in_block", + "ordinal": 11, + "type_info": "Int4" + }, + { + "name": "event_index_in_tx", + "ordinal": 12, + "type_info": "Int4" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + false, + null, + null, + false, + false, + false, + false, + false + ], + "parameters": { + "Left": [ + "Bytea" + ] + } + }, + "query": "\n SELECT\n address, topic1, topic2, topic3, topic4, value,\n Null::bytea as \"block_hash\", Null::bigint as \"l1_batch_number?\",\n miniblock_number, tx_hash, tx_index_in_block,\n event_index_in_block, event_index_in_tx\n FROM events\n WHERE tx_hash = $1\n ORDER BY miniblock_number ASC, event_index_in_block ASC\n " + }, + "60d8df86205f043af69ff5daee1db8d4d20805bf8dfeddc256ff616e36502cc8": { + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n SELECT id FROM prover_jobs\n WHERE length(prover_input) <> 0\n LIMIT $1;\n " + }, + "61f4f5ef369b2435732af17091493876301e3e59b68d6817fe0053c7da89291e": { + "describe": { + "columns": [ + { + "name": "max_nonce?", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + null + ], + "parameters": { + "Left": [] + } + }, + "query": "SELECT MAX(nonce) as \"max_nonce?\" FROM eth_txs" + }, + "622735d9d8a0ab3f607b239740a0a2e323cca7026556c4fff95d06ef5ae9d9ba": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8", + "Int8", + "Bytea", + "Int4", + "Int4", + "Numeric", + "Int8", + "Int8", + "Int8" + ] + } + }, + "query": "\n INSERT INTO miniblocks (\n number, timestamp, hash, l1_tx_count, l2_tx_count,\n base_fee_per_gas, l1_gas_price, l2_fair_gas_price, gas_per_pubdata_limit, created_at, updated_at\n )\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, now(), now())\n " + }, + "623ce93bba053fe78a1e254db5e421c5b51fbafcda1fc5c17eaab3f5fe233122": { + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [ + "Bytea", + "Text", + "Text", + "Text", + "Text", + "Bool", + "Bytea" + ] + } + }, + "query": "\n INSERT INTO contract_verification_requests (\n contract_address,\n source_code,\n contract_name,\n compiler_zksolc_version,\n compiler_solc_version,\n optimization_used,\n constructor_arguments,\n status,\n created_at,\n updated_at\n )\n VALUES ($1, $2, $3, $4, $5, $6, $7, 'queued', now(), now())\n RETURNING id\n " + }, + "62e8b4afd4df9e30bfa08cb30c74ba4566fa2e9f4934b7a2777f9e90b49e8fce": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int4" + ] + } + }, + "query": "DELETE FROM eth_txs_history\n WHERE id = $1" + }, + "63616acc2c415f4c8d650a96fd5481a609436a94666d65363eb06808da8da4b8": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "UPDATE transactions\n SET l1_batch_number = NULL, miniblock_number = NULL, error = NULL, index_in_block = NULL, execution_info = '{}'\n WHERE miniblock_number > $1" + }, + "64b1bce209f43ee9f8294a270047cd58c20b973d8fef29c662742cad89363ffe": { + "describe": { + "columns": [ + { + "name": "status", + "ordinal": 0, + "type_info": "Text" + }, + { + "name": "error", + "ordinal": 1, + "type_info": "Text" + }, + { + "name": "compilation_errors", + "ordinal": 2, + "type_info": "Jsonb" + } + ], + "nullable": [ + false, + true, + true + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n SELECT status, error, compilation_errors FROM contract_verification_requests\n WHERE id = $1\n " + }, + "65bf55ff4ac5c4ac60bedd7c5b39d82f6e8793859749a7b6ab56121f623ed840": { + "describe": { + "columns": [ + { + "name": "number", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "commit_gas?", + "ordinal": 1, + "type_info": "Int8" + }, + { + "name": "commit_base_gas_price?", + "ordinal": 2, + "type_info": "Int8" + }, + { + "name": "commit_priority_gas_price?", + "ordinal": 3, + "type_info": "Int8" + }, + { + "name": "prove_gas?", + "ordinal": 4, + "type_info": "Int8" + }, + { + "name": "prove_base_gas_price?", + "ordinal": 5, + "type_info": "Int8" + }, + { + "name": "prove_priority_gas_price?", + "ordinal": 6, + "type_info": "Int8" + }, + { + "name": "execute_gas?", + "ordinal": 7, + "type_info": "Int8" + }, + { + "name": "execute_base_gas_price?", + "ordinal": 8, + "type_info": "Int8" + }, + { + "name": "execute_priority_gas_price?", + "ordinal": 9, + "type_info": "Int8" + } + ], + "nullable": [ + false, + true, + false, + false, + true, + false, + false, + true, + false, + false + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n SELECT\n l1_batches.number,\n commit_tx_data.gas_used as \"commit_gas?\",\n commit_tx.base_fee_per_gas as \"commit_base_gas_price?\",\n commit_tx.priority_fee_per_gas as \"commit_priority_gas_price?\",\n prove_tx_data.gas_used as \"prove_gas?\",\n prove_tx.base_fee_per_gas as \"prove_base_gas_price?\",\n prove_tx.priority_fee_per_gas as \"prove_priority_gas_price?\",\n execute_tx_data.gas_used as \"execute_gas?\",\n execute_tx.base_fee_per_gas as \"execute_base_gas_price?\",\n execute_tx.priority_fee_per_gas as \"execute_priority_gas_price?\"\n FROM l1_batches\n LEFT JOIN eth_txs_history as commit_tx\n ON (l1_batches.eth_commit_tx_id = commit_tx.eth_tx_id AND commit_tx.confirmed_at IS NOT NULL)\n LEFT JOIN eth_txs as commit_tx_data\n ON (l1_batches.eth_commit_tx_id = commit_tx_data.id)\n LEFT JOIN eth_txs_history as prove_tx\n ON (l1_batches.eth_prove_tx_id = prove_tx.eth_tx_id AND prove_tx.confirmed_at IS NOT NULL)\n LEFT JOIN eth_txs as prove_tx_data\n ON (l1_batches.eth_prove_tx_id = prove_tx_data.id)\n LEFT JOIN eth_txs_history as execute_tx\n ON (l1_batches.eth_execute_tx_id = execute_tx.eth_tx_id AND execute_tx.confirmed_at IS NOT NULL)\n LEFT JOIN eth_txs as execute_tx_data\n ON (l1_batches.eth_execute_tx_id = execute_tx_data.id)\n WHERE l1_batches.number = $1\n " + }, + "66072439a0436906c6273ffdbadca8837f23677f4d47c42cd9053e952789f26b": { + "describe": { + "columns": [ + { + "name": "l1_batch_number", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "basic_circuits", + "ordinal": 1, + "type_info": "Bytea" + }, + { + "name": "basic_circuits_inputs", + "ordinal": 2, + "type_info": "Bytea" + }, + { + "name": "number_of_basic_circuits", + "ordinal": 3, + "type_info": "Int4" + }, + { + "name": "status", + "ordinal": 4, + "type_info": "Text" + }, + { + "name": "processing_started_at", + "ordinal": 5, + "type_info": "Timestamp" + }, + { + "name": "time_taken", + "ordinal": 6, + "type_info": "Time" + }, + { + "name": "error", + "ordinal": 7, + "type_info": "Text" + }, + { + "name": "created_at", + "ordinal": 8, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 9, + "type_info": "Timestamp" + }, + { + "name": "attempts", + "ordinal": 10, + "type_info": "Int4" + }, + { + "name": "basic_circuits_blob_url", + "ordinal": 11, + "type_info": "Text" + }, + { + "name": "basic_circuits_inputs_blob_url", + "ordinal": 12, + "type_info": "Text" + }, + { + "name": "is_blob_cleaned", + "ordinal": 13, + "type_info": "Bool" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + true, + true, + true, + false, + false, + false, + true, + true, + false + ], + "parameters": { + "Left": [ + "Interval", + "Int4" + ] + } + }, + "query": "\n UPDATE leaf_aggregation_witness_jobs\n SET status = 'in_progress', attempts = attempts + 1,\n updated_at = now(), processing_started_at = now()\n WHERE l1_batch_number = (\n SELECT l1_batch_number\n FROM leaf_aggregation_witness_jobs\n WHERE status = 'queued' \n OR (status = 'in_progress' AND processing_started_at < now() - $1::interval)\n OR (status = 'failed' AND attempts < $2)\n ORDER BY l1_batch_number ASC\n LIMIT 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING leaf_aggregation_witness_jobs.*\n " + }, + "66a3761aec92aa8794e55ddd8299879e915e8ef84f8be9ebca9881c77438d2c8": { + "describe": { + "columns": [ + { + "name": "value", + "ordinal": 0, + "type_info": "Bytea" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [ + "Bytea", + "Int8" + ] + } + }, + "query": "\n SELECT value FROM storage_logs\n WHERE hashed_key = $1 AND miniblock_number <= $2\n ORDER BY miniblock_number DESC, operation_number DESC\n LIMIT 1\n " + }, + "67a47f1e7d5f8dafcef94bea3f268b4baec1888c6ef11c92ab66480ecdcb9aef": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Time", + "Bytea", + "Text", + "Int8" + ] + } + }, + "query": "\n UPDATE prover_jobs\n SET status = 'successful', updated_at = now(), time_taken = $1, result = $2, proccesed_by = $3\n WHERE id = $4\n " + }, + "67b861c97d16bf99a2d305c100116cbcb0334473c4462e4164436885481197fb": { + "describe": { + "columns": [ + { + "name": "total_transactions!", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + null + ], + "parameters": { + "Left": [ + "Bytea" + ] + } + }, + "query": "\n SELECT COUNT(*) as \"total_transactions!\"\n FROM transactions\n WHERE contract_address = $1\n " + }, + "67ecdc69e39e689f1f23f867d31e6b8c47e9c041e18cbd84a2ad6482a9be4e74": { + "describe": { + "columns": [ + { + "name": "l2_to_l1_logs", + "ordinal": 0, + "type_info": "ByteaArray" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "SELECT l2_to_l1_logs FROM l1_batches WHERE number = $1" + }, + "6ae4738857a3dc19860b8dc61b75790dee0030d84438bcc311e917cb1a076289": { + "describe": { + "columns": [ + { + "name": "proof", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "aggregation_result_coords", + "ordinal": 1, + "type_info": "Bytea" + } + ], + "nullable": [ + true, + true + ], + "parameters": { + "Left": [ + "Int8", + "Int8" + ] + } + }, + "query": "SELECT prover_jobs.result as proof, scheduler_witness_jobs.aggregation_result_coords\n FROM prover_jobs\n INNER JOIN scheduler_witness_jobs\n ON prover_jobs.l1_batch_number = scheduler_witness_jobs.l1_batch_number\n WHERE prover_jobs.l1_batch_number >= $1 AND prover_jobs.l1_batch_number <= $2\n AND prover_jobs.aggregation_round = 3\n AND prover_jobs.status = 'successful'\n AND scheduler_witness_jobs.status = 'successful'\n " + }, + "6c81c5a55d595d0790ac20ca202ff3083b0677c47872f2eb1c65e568dd7c156a": { + "describe": { + "columns": [ + { + "name": "miniblock_number", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "log_index_in_miniblock", + "ordinal": 1, + "type_info": "Int4" + }, + { + "name": "log_index_in_tx", + "ordinal": 2, + "type_info": "Int4" + }, + { + "name": "tx_hash", + "ordinal": 3, + "type_info": "Bytea" + }, + { + "name": "block_hash", + "ordinal": 4, + "type_info": "Bytea" + }, + { + "name": "l1_batch_number?", + "ordinal": 5, + "type_info": "Int8" + }, + { + "name": "shard_id", + "ordinal": 6, + "type_info": "Int4" + }, + { + "name": "is_service", + "ordinal": 7, + "type_info": "Bool" + }, + { + "name": "tx_index_in_miniblock", + "ordinal": 8, + "type_info": "Int4" + }, + { + "name": "tx_index_in_l1_batch", + "ordinal": 9, + "type_info": "Int4" + }, + { + "name": "sender", + "ordinal": 10, + "type_info": "Bytea" + }, + { + "name": "key", + "ordinal": 11, + "type_info": "Bytea" + }, + { + "name": "value", + "ordinal": 12, + "type_info": "Bytea" + } + ], + "nullable": [ + false, + false, + false, + false, + null, + null, + false, + false, + false, + false, + false, + false, + false + ], + "parameters": { + "Left": [ + "Bytea" + ] + } + }, + "query": "\n SELECT\n miniblock_number, log_index_in_miniblock, log_index_in_tx, tx_hash,\n Null::bytea as \"block_hash\", Null::bigint as \"l1_batch_number?\",\n shard_id, is_service, tx_index_in_miniblock, tx_index_in_l1_batch, sender, key, value\n FROM l2_to_l1_logs\n WHERE tx_hash = $1\n ORDER BY log_index_in_tx ASC\n " + }, + "6de96eb86301418de9a4342cd66447afd6eb42759d36e164e36adddbd42e98e2": { + "describe": { + "columns": [ + { + "name": "number", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [] + } + }, + "query": "SELECT number FROM l1_batches\n LEFT JOIN eth_txs_history as execute_tx ON (l1_batches.eth_execute_tx_id = execute_tx.eth_tx_id)\n WHERE execute_tx.confirmed_at IS NOT NULL\n ORDER BY number DESC LIMIT 1" + }, + "6ebe0d6a315050d72ffead2dd695f0ba1926a3f4a1ed56b3f291d0f41b72c4d4": { + "describe": { + "columns": [ + { + "name": "hashed_key!", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "value?", + "ordinal": 1, + "type_info": "Bytea" + } + ], + "nullable": [ + null, + null + ], + "parameters": { + "Left": [ + "ByteaArray", + "Int8" + ] + } + }, + "query": "\n SELECT u.hashed_key as \"hashed_key!\",\n (SELECT value FROM storage_logs\n WHERE hashed_key = u.hashed_key AND miniblock_number < $2\n ORDER BY miniblock_number DESC, operation_number DESC LIMIT 1) as \"value?\"\n FROM UNNEST($1::bytea[]) AS u(hashed_key)\n " + }, + "6f9edffc50202b888d12f80e57a2a346d865e522aa5a02fe3fcfa155406227a4": { + "describe": { + "columns": [ + { + "name": "hash", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "is_priority", + "ordinal": 1, + "type_info": "Bool" + }, + { + "name": "full_fee", + "ordinal": 2, + "type_info": "Numeric" + }, + { + "name": "layer_2_tip_fee", + "ordinal": 3, + "type_info": "Numeric" + }, + { + "name": "initiator_address", + "ordinal": 4, + "type_info": "Bytea" + }, + { + "name": "nonce", + "ordinal": 5, + "type_info": "Int8" + }, + { + "name": "signature", + "ordinal": 6, + "type_info": "Bytea" + }, + { + "name": "input", + "ordinal": 7, + "type_info": "Bytea" + }, + { + "name": "data", + "ordinal": 8, + "type_info": "Jsonb" + }, + { + "name": "received_at", + "ordinal": 9, + "type_info": "Timestamp" + }, + { + "name": "priority_op_id", + "ordinal": 10, + "type_info": "Int8" + }, + { + "name": "l1_batch_number", + "ordinal": 11, + "type_info": "Int8" + }, + { + "name": "index_in_block", + "ordinal": 12, + "type_info": "Int4" + }, + { + "name": "error", + "ordinal": 13, + "type_info": "Varchar" + }, + { + "name": "gas_limit", + "ordinal": 14, + "type_info": "Numeric" + }, + { + "name": "gas_per_storage_limit", + "ordinal": 15, + "type_info": "Numeric" + }, + { + "name": "gas_per_pubdata_limit", + "ordinal": 16, + "type_info": "Numeric" + }, + { + "name": "tx_format", + "ordinal": 17, + "type_info": "Int4" + }, + { + "name": "created_at", + "ordinal": 18, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 19, + "type_info": "Timestamp" + }, + { + "name": "execution_info", + "ordinal": 20, + "type_info": "Jsonb" + }, + { + "name": "contract_address", + "ordinal": 21, + "type_info": "Bytea" + }, + { + "name": "in_mempool", + "ordinal": 22, + "type_info": "Bool" + }, + { + "name": "l1_block_number", + "ordinal": 23, + "type_info": "Int4" + }, + { + "name": "value", + "ordinal": 24, + "type_info": "Numeric" + }, + { + "name": "paymaster", + "ordinal": 25, + "type_info": "Bytea" + }, + { + "name": "paymaster_input", + "ordinal": 26, + "type_info": "Bytea" + }, + { + "name": "max_fee_per_gas", + "ordinal": 27, + "type_info": "Numeric" + }, + { + "name": "max_priority_fee_per_gas", + "ordinal": 28, + "type_info": "Numeric" + }, + { + "name": "effective_gas_price", + "ordinal": 29, + "type_info": "Numeric" + }, + { + "name": "miniblock_number", + "ordinal": 30, + "type_info": "Int8" + }, + { + "name": "l1_batch_tx_index", + "ordinal": 31, + "type_info": "Int4" + }, + { + "name": "refunded_gas", + "ordinal": 32, + "type_info": "Int8" + }, + { + "name": "l1_tx_mint", + "ordinal": 33, + "type_info": "Numeric" + }, + { + "name": "l1_tx_refund_recipient", + "ordinal": 34, + "type_info": "Bytea" + } + ], + "nullable": [ + false, + false, + true, + true, + false, + true, + true, + true, + false, + false, + true, + true, + true, + true, + true, + true, + true, + true, + false, + false, + false, + true, + false, + true, + false, + false, + false, + true, + true, + true, + true, + true, + false, + true, + true + ], + "parameters": { + "Left": [ + "Int8", + "Numeric", + "Numeric" + ] + } + }, + "query": "UPDATE transactions\n SET in_mempool = TRUE\n FROM (\n SELECT hash\n FROM transactions\n WHERE miniblock_number IS NULL AND in_mempool = FALSE AND error IS NULL\n AND (is_priority = TRUE OR (max_fee_per_gas >= $2 and gas_per_pubdata_limit >= $3))\n ORDER BY is_priority DESC, priority_op_id, received_at\n LIMIT $1\n ) as subquery\n WHERE transactions.hash = subquery.hash\n RETURNING transactions.*" + }, + "71df95e25f719ed9bc32622b33c1da0aad14c6ad1a96f25454ce8618470c2ea3": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "ByteaArray", + "Int8" + ] + } + }, + "query": "INSERT INTO initial_writes (hashed_key, l1_batch_number, created_at, updated_at)\n SELECT u.hashed_key, $2, now(), now()\n FROM UNNEST($1::bytea[]) AS u(hashed_key)\n ON CONFLICT (hashed_key) DO NOTHING\n " + }, + "734fc9cc1ffe10a6c6b56150c0681b6b2757d14b2ea04a289abb1de64dffb172": { + "describe": { + "columns": [ + { + "name": "hash", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "is_priority", + "ordinal": 1, + "type_info": "Bool" + }, + { + "name": "full_fee", + "ordinal": 2, + "type_info": "Numeric" + }, + { + "name": "layer_2_tip_fee", + "ordinal": 3, + "type_info": "Numeric" + }, + { + "name": "initiator_address", + "ordinal": 4, + "type_info": "Bytea" + }, + { + "name": "nonce", + "ordinal": 5, + "type_info": "Int8" + }, + { + "name": "signature", + "ordinal": 6, + "type_info": "Bytea" + }, + { + "name": "input", + "ordinal": 7, + "type_info": "Bytea" + }, + { + "name": "data", + "ordinal": 8, + "type_info": "Jsonb" + }, + { + "name": "received_at", + "ordinal": 9, + "type_info": "Timestamp" + }, + { + "name": "priority_op_id", + "ordinal": 10, + "type_info": "Int8" + }, + { + "name": "l1_batch_number", + "ordinal": 11, + "type_info": "Int8" + }, + { + "name": "index_in_block", + "ordinal": 12, + "type_info": "Int4" + }, + { + "name": "error", + "ordinal": 13, + "type_info": "Varchar" + }, + { + "name": "gas_limit", + "ordinal": 14, + "type_info": "Numeric" + }, + { + "name": "gas_per_storage_limit", + "ordinal": 15, + "type_info": "Numeric" + }, + { + "name": "gas_per_pubdata_limit", + "ordinal": 16, + "type_info": "Numeric" + }, + { + "name": "tx_format", + "ordinal": 17, + "type_info": "Int4" + }, + { + "name": "created_at", + "ordinal": 18, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 19, + "type_info": "Timestamp" + }, + { + "name": "execution_info", + "ordinal": 20, + "type_info": "Jsonb" + }, + { + "name": "contract_address", + "ordinal": 21, + "type_info": "Bytea" + }, + { + "name": "in_mempool", + "ordinal": 22, + "type_info": "Bool" + }, + { + "name": "l1_block_number", + "ordinal": 23, + "type_info": "Int4" + }, + { + "name": "value", + "ordinal": 24, + "type_info": "Numeric" + }, + { + "name": "paymaster", + "ordinal": 25, + "type_info": "Bytea" + }, + { + "name": "paymaster_input", + "ordinal": 26, + "type_info": "Bytea" + }, + { + "name": "max_fee_per_gas", + "ordinal": 27, + "type_info": "Numeric" + }, + { + "name": "max_priority_fee_per_gas", + "ordinal": 28, + "type_info": "Numeric" + }, + { + "name": "effective_gas_price", + "ordinal": 29, + "type_info": "Numeric" + }, + { + "name": "miniblock_number", + "ordinal": 30, + "type_info": "Int8" + }, + { + "name": "l1_batch_tx_index", + "ordinal": 31, + "type_info": "Int4" + }, + { + "name": "refunded_gas", + "ordinal": 32, + "type_info": "Int8" + }, + { + "name": "l1_tx_mint", + "ordinal": 33, + "type_info": "Numeric" + }, + { + "name": "l1_tx_refund_recipient", + "ordinal": 34, + "type_info": "Bytea" + }, + { + "name": "block_hash?", + "ordinal": 35, + "type_info": "Bytea" + }, + { + "name": "eth_commit_tx_hash?", + "ordinal": 36, + "type_info": "Text" + }, + { + "name": "eth_prove_tx_hash?", + "ordinal": 37, + "type_info": "Text" + }, + { + "name": "eth_execute_tx_hash?", + "ordinal": 38, + "type_info": "Text" + } + ], + "nullable": [ + false, + false, + true, + true, + false, + true, + true, + true, + false, + false, + true, + true, + true, + true, + true, + true, + true, + true, + false, + false, + false, + true, + false, + true, + false, + false, + false, + true, + true, + true, + true, + true, + false, + true, + true, + false, + false, + false, + false + ], + "parameters": { + "Left": [ + "Bytea" + ] + } + }, + "query": "\n SELECT transactions.*, miniblocks.hash as \"block_hash?\",\n commit_tx.tx_hash as \"eth_commit_tx_hash?\",\n prove_tx.tx_hash as \"eth_prove_tx_hash?\",\n execute_tx.tx_hash as \"eth_execute_tx_hash?\"\n FROM transactions\n LEFT JOIN miniblocks ON miniblocks.number = transactions.miniblock_number\n LEFT JOIN l1_batches ON l1_batches.number = miniblocks.l1_batch_number\n LEFT JOIN eth_txs_history as commit_tx ON (l1_batches.eth_commit_tx_id = commit_tx.eth_tx_id AND commit_tx.confirmed_at IS NOT NULL)\n LEFT JOIN eth_txs_history as prove_tx ON (l1_batches.eth_prove_tx_id = prove_tx.eth_tx_id AND prove_tx.confirmed_at IS NOT NULL)\n LEFT JOIN eth_txs_history as execute_tx ON (l1_batches.eth_execute_tx_id = execute_tx.eth_tx_id AND execute_tx.confirmed_at IS NOT NULL)\n WHERE transactions.hash = $1\n " + }, + "75273db544f363b2c75bb7b579ba72fbf9447dd76182159edc40a48b32a9f738": { + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "l1_batch_number", + "ordinal": 1, + "type_info": "Int8" + }, + { + "name": "circuit_type", + "ordinal": 2, + "type_info": "Text" + }, + { + "name": "prover_input", + "ordinal": 3, + "type_info": "Bytea" + }, + { + "name": "status", + "ordinal": 4, + "type_info": "Text" + }, + { + "name": "error", + "ordinal": 5, + "type_info": "Text" + }, + { + "name": "processing_started_at", + "ordinal": 6, + "type_info": "Timestamp" + }, + { + "name": "created_at", + "ordinal": 7, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 8, + "type_info": "Timestamp" + }, + { + "name": "time_taken", + "ordinal": 9, + "type_info": "Time" + }, + { + "name": "aggregation_round", + "ordinal": 10, + "type_info": "Int4" + }, + { + "name": "result", + "ordinal": 11, + "type_info": "Bytea" + }, + { + "name": "sequence_number", + "ordinal": 12, + "type_info": "Int4" + }, + { + "name": "attempts", + "ordinal": 13, + "type_info": "Int4" + }, + { + "name": "circuit_input_blob_url", + "ordinal": 14, + "type_info": "Text" + }, + { + "name": "proccesed_by", + "ordinal": 15, + "type_info": "Text" + }, + { + "name": "is_blob_cleaned", + "ordinal": 16, + "type_info": "Bool" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + true, + true, + false, + false, + false, + false, + true, + false, + false, + true, + true, + false + ], + "parameters": { + "Left": [ + "Interval", + "Int4" + ] + } + }, + "query": "\n UPDATE prover_jobs\n SET status = 'in_progress', attempts = attempts + 1,\n updated_at = now(), processing_started_at = now()\n WHERE id = (\n SELECT id\n FROM prover_jobs\n WHERE status = 'queued' \n OR (status = 'in_progress' AND processing_started_at < now() - $1::interval)\n OR (status = 'in_gpu_proof' AND processing_started_at < now() - $1::interval)\n OR (status = 'failed' AND attempts < $2)\n ORDER BY aggregation_round DESC, l1_batch_number ASC, id ASC\n LIMIT 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING prover_jobs.*\n " + }, + "766119f845a7a11b6a5bb2a29bab32e2890df772b13e1a378222e089736fd3bf": { + "describe": { + "columns": [ + { + "name": "number!", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + null + ], + "parameters": { + "Left": [] + } + }, + "query": "SELECT COALESCE(max(number), 0) as \"number!\" FROM l1_batches\n WHERE eth_prove_tx_id IS NOT NULL" + }, + "7889294ffe999d3c8b3b093d3add7f9b826e8259451068aeaeca0da0772648e8": { + "describe": { + "columns": [ + { + "name": "count!", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + null + ], + "parameters": { + "Left": [] + } + }, + "query": "\n SELECT COUNT(*) as \"count!\"\n FROM contract_verification_requests\n WHERE status = 'queued'\n " + }, + "79eddef996b6770822a92f06f0f1a61f9fdcb4f7b57a69cbeae23925bcd10b15": { + "describe": { + "columns": [ + { + "name": "l1_batch_number", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n SELECT l1_batch_number FROM witness_inputs\n WHERE length(merkle_tree_paths) <> 0\n LIMIT $1;\n " + }, + "7b90e1c16196f0ee29d7278689fe0ac0169093a11b95edf97c729370fadcb73e": { + "describe": { + "columns": [ + { + "name": "l1_batch_number", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [ + "Bytea" + ] + } + }, + "query": "\n SELECT l1_batch_number FROM initial_writes\n WHERE hashed_key = $1\n " + }, + "7bd5f83afce3c30c9c10d1d94cf6b943c5ba5caaef9fc9130b5c444af4238e14": { + "describe": { + "columns": [ + { + "name": "l1_batch_number", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "merkel_tree_paths_blob_url", + "ordinal": 1, + "type_info": "Text" + } + ], + "nullable": [ + false, + true + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n SELECT l1_batch_number, merkel_tree_paths_blob_url FROM witness_inputs\n WHERE status='successful' AND is_blob_cleaned=FALSE\n AND merkel_tree_paths_blob_url is NOT NULL\n AND updated_at < NOW() - INTERVAL '2 days'\n LIMIT $1;\n " + }, + "7cf855c4869db43b765b92762402596f6b97b3717735b6d87a16a5776f2eca71": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Bytea", + "Numeric", + "Timestamp" + ] + } + }, + "query": "UPDATE tokens SET usd_price = $2, usd_price_updated_at = $3, updated_at = now() WHERE l1_address = $1" + }, + "7d3a57126f111ebe51d678b91f64c34b8394df3e7b1d59ca80b6eca01c606da4": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Bytea", + "Jsonb" + ] + } + }, + "query": "\n INSERT INTO contracts_verification_info\n (address, verification_info)\n VALUES ($1, $2)\n ON CONFLICT (address)\n DO UPDATE SET verification_info = $2\n " + }, + "7e3623674226e5bb934f7769cdf595138015ad346e12074398fd57dbc03962d3": { + "describe": { + "columns": [ + { + "name": "number", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "timestamp", + "ordinal": 1, + "type_info": "Int8" + }, + { + "name": "is_finished", + "ordinal": 2, + "type_info": "Bool" + }, + { + "name": "l1_tx_count", + "ordinal": 3, + "type_info": "Int4" + }, + { + "name": "l2_tx_count", + "ordinal": 4, + "type_info": "Int4" + }, + { + "name": "fee_account_address", + "ordinal": 5, + "type_info": "Bytea" + }, + { + "name": "bloom", + "ordinal": 6, + "type_info": "Bytea" + }, + { + "name": "priority_ops_onchain_data", + "ordinal": 7, + "type_info": "ByteaArray" + }, + { + "name": "hash", + "ordinal": 8, + "type_info": "Bytea" + }, + { + "name": "parent_hash", + "ordinal": 9, + "type_info": "Bytea" + }, + { + "name": "commitment", + "ordinal": 10, + "type_info": "Bytea" + }, + { + "name": "compressed_write_logs", + "ordinal": 11, + "type_info": "Bytea" + }, + { + "name": "compressed_contracts", + "ordinal": 12, + "type_info": "Bytea" + }, + { + "name": "eth_prove_tx_id", + "ordinal": 13, + "type_info": "Int4" + }, + { + "name": "eth_commit_tx_id", + "ordinal": 14, + "type_info": "Int4" + }, + { + "name": "eth_execute_tx_id", + "ordinal": 15, + "type_info": "Int4" + }, + { + "name": "created_at", + "ordinal": 16, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 17, + "type_info": "Timestamp" + }, + { + "name": "merkle_root_hash", + "ordinal": 18, + "type_info": "Bytea" + }, + { + "name": "l2_to_l1_logs", + "ordinal": 19, + "type_info": "ByteaArray" + }, + { + "name": "l2_to_l1_messages", + "ordinal": 20, + "type_info": "ByteaArray" + }, + { + "name": "predicted_commit_gas_cost", + "ordinal": 21, + "type_info": "Int8" + }, + { + "name": "predicted_prove_gas_cost", + "ordinal": 22, + "type_info": "Int8" + }, + { + "name": "predicted_execute_gas_cost", + "ordinal": 23, + "type_info": "Int8" + }, + { + "name": "initial_bootloader_heap_content", + "ordinal": 24, + "type_info": "Jsonb" + }, + { + "name": "used_contract_hashes", + "ordinal": 25, + "type_info": "Jsonb" + }, + { + "name": "compressed_initial_writes", + "ordinal": 26, + "type_info": "Bytea" + }, + { + "name": "compressed_repeated_writes", + "ordinal": 27, + "type_info": "Bytea" + }, + { + "name": "l2_l1_compressed_messages", + "ordinal": 28, + "type_info": "Bytea" + }, + { + "name": "l2_l1_merkle_root", + "ordinal": 29, + "type_info": "Bytea" + }, + { + "name": "gas_per_pubdata_byte_in_block", + "ordinal": 30, + "type_info": "Int4" + }, + { + "name": "rollup_last_leaf_index", + "ordinal": 31, + "type_info": "Int8" + }, + { + "name": "zkporter_is_available", + "ordinal": 32, + "type_info": "Bool" + }, + { + "name": "bootloader_code_hash", + "ordinal": 33, + "type_info": "Bytea" + }, + { + "name": "default_aa_code_hash", + "ordinal": 34, + "type_info": "Bytea" + }, + { + "name": "base_fee_per_gas", + "ordinal": 35, + "type_info": "Numeric" + }, + { + "name": "gas_per_pubdata_limit", + "ordinal": 36, + "type_info": "Int8" + }, + { + "name": "aux_data_hash", + "ordinal": 37, + "type_info": "Bytea" + }, + { + "name": "pass_through_data_hash", + "ordinal": 38, + "type_info": "Bytea" + }, + { + "name": "meta_parameters_hash", + "ordinal": 39, + "type_info": "Bytea" + }, + { + "name": "skip_proof", + "ordinal": 40, + "type_info": "Bool" + }, + { + "name": "l1_gas_price", + "ordinal": 41, + "type_info": "Int8" + }, + { + "name": "l2_fair_gas_price", + "ordinal": 42, + "type_info": "Int8" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false, + true, + true, + true, + true, + true, + true, + true, + true, + false, + false, + true, + false, + false, + false, + false, + false, + false, + false, + true, + true, + true, + true, + true, + true, + true, + true, + true, + false, + false, + true, + true, + true, + false, + false, + false + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "SELECT * FROM l1_batches WHERE eth_commit_tx_id IS NOT NULL AND eth_prove_tx_id IS NULL ORDER BY number LIMIT $1" + }, + "7f1a7b5cc5786e1554cb082c2f4cd1368c511e67aeb12465e16661ba940e9538": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [] + } + }, + "query": "LOCK TABLE prover_jobs IN EXCLUSIVE MODE" + }, + "80ce94067a7727aca34a30372e9770d48b740798121b1abf7b84a6fd3545fe91": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "ByteaArray", + "Int4Array", + "ByteaArray", + "ByteaArray", + "NumericArray", + "NumericArray", + "NumericArray", + "NumericArray", + "Int4Array", + "Int4Array", + "VarcharArray", + "NumericArray", + "JsonbArray", + "ByteaArray", + "JsonbArray", + "Int8Array", + "Int8" + ] + } + }, + "query": "\n UPDATE transactions\n SET \n hash = data_table.hash,\n signature = data_table.signature,\n gas_limit = data_table.gas_limit,\n max_fee_per_gas = data_table.max_fee_per_gas,\n max_priority_fee_per_gas = data_table.max_priority_fee_per_gas,\n gas_per_pubdata_limit = data_table.gas_per_pubdata_limit,\n input = data_table.input,\n data = data_table.data,\n tx_format = data_table.tx_format,\n miniblock_number = $17,\n index_in_block = data_table.index_in_block,\n error = NULLIF(data_table.error, ''),\n effective_gas_price = data_table.effective_gas_price,\n execution_info = data_table.new_execution_info,\n refunded_gas = data_table.refunded_gas,\n in_mempool = FALSE,\n updated_at = now()\n FROM\n (\n SELECT\n UNNEST($1::bytea[]) AS initiator_address,\n UNNEST($2::int[]) AS nonce,\n UNNEST($3::bytea[]) AS hash,\n UNNEST($4::bytea[]) AS signature,\n UNNEST($5::numeric[]) AS gas_limit,\n UNNEST($6::numeric[]) AS max_fee_per_gas,\n UNNEST($7::numeric[]) AS max_priority_fee_per_gas,\n UNNEST($8::numeric[]) AS gas_per_pubdata_limit,\n UNNEST($9::int[]) AS tx_format,\n UNNEST($10::integer[]) AS index_in_block,\n UNNEST($11::varchar[]) AS error,\n UNNEST($12::numeric[]) AS effective_gas_price,\n UNNEST($13::jsonb[]) AS new_execution_info,\n UNNEST($14::bytea[]) AS input,\n UNNEST($15::jsonb[]) AS data,\n UNNEST($16::bigint[]) as refunded_gas\n ) AS data_table\n WHERE transactions.initiator_address=data_table.initiator_address \n AND transactions.nonce=data_table.nonce\n " + }, + "831e1beb42dab1dc4e9b585bb35ce568196e7f46cb655357fdf5437ece519270": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n UPDATE miniblocks\n SET l1_batch_number = $1\n WHERE l1_batch_number IS NULL\n " + }, + "87d0666797929df7eda848701b857af200eaada464c3f02b48f106c61d351239": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8", + "Int8" + ] + } + }, + "query": "INSERT INTO protective_reads (l1_batch_number, address, key, created_at, updated_at)\n SELECT storage_logs_dedup.l1_batch_number, storage_logs_dedup.address, storage_logs_dedup.key, now(), now()\n FROM storage_logs_dedup\n WHERE l1_batch_number BETWEEN $1 AND $2\n AND is_write = FALSE\n ON CONFLICT DO NOTHING\n " + }, + "87e1ae393bf250f834704c940482884c9ed729a24f41d1ec07319fa0cbcc21a7": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "DELETE FROM l1_batches WHERE number > $1" + }, + "89b124c78f4f6e86790af8ec391a2c486ce01b33cfb4492a443187b1731cae1e": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int4", + "Int8", + "Int8" + ] + } + }, + "query": "UPDATE l1_batches SET eth_prove_tx_id = $1, updated_at = now() WHERE number BETWEEN $2 AND $3" + }, + "8b96fbf5b8adabd76ea2648688c38c4d9917b3736ca53ed3896c35c0da427369": { + "describe": { + "columns": [ + { + "name": "bytecode_hash", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "bytecode", + "ordinal": 1, + "type_info": "Bytea" + } + ], + "nullable": [ + false, + false + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "SELECT bytecode_hash, bytecode FROM factory_deps\n INNER JOIN miniblocks ON miniblocks.number = factory_deps.miniblock_number\n WHERE miniblocks.l1_batch_number = $1" + }, + "8fe01036cac5181aabfdc06095da291c4de6b1e0f82f846c37509bb550ef544e": { + "describe": { + "columns": [ + { + "name": "l1_address", + "ordinal": 0, + "type_info": "Bytea" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [] + } + }, + "query": "SELECT l1_address FROM tokens WHERE well_known = false" + }, + "8fefa3194f469b0f46dc5efcb9e6ccc08159ef6a5681090cb7596877b597bc73": { + "describe": { + "columns": [ + { + "name": "number", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "timestamp", + "ordinal": 1, + "type_info": "Int8" + }, + { + "name": "is_finished", + "ordinal": 2, + "type_info": "Bool" + }, + { + "name": "l1_tx_count", + "ordinal": 3, + "type_info": "Int4" + }, + { + "name": "l2_tx_count", + "ordinal": 4, + "type_info": "Int4" + }, + { + "name": "fee_account_address", + "ordinal": 5, + "type_info": "Bytea" + }, + { + "name": "bloom", + "ordinal": 6, + "type_info": "Bytea" + }, + { + "name": "priority_ops_onchain_data", + "ordinal": 7, + "type_info": "ByteaArray" + }, + { + "name": "hash", + "ordinal": 8, + "type_info": "Bytea" + }, + { + "name": "parent_hash", + "ordinal": 9, + "type_info": "Bytea" + }, + { + "name": "commitment", + "ordinal": 10, + "type_info": "Bytea" + }, + { + "name": "compressed_write_logs", + "ordinal": 11, + "type_info": "Bytea" + }, + { + "name": "compressed_contracts", + "ordinal": 12, + "type_info": "Bytea" + }, + { + "name": "eth_prove_tx_id", + "ordinal": 13, + "type_info": "Int4" + }, + { + "name": "eth_commit_tx_id", + "ordinal": 14, + "type_info": "Int4" + }, + { + "name": "eth_execute_tx_id", + "ordinal": 15, + "type_info": "Int4" + }, + { + "name": "created_at", + "ordinal": 16, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 17, + "type_info": "Timestamp" + }, + { + "name": "merkle_root_hash", + "ordinal": 18, + "type_info": "Bytea" + }, + { + "name": "l2_to_l1_logs", + "ordinal": 19, + "type_info": "ByteaArray" + }, + { + "name": "l2_to_l1_messages", + "ordinal": 20, + "type_info": "ByteaArray" + }, + { + "name": "predicted_commit_gas_cost", + "ordinal": 21, + "type_info": "Int8" + }, + { + "name": "predicted_prove_gas_cost", + "ordinal": 22, + "type_info": "Int8" + }, + { + "name": "predicted_execute_gas_cost", + "ordinal": 23, + "type_info": "Int8" + }, + { + "name": "initial_bootloader_heap_content", + "ordinal": 24, + "type_info": "Jsonb" + }, + { + "name": "used_contract_hashes", + "ordinal": 25, + "type_info": "Jsonb" + }, + { + "name": "compressed_initial_writes", + "ordinal": 26, + "type_info": "Bytea" + }, + { + "name": "compressed_repeated_writes", + "ordinal": 27, + "type_info": "Bytea" + }, + { + "name": "l2_l1_compressed_messages", + "ordinal": 28, + "type_info": "Bytea" + }, + { + "name": "l2_l1_merkle_root", + "ordinal": 29, + "type_info": "Bytea" + }, + { + "name": "gas_per_pubdata_byte_in_block", + "ordinal": 30, + "type_info": "Int4" + }, + { + "name": "rollup_last_leaf_index", + "ordinal": 31, + "type_info": "Int8" + }, + { + "name": "zkporter_is_available", + "ordinal": 32, + "type_info": "Bool" + }, + { + "name": "bootloader_code_hash", + "ordinal": 33, + "type_info": "Bytea" + }, + { + "name": "default_aa_code_hash", + "ordinal": 34, + "type_info": "Bytea" + }, + { + "name": "base_fee_per_gas", + "ordinal": 35, + "type_info": "Numeric" + }, + { + "name": "gas_per_pubdata_limit", + "ordinal": 36, + "type_info": "Int8" + }, + { + "name": "aux_data_hash", + "ordinal": 37, + "type_info": "Bytea" + }, + { + "name": "pass_through_data_hash", + "ordinal": 38, + "type_info": "Bytea" + }, + { + "name": "meta_parameters_hash", + "ordinal": 39, + "type_info": "Bytea" + }, + { + "name": "skip_proof", + "ordinal": 40, + "type_info": "Bool" + }, + { + "name": "l1_gas_price", + "ordinal": 41, + "type_info": "Int8" + }, + { + "name": "l2_fair_gas_price", + "ordinal": 42, + "type_info": "Int8" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false, + true, + true, + true, + true, + true, + true, + true, + true, + false, + false, + true, + false, + false, + false, + false, + false, + false, + false, + true, + true, + true, + true, + true, + true, + true, + true, + true, + false, + false, + true, + true, + true, + false, + false, + false + ], + "parameters": { + "Left": [] + } + }, + "query": "SELECT * FROM l1_batches\n ORDER BY number DESC\n LIMIT 1" + }, + "91db60cc4f98ebcaef1435342607da0a86fe16e20a696cb81a569772d5d5ae88": { + "describe": { + "columns": [ + { + "name": "value", + "ordinal": 0, + "type_info": "Bytea" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [ + "Bytea", + "Int8" + ] + } + }, + "query": "\n SELECT value\n FROM storage_logs\n WHERE storage_logs.hashed_key = $1 AND storage_logs.miniblock_number <= $2\n ORDER BY storage_logs.miniblock_number DESC, storage_logs.operation_number DESC\n LIMIT 1\n " + }, + "95ce099fde99c57a930ed3d44f74a90d632b831360210ec7fe21b33bed1a4582": { + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Int4" + }, + { + "name": "nonce", + "ordinal": 1, + "type_info": "Int8" + }, + { + "name": "raw_tx", + "ordinal": 2, + "type_info": "Bytea" + }, + { + "name": "contract_address", + "ordinal": 3, + "type_info": "Text" + }, + { + "name": "tx_type", + "ordinal": 4, + "type_info": "Text" + }, + { + "name": "gas_used", + "ordinal": 5, + "type_info": "Int8" + }, + { + "name": "created_at", + "ordinal": 6, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 7, + "type_info": "Timestamp" + }, + { + "name": "has_failed", + "ordinal": 8, + "type_info": "Bool" + }, + { + "name": "sent_at_block", + "ordinal": 9, + "type_info": "Int4" + }, + { + "name": "confirmed_eth_tx_history_id", + "ordinal": 10, + "type_info": "Int4" + }, + { + "name": "predicted_gas_cost", + "ordinal": 11, + "type_info": "Int8" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + true, + false, + false, + false, + true, + true, + false + ], + "parameters": { + "Left": [ + "Bytea", + "Int8", + "Text", + "Text", + "Int8" + ] + } + }, + "query": "INSERT INTO eth_txs (raw_tx, nonce, tx_type, contract_address, predicted_gas_cost, created_at, updated_at)\n VALUES ($1, $2, $3, $4, $5, now(), now())\n RETURNING *" + }, + "95e0e783794ac55ab20b30366f037c313fb0d17e93d3e6ec60667ef1b4da30d5": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8Array" + ] + } + }, + "query": "\n UPDATE prover_jobs\n SET is_blob_cleaned=TRUE\n WHERE id = ANY($1);\n " + }, + "9be2d960a76e3026408c829cb2fda1eca3b4550edaaa75b5a0e552c3163c1867": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Bytea", + "Bytea", + "Numeric", + "Numeric", + "Jsonb", + "Int8", + "Numeric", + "Numeric", + "Bytea", + "Int4", + "Numeric", + "Bytea", + "Bytea", + "Int4", + "Numeric", + "Bytea", + "Timestamp" + ] + } + }, + "query": "\n INSERT INTO transactions\n (\n hash,\n is_priority,\n initiator_address,\n\n gas_limit,\n gas_per_pubdata_limit,\n\n data,\n priority_op_id,\n full_fee,\n layer_2_tip_fee,\n contract_address,\n l1_block_number,\n value,\n\n paymaster,\n paymaster_input,\n tx_format,\n\n l1_tx_mint,\n l1_tx_refund_recipient,\n\n received_at,\n created_at,\n updated_at\n )\n VALUES\n (\n $1, TRUE, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12,\n $13, $14, $15, $16, $17, now(), now()\n )\n " + }, + "9bf32ea710825c1f0560a7eaa89f8f097ad196755ba82d98a729a2b0d34e1aca": { + "describe": { + "columns": [ + { + "name": "successful_limit!", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "queued_limit!", + "ordinal": 1, + "type_info": "Int8" + }, + { + "name": "max_block!", + "ordinal": 2, + "type_info": "Int8" + } + ], + "nullable": [ + null, + null, + null + ], + "parameters": { + "Left": [] + } + }, + "query": "\n SELECT\n (SELECT l1_batch_number\n FROM prover_jobs\n WHERE status NOT IN ('successful', 'skipped')\n ORDER BY l1_batch_number\n LIMIT 1) as \"successful_limit!\",\n \n (SELECT l1_batch_number\n FROM prover_jobs\n WHERE status <> 'queued'\n ORDER BY l1_batch_number DESC\n LIMIT 1) as \"queued_limit!\",\n\n (SELECT MAX(l1_batch_number) as \"max!\" FROM prover_jobs) as \"max_block!\"\n " + }, + "9d2faf0b6f8582f0a2607ddd6e216cccfbea7ff5e99646e3a35420c4d190c5f8": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8Array" + ] + } + }, + "query": "\n UPDATE witness_inputs\n SET merkle_tree_paths=''\n WHERE l1_batch_number = ANY($1);\n " + }, + "a39f760d2cd879a78112e57d8611d7099802b03b7cc4933cafb4c47e133ad543": { + "describe": { + "columns": [ + { + "name": "address", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "topic1", + "ordinal": 1, + "type_info": "Bytea" + }, + { + "name": "topic2", + "ordinal": 2, + "type_info": "Bytea" + }, + { + "name": "topic3", + "ordinal": 3, + "type_info": "Bytea" + }, + { + "name": "topic4", + "ordinal": 4, + "type_info": "Bytea" + }, + { + "name": "value", + "ordinal": 5, + "type_info": "Bytea" + }, + { + "name": "block_hash", + "ordinal": 6, + "type_info": "Bytea" + }, + { + "name": "l1_batch_number?", + "ordinal": 7, + "type_info": "Int8" + }, + { + "name": "miniblock_number", + "ordinal": 8, + "type_info": "Int8" + }, + { + "name": "tx_hash", + "ordinal": 9, + "type_info": "Bytea" + }, + { + "name": "tx_index_in_block", + "ordinal": 10, + "type_info": "Int4" + }, + { + "name": "event_index_in_block", + "ordinal": 11, + "type_info": "Int4" + }, + { + "name": "event_index_in_tx", + "ordinal": 12, + "type_info": "Int4" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + false, + null, + null, + false, + false, + false, + false, + false + ], + "parameters": { + "Left": [ + "Bytea" + ] + } + }, + "query": "\n SELECT\n address, topic1, topic2, topic3, topic4, value,\n Null::bytea as \"block_hash\", Null::bigint as \"l1_batch_number?\",\n miniblock_number, tx_hash, tx_index_in_block,\n event_index_in_block, event_index_in_tx\n FROM events\n WHERE tx_hash = $1\n ORDER BY miniblock_number ASC, event_index_in_block ASC\n " + }, + "a3d526a5a341618e9784fc81626143a3174709483a527879254ff8e28f210ac3": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int4", + "Int8", + "Int8" + ] + } + }, + "query": "UPDATE l1_batches SET eth_execute_tx_id = $1, updated_at = now() WHERE number BETWEEN $2 AND $3" + }, + "a3d6cbf1f4386b65338db27467087eb77479f739dc9e9e2ac004c5c0350aa99e": { + "describe": { + "columns": [ + { + "name": "number", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "hash", + "ordinal": 1, + "type_info": "Bytea" + } + ], + "nullable": [ + false, + false + ], + "parameters": { + "Left": [ + "Int8", + "Int8" + ] + } + }, + "query": "\n SELECT number, hash FROM miniblocks\n WHERE number > $1\n ORDER BY number ASC\n LIMIT $2\n " + }, + "a482c481a9ffaad4735775282cf6e8d68f284884e7c6f043e9737a0d236f2e97": { + "describe": { + "columns": [ + { + "name": "tx_hash", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "topic2!", + "ordinal": 1, + "type_info": "Bytea" + }, + { + "name": "topic3!", + "ordinal": 2, + "type_info": "Bytea" + }, + { + "name": "value!", + "ordinal": 3, + "type_info": "Bytea" + }, + { + "name": "l1_address!", + "ordinal": 4, + "type_info": "Bytea" + }, + { + "name": "l2_address!", + "ordinal": 5, + "type_info": "Bytea" + }, + { + "name": "symbol!", + "ordinal": 6, + "type_info": "Varchar" + }, + { + "name": "name!", + "ordinal": 7, + "type_info": "Varchar" + }, + { + "name": "decimals!", + "ordinal": 8, + "type_info": "Int4" + }, + { + "name": "usd_price?", + "ordinal": 9, + "type_info": "Numeric" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false, + false, + true + ], + "parameters": { + "Left": [ + "ByteaArray", + "Bytea", + "Bytea" + ] + } + }, + "query": "\n SELECT tx_hash, topic2 as \"topic2!\", topic3 as \"topic3!\", value as \"value!\",\n tokens.l1_address as \"l1_address!\", tokens.l2_address as \"l2_address!\",\n tokens.symbol as \"symbol!\", tokens.name as \"name!\", tokens.decimals as \"decimals!\", tokens.usd_price as \"usd_price?\"\n FROM events\n INNER JOIN tokens ON tokens.l2_address = '\\x0000000000000000000000000000000000000000'\n WHERE tx_hash = ANY($1) AND events.topic1 = $2 AND events.address = $3\n ORDER BY tx_hash, miniblock_number ASC, event_index_in_block ASC\n " + }, + "a515ac602a38f43bb9cc026a8bbfc5bea47b13326b0a32b13e9d43724bf4165e": { + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "circuit_input_blob_url", + "ordinal": 1, + "type_info": "Text" + } + ], + "nullable": [ + false, + true + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n SELECT id, circuit_input_blob_url FROM prover_jobs\n WHERE status='successful' AND is_blob_cleaned=FALSE\n AND circuit_input_blob_url is NOT NULL\n AND updated_at < NOW() - INTERVAL '2 days'\n LIMIT $1;\n " + }, + "a7d575d90f9bf19427ddbe342d296effb7c38bc90f213aa1cc94523930dd8f15": { + "describe": { + "columns": [ + { + "name": "tx_hash", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "l1_sender!", + "ordinal": 1, + "type_info": "Bytea" + }, + { + "name": "topic2!", + "ordinal": 2, + "type_info": "Bytea" + }, + { + "name": "value!", + "ordinal": 3, + "type_info": "Bytea" + }, + { + "name": "l1_address!", + "ordinal": 4, + "type_info": "Bytea" + }, + { + "name": "l2_address!", + "ordinal": 5, + "type_info": "Bytea" + }, + { + "name": "symbol!", + "ordinal": 6, + "type_info": "Varchar" + }, + { + "name": "name!", + "ordinal": 7, + "type_info": "Varchar" + }, + { + "name": "decimals!", + "ordinal": 8, + "type_info": "Int4" + }, + { + "name": "usd_price?", + "ordinal": 9, + "type_info": "Numeric" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false, + false, + true + ], + "parameters": { + "Left": [ + "ByteaArray", + "Bytea", + "Bytea" + ] + } + }, + "query": "\n SELECT events.tx_hash, transactions.initiator_address as \"l1_sender!\", events.topic2 as \"topic2!\", events.value as \"value!\",\n tokens.l1_address as \"l1_address!\", tokens.l2_address as \"l2_address!\",\n tokens.symbol as \"symbol!\", tokens.name as \"name!\", tokens.decimals as \"decimals!\", tokens.usd_price as \"usd_price?\"\n FROM events\n INNER JOIN tokens ON tokens.l2_address = '\\x0000000000000000000000000000000000000000'\n INNER JOIN transactions ON transactions.hash = events.tx_hash\n WHERE tx_hash = ANY($1) AND events.topic1 = $2 AND events.address = $3\n ORDER BY tx_hash, events.miniblock_number ASC, event_index_in_block ASC\n " + }, + "a7f4d8a9520de951c50fd12fafc0ce8895e03932cbb0337ce0ea4e884296ca36": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Text", + "Text", + "Int4", + "Int4" + ] + } + }, + "query": "\n UPDATE gpu_prover_queue\n SET instance_status = $1, updated_at = now(), queue_free_slots = $4\n WHERE instance_host = $2::text::inet\n AND instance_port = $3\n " + }, + "a8d2b80d197d8168a6c1b4666e799a9d6c2e31d84986ae352715e687989f913c": { + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "contract_address", + "ordinal": 1, + "type_info": "Bytea" + }, + { + "name": "source_code", + "ordinal": 2, + "type_info": "Text" + }, + { + "name": "contract_name", + "ordinal": 3, + "type_info": "Text" + }, + { + "name": "compiler_zksolc_version", + "ordinal": 4, + "type_info": "Text" + }, + { + "name": "optimization_used", + "ordinal": 5, + "type_info": "Bool" + }, + { + "name": "constructor_arguments", + "ordinal": 6, + "type_info": "Bytea" + }, + { + "name": "status", + "ordinal": 7, + "type_info": "Text" + }, + { + "name": "error", + "ordinal": 8, + "type_info": "Text" + }, + { + "name": "created_at", + "ordinal": 9, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 10, + "type_info": "Timestamp" + }, + { + "name": "compilation_errors", + "ordinal": 11, + "type_info": "Jsonb" + }, + { + "name": "processing_started_at", + "ordinal": 12, + "type_info": "Timestamp" + }, + { + "name": "compiler_solc_version", + "ordinal": 13, + "type_info": "Text" + }, + { + "name": "attempts", + "ordinal": 14, + "type_info": "Int4" + }, + { + "name": "panic_message", + "ordinal": 15, + "type_info": "Text" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false, + true, + false, + false, + true, + true, + false, + false, + true + ], + "parameters": { + "Left": [ + "Interval" + ] + } + }, + "query": "UPDATE contract_verification_requests\n SET status = 'in_progress', attempts = attempts + 1,\n updated_at = now(), processing_started_at = now()\n WHERE id = (\n SELECT id FROM contract_verification_requests\n WHERE status = 'queued' OR (status = 'in_progress' AND processing_started_at < now() - $1::interval)\n ORDER BY created_at\n LIMIT 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING contract_verification_requests.*" + }, + "a9b7a880dbde4f7de5a6c2ff4009281527f2d01a547228981af3af2129ffb3f7": { + "describe": { + "columns": [ + { + "name": "count!", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + null + ], + "parameters": { + "Left": [ + "Bytea", + "Numeric", + "Interval", + "Interval" + ] + } + }, + "query": "\n SELECT COUNT(*) as \"count!\" FROM tokens\n WHERE l2_address = $1 AND\n market_volume > $2 AND now() - market_volume_updated_at < $3 AND\n usd_price > 0 AND now() - usd_price_updated_at < $4\n " + }, + "aa062e23ada48ce48c1f4005ca059abcad411601e038b19154eedd15a2f7a493": { + "describe": { + "columns": [ + { + "name": "l1_batch_number", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "scheduler_witness_blob_url", + "ordinal": 1, + "type_info": "Text" + }, + { + "name": "final_node_aggregations_blob_url", + "ordinal": 2, + "type_info": "Text" + } + ], + "nullable": [ + false, + true, + true + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n SELECT l1_batch_number, scheduler_witness_blob_url, final_node_aggregations_blob_url FROM scheduler_witness_jobs\n WHERE status='successful' AND is_blob_cleaned=FALSE\n AND updated_at < NOW() - INTERVAL '2 days'\n AND scheduler_witness_blob_url is NOT NULL\n AND final_node_aggregations_blob_url is NOT NULL\n LIMIT $1;\n " + }, + "aa1534f03679fd2d1d9e7c1da1f94cc0e2ec5fc3a0e1ac7137147533eacf0aaf": { + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Int4" + }, + { + "name": "nonce", + "ordinal": 1, + "type_info": "Int8" + }, + { + "name": "raw_tx", + "ordinal": 2, + "type_info": "Bytea" + }, + { + "name": "contract_address", + "ordinal": 3, + "type_info": "Text" + }, + { + "name": "tx_type", + "ordinal": 4, + "type_info": "Text" + }, + { + "name": "gas_used", + "ordinal": 5, + "type_info": "Int8" + }, + { + "name": "created_at", + "ordinal": 6, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 7, + "type_info": "Timestamp" + }, + { + "name": "has_failed", + "ordinal": 8, + "type_info": "Bool" + }, + { + "name": "sent_at_block", + "ordinal": 9, + "type_info": "Int4" + }, + { + "name": "confirmed_eth_tx_history_id", + "ordinal": 10, + "type_info": "Int4" + }, + { + "name": "predicted_gas_cost", + "ordinal": 11, + "type_info": "Int8" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + true, + false, + false, + false, + true, + true, + false + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "SELECT * FROM eth_txs \n WHERE id > (SELECT COALESCE(MAX(eth_tx_id), 0) FROM eth_txs_history)\n ORDER BY id\n LIMIT $1\n " + }, + "aa9256fd40c557a553b407506794bffcc99247ccb9badf6ab303552d7b1bf5d2": { + "describe": { + "columns": [ + { + "name": "count", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + null + ], + "parameters": { + "Left": [] + } + }, + "query": "\n SELECT MIN(count) as \"count\"\n FROM (SELECT COALESCE(SUM(queue_free_slots), 0) as \"count\"\n FROM gpu_prover_queue\n where instance_status = 'available'\n UNION\n SELECT count(*) as \"count\"\n from prover_jobs\n where status = 'queued'\n ) as t1;\n " + }, + "ad11ec3e628ae6c64ac160d8dd689b2f64033f620e17a31469788b3ce4968ad3": { + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Int4" + }, + { + "name": "eth_tx_id", + "ordinal": 1, + "type_info": "Int4" + }, + { + "name": "tx_hash", + "ordinal": 2, + "type_info": "Text" + }, + { + "name": "created_at", + "ordinal": 3, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 4, + "type_info": "Timestamp" + }, + { + "name": "base_fee_per_gas", + "ordinal": 5, + "type_info": "Int8" + }, + { + "name": "priority_fee_per_gas", + "ordinal": 6, + "type_info": "Int8" + }, + { + "name": "confirmed_at", + "ordinal": 7, + "type_info": "Timestamp" + }, + { + "name": "signed_raw_tx", + "ordinal": 8, + "type_info": "Bytea" + }, + { + "name": "sent_at_block", + "ordinal": 9, + "type_info": "Int4" + }, + { + "name": "sent_at", + "ordinal": 10, + "type_info": "Timestamp" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + true, + true, + true, + true + ], + "parameters": { + "Left": [ + "Int4" + ] + } + }, + "query": "SELECT * FROM eth_txs_history WHERE eth_tx_id = $1 ORDER BY created_at DESC LIMIT 1" + }, + "ad4f74aa6f131df0243f4fa500ade1b98aa335bd71ed417b02361e2c697e60f8": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Bytea", + "Int8" + ] + } + }, + "query": "\n UPDATE scheduler_witness_jobs\n SET aggregation_result_coords = $1,\n updated_at = now()\n WHERE l1_batch_number = $2\n " + }, + "adc9ad2c944f9dacc28b5bd133aa37d9e8ea99eca1c5dfbeef37cda4b793f434": { + "describe": { + "columns": [ + { + "name": "market_volume", + "ordinal": 0, + "type_info": "Numeric" + }, + { + "name": "market_volume_updated_at", + "ordinal": 1, + "type_info": "Timestamp" + } + ], + "nullable": [ + true, + true + ], + "parameters": { + "Left": [ + "Bytea" + ] + } + }, + "query": "SELECT market_volume, market_volume_updated_at FROM tokens WHERE l2_address = $1" + }, + "ae072f51b65d0b5212264be9a34027922e5aedef7e4741517ad8104bf5aa79e9": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "DELETE FROM factory_deps WHERE miniblock_number > $1" + }, + "aeece159730c2751bc57880c0c394a4ebb60d263ecb4b7f6e68dce681aa23b65": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8Array" + ] + } + }, + "query": "\n UPDATE prover_jobs\n SET prover_input=''\n WHERE id = ANY($1);\n " + }, + "af75db6b7e42b73ce62b28a7281e1bfa181ee0c80a85d7d8078831db5dcdb699": { + "describe": { + "columns": [ + { + "name": "l1_block_number", + "ordinal": 0, + "type_info": "Int4" + } + ], + "nullable": [ + true + ], + "parameters": { + "Left": [] + } + }, + "query": "SELECT l1_block_number FROM transactions\n WHERE priority_op_id IS NOT NULL\n ORDER BY priority_op_id DESC\n LIMIT 1" + }, + "afc0448c58b0e2f7a7865cc1b5069d66f4cb9d4f609a0fab06cac3b7784910d1": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Text", + "Int4", + "Int4" + ] + } + }, + "query": "\n UPDATE gpu_prover_queue\n SET instance_status = 'available', updated_at = now(), queue_free_slots = $3\n WHERE instance_host = $1::text::inet\n AND instance_port = $2\n AND instance_status = 'full'\n " + }, + "b1478907214ad20dddd4f3846fba4b0ddf1fff63ddb3b95c8999635e77c8b863": { + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Int4" + }, + { + "name": "eth_tx_id", + "ordinal": 1, + "type_info": "Int4" + }, + { + "name": "tx_hash", + "ordinal": 2, + "type_info": "Text" + }, + { + "name": "created_at", + "ordinal": 3, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 4, + "type_info": "Timestamp" + }, + { + "name": "base_fee_per_gas", + "ordinal": 5, + "type_info": "Int8" + }, + { + "name": "priority_fee_per_gas", + "ordinal": 6, + "type_info": "Int8" + }, + { + "name": "confirmed_at", + "ordinal": 7, + "type_info": "Timestamp" + }, + { + "name": "signed_raw_tx", + "ordinal": 8, + "type_info": "Bytea" + }, + { + "name": "sent_at_block", + "ordinal": 9, + "type_info": "Int4" + }, + { + "name": "sent_at", + "ordinal": 10, + "type_info": "Timestamp" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + true, + true, + true, + true + ], + "parameters": { + "Left": [ + "Int4" + ] + } + }, + "query": "SELECT * FROM eth_txs_history WHERE eth_tx_id = $1 ORDER BY created_at DESC" + }, + "b21656ac9a476ea4c5ddaeae8d557ad284514e65321088d6c45cab2ffea42825": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int4", + "Int8", + "Text", + "Text" + ] + } + }, + "query": "\n UPDATE node_aggregation_witness_jobs\n SET number_of_leaf_circuits = $1,\n leaf_layer_subqueues_blob_url = $3,\n aggregation_outputs_blob_url = $4,\n status = 'waiting_for_proofs',\n updated_at = now()\n WHERE l1_batch_number = $2\n " + }, + "b4cd15d430b423cd5bad80199abf0f67c698ca469e55557f20d5c7460ed40b0d": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8", + "Text", + "Int4", + "Bytea", + "Int4", + "Text" + ] + } + }, + "query": "\n INSERT INTO prover_jobs (l1_batch_number, circuit_type, sequence_number, prover_input, aggregation_round, circuit_input_blob_url, status, created_at, updated_at)\n VALUES ($1, $2, $3, $4, $5, $6, 'queued', now(), now())\n ON CONFLICT(l1_batch_number, aggregation_round, sequence_number) DO NOTHING\n " + }, + "b4da918ee3b36b56d95c8834edebe65eb48ebb8270fa1e6ccf73ad354fd71134": { + "describe": { + "columns": [ + { + "name": "l1_address", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "l2_address", + "ordinal": 1, + "type_info": "Bytea" + } + ], + "nullable": [ + false, + false + ], + "parameters": { + "Left": [] + } + }, + "query": "SELECT l1_address, l2_address FROM tokens WHERE well_known = true" + }, + "b6c8e0827b2389a14433c031332962495311562ae9652ae7e9409a4bf48dc55b": { + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Int4" + }, + { + "name": "nonce", + "ordinal": 1, + "type_info": "Int8" + }, + { + "name": "raw_tx", + "ordinal": 2, + "type_info": "Bytea" + }, + { + "name": "contract_address", + "ordinal": 3, + "type_info": "Text" + }, + { + "name": "tx_type", + "ordinal": 4, + "type_info": "Text" + }, + { + "name": "gas_used", + "ordinal": 5, + "type_info": "Int8" + }, + { + "name": "created_at", + "ordinal": 6, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 7, + "type_info": "Timestamp" + }, + { + "name": "has_failed", + "ordinal": 8, + "type_info": "Bool" + }, + { + "name": "sent_at_block", + "ordinal": 9, + "type_info": "Int4" + }, + { + "name": "confirmed_eth_tx_history_id", + "ordinal": 10, + "type_info": "Int4" + }, + { + "name": "predicted_gas_cost", + "ordinal": 11, + "type_info": "Int8" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + true, + false, + false, + false, + true, + true, + false + ], + "parameters": { + "Left": [] + } + }, + "query": "SELECT * FROM eth_txs WHERE confirmed_eth_tx_history_id IS NULL \n AND id <= (SELECT COALESCE(MAX(eth_tx_id), 0) FROM eth_txs_history WHERE sent_at_block IS NOT NULL)\n ORDER BY id" + }, + "b776d4d774b300958ba027a7dd80bf19e11d7ef202e8c73dca185d553199fb5f": { + "describe": { + "columns": [ + { + "name": "l1_batch_number", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n SELECT l1_batch_number FROM scheduler_witness_jobs\n WHERE length(final_node_aggregations) <> 0\n LIMIT $1;\n " + }, + "bd4898ee283a312cb995853686a1f5252e73b22efea3cf9f158c4476c9639b32": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "ByteaArray", + "ByteaArray", + "ByteaArray", + "ByteaArray", + "ByteaArray" + ] + } + }, + "query": "INSERT INTO storage (hashed_key, address, key, value, tx_hash, created_at, updated_at)\n SELECT u.hashed_key, u.address, u.key, u.value, u.tx_hash, now(), now()\n FROM UNNEST ($1::bytea[], $2::bytea[], $3::bytea[], $4::bytea[], $5::bytea[])\n AS u(hashed_key, address, key, value, tx_hash)\n ON CONFLICT (hashed_key)\n DO UPDATE SET tx_hash = excluded.tx_hash, value = excluded.value, updated_at = now()\n " + }, + "bef58e581dd0b658350dcdc15ebf7cf350cf088b60c916a15889e31ee7534907": { + "describe": { + "columns": [ + { + "name": "bytecode", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "bytecode_hash", + "ordinal": 1, + "type_info": "Bytea" + } + ], + "nullable": [ + false, + false + ], + "parameters": { + "Left": [ + "ByteaArray" + ] + } + }, + "query": "SELECT bytecode, bytecode_hash FROM factory_deps WHERE bytecode_hash = ANY($1)" + }, + "bf9ad4da63cb23b0991f16381a13139bd32003a7f8d0736deb2b127162e492ec": { + "describe": { + "columns": [ + { + "name": "number", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "timestamp", + "ordinal": 1, + "type_info": "Int8" + }, + { + "name": "l1_tx_count", + "ordinal": 2, + "type_info": "Int4" + }, + { + "name": "l2_tx_count", + "ordinal": 3, + "type_info": "Int4" + }, + { + "name": "root_hash?", + "ordinal": 4, + "type_info": "Bytea" + }, + { + "name": "commit_tx_hash?", + "ordinal": 5, + "type_info": "Text" + }, + { + "name": "committed_at?", + "ordinal": 6, + "type_info": "Timestamp" + }, + { + "name": "prove_tx_hash?", + "ordinal": 7, + "type_info": "Text" + }, + { + "name": "proven_at?", + "ordinal": 8, + "type_info": "Timestamp" + }, + { + "name": "execute_tx_hash?", + "ordinal": 9, + "type_info": "Text" + }, + { + "name": "executed_at?", + "ordinal": 10, + "type_info": "Timestamp" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + false, + true, + false, + true, + false, + true + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n SELECT miniblocks.number,\n miniblocks.timestamp,\n miniblocks.l1_tx_count,\n miniblocks.l2_tx_count,\n miniblocks.hash as \"root_hash?\",\n commit_tx.tx_hash as \"commit_tx_hash?\",\n commit_tx.confirmed_at as \"committed_at?\",\n prove_tx.tx_hash as \"prove_tx_hash?\",\n prove_tx.confirmed_at as \"proven_at?\",\n execute_tx.tx_hash as \"execute_tx_hash?\",\n execute_tx.confirmed_at as \"executed_at?\"\n FROM miniblocks\n LEFT JOIN l1_batches ON miniblocks.l1_batch_number = l1_batches.number\n LEFT JOIN eth_txs_history as commit_tx ON (l1_batches.eth_commit_tx_id = commit_tx.eth_tx_id AND commit_tx.confirmed_at IS NOT NULL)\n LEFT JOIN eth_txs_history as prove_tx ON (l1_batches.eth_prove_tx_id = prove_tx.eth_tx_id AND prove_tx.confirmed_at IS NOT NULL)\n LEFT JOIN eth_txs_history as execute_tx ON (l1_batches.eth_execute_tx_id = execute_tx.eth_tx_id AND execute_tx.confirmed_at IS NOT NULL)\n WHERE miniblocks.number = $1\n " + }, + "c0532f9e7a6130426acb032f391f6dae7ff22914f0045673c42c1ee84ca36490": { + "describe": { + "columns": [ + { + "name": "count!", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + null + ], + "parameters": { + "Left": [ + "Bytea", + "Bytea" + ] + } + }, + "query": "\n SELECT COUNT(*) as \"count!\"\n FROM (\n SELECT * FROM storage_logs\n WHERE storage_logs.hashed_key = $1\n ORDER BY storage_logs.miniblock_number DESC, storage_logs.operation_number DESC\n LIMIT 1\n ) sl\n WHERE sl.value != $2\n " + }, + "c1ed4c80984db514dd264a9bc19bdaee29b6f5c291a9d503d9896c41b316cca5": { + "describe": { + "columns": [ + { + "name": "nonce!", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + true + ], + "parameters": { + "Left": [ + "Bytea", + "Int8" + ] + } + }, + "query": "\n SELECT nonce as \"nonce!\" FROM transactions\n WHERE initiator_address = $1 AND nonce >= $2\n AND is_priority = FALSE\n AND (miniblock_number IS NOT NULL OR error IS NULL)\n ORDER BY nonce\n " + }, + "c2f6f7fa37b303748f47ff2de01227e7afbc9ff041bc1428743d91300f5f5caf": { + "describe": { + "columns": [ + { + "name": "l1_batch_number", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + true + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n SELECT l1_batch_number FROM miniblocks\n WHERE number = $1\n " + }, + "c6109267f85f38edcd53f361cf2654f43fa45928e39324cfab8389453b4e7031": { + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Int4" + }, + { + "name": "eth_tx_id", + "ordinal": 1, + "type_info": "Int4" + }, + { + "name": "tx_hash", + "ordinal": 2, + "type_info": "Text" + }, + { + "name": "base_fee_per_gas", + "ordinal": 3, + "type_info": "Int8" + }, + { + "name": "priority_fee_per_gas", + "ordinal": 4, + "type_info": "Int8" + }, + { + "name": "signed_raw_tx", + "ordinal": 5, + "type_info": "Bytea" + }, + { + "name": "nonce", + "ordinal": 6, + "type_info": "Int8" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + true, + false + ], + "parameters": { + "Left": [] + } + }, + "query": "\n SELECT \n eth_txs_history.id,\n eth_txs_history.eth_tx_id,\n eth_txs_history.tx_hash,\n eth_txs_history.base_fee_per_gas,\n eth_txs_history.priority_fee_per_gas,\n eth_txs_history.signed_raw_tx,\n eth_txs.nonce\n FROM eth_txs_history \n JOIN eth_txs ON eth_txs.id = eth_txs_history.eth_tx_id \n WHERE eth_txs_history.sent_at_block IS NULL AND eth_txs.confirmed_eth_tx_history_id IS NULL\n ORDER BY eth_txs_history.id DESC" + }, + "c6aadc4ec78e30f5775f7a9f866ad02984b78de3e3d1f34c144a4057ff44ea6a": { + "describe": { + "columns": [ + { + "name": "count", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + null + ], + "parameters": { + "Left": [] + } + }, + "query": "SELECT COUNT(*) FROM eth_txs WHERE has_failed = TRUE" + }, + "c766f2ee9e3054ba337873ba5ebb26d4f1a43691664372152e5eb782391f9f68": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8Array" + ] + } + }, + "query": "\n UPDATE node_aggregation_witness_jobs\n SET is_blob_cleaned=TRUE\n WHERE l1_batch_number = ANY($1);\n " + }, + "c8125b30eb64eebfa4500dc623972bf8771a83b218bd18a51e633d4cf4bf8eb3": { + "describe": { + "columns": [ + { + "name": "bytecode", + "ordinal": 0, + "type_info": "Bytea" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [ + "Bytea", + "Int8", + "Bytea" + ] + } + }, + "query": "\n SELECT bytecode FROM (\n SELECT * FROM storage_logs\n WHERE\n storage_logs.hashed_key = $1 AND\n storage_logs.miniblock_number <= $2\n ORDER BY\n storage_logs.miniblock_number DESC, storage_logs.operation_number DESC\n LIMIT 1\n ) t\n JOIN factory_deps ON value = factory_deps.bytecode_hash\n WHERE value != $3\n " + }, + "c849561f88c775f2cce4d59387916793ba1623a8a714b415375477e090d86bd3": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8", + "Int4", + "Int4" + ] + } + }, + "query": "UPDATE eth_txs\n SET gas_used = $1, confirmed_eth_tx_history_id = $2\n WHERE id = $3" + }, + "c891770305cb3aba4021738e60567d977eac54435c871b5178de7c3c96d2f721": { + "describe": { + "columns": [ + { + "name": "usd_price", + "ordinal": 0, + "type_info": "Numeric" + }, + { + "name": "usd_price_updated_at", + "ordinal": 1, + "type_info": "Timestamp" + } + ], + "nullable": [ + true, + true + ], + "parameters": { + "Left": [ + "Bytea" + ] + } + }, + "query": "SELECT usd_price, usd_price_updated_at FROM tokens WHERE l2_address = $1" + }, + "c8c154ac76bb14498fb0f0720fcdab4d863985a8d15c9d8b8b1f68f390bc1c03": { + "describe": { + "columns": [ + { + "name": "l1_batch_number", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "leaf_layer_subqueues", + "ordinal": 1, + "type_info": "Bytea" + }, + { + "name": "aggregation_outputs", + "ordinal": 2, + "type_info": "Bytea" + }, + { + "name": "number_of_leaf_circuits", + "ordinal": 3, + "type_info": "Int4" + }, + { + "name": "status", + "ordinal": 4, + "type_info": "Text" + }, + { + "name": "processing_started_at", + "ordinal": 5, + "type_info": "Timestamp" + }, + { + "name": "time_taken", + "ordinal": 6, + "type_info": "Time" + }, + { + "name": "error", + "ordinal": 7, + "type_info": "Text" + }, + { + "name": "created_at", + "ordinal": 8, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 9, + "type_info": "Timestamp" + }, + { + "name": "attempts", + "ordinal": 10, + "type_info": "Int4" + }, + { + "name": "leaf_layer_subqueues_blob_url", + "ordinal": 11, + "type_info": "Text" + }, + { + "name": "aggregation_outputs_blob_url", + "ordinal": 12, + "type_info": "Text" + }, + { + "name": "is_blob_cleaned", + "ordinal": 13, + "type_info": "Bool" + } + ], + "nullable": [ + false, + true, + true, + true, + false, + true, + true, + true, + false, + false, + false, + true, + true, + false + ], + "parameters": { + "Left": [ + "Interval", + "Int4" + ] + } + }, + "query": "\n UPDATE node_aggregation_witness_jobs\n SET status = 'in_progress', attempts = attempts + 1,\n updated_at = now(), processing_started_at = now()\n WHERE l1_batch_number = (\n SELECT l1_batch_number\n FROM node_aggregation_witness_jobs\n WHERE status = 'queued' \n OR (status = 'in_progress' AND processing_started_at < now() - $1::interval)\n OR (status = 'failed' AND attempts < $2)\n ORDER BY l1_batch_number ASC\n LIMIT 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING node_aggregation_witness_jobs.*\n " + }, + "cbd6ed03ec615ee3a1747bc39f068e792d2c51ef4e3717b3f0074a38a625a44b": { + "describe": { + "columns": [ + { + "name": "number", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "timestamp", + "ordinal": 1, + "type_info": "Int8" + }, + { + "name": "hash", + "ordinal": 2, + "type_info": "Bytea" + }, + { + "name": "l1_tx_count", + "ordinal": 3, + "type_info": "Int4" + }, + { + "name": "l2_tx_count", + "ordinal": 4, + "type_info": "Int4" + }, + { + "name": "base_fee_per_gas", + "ordinal": 5, + "type_info": "Numeric" + }, + { + "name": "l1_gas_price", + "ordinal": 6, + "type_info": "Int8" + }, + { + "name": "l2_fair_gas_price", + "ordinal": 7, + "type_info": "Int8" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n SELECT number, timestamp, hash, l1_tx_count, l2_tx_count,\n base_fee_per_gas, l1_gas_price, l2_fair_gas_price\n FROM miniblocks\n WHERE number = $1\n " + }, + "cbe9445b28efc540d4a01b4c8f1e62017e9854b2d01973c55b27603a8a81bbdd": { + "describe": { + "columns": [ + { + "name": "value", + "ordinal": 0, + "type_info": "Bytea" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [ + "Bytea", + "Int8" + ] + } + }, + "query": "select value from storage_logs where hashed_key = $1 and miniblock_number <= $2 order by miniblock_number desc, operation_number desc limit 1" + }, + "ce12a389d218de2071752e8f67b9ad3132777c8a8737009be283e1bedef6dad5": { + "describe": { + "columns": [ + { + "name": "number", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "timestamp", + "ordinal": 1, + "type_info": "Int8" + }, + { + "name": "is_finished", + "ordinal": 2, + "type_info": "Bool" + }, + { + "name": "l1_tx_count", + "ordinal": 3, + "type_info": "Int4" + }, + { + "name": "l2_tx_count", + "ordinal": 4, + "type_info": "Int4" + }, + { + "name": "fee_account_address", + "ordinal": 5, + "type_info": "Bytea" + }, + { + "name": "bloom", + "ordinal": 6, + "type_info": "Bytea" + }, + { + "name": "priority_ops_onchain_data", + "ordinal": 7, + "type_info": "ByteaArray" + }, + { + "name": "hash", + "ordinal": 8, + "type_info": "Bytea" + }, + { + "name": "parent_hash", + "ordinal": 9, + "type_info": "Bytea" + }, + { + "name": "commitment", + "ordinal": 10, + "type_info": "Bytea" + }, + { + "name": "compressed_write_logs", + "ordinal": 11, + "type_info": "Bytea" + }, + { + "name": "compressed_contracts", + "ordinal": 12, + "type_info": "Bytea" + }, + { + "name": "eth_prove_tx_id", + "ordinal": 13, + "type_info": "Int4" + }, + { + "name": "eth_commit_tx_id", + "ordinal": 14, + "type_info": "Int4" + }, + { + "name": "eth_execute_tx_id", + "ordinal": 15, + "type_info": "Int4" + }, + { + "name": "created_at", + "ordinal": 16, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 17, + "type_info": "Timestamp" + }, + { + "name": "merkle_root_hash", + "ordinal": 18, + "type_info": "Bytea" + }, + { + "name": "l2_to_l1_logs", + "ordinal": 19, + "type_info": "ByteaArray" + }, + { + "name": "l2_to_l1_messages", + "ordinal": 20, + "type_info": "ByteaArray" + }, + { + "name": "predicted_commit_gas_cost", + "ordinal": 21, + "type_info": "Int8" + }, + { + "name": "predicted_prove_gas_cost", + "ordinal": 22, + "type_info": "Int8" + }, + { + "name": "predicted_execute_gas_cost", + "ordinal": 23, + "type_info": "Int8" + }, + { + "name": "initial_bootloader_heap_content", + "ordinal": 24, + "type_info": "Jsonb" + }, + { + "name": "used_contract_hashes", + "ordinal": 25, + "type_info": "Jsonb" + }, + { + "name": "compressed_initial_writes", + "ordinal": 26, + "type_info": "Bytea" + }, + { + "name": "compressed_repeated_writes", + "ordinal": 27, + "type_info": "Bytea" + }, + { + "name": "l2_l1_compressed_messages", + "ordinal": 28, + "type_info": "Bytea" + }, + { + "name": "l2_l1_merkle_root", + "ordinal": 29, + "type_info": "Bytea" + }, + { + "name": "l1_gas_price", + "ordinal": 30, + "type_info": "Int8" + }, + { + "name": "l2_fair_gas_price", + "ordinal": 31, + "type_info": "Int8" + }, + { + "name": "rollup_last_leaf_index", + "ordinal": 32, + "type_info": "Int8" + }, + { + "name": "zkporter_is_available", + "ordinal": 33, + "type_info": "Bool" + }, + { + "name": "bootloader_code_hash", + "ordinal": 34, + "type_info": "Bytea" + }, + { + "name": "default_aa_code_hash", + "ordinal": 35, + "type_info": "Bytea" + }, + { + "name": "base_fee_per_gas", + "ordinal": 36, + "type_info": "Numeric" + }, + { + "name": "aux_data_hash", + "ordinal": 37, + "type_info": "Bytea" + }, + { + "name": "pass_through_data_hash", + "ordinal": 38, + "type_info": "Bytea" + }, + { + "name": "meta_parameters_hash", + "ordinal": 39, + "type_info": "Bytea" + }, + { + "name": "skip_proof", + "ordinal": 40, + "type_info": "Bool" + }, + { + "name": "gas_per_pubdata_byte_in_block", + "ordinal": 41, + "type_info": "Int4" + }, + { + "name": "gas_per_pubdata_limit", + "ordinal": 42, + "type_info": "Int8" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false, + true, + true, + true, + true, + true, + true, + true, + true, + false, + false, + true, + false, + false, + false, + false, + false, + false, + false, + true, + true, + true, + true, + false, + false, + true, + true, + true, + true, + false, + true, + true, + true, + false, + true, + false + ], + "parameters": { + "Left": [ + "Int8", + "Int8" + ] + } + }, + "query": "\n SELECT number, timestamp, is_finished, l1_tx_count, l2_tx_count, fee_account_address, bloom, priority_ops_onchain_data, hash, parent_hash, commitment, compressed_write_logs, compressed_contracts, eth_prove_tx_id, eth_commit_tx_id, eth_execute_tx_id, created_at, updated_at, merkle_root_hash, l2_to_l1_logs, l2_to_l1_messages, predicted_commit_gas_cost, predicted_prove_gas_cost, predicted_execute_gas_cost, initial_bootloader_heap_content, used_contract_hashes, compressed_initial_writes, compressed_repeated_writes, l2_l1_compressed_messages, l2_l1_merkle_root, l1_gas_price, l2_fair_gas_price, rollup_last_leaf_index, zkporter_is_available, bootloader_code_hash, default_aa_code_hash, base_fee_per_gas, aux_data_hash, pass_through_data_hash, meta_parameters_hash, skip_proof, gas_per_pubdata_byte_in_block, gas_per_pubdata_limit\n FROM\n (SELECT l1_batches.*, row_number() over (order by number ASC) as row_number\n FROM l1_batches\n WHERE eth_commit_tx_id IS NOT NULL\n AND l1_batches.skip_proof = TRUE\n AND l1_batches.number > $1\n ORDER BY number LIMIT $2) inn\n WHERE number - row_number = $1\n " + }, + "ce3666b149f7fc62a68139a8efb83ed149c7deace17b8968817941763e45a147": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Bytea", + "Int8", + "Bytea" + ] + } + }, + "query": "\n DELETE FROM tokens \n WHERE l2_address IN\n (\n SELECT substring(key, 12, 20) FROM storage_logs \n WHERE storage_logs.address = $1 AND miniblock_number > $2 AND NOT EXISTS (\n SELECT 1 FROM storage_logs as s\n WHERE\n s.hashed_key = storage_logs.hashed_key AND\n (s.miniblock_number, s.operation_number) >= (storage_logs.miniblock_number, storage_logs.operation_number) AND\n s.value = $3\n )\n )\n " + }, + "cea77fbe02853a7a9b1f7b5ddf2957cb23212ae5ef0f889834d796c35b583542": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "DELETE FROM miniblocks WHERE number > $1" + }, + "cf9a49dd3ef67b3515e411fd0daadd667af9a4451390b3ef47fe9f902ee9f4e2": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8", + "Text", + "Jsonb", + "Text" + ] + } + }, + "query": "\n UPDATE contract_verification_requests\n SET status = 'failed', updated_at = now(), error = $2, compilation_errors = $3, panic_message = $4\n WHERE id = $1\n " + }, + "d0571a05a9f65e71b3ab478dc7217c3644024ed0d6ae6616c331a7737759c86c": { + "describe": { + "columns": [ + { + "name": "merkle_root_hash", + "ordinal": 0, + "type_info": "Bytea" + } + ], + "nullable": [ + true + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "SELECT merkle_root_hash FROM l1_batches WHERE number = $1" + }, + "d0770d2d0cc0cec5cf5c2e90912b697f19adbdf5cb6e734c3bddd06ad96e83e9": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "TextArray" + ] + } + }, + "query": "\n INSERT INTO contract_verification_solc_versions (version, created_at, updated_at)\n SELECT u.version, now(), now()\n FROM UNNEST($1::text[])\n AS u(version)\n " + }, + "d0ff67e7c59684a0e4409726544cf850dbdbb36d038ebbc6a1c5bf0e76b0358c": { + "describe": { + "columns": [ + { + "name": "count!", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + null + ], + "parameters": { + "Left": [] + } + }, + "query": "SELECT COUNT(*) as \"count!\" FROM l1_batches" + }, + "d2f16dcd8175a337f57724ce5b2fb59d2934f60bb2d24c6ec77195dc63c26002": { + "describe": { + "columns": [ + { + "name": "hash!", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "l1_address!", + "ordinal": 1, + "type_info": "Bytea" + }, + { + "name": "l2_address!", + "ordinal": 2, + "type_info": "Bytea" + }, + { + "name": "symbol!", + "ordinal": 3, + "type_info": "Varchar" + }, + { + "name": "name!", + "ordinal": 4, + "type_info": "Varchar" + }, + { + "name": "decimals!", + "ordinal": 5, + "type_info": "Int4" + }, + { + "name": "usd_price?", + "ordinal": 6, + "type_info": "Numeric" + } + ], + "nullable": [ + true, + true, + true, + true, + true, + true, + true + ], + "parameters": { + "Left": [ + "ByteaArray", + "Bytea", + "Bytea" + ] + } + }, + "query": "\n SELECT hash as \"hash!\",\n tokens.l1_address as \"l1_address!\", tokens.l2_address as \"l2_address!\",\n tokens.symbol as \"symbol!\", tokens.name as \"name!\", tokens.decimals as \"decimals!\", tokens.usd_price as \"usd_price?\"\n FROM transactions\n INNER JOIN tokens\n ON tokens.l2_address = transactions.contract_address OR (transactions.contract_address = $2 AND tokens.l2_address = $3)\n WHERE hash = ANY($1)\n " + }, + "d6654b10ce779826e565bddf67c9a1aca2767f11e858eb9aaedff4b0ea277a34": { + "describe": { + "columns": [ + { + "name": "tx_hash", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "topic2!", + "ordinal": 1, + "type_info": "Bytea" + }, + { + "name": "topic3!", + "ordinal": 2, + "type_info": "Bytea" + }, + { + "name": "value!", + "ordinal": 3, + "type_info": "Bytea" + }, + { + "name": "l1_address!", + "ordinal": 4, + "type_info": "Bytea" + }, + { + "name": "l2_address!", + "ordinal": 5, + "type_info": "Bytea" + }, + { + "name": "symbol!", + "ordinal": 6, + "type_info": "Varchar" + }, + { + "name": "name!", + "ordinal": 7, + "type_info": "Varchar" + }, + { + "name": "decimals!", + "ordinal": 8, + "type_info": "Int4" + }, + { + "name": "usd_price?", + "ordinal": 9, + "type_info": "Numeric" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false, + false, + true + ], + "parameters": { + "Left": [ + "ByteaArray", + "Bytea", + "Bytea", + "Bytea" + ] + } + }, + "query": "\n SELECT tx_hash, topic2 as \"topic2!\", topic3 as \"topic3!\", value as \"value!\",\n tokens.l1_address as \"l1_address!\", tokens.l2_address as \"l2_address!\",\n tokens.symbol as \"symbol!\", tokens.name as \"name!\", tokens.decimals as \"decimals!\", tokens.usd_price as \"usd_price?\"\n FROM events\n INNER JOIN tokens ON\n tokens.l2_address = events.address OR (events.address = $3 AND tokens.l2_address = $4)\n WHERE tx_hash = ANY($1) AND topic1 = $2\n ORDER BY tx_hash, miniblock_number ASC, event_index_in_block ASC\n " + }, + "d6709f3ce8f08f988e10a0e0fb5c06db9488834a85066babaf3d56cf212b4ea0": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Bytea", + "Varchar", + "Varchar", + "Int4" + ] + } + }, + "query": "UPDATE tokens SET token_list_name = $2, token_list_symbol = $3,\n token_list_decimals = $4, well_known = true, updated_at = now()\n WHERE l1_address = $1\n " + }, + "d8515595d34dca53e50bbd4ed396f6208e33f596195a5ed02fba9e8364ceb33c": { + "describe": { + "columns": [ + { + "name": "bytecode", + "ordinal": 0, + "type_info": "Bytea" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [ + "Bytea" + ] + } + }, + "query": "SELECT bytecode FROM factory_deps WHERE bytecode_hash = $1" + }, + "dbf9a2be8cdd0a8ad95f049134d33ae0c4ed4204e4d8f6e5f3244bea4830f67e": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n UPDATE l1_batches\n SET skip_proof = TRUE WHERE number = $1\n " + }, + "dbfb1709a68fccf341320f7cf1b757378ec462d63d17672f82a8d9f95797136d": { + "describe": { + "columns": [ + { + "name": "hash", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "received_at", + "ordinal": 1, + "type_info": "Timestamp" + } + ], + "nullable": [ + false, + false + ], + "parameters": { + "Left": [ + "Timestamp", + "Int8" + ] + } + }, + "query": "\n SELECT transactions.hash, transactions.received_at\n FROM transactions\n LEFT JOIN miniblocks ON miniblocks.number = miniblock_number\n WHERE received_at > $1\n ORDER BY received_at ASC\n LIMIT $2\n " + }, + "dd10ebfbf5db4d2ac44b03be3acf494ea180f59685d8fc156af481e8265079c2": { + "describe": { + "columns": [ + { + "name": "hash", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "number", + "ordinal": 1, + "type_info": "Int8" + }, + { + "name": "timestamp", + "ordinal": 2, + "type_info": "Int8" + } + ], + "nullable": [ + false, + false, + false + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n SELECT\n hash,\n number,\n timestamp\n FROM miniblocks\n WHERE number > $1\n ORDER BY number ASC\n " + }, + "dd8aa1c9d4dcea22c9a13cca5ae45e951cf963b0608046b88be40309d7379ec2": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Varchar", + "Bytea" + ] + } + }, + "query": "UPDATE transactions\n SET error = $1, updated_at = now()\n WHERE hash = $2" + }, + "ddb3b38be2b6038b63288961f46ba7d3bb7250caff1146e13c5ee77b6a994ffc": { + "describe": { + "columns": [ + { + "name": "circuit_type", + "ordinal": 0, + "type_info": "Text" + }, + { + "name": "result", + "ordinal": 1, + "type_info": "Bytea" + } + ], + "nullable": [ + false, + true + ], + "parameters": { + "Left": [ + "Int8", + "Int4" + ] + } + }, + "query": "\n SELECT circuit_type, result from prover_jobs\n WHERE l1_batch_number = $1 AND status = 'successful' AND aggregation_round = $2\n ORDER BY sequence_number ASC;\n " + }, + "dec8533793968c9db379e3da18f262ea9d9dce2f8959c29b0a638296bf10ccc2": { + "describe": { + "columns": [ + { + "name": "key", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "bytecode", + "ordinal": 1, + "type_info": "Bytea" + } + ], + "nullable": [ + false, + false + ], + "parameters": { + "Left": [ + "Bytea", + "Bytea", + "Int8", + "Int8" + ] + } + }, + "query": "\n SELECT storage_logs.key, factory_deps.bytecode\n FROM storage_logs\n JOIN factory_deps ON storage_logs.value = factory_deps.bytecode_hash\n WHERE\n storage_logs.address = $1 AND\n storage_logs.miniblock_number >= $3 AND\n storage_logs.miniblock_number <= $4 AND\n NOT EXISTS (\n SELECT 1 FROM storage_logs as s\n WHERE\n s.hashed_key = storage_logs.hashed_key AND\n (s.miniblock_number, s.operation_number) >= (storage_logs.miniblock_number, storage_logs.operation_number) AND\n s.value = $2\n )\n " + }, + "e14338281eb639856f1c7a8ba6b60fe3914d3f30d0b55cea8fb287209892df03": { + "describe": { + "columns": [ + { + "name": "key_address", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "bytecode", + "ordinal": 1, + "type_info": "Bytea" + }, + { + "name": "creator_address?", + "ordinal": 2, + "type_info": "Bytea" + }, + { + "name": "creator_tx_hash?", + "ordinal": 3, + "type_info": "Bytea" + }, + { + "name": "created_in_block_number", + "ordinal": 4, + "type_info": "Int8" + }, + { + "name": "verification_info", + "ordinal": 5, + "type_info": "Jsonb" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + true + ], + "parameters": { + "Left": [ + "Bytea", + "Bytea", + "Bytea" + ] + } + }, + "query": "\n WITH sl AS (\n SELECT * FROM storage_logs\n WHERE storage_logs.hashed_key = $1\n ORDER BY storage_logs.miniblock_number DESC, storage_logs.operation_number DESC\n LIMIT 1\n )\n SELECT\n sl.key as \"key_address\",\n fd.bytecode,\n txs.initiator_address as \"creator_address?\",\n txs.hash as \"creator_tx_hash?\",\n sl.miniblock_number as \"created_in_block_number\",\n c.verification_info\n FROM sl\n JOIN factory_deps fd ON fd.bytecode_hash = sl.value\n LEFT JOIN transactions txs ON txs.hash = sl.tx_hash\n LEFT JOIN contracts_verification_info c ON c.address = $2\n WHERE sl.value != $3\n " + }, + "e15e67a56e3caa0a30d4981308437a531f6d16e7a7bb5ebacd9a9466b10f5e7a": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8", + "Text" + ] + } + }, + "query": "\n UPDATE scheduler_witness_jobs\n SET final_node_aggregations_blob_url = $2, status = 'waiting_for_proofs',\n updated_at = now()\n WHERE l1_batch_number = $1\n " + }, + "e199251d38cb1f18993863f2e7920f21f7867ae1b48ffc905919de7bd98491de": { + "describe": { + "columns": [ + { + "name": "min?", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + null + ], + "parameters": { + "Left": [] + } + }, + "query": "\n SELECT MIN(miniblock_number) as \"min?\"\n FROM l2_to_l1_logs\n " + }, + "e2023b335b34b24cd0bd8d1d972aa1867a13c78504312fc718e801272c47b559": { + "describe": { + "columns": [ + { + "name": "bytecode", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "data", + "ordinal": 1, + "type_info": "Jsonb" + }, + { + "name": "contract_address", + "ordinal": 2, + "type_info": "Bytea" + } + ], + "nullable": [ + false, + false, + true + ], + "parameters": { + "Left": [ + "Bytea", + "Bytea" + ] + } + }, + "query": "\n SELECT factory_deps.bytecode, transactions.data, transactions.contract_address\n FROM (\n SELECT * FROM storage_logs\n WHERE storage_logs.hashed_key = $1\n ORDER BY miniblock_number DESC, operation_number DESC\n LIMIT 1\n ) storage_logs\n JOIN factory_deps ON factory_deps.bytecode_hash = storage_logs.value\n JOIN transactions ON transactions.hash = storage_logs.tx_hash\n WHERE storage_logs.value != $2\n " + }, + "e29d263f33257a37f391907b7ff588f416a0350b606f16f4779fa1d3bf4be08b": { + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Int4" + }, + { + "name": "eth_tx_id", + "ordinal": 1, + "type_info": "Int4" + } + ], + "nullable": [ + false, + false + ], + "parameters": { + "Left": [ + "Text" + ] + } + }, + "query": "UPDATE eth_txs_history\n SET updated_at = now(), confirmed_at = now()\n WHERE tx_hash = $1\n RETURNING id, eth_tx_id" + }, + "e42721cc22fbb2bda84f64057586f019cc5122c8e8723f2a9df778b2aa19fffc": { + "describe": { + "columns": [ + { + "name": "version", + "ordinal": 0, + "type_info": "Text" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [] + } + }, + "query": "SELECT version FROM contract_verification_solc_versions ORDER by version" + }, + "e7f7e746aca1c17a8c88aba2db3f7cbd7c639c003580fc72e7b6af4c8ffba595": { + "describe": { + "columns": [ + { + "name": "bytecode_hash", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "bytecode", + "ordinal": 1, + "type_info": "Bytea" + } + ], + "nullable": [ + false, + false + ], + "parameters": { + "Left": [ + "Int8", + "Int8" + ] + } + }, + "query": "SELECT bytecode_hash, bytecode FROM factory_deps\n WHERE miniblock_number >= $1 AND miniblock_number <= $2" + }, + "e900682a160af90d532da47a1222fc1d7c9962ee8996dbd9b9bb63f13820cf2b": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "ByteaArray" + ] + } + }, + "query": "DELETE FROM transactions WHERE in_mempool = TRUE AND initiator_address = ANY($1)" + }, + "e90688187953eb3c8f5ff4b25c4a6b838e6717c720643b441dece5079b441fc2": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [] + } + }, + "query": "DELETE FROM eth_txs WHERE id >=\n (SELECT MIN(id) FROM eth_txs WHERE has_failed = TRUE)" + }, + "ec4a3bc6a7a9c13ad11a4b71bed019a961f918a1d1376440c484cc42432c6c9c": { + "describe": { + "columns": [ + { + "name": "count!", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + null + ], + "parameters": { + "Left": [ + "Int8", + "Int4" + ] + } + }, + "query": "\n SELECT COUNT(*) as \"count!\"\n FROM prover_jobs\n WHERE status = 'successful' AND l1_batch_number = $1 AND aggregation_round = $2\n " + }, + "edc9e374698c57ba9f65f83f0e1945e4785d8b4bc95f46ed4d16c095e5511709": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8Array" + ] + } + }, + "query": "\n UPDATE leaf_aggregation_witness_jobs\n SET is_blob_cleaned=TRUE\n WHERE l1_batch_number = ANY($1);\n " + }, + "ee7bd820bf35c5c714092494c386eccff25457cff6dc00eb81d9809eaeb95670": { + "describe": { + "columns": [ + { + "name": "is_replaced!", + "ordinal": 0, + "type_info": "Bool" + } + ], + "nullable": [ + null + ], + "parameters": { + "Left": [ + "Bytea", + "Bytea", + "Int8", + "Bytea", + "Numeric", + "Numeric", + "Numeric", + "Numeric", + "Bytea", + "Jsonb", + "Int4", + "Bytea", + "Numeric", + "Bytea", + "Bytea", + "Int8", + "Int4", + "Int4", + "Timestamp" + ] + } + }, + "query": "\n INSERT INTO transactions\n (\n hash,\n is_priority,\n initiator_address,\n nonce,\n signature,\n gas_limit,\n max_fee_per_gas,\n max_priority_fee_per_gas,\n gas_per_pubdata_limit,\n input,\n data,\n tx_format,\n contract_address,\n value,\n paymaster,\n paymaster_input,\n execution_info,\n received_at,\n created_at,\n updated_at\n )\n VALUES\n (\n $1, FALSE, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15,\n jsonb_build_object('gas_used', $16::bigint, 'storage_writes', $17::int, 'contracts_used', $18::int),\n $19, now(), now()\n )\n ON CONFLICT\n (initiator_address, nonce)\n DO UPDATE\n SET hash=$1,\n signature=$4,\n gas_limit=$5,\n max_fee_per_gas=$6,\n max_priority_fee_per_gas=$7,\n gas_per_pubdata_limit=$8,\n input=$9,\n data=$10,\n tx_format=$11,\n contract_address=$12,\n value=$13,\n paymaster=$14,\n paymaster_input=$15,\n execution_info=jsonb_build_object('gas_used', $16::bigint, 'storage_writes', $17::int, 'contracts_used', $18::int),\n in_mempool=FALSE,\n received_at=$19,\n created_at=now(),\n updated_at=now(),\n error = NULL\n WHERE transactions.is_priority = FALSE AND transactions.miniblock_number IS NULL\n RETURNING (SELECT hash FROM transactions WHERE transactions.initiator_address = $2 AND transactions.nonce = $3) IS NOT NULL as \"is_replaced!\"\n " + }, + "eeb83808774404b1af3e09c1f89399c92f743be21e45b7b19a0ece6084e61c6c": { + "describe": { + "columns": [ + { + "name": "tx_format", + "ordinal": 0, + "type_info": "Int4" + } + ], + "nullable": [ + true + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n UPDATE transactions \n SET tx_format=255 \n WHERE hash IN (\n SELECT hash \n FROM transactions\n WHERE is_priority = true\n AND tx_format is null\n LIMIT $1\n )\n RETURNING tx_format\n " + }, + "efc83e42f5d0238b8996a5b311746527289a5a002ff659531a076680127e8eb4": { + "describe": { + "columns": [ + { + "name": "hash", + "ordinal": 0, + "type_info": "Bytea" + } + ], + "nullable": [ + true + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "SELECT hash FROM l1_batches WHERE number = $1" + }, + "f0308ffa4cc34a305150959ad1a30792c0b2bf493c6fa6183725b731a89c11e8": { + "describe": { + "columns": [ + { + "name": "count", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + null + ], + "parameters": { + "Left": [ + "Bytea", + "Bytea" + ] + } + }, + "query": "SELECT count(*)\n FROM storage\n WHERE\n address = $1 AND\n value != $2\n " + }, + "f0c50c53c3883c1ae59263b40e55011760d64350eff411eef856ff301bb70579": { + "describe": { + "columns": [ + { + "name": "l1_batch_number", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "l1_batch_tx_index", + "ordinal": 1, + "type_info": "Int4" + } + ], + "nullable": [ + true, + true + ], + "parameters": { + "Left": [ + "Bytea" + ] + } + }, + "query": "\n SELECT l1_batch_number, l1_batch_tx_index\n FROM transactions\n WHERE hash = $1\n " + }, + "f3f7ceb708cc072d66e8609d64ba99e6faa80bf58ff0ce0ef49e882af63522d4": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n INSERT INTO node_aggregation_witness_jobs\n (l1_batch_number, status, created_at, updated_at)\n VALUES ($1, 'waiting_for_artifacts', now(), now())\n " + }, + "f59f291b06c6a0cd0ba4de04f07c05a38a93665cc81f78b14223d6ceef5d6ba6": { + "describe": { + "columns": [ + { + "name": "number", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "timestamp", + "ordinal": 1, + "type_info": "Int8" + }, + { + "name": "is_finished", + "ordinal": 2, + "type_info": "Bool" + }, + { + "name": "l1_tx_count", + "ordinal": 3, + "type_info": "Int4" + }, + { + "name": "l2_tx_count", + "ordinal": 4, + "type_info": "Int4" + }, + { + "name": "fee_account_address", + "ordinal": 5, + "type_info": "Bytea" + }, + { + "name": "bloom", + "ordinal": 6, + "type_info": "Bytea" + }, + { + "name": "priority_ops_onchain_data", + "ordinal": 7, + "type_info": "ByteaArray" + }, + { + "name": "hash", + "ordinal": 8, + "type_info": "Bytea" + }, + { + "name": "parent_hash", + "ordinal": 9, + "type_info": "Bytea" + }, + { + "name": "commitment", + "ordinal": 10, + "type_info": "Bytea" + }, + { + "name": "compressed_write_logs", + "ordinal": 11, + "type_info": "Bytea" + }, + { + "name": "compressed_contracts", + "ordinal": 12, + "type_info": "Bytea" + }, + { + "name": "eth_prove_tx_id", + "ordinal": 13, + "type_info": "Int4" + }, + { + "name": "eth_commit_tx_id", + "ordinal": 14, + "type_info": "Int4" + }, + { + "name": "eth_execute_tx_id", + "ordinal": 15, + "type_info": "Int4" + }, + { + "name": "created_at", + "ordinal": 16, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 17, + "type_info": "Timestamp" + }, + { + "name": "merkle_root_hash", + "ordinal": 18, + "type_info": "Bytea" + }, + { + "name": "l2_to_l1_logs", + "ordinal": 19, + "type_info": "ByteaArray" + }, + { + "name": "l2_to_l1_messages", + "ordinal": 20, + "type_info": "ByteaArray" + }, + { + "name": "predicted_commit_gas_cost", + "ordinal": 21, + "type_info": "Int8" + }, + { + "name": "predicted_prove_gas_cost", + "ordinal": 22, + "type_info": "Int8" + }, + { + "name": "predicted_execute_gas_cost", + "ordinal": 23, + "type_info": "Int8" + }, + { + "name": "initial_bootloader_heap_content", + "ordinal": 24, + "type_info": "Jsonb" + }, + { + "name": "used_contract_hashes", + "ordinal": 25, + "type_info": "Jsonb" + }, + { + "name": "compressed_initial_writes", + "ordinal": 26, + "type_info": "Bytea" + }, + { + "name": "compressed_repeated_writes", + "ordinal": 27, + "type_info": "Bytea" + }, + { + "name": "l2_l1_compressed_messages", + "ordinal": 28, + "type_info": "Bytea" + }, + { + "name": "l2_l1_merkle_root", + "ordinal": 29, + "type_info": "Bytea" + }, + { + "name": "gas_per_pubdata_byte_in_block", + "ordinal": 30, + "type_info": "Int4" + }, + { + "name": "rollup_last_leaf_index", + "ordinal": 31, + "type_info": "Int8" + }, + { + "name": "zkporter_is_available", + "ordinal": 32, + "type_info": "Bool" + }, + { + "name": "bootloader_code_hash", + "ordinal": 33, + "type_info": "Bytea" + }, + { + "name": "default_aa_code_hash", + "ordinal": 34, + "type_info": "Bytea" + }, + { + "name": "base_fee_per_gas", + "ordinal": 35, + "type_info": "Numeric" + }, + { + "name": "gas_per_pubdata_limit", + "ordinal": 36, + "type_info": "Int8" + }, + { + "name": "aux_data_hash", + "ordinal": 37, + "type_info": "Bytea" + }, + { + "name": "pass_through_data_hash", + "ordinal": 38, + "type_info": "Bytea" + }, + { + "name": "meta_parameters_hash", + "ordinal": 39, + "type_info": "Bytea" + }, + { + "name": "skip_proof", + "ordinal": 40, + "type_info": "Bool" + }, + { + "name": "l1_gas_price", + "ordinal": 41, + "type_info": "Int8" + }, + { + "name": "l2_fair_gas_price", + "ordinal": 42, + "type_info": "Int8" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false, + true, + true, + true, + true, + true, + true, + true, + true, + false, + false, + true, + false, + false, + false, + false, + false, + false, + false, + true, + true, + true, + true, + true, + true, + true, + true, + true, + false, + false, + true, + true, + true, + false, + false, + false + ], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "SELECT * FROM l1_batches WHERE eth_commit_tx_id IS NULL AND number != 0 AND commitment IS NOT NULL ORDER BY number LIMIT $1" + }, + "f5abda9631a44b209b759c6800970d9669a8b5f0280e20ee9901f7c831ab4762": { + "describe": { + "columns": [ + { + "name": "value!", + "ordinal": 0, + "type_info": "Bytea" + }, + { + "name": "l1_address!", + "ordinal": 1, + "type_info": "Bytea" + }, + { + "name": "l2_address!", + "ordinal": 2, + "type_info": "Bytea" + }, + { + "name": "symbol!", + "ordinal": 3, + "type_info": "Varchar" + }, + { + "name": "name!", + "ordinal": 4, + "type_info": "Varchar" + }, + { + "name": "decimals!", + "ordinal": 5, + "type_info": "Int4" + }, + { + "name": "usd_price?", + "ordinal": 6, + "type_info": "Numeric" + } + ], + "nullable": [ + true, + true, + true, + true, + true, + true, + true + ], + "parameters": { + "Left": [ + "ByteaArray", + "Bytea", + "Bytea" + ] + } + }, + "query": "\n SELECT storage.value as \"value!\",\n tokens.l1_address as \"l1_address!\", tokens.l2_address as \"l2_address!\",\n tokens.symbol as \"symbol!\", tokens.name as \"name!\", tokens.decimals as \"decimals!\", tokens.usd_price as \"usd_price?\"\n FROM storage\n INNER JOIN tokens ON\n storage.address = tokens.l2_address OR (storage.address = $2 AND tokens.l2_address = $3)\n WHERE storage.hashed_key = ANY($1)\n " + }, + "f7ac0e1a473a65b9318c55551c3a83b1316a2270fbafcc73e180bce65496bbe5": { + "describe": { + "columns": [ + { + "name": "l1_batch_number", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "scheduler_witness", + "ordinal": 1, + "type_info": "Bytea" + }, + { + "name": "final_node_aggregations", + "ordinal": 2, + "type_info": "Bytea" + }, + { + "name": "status", + "ordinal": 3, + "type_info": "Text" + }, + { + "name": "processing_started_at", + "ordinal": 4, + "type_info": "Timestamp" + }, + { + "name": "time_taken", + "ordinal": 5, + "type_info": "Time" + }, + { + "name": "error", + "ordinal": 6, + "type_info": "Text" + }, + { + "name": "created_at", + "ordinal": 7, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 8, + "type_info": "Timestamp" + }, + { + "name": "attempts", + "ordinal": 9, + "type_info": "Int4" + }, + { + "name": "aggregation_result_coords", + "ordinal": 10, + "type_info": "Bytea" + }, + { + "name": "scheduler_witness_blob_url", + "ordinal": 11, + "type_info": "Text" + }, + { + "name": "final_node_aggregations_blob_url", + "ordinal": 12, + "type_info": "Text" + }, + { + "name": "is_blob_cleaned", + "ordinal": 13, + "type_info": "Bool" + } + ], + "nullable": [ + false, + false, + true, + false, + true, + true, + true, + false, + false, + false, + true, + true, + true, + false + ], + "parameters": { + "Left": [ + "Interval", + "Int4" + ] + } + }, + "query": "\n UPDATE scheduler_witness_jobs\n SET status = 'in_progress', attempts = attempts + 1,\n updated_at = now(), processing_started_at = now()\n WHERE l1_batch_number = (\n SELECT l1_batch_number\n FROM scheduler_witness_jobs\n WHERE status = 'queued' \n OR (status = 'in_progress' AND processing_started_at < now() - $1::interval)\n OR (status = 'failed' AND attempts < $2)\n ORDER BY l1_batch_number ASC\n LIMIT 1\n FOR UPDATE\n SKIP LOCKED\n )\n RETURNING scheduler_witness_jobs.*\n " + }, + "f93109d1cc02f5516b40a4a29082a46fd6fa66972bae710d08cfe6a1484b1616": { + "describe": { + "columns": [ + { + "name": "assembly_code", + "ordinal": 0, + "type_info": "Text" + }, + { + "name": "pc_line_mapping", + "ordinal": 1, + "type_info": "Jsonb" + } + ], + "nullable": [ + false, + false + ], + "parameters": { + "Left": [ + "Bytea" + ] + } + }, + "query": "SELECT assembly_code, pc_line_mapping FROM contract_sources WHERE address = $1" + }, + "fa006dda8f56abb70afc5ba8b6da631747d17ebd03a37ddb72914c4ed2aeb2f5": { + "describe": { + "columns": [ + { + "name": "trace", + "ordinal": 0, + "type_info": "Jsonb" + } + ], + "nullable": [ + false + ], + "parameters": { + "Left": [ + "Bytea" + ] + } + }, + "query": "SELECT trace FROM transaction_traces WHERE tx_hash = $1" + }, + "fa2e61fb1ad09bb10260598f8daec2a22f5cb74a2a6fa3d6f0cda2d7f62d410e": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8", + "Int8" + ] + } + }, + "query": "\n UPDATE events SET tx_initiator_address = transactions.initiator_address\n FROM transactions WHERE transactions.hash = events.tx_hash AND events.miniblock_number BETWEEN $1 AND $2\n " + }, + "fa33d51f8627376832b11bb174354e65e645ee2fb81564a97725518f47ae6f57": { + "describe": { + "columns": [ + { + "name": "number", + "ordinal": 0, + "type_info": "Int8" + } + ], + "nullable": [ + null + ], + "parameters": { + "Left": [] + } + }, + "query": "SELECT MAX(number) as \"number\" FROM l1_batches" + }, + "fa6ef06edd04d20ddbdf22a63092222e89bb84d6093b07bda16407811d9c33c0": { + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Int4" + }, + { + "name": "nonce", + "ordinal": 1, + "type_info": "Int8" + }, + { + "name": "raw_tx", + "ordinal": 2, + "type_info": "Bytea" + }, + { + "name": "contract_address", + "ordinal": 3, + "type_info": "Text" + }, + { + "name": "tx_type", + "ordinal": 4, + "type_info": "Text" + }, + { + "name": "gas_used", + "ordinal": 5, + "type_info": "Int8" + }, + { + "name": "created_at", + "ordinal": 6, + "type_info": "Timestamp" + }, + { + "name": "updated_at", + "ordinal": 7, + "type_info": "Timestamp" + }, + { + "name": "has_failed", + "ordinal": 8, + "type_info": "Bool" + }, + { + "name": "sent_at_block", + "ordinal": 9, + "type_info": "Int4" + }, + { + "name": "confirmed_eth_tx_history_id", + "ordinal": 10, + "type_info": "Int4" + }, + { + "name": "predicted_gas_cost", + "ordinal": 11, + "type_info": "Int8" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + true, + false, + false, + false, + true, + true, + false + ], + "parameters": { + "Left": [ + "Int4" + ] + } + }, + "query": "SELECT * FROM eth_txs WHERE id = $1" + }, + "fadc14334d48d2b29acd8433245e337655aeb3472c65922b7949ba84b32e9938": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8", + "Bytea", + "Text" + ] + } + }, + "query": "INSERT INTO witness_inputs(l1_batch_number, merkle_tree_paths, merkel_tree_paths_blob_url, status, created_at, updated_at) VALUES ($1, $2, $3, 'queued', now(), now())\n ON CONFLICT (l1_batch_number) DO NOTHING" + }, + "fb016c8fa4983478572c0e9dcffd058d226cec181f4336925fb2d19752fd7427": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8", + "Bytea", + "Bytea", + "Text", + "Text", + "Int4" + ] + } + }, + "query": "\n INSERT INTO leaf_aggregation_witness_jobs\n (l1_batch_number, basic_circuits, basic_circuits_inputs, basic_circuits_blob_url, basic_circuits_inputs_blob_url, number_of_basic_circuits, status, created_at, updated_at)\n VALUES ($1, $2, $3, $4, $5, $6, 'waiting_for_proofs', now(), now())\n " + }, + "fc52c356fd09d82da89a435d08398d9b773494491404b5c84fc14c1c1d374b59": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Int8" + ] + } + }, + "query": "\n UPDATE contract_verification_requests\n SET status = 'successful', updated_at = now()\n WHERE id = $1\n " + } +} \ No newline at end of file diff --git a/core/lib/dal/src/blocks_dal.rs b/core/lib/dal/src/blocks_dal.rs new file mode 100644 index 000000000000..6ae5014e46d7 --- /dev/null +++ b/core/lib/dal/src/blocks_dal.rs @@ -0,0 +1,911 @@ +use std::collections::HashMap; +use std::convert::{Into, TryInto}; +use std::time::Instant; + +use bigdecimal::{BigDecimal, FromPrimitive, ToPrimitive}; +use sqlx::Row; + +use zksync_types::aggregated_operations::AggregatedActionType; +use zksync_types::commitment::{BlockWithMetadata, CommitmentSerializable}; + +use zksync_types::MAX_GAS_PER_PUBDATA_BYTE; + +use zksync_types::{ + block::{BlockGasCount, L1BatchHeader, MiniblockHeader}, + commitment::BlockMetadata, + L1BatchNumber, MiniblockNumber, H256, +}; + +use crate::{models::storage_block::StorageBlock, StorageProcessor}; + +#[derive(Debug)] +pub struct BlocksDal<'a, 'c> { + pub storage: &'a mut StorageProcessor<'c>, +} + +impl BlocksDal<'_, '_> { + pub fn is_genesis_needed(&mut self) -> bool { + async_std::task::block_on(async { + let count: i64 = sqlx::query!(r#"SELECT COUNT(*) as "count!" FROM l1_batches"#) + .fetch_one(self.storage.conn()) + .await + .unwrap() + .count; + count == 0 + }) + } + + pub fn get_sealed_block_number(&mut self) -> L1BatchNumber { + async_std::task::block_on(async { + let started_at = Instant::now(); + let number: i64 = sqlx::query!( + r#"SELECT MAX(number) as "number" FROM l1_batches WHERE is_finished = TRUE"# + ) + .fetch_one(self.storage.conn()) + .await + .unwrap() + .number + .expect("DAL invocation before genesis"); + metrics::histogram!("dal.request", started_at.elapsed(), "method" => "get_sealed_block_number"); + L1BatchNumber(number as u32) + }) + } + + pub fn get_sealed_miniblock_number(&mut self) -> MiniblockNumber { + async_std::task::block_on(async { + let started_at = Instant::now(); + let number: i64 = sqlx::query!(r#"SELECT MAX(number) as "number" FROM miniblocks"#) + .fetch_one(self.storage.conn()) + .await + .unwrap() + .number + .unwrap_or(0); + metrics::histogram!("dal.request", started_at.elapsed(), "method" => "get_sealed_miniblock_number"); + MiniblockNumber(number as u32) + }) + } + + pub fn get_last_block_number_with_metadata(&mut self) -> L1BatchNumber { + async_std::task::block_on(async { + let started_at = Instant::now(); + let number: i64 = sqlx::query!( + r#"SELECT MAX(number) as "number" FROM l1_batches WHERE hash IS NOT NULL"# + ) + .fetch_one(self.storage.conn()) + .await + .unwrap() + .number + .expect("DAL invocation before genesis"); + metrics::histogram!("dal.request", started_at.elapsed(), "method" => "get_last_block_number_with_metadata"); + L1BatchNumber(number as u32) + }) + } + + pub fn get_blocks_for_eth_tx_id(&mut self, eth_tx_id: u32) -> Vec { + async_std::task::block_on(async { + let blocks = sqlx::query_as!( + StorageBlock, + "SELECT * FROM l1_batches + WHERE eth_commit_tx_id = $1 OR eth_prove_tx_id = $1 OR eth_execute_tx_id = $1", + eth_tx_id as i32 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap(); + blocks.into_iter().map(|bl| bl.into()).collect() + }) + } + + fn get_storage_block(&mut self, number: L1BatchNumber) -> Option { + async_std::task::block_on(async { + sqlx::query_as!( + StorageBlock, + "SELECT * FROM l1_batches WHERE number = $1", + number.0 as i64 + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + }) + } + + pub fn get_block_header(&mut self, number: L1BatchNumber) -> Option { + self.get_storage_block(number).map(Into::into) + } + + pub fn set_eth_tx_id( + &mut self, + first_block: L1BatchNumber, + last_block: L1BatchNumber, + eth_tx_id: u32, + aggregation_type: AggregatedActionType, + ) { + async_std::task::block_on(async { + match aggregation_type { + AggregatedActionType::CommitBlocks => { + sqlx::query!( + "UPDATE l1_batches \ + SET eth_commit_tx_id = $1, updated_at = now() \ + WHERE number BETWEEN $2 AND $3", + eth_tx_id as i32, + *first_block as i64, + *last_block as i64 + ) + .execute(self.storage.conn()) + .await + .unwrap(); + } + AggregatedActionType::PublishProofBlocksOnchain => { + sqlx::query!( + "UPDATE l1_batches \ + SET eth_prove_tx_id = $1, updated_at = now() \ + WHERE number BETWEEN $2 AND $3", + eth_tx_id as i32, + *first_block as i64, + *last_block as i64 + ) + .execute(self.storage.conn()) + .await + .unwrap(); + } + AggregatedActionType::ExecuteBlocks => { + sqlx::query!( + "UPDATE l1_batches \ + SET eth_execute_tx_id = $1, updated_at = now() \ + WHERE number BETWEEN $2 AND $3", + eth_tx_id as i32, + *first_block as i64, + *last_block as i64 + ) + .execute(self.storage.conn()) + .await + .unwrap(); + } + } + }) + } + + pub fn insert_l1_batch(&mut self, block: L1BatchHeader, predicted_block_gas: BlockGasCount) { + async_std::task::block_on(async { + let priority_onchain_data: Vec> = block + .priority_ops_onchain_data + .iter() + .map(|data| data.clone().into()) + .collect(); + let l2_to_l1_logs: Vec> = block + .l2_to_l1_logs + .iter() + .map(|log| log.clone().to_bytes()) + .collect(); + + let initial_bootloader_contents = + serde_json::to_value(block.initial_bootloader_contents) + .expect("failed to serialize initial_bootloader_contents to JSON value"); + + let used_contract_hashes = serde_json::to_value(block.used_contract_hashes) + .expect("failed to serialize used_contract_hashes to JSON value"); + + let base_fee_per_gas = BigDecimal::from_u64(block.base_fee_per_gas) + .expect("block.base_fee_per_gas should fit in u64"); + + sqlx::query!( + "INSERT INTO l1_batches (number, l1_tx_count, l2_tx_count, + timestamp, is_finished, fee_account_address, l2_to_l1_logs, l2_to_l1_messages, bloom, priority_ops_onchain_data, + predicted_commit_gas_cost, predicted_prove_gas_cost, predicted_execute_gas_cost, + initial_bootloader_heap_content, used_contract_hashes, base_fee_per_gas, l1_gas_price, l2_fair_gas_price, + created_at, updated_at) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, now(), now()) + ", + block.number.0 as i64, + block.l1_tx_count as i32, + block.l2_tx_count as i32, + block.timestamp as i64, + block.is_finished, + block.fee_account_address.as_bytes(), + &l2_to_l1_logs, + &block.l2_to_l1_messages, + block.bloom.as_bytes(), + &priority_onchain_data, + predicted_block_gas.commit as i64, + predicted_block_gas.prove as i64, + predicted_block_gas.execute as i64, + initial_bootloader_contents, + used_contract_hashes, + base_fee_per_gas, + block.l1_gas_price as i64, + block.l2_fair_gas_price as i64 + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn insert_miniblock(&mut self, miniblock_header: MiniblockHeader) { + let base_fee_per_gas = BigDecimal::from_u64(miniblock_header.base_fee_per_gas) + .expect("base_fee_per_gas should fit in u64"); + + async_std::task::block_on(async { + sqlx::query!( + " + INSERT INTO miniblocks ( + number, timestamp, hash, l1_tx_count, l2_tx_count, + base_fee_per_gas, l1_gas_price, l2_fair_gas_price, gas_per_pubdata_limit, created_at, updated_at + ) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, now(), now()) + ", + miniblock_header.number.0 as i64, + miniblock_header.timestamp as i64, + miniblock_header.hash.as_bytes(), + miniblock_header.l1_tx_count as i32, + miniblock_header.l2_tx_count as i32, + base_fee_per_gas, + miniblock_header.l1_gas_price as i64, + miniblock_header.l2_fair_gas_price as i64, + MAX_GAS_PER_PUBDATA_BYTE as i64 + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn get_miniblock_header( + &mut self, + miniblock_number: MiniblockNumber, + ) -> Option { + async_std::task::block_on(async { + sqlx::query!( + " + SELECT number, timestamp, hash, l1_tx_count, l2_tx_count, + base_fee_per_gas, l1_gas_price, l2_fair_gas_price + FROM miniblocks + WHERE number = $1 + ", + miniblock_number.0 as i64, + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|row| MiniblockHeader { + number: MiniblockNumber(row.number as u32), + timestamp: row.timestamp as u64, + hash: H256::from_slice(&row.hash), + l1_tx_count: row.l1_tx_count as u16, + l2_tx_count: row.l2_tx_count as u16, + base_fee_per_gas: row + .base_fee_per_gas + .to_u64() + .expect("base_fee_per_gas should fit in u64"), + l1_gas_price: row.l1_gas_price as u64, + l2_fair_gas_price: row.l2_fair_gas_price as u64, + }) + }) + } + + pub fn mark_miniblocks_as_executed_in_l1_batch(&mut self, l1_batch_number: L1BatchNumber) { + async_std::task::block_on(async { + sqlx::query!( + " + UPDATE miniblocks + SET l1_batch_number = $1 + WHERE l1_batch_number IS NULL + ", + l1_batch_number.0 as i32, + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn save_block_metadata( + &mut self, + block_number: L1BatchNumber, + block_metadata: BlockMetadata, + ) { + async_std::task::block_on(async { + sqlx::query!( + " + UPDATE l1_batches + SET hash = $1, merkle_root_hash = $2, commitment = $3, default_aa_code_hash = $4, + compressed_repeated_writes = $5, compressed_initial_writes = $6, l2_l1_compressed_messages = $7, + l2_l1_merkle_root = $8, + zkporter_is_available = $9, bootloader_code_hash = $10, rollup_last_leaf_index = $11, + aux_data_hash = $12, pass_through_data_hash = $13, meta_parameters_hash = $14, + updated_at = now() + WHERE number = $15 + ", + block_metadata.root_hash.as_bytes(), + block_metadata.merkle_root_hash.as_bytes(), + block_metadata.commitment.as_bytes(), + block_metadata.block_meta_params.default_aa_code_hash.as_bytes(), + block_metadata.repeated_writes_compressed, + block_metadata.initial_writes_compressed, + block_metadata.l2_l1_messages_compressed, + block_metadata.l2_l1_merkle_root.as_bytes(), + block_metadata.block_meta_params.zkporter_is_available, + block_metadata.block_meta_params.bootloader_code_hash.as_bytes(), + block_metadata.rollup_last_leaf_index as i64, + block_metadata.aux_data_hash.as_bytes(), + block_metadata.pass_through_data_hash.as_bytes(), + block_metadata.meta_parameters_hash.as_bytes(), + block_number.0 as i64, + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn save_blocks_metadata( + &mut self, + block_number: L1BatchNumber, + block_metadata: BlockMetadata, + previous_root_hash: H256, + ) { + async_std::task::block_on(async { + let started_at = Instant::now(); + let update_result = sqlx::query!( + " + UPDATE l1_batches SET + hash = $1, merkle_root_hash = $2, commitment = $3, default_aa_code_hash = $4, + compressed_repeated_writes = $5, compressed_initial_writes = $6, l2_l1_compressed_messages = $7, + l2_l1_merkle_root = $8, zkporter_is_available = $9, + bootloader_code_hash = $10, parent_hash = $11, rollup_last_leaf_index = $12, + aux_data_hash = $13, pass_through_data_hash = $14, meta_parameters_hash = $15, + updated_at = NOW() + WHERE number = $16 AND hash IS NULL + ", + block_metadata.root_hash.as_bytes(), + block_metadata.merkle_root_hash.as_bytes(), + block_metadata.commitment.as_bytes(), + block_metadata.block_meta_params.default_aa_code_hash.as_bytes(), + block_metadata.repeated_writes_compressed, + block_metadata.initial_writes_compressed, + block_metadata.l2_l1_messages_compressed, + block_metadata.l2_l1_merkle_root.as_bytes(), + block_metadata.block_meta_params.zkporter_is_available, + block_metadata.block_meta_params.bootloader_code_hash.as_bytes(), + previous_root_hash.0.to_vec(), + block_metadata.rollup_last_leaf_index as i64, + block_metadata.aux_data_hash.as_bytes(), + block_metadata.pass_through_data_hash.as_bytes(), + block_metadata.meta_parameters_hash.as_bytes(), + block_number.0 as i64, + ) + .execute(self.storage.conn()) + .await + .unwrap(); + + if update_result.rows_affected() == 0 { + vlog::debug!( + "L1 batch {} info wasn't updated. Details: root_hash: {:?}, merkle_root_hash: {:?}, parent_hash: {:?}, commitment: {:?}, l2_l1_merkle_root: {:?}", + block_number.0 as i64, + block_metadata.root_hash.0.to_vec(), + block_metadata.merkle_root_hash.0.to_vec(), + previous_root_hash.0.to_vec(), + block_metadata.commitment.0.to_vec(), + block_metadata.l2_l1_merkle_root.as_bytes() + ); + + // block was already processed. Verify that existing hashes match + let matched: i64 = sqlx::query!( + r#" + SELECT COUNT(*) as "count!" + FROM l1_batches + WHERE number = $1 + AND hash = $2 + AND merkle_root_hash = $3 + AND parent_hash = $4 + AND l2_l1_merkle_root = $5 + "#, + block_number.0 as i64, + block_metadata.root_hash.0.to_vec(), + block_metadata.merkle_root_hash.0.to_vec(), + previous_root_hash.0.to_vec(), + block_metadata.l2_l1_merkle_root.as_bytes(), + ) + .fetch_one(self.storage.conn()) + .await + .unwrap() + .count; + + assert_eq!(matched, 1, "Root hash verification failed. Hashes for some of previously processed blocks do not match"); + } + metrics::histogram!("dal.request", started_at.elapsed(), "method" => "save_blocks_metadata"); + }) + } + + pub fn get_last_committed_to_eth_block(&mut self) -> Option { + async_std::task::block_on(async { + // We can get 0 block for the first transaction + let block = sqlx::query_as!( + StorageBlock, + "SELECT * FROM l1_batches \ + WHERE number = 0 OR eth_commit_tx_id IS NOT NULL AND commitment IS NOT NULL \ + ORDER BY number DESC LIMIT 1", + ) + .fetch_one(self.storage.conn()) + .await + .unwrap(); + // genesis block is first generated without commitment, we should wait for the tree to set it. + block.commitment.as_ref()?; + + self.get_block_with_metadata(block) + }) + } + + /// Returns the number of the last block for which an Ethereum execute tx was sent and confirmed. + pub fn get_number_of_last_block_executed_on_eth(&mut self) -> Option { + async_std::task::block_on(async { + sqlx::query!( + "SELECT number FROM l1_batches + LEFT JOIN eth_txs_history as execute_tx ON (l1_batches.eth_execute_tx_id = execute_tx.eth_tx_id) + WHERE execute_tx.confirmed_at IS NOT NULL + ORDER BY number DESC LIMIT 1" + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|record| L1BatchNumber(record.number as u32)) + }) + } + + /// This method returns blocks for which the proofs are computed + pub fn get_ready_for_proof_blocks_real_verifier( + &mut self, + limit: usize, + ) -> Vec { + async_std::task::block_on(async { + let last_proved_block_number_row = sqlx::query!( + r#"SELECT COALESCE(max(number), 0) as "number!" FROM l1_batches + WHERE eth_prove_tx_id IS NOT NULL"# + ) + .fetch_one(self.storage.conn()) + .await + .unwrap(); + let last_proved_block_number = + L1BatchNumber(last_proved_block_number_row.number as u32); + // note that the proofs can be generated out of order, so + // `WHERE l1_batches.number - row_number = $1` is used to avoid having gaps in the list of blocks to proof + // note that we need to manually list all the columns in `l1_batches` table here - we cannot use `*` because there is one extra column (`row_number`) + let l1_batches = sqlx::query_as!( + StorageBlock, + " + SELECT number, timestamp, is_finished, l1_tx_count, l2_tx_count, fee_account_address, bloom, priority_ops_onchain_data, hash, parent_hash, commitment, compressed_write_logs, compressed_contracts, eth_prove_tx_id, eth_commit_tx_id, eth_execute_tx_id, created_at, updated_at, merkle_root_hash, l2_to_l1_logs, l2_to_l1_messages, predicted_commit_gas_cost, predicted_prove_gas_cost, predicted_execute_gas_cost, initial_bootloader_heap_content, used_contract_hashes, compressed_initial_writes, compressed_repeated_writes, l2_l1_compressed_messages, l2_l1_merkle_root, l1_gas_price, l2_fair_gas_price, rollup_last_leaf_index, zkporter_is_available, bootloader_code_hash, default_aa_code_hash, base_fee_per_gas, aux_data_hash, pass_through_data_hash, meta_parameters_hash, skip_proof, gas_per_pubdata_byte_in_block, gas_per_pubdata_limit + FROM + (SELECT l1_batches.*, row_number() over (order by number ASC) as row_number + FROM l1_batches + LEFT JOIN prover_jobs ON prover_jobs.l1_batch_number = l1_batches.number + WHERE eth_commit_tx_id IS NOT NULL + AND prover_jobs.aggregation_round = 3 + AND prover_jobs.status = 'successful' + AND l1_batches.number > $1 + ORDER BY number LIMIT $2) inn + WHERE number - row_number = $1 + ", + last_proved_block_number.0 as i32, + limit as i32 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap(); + l1_batches + .into_iter() + .map(|block| { + self.get_block_with_metadata(block) + .expect("Block should be complete") + }) + .collect() + }) + } + + /// This method returns blocks that are confirmed on L1. That is, it doesn't wait for the proofs to be generated. + pub fn get_ready_for_dummy_proof_blocks(&mut self, limit: usize) -> Vec { + async_std::task::block_on(async { + let l1_batches = sqlx::query_as!( + StorageBlock, + "SELECT * FROM l1_batches \ + WHERE eth_commit_tx_id IS NOT NULL AND eth_prove_tx_id IS NULL \ + ORDER BY number LIMIT $1", + limit as i32 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap(); + l1_batches + .into_iter() + .map(|block| { + self.get_block_with_metadata(block) + .expect("Block should be complete") + }) + .collect() + }) + } + + pub fn set_skip_proof_for_l1_batch(&mut self, l1_batch_number: L1BatchNumber) { + async_std::task::block_on(async { + sqlx::query!( + " + UPDATE l1_batches + SET skip_proof = TRUE WHERE number = $1 + ", + l1_batch_number.0 as i64 + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + /// This method returns blocks that are committed on L1 and witness jobs for them are skipped. + pub fn get_skipped_for_proof_blocks(&mut self, limit: usize) -> Vec { + async_std::task::block_on(async { + let last_proved_block_number_row = sqlx::query!( + r#"SELECT COALESCE(max(number), 0) as "number!" FROM l1_batches + WHERE eth_prove_tx_id IS NOT NULL"# + ) + .fetch_one(self.storage.conn()) + .await + .unwrap(); + let last_proved_block_number = + L1BatchNumber(last_proved_block_number_row.number as u32); + // note that the witness jobs can be processed out of order, so + // `WHERE l1_batches.number - row_number = $1` is used to avoid having gaps in the list of blocks to send dummy proofs for + // note that we need to manually list all the columns in `l1_batches` table here - we cannot use `*` because there is one extra column (`row_number`) + let l1_batches = sqlx::query_as!( + StorageBlock, + " + SELECT number, timestamp, is_finished, l1_tx_count, l2_tx_count, fee_account_address, bloom, priority_ops_onchain_data, hash, parent_hash, commitment, compressed_write_logs, compressed_contracts, eth_prove_tx_id, eth_commit_tx_id, eth_execute_tx_id, created_at, updated_at, merkle_root_hash, l2_to_l1_logs, l2_to_l1_messages, predicted_commit_gas_cost, predicted_prove_gas_cost, predicted_execute_gas_cost, initial_bootloader_heap_content, used_contract_hashes, compressed_initial_writes, compressed_repeated_writes, l2_l1_compressed_messages, l2_l1_merkle_root, l1_gas_price, l2_fair_gas_price, rollup_last_leaf_index, zkporter_is_available, bootloader_code_hash, default_aa_code_hash, base_fee_per_gas, aux_data_hash, pass_through_data_hash, meta_parameters_hash, skip_proof, gas_per_pubdata_byte_in_block, gas_per_pubdata_limit + FROM + (SELECT l1_batches.*, row_number() over (order by number ASC) as row_number + FROM l1_batches + WHERE eth_commit_tx_id IS NOT NULL + AND l1_batches.skip_proof = TRUE + AND l1_batches.number > $1 + ORDER BY number LIMIT $2) inn + WHERE number - row_number = $1 + ", + last_proved_block_number.0 as i32, + limit as i32 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap(); + l1_batches + .into_iter() + .map(|block| { + self.get_block_with_metadata(block) + .expect("Block should be complete") + }) + .collect() + }) + } + + pub fn get_ready_for_execute_blocks(&mut self, limit: usize) -> Vec { + async_std::task::block_on(async { + let l1_batches = sqlx::query_as!( + StorageBlock, + "SELECT * FROM l1_batches \ + WHERE eth_prove_tx_id IS NOT NULL AND eth_execute_tx_id IS NULL \ + ORDER BY number LIMIT $1", + limit as i32 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap(); + l1_batches + .into_iter() + .map(|block| { + self.get_block_with_metadata(block) + .expect("Block should be complete") + }) + .collect() + }) + } + + pub fn get_ready_for_commit_blocks(&mut self, limit: usize) -> Vec { + async_std::task::block_on(async { + let l1_batches = sqlx::query_as!( + StorageBlock, + "SELECT * FROM l1_batches \ + WHERE eth_commit_tx_id IS NULL \ + AND number != 0 \ + AND commitment IS NOT NULL \ + ORDER BY number LIMIT $1", + limit as i32 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap(); + l1_batches + .into_iter() + .map(|block| { + self.get_block_with_metadata(block) + .expect("Block should be complete") + }) + .collect() + }) + } + + pub fn get_block_state_root(&mut self, number: L1BatchNumber) -> Option { + async_std::task::block_on(async { + let hash: Option<_> = sqlx::query!( + "SELECT hash FROM l1_batches WHERE number = $1", + number.0 as i64 + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .and_then(|row| row.hash) + .map(|hash| H256::from_slice(&hash)); + hash + }) + } + + pub fn get_merkle_state_root(&mut self, number: L1BatchNumber) -> Option { + async_std::task::block_on(async { + let hash: Option> = sqlx::query!( + "SELECT merkle_root_hash FROM l1_batches WHERE number = $1", + number.0 as i64 + ) + .fetch_one(self.storage.conn()) + .await + .unwrap() + .merkle_root_hash; + hash.map(|hash| H256::from_slice(&hash)) + }) + } + + pub fn get_newest_block_header(&mut self) -> L1BatchHeader { + async_std::task::block_on(async { + let last_block = sqlx::query_as!( + StorageBlock, + "SELECT * FROM l1_batches + ORDER BY number DESC + LIMIT 1" + ) + .fetch_one(self.storage.conn()) + .await + .unwrap(); + last_block.into() + }) + } + + pub fn get_block_metadata(&mut self, number: L1BatchNumber) -> Option { + async_std::task::block_on(async { + let l1_batch: Option = sqlx::query_as!( + StorageBlock, + "SELECT * FROM l1_batches WHERE number = $1", + number.0 as i64 + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap(); + + l1_batch.and_then(|bl| self.get_block_with_metadata(bl)) + }) + } + + fn get_block_with_metadata( + &mut self, + storage_block: StorageBlock, + ) -> Option { + async_std::task::block_on(async { + let unsorted_factory_deps = + self.get_l1_batch_factory_deps(L1BatchNumber(storage_block.number as u32)); + let block_header = storage_block.clone().try_into().ok()?; + let block_metadata = storage_block.try_into().ok()?; + + Some(BlockWithMetadata::new( + block_header, + block_metadata, + unsorted_factory_deps, + )) + }) + } + + pub fn get_l1_batch_factory_deps( + &mut self, + l1_batch_number: L1BatchNumber, + ) -> HashMap> { + async_std::task::block_on(async { + sqlx::query!( + "SELECT bytecode_hash, bytecode FROM factory_deps + INNER JOIN miniblocks ON miniblocks.number = factory_deps.miniblock_number + WHERE miniblocks.l1_batch_number = $1", + l1_batch_number.0 as i64 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap() + .into_iter() + .map(|row| (H256::from_slice(&row.bytecode_hash), row.bytecode)) + .collect() + }) + } + + pub fn delete_l1_batches(&mut self, block_number: L1BatchNumber) { + async_std::task::block_on(async { + sqlx::query!( + "DELETE FROM l1_batches WHERE number > $1", + block_number.0 as i64 + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn delete_miniblocks(&mut self, block_number: MiniblockNumber) { + async_std::task::block_on(async { + sqlx::query!( + "DELETE FROM miniblocks WHERE number > $1", + block_number.0 as i64 + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + /// Returns sum of predicted gas costs or given block range. + /// Panics if the sum doesn't fit into usize. + pub fn get_blocks_predicted_gas( + &mut self, + from_block: L1BatchNumber, + to_block: L1BatchNumber, + op_type: AggregatedActionType, + ) -> u32 { + async_std::task::block_on(async { + let column_name = match op_type { + AggregatedActionType::CommitBlocks => "predicted_commit_gas_cost", + AggregatedActionType::PublishProofBlocksOnchain => "predicted_prove_gas_cost", + AggregatedActionType::ExecuteBlocks => "predicted_execute_gas_cost", + }; + let sql_query_str = format!( + " + SELECT COALESCE(SUM({}),0) as sum FROM l1_batches + WHERE number BETWEEN {} AND {} + ", + column_name, from_block, to_block + ); + sqlx::query(&sql_query_str) + .fetch_one(self.storage.conn()) + .await + .unwrap() + .get::("sum") + .to_u32() + .expect("Sum of predicted gas costs should fit into u32") + }) + } + + pub fn update_predicted_block_commit_gas( + &mut self, + l1_batch_number: L1BatchNumber, + predicted_gas_cost: u32, + ) { + async_std::task::block_on(async { + sqlx::query!( + " + UPDATE l1_batches + SET predicted_commit_gas_cost = $2, updated_at = now() + WHERE number = $1 + ", + l1_batch_number.0 as i64, + predicted_gas_cost as i64 + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn get_miniblock_range_of_l1_batch( + &mut self, + l1_batch_number: L1BatchNumber, + ) -> Option<(MiniblockNumber, MiniblockNumber)> { + async_std::task::block_on(async { + let row = sqlx::query!( + r#" + SELECT MIN(miniblocks.number) as "min?", MAX(miniblocks.number) as "max?" + FROM miniblocks + WHERE l1_batch_number = $1 + "#, + l1_batch_number.0 as i64 + ) + .fetch_one(self.storage.conn()) + .await + .unwrap(); + match (row.min, row.max) { + (Some(min), Some(max)) => { + Some((MiniblockNumber(min as u32), MiniblockNumber(max as u32))) + } + (None, None) => None, + _ => unreachable!(), + } + }) + } + + pub fn get_l1_batches_with_blobs_in_db(&mut self, limit: u8) -> Vec { + async_std::task::block_on(async { + let l1_batches = sqlx::query!( + r#" + SELECT l1_batch_number FROM witness_inputs + WHERE length(merkle_tree_paths) <> 0 + LIMIT $1; + "#, + limit as i32 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap(); + l1_batches + .into_iter() + .map(|row| L1BatchNumber(row.l1_batch_number as u32)) + .collect() + }) + } + + pub fn purge_blobs_from_db(&mut self, l1_batches: Vec) { + let l1_batches: Vec = l1_batches + .iter() + .map(|l1_batch| l1_batch.0 as i64) + .collect(); + async_std::task::block_on(async { + sqlx::query!( + r#" + UPDATE witness_inputs + SET merkle_tree_paths='' + WHERE l1_batch_number = ANY($1); + "#, + &l1_batches[..] + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn get_merkle_tree_paths_blob_urls_to_be_cleaned( + &mut self, + limit: u8, + ) -> Vec<(i64, String)> { + async_std::task::block_on(async { + let job_ids = sqlx::query!( + r#" + SELECT l1_batch_number, merkel_tree_paths_blob_url FROM witness_inputs + WHERE status='successful' AND is_blob_cleaned=FALSE + AND merkel_tree_paths_blob_url is NOT NULL + AND updated_at < NOW() - INTERVAL '2 days' + LIMIT $1; + "#, + limit as i32 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap(); + job_ids + .into_iter() + .map(|row| (row.l1_batch_number, row.merkel_tree_paths_blob_url.unwrap())) + .collect() + }) + } + + pub fn mark_gcs_blobs_as_cleaned(&mut self, l1_batch_numbers: Vec) { + async_std::task::block_on(async { + sqlx::query!( + r#" + UPDATE witness_inputs + SET is_blob_cleaned=TRUE + WHERE l1_batch_number = ANY($1); + "#, + &l1_batch_numbers[..] + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } +} diff --git a/core/lib/dal/src/blocks_web3_dal.rs b/core/lib/dal/src/blocks_web3_dal.rs new file mode 100644 index 000000000000..88f94e8be995 --- /dev/null +++ b/core/lib/dal/src/blocks_web3_dal.rs @@ -0,0 +1,397 @@ +use crate::models::storage_block::{ + bind_block_where_sql_params, web3_block_number_to_sql, web3_block_where_sql, +}; +use crate::models::storage_transaction::{extract_web3_transaction, web3_transaction_select_sql}; +use crate::SqlxError; +use crate::StorageProcessor; +use bigdecimal::BigDecimal; +use sqlx::postgres::PgArguments; +use sqlx::query::Query; +use sqlx::{Postgres, Row}; +use std::time::Instant; +use vm::utils::BLOCK_GAS_LIMIT; +use zksync_config::constants::EMPTY_UNCLES_HASH; + +use zksync_types::api::{self, Block, BlockId, TransactionVariant}; +use zksync_types::l2_to_l1_log::L2ToL1Log; +use zksync_types::web3::types::{BlockHeader, U64}; +use zksync_types::{L1BatchNumber, L2ChainId, MiniblockNumber, H160, H256, U256}; +use zksync_utils::{bigdecimal_to_u256, miniblock_hash}; +use zksync_web3_decl::error::Web3Error; + +#[derive(Debug)] +pub struct BlocksWeb3Dal<'a, 'c> { + pub storage: &'a mut StorageProcessor<'c>, +} + +impl BlocksWeb3Dal<'_, '_> { + pub fn get_sealed_miniblock_number(&mut self) -> Result { + async_std::task::block_on(async { + let started_at = Instant::now(); + let number: i64 = sqlx::query!(r#"SELECT MAX(number) as "number" FROM miniblocks"#) + .fetch_one(self.storage.conn()) + .await? + .number + .expect("DAL invocation before genesis"); + metrics::histogram!("dal.request", started_at.elapsed(), "method" => "get_sealed_block_number"); + Ok(MiniblockNumber(number as u32)) + }) + } + + pub fn get_sealed_l1_batch_number(&mut self) -> Result { + async_std::task::block_on(async { + let started_at = Instant::now(); + let number: i64 = sqlx::query!(r#"SELECT MAX(number) as "number" FROM l1_batches"#) + .fetch_one(self.storage.conn()) + .await? + .number + .expect("DAL invocation before genesis"); + metrics::histogram!("dal.request", started_at.elapsed(), "method" => "get_sealed_block_number"); + Ok(L1BatchNumber(number as u32)) + }) + } + + pub fn get_block_by_web3_block_id( + &mut self, + block_id: BlockId, + include_full_transactions: bool, + chain_id: L2ChainId, + ) -> Result>, SqlxError> { + async_std::task::block_on(async { + let transactions_sql = if include_full_transactions { + web3_transaction_select_sql() + } else { + "transactions.hash as tx_hash" + }; + + let query = format!( + " + SELECT + miniblocks.hash as block_hash, + miniblocks.number, + miniblocks.l1_batch_number, + miniblocks.timestamp, + miniblocks.base_fee_per_gas, + l1_batches.timestamp as l1_batch_timestamp, + transactions.gas_limit as gas_limit, + transactions.refunded_gas as refunded_gas, + {} + FROM miniblocks + LEFT JOIN l1_batches + ON l1_batches.number = miniblocks.l1_batch_number + LEFT JOIN transactions + ON transactions.miniblock_number = miniblocks.number + WHERE {} + ", + transactions_sql, + web3_block_where_sql(block_id, 1) + ); + + let query: Query = + bind_block_where_sql_params(block_id, sqlx::query(&query)); + + let block = query + .fetch_all(self.storage.conn()) + .await? + .into_iter() + .fold( + Option::>::None, + |prev_block, db_row| { + let mut block: Block = prev_block.unwrap_or({ + // This code will be only executed for the first row in the DB response. + // All other rows will only be used to extract relevant transactions. + let hash = db_row + .try_get("block_hash") + .map(H256::from_slice) + .unwrap_or_else(|_| H256::zero()); + let number = U64::from(db_row.get::("number")); + let l1_batch_number = db_row + .try_get::("l1_batch_number") + .map(U64::from) + .ok(); + let l1_batch_timestamp = db_row + .try_get::("l1_batch_timestamp") + .map(U256::from) + .ok(); + let parent_hash = match number.as_u32() { + 0 => H256::zero(), + number => miniblock_hash(MiniblockNumber(number - 1)), + }; + + Block { + hash, + parent_hash, + uncles_hash: EMPTY_UNCLES_HASH, + author: H160::zero(), + state_root: H256::zero(), + transactions_root: H256::zero(), + receipts_root: H256::zero(), + number, + l1_batch_number, + gas_used: Default::default(), + gas_limit: BLOCK_GAS_LIMIT.into(), + base_fee_per_gas: bigdecimal_to_u256( + db_row.get::("base_fee_per_gas"), + ), + extra_data: Default::default(), + // todo logs + logs_bloom: Default::default(), + timestamp: U256::from(db_row.get::("timestamp")), + l1_batch_timestamp, + difficulty: Default::default(), + total_difficulty: Default::default(), + seal_fields: vec![], + uncles: vec![], + transactions: Vec::default(), + size: Default::default(), + mix_hash: Default::default(), + nonce: Default::default(), + } + }); + if db_row.try_get::<&[u8], &str>("tx_hash").is_ok() { + let tx_gas_limit: U256 = + bigdecimal_to_u256(db_row.get::("gas_limit")); + let tx_refunded_gas: U256 = + ((db_row.get::("refunded_gas")) as u32).into(); + + block.gas_used += tx_gas_limit - tx_refunded_gas; + let tx = if include_full_transactions { + TransactionVariant::Full(extract_web3_transaction(db_row, chain_id)) + } else { + TransactionVariant::Hash(H256::from_slice(db_row.get("tx_hash"))) + }; + block.transactions.push(tx); + } + Some(block) + }, + ); + Ok(block) + }) + } + + pub fn get_block_tx_count(&mut self, block_id: BlockId) -> Result, SqlxError> { + async_std::task::block_on(async { + let query = format!( + "SELECT l1_tx_count + l2_tx_count as tx_count FROM miniblocks WHERE {}", + web3_block_where_sql(block_id, 1) + ); + let query: Query = + bind_block_where_sql_params(block_id, sqlx::query(&query)); + + let tx_count: Option = query + .fetch_optional(self.storage.conn()) + .await? + .map(|db_row| db_row.get("tx_count")); + + Ok(tx_count.map(|t| (t as u32).into())) + }) + } + + /// Returns hashes of blocks with numbers greater than `from_block` and the number of the last block. + pub fn get_block_hashes_after( + &mut self, + from_block: MiniblockNumber, + limit: usize, + ) -> Result<(Vec, Option), SqlxError> { + async_std::task::block_on(async { + let records = sqlx::query!( + " + SELECT number, hash FROM miniblocks + WHERE number > $1 + ORDER BY number ASC + LIMIT $2 + ", + from_block.0 as i64, + limit as i32 + ) + .fetch_all(self.storage.conn()) + .await?; + let last_block_number = records + .last() + .map(|record| MiniblockNumber(record.number as u32)); + let hashes = records + .into_iter() + .map(|record| H256::from_slice(&record.hash)) + .collect(); + Ok((hashes, last_block_number)) + }) + } + + /// Returns hashes of blocks with numbers greater than `from_block` and the number of the last block. + pub fn get_block_headers_after( + &mut self, + from_block: MiniblockNumber, + ) -> Result, SqlxError> { + async_std::task::block_on(async { + let records = sqlx::query!( + " + SELECT + hash, + number, + timestamp + FROM miniblocks + WHERE number > $1 + ORDER BY number ASC + ", + from_block.0 as i64, + ) + .fetch_all(self.storage.conn()) + .await?; + let blocks: Vec = records + .into_iter() + .map(|db_row| BlockHeader { + hash: Some(H256::from_slice(&db_row.hash)), + parent_hash: H256::zero(), + uncles_hash: EMPTY_UNCLES_HASH, + author: H160::zero(), + state_root: H256::zero(), + transactions_root: H256::zero(), + receipts_root: H256::zero(), + number: Some(U64::from(db_row.number)), + gas_used: Default::default(), + gas_limit: Default::default(), + base_fee_per_gas: Default::default(), + extra_data: Default::default(), + // todo logs + logs_bloom: Default::default(), + timestamp: U256::from(db_row.timestamp), + difficulty: Default::default(), + mix_hash: None, + nonce: None, + }) + .collect(); + Ok(blocks) + }) + } + + pub fn resolve_block_id( + &mut self, + block_id: api::BlockId, + ) -> Result, SqlxError> { + async_std::task::block_on(async { + let query_string = match block_id { + api::BlockId::Hash(_) => { + "SELECT number FROM miniblocks WHERE hash = $1".to_string() + } + api::BlockId::Number(api::BlockNumber::Number(_)) => { + "SELECT number FROM miniblocks WHERE number = $1".to_string() + } + api::BlockId::Number(block_number) => web3_block_number_to_sql(block_number, 1).0, + }; + let row = bind_block_where_sql_params(block_id, sqlx::query(&query_string)) + .fetch_optional(self.storage.conn()) + .await?; + + let block_number = row + .and_then(|row| row.get::, &str>("number")) + .map(|n| MiniblockNumber(n as u32)) + .ok_or(Web3Error::NoBlock); + Ok(block_number) + }) + } + + pub fn get_block_timestamp( + &mut self, + block_number: MiniblockNumber, + ) -> Result, SqlxError> { + async_std::task::block_on(async { + let timestamp = sqlx::query!( + r#"SELECT timestamp FROM miniblocks WHERE number = $1"#, + block_number.0 as i64 + ) + .fetch_optional(self.storage.conn()) + .await? + .map(|row| row.timestamp as u64); + Ok(timestamp) + }) + } + + pub fn get_l2_to_l1_logs( + &mut self, + block_number: L1BatchNumber, + ) -> Result, SqlxError> { + async_std::task::block_on(async { + let result: Vec> = sqlx::query!( + "SELECT l2_to_l1_logs FROM l1_batches WHERE number = $1", + block_number.0 as i64 + ) + .fetch_optional(self.storage.conn()) + .await? + .map(|row| row.l2_to_l1_logs) + .unwrap_or_else(Vec::new); + + Ok(result.into_iter().map(L2ToL1Log::from).collect()) + }) + } + + pub fn get_l1_batch_number_of_miniblock( + &mut self, + miniblock_number: MiniblockNumber, + ) -> Result, SqlxError> { + async_std::task::block_on(async { + let number: Option = sqlx::query!( + " + SELECT l1_batch_number FROM miniblocks + WHERE number = $1 + ", + miniblock_number.0 as i64 + ) + .fetch_optional(self.storage.conn()) + .await? + .and_then(|row| row.l1_batch_number); + Ok(number.map(|number| L1BatchNumber(number as u32))) + }) + } + + pub fn get_miniblock_range_of_l1_batch( + &mut self, + l1_batch_number: L1BatchNumber, + ) -> Result, SqlxError> { + async_std::task::block_on(async { + let row = sqlx::query!( + r#" + SELECT MIN(miniblocks.number) as "min?", MAX(miniblocks.number) as "max?" + FROM miniblocks + WHERE l1_batch_number = $1 + "#, + l1_batch_number.0 as i64 + ) + .fetch_one(self.storage.conn()) + .await?; + match (row.min, row.max) { + (Some(min), Some(max)) => Ok(Some(( + MiniblockNumber(min as u32), + MiniblockNumber(max as u32), + ))), + (None, None) => Ok(None), + _ => unreachable!(), + } + }) + } + + pub fn get_l1_batch_info_for_tx( + &mut self, + tx_hash: H256, + ) -> Result, SqlxError> { + async_std::task::block_on(async { + let row = sqlx::query!( + " + SELECT l1_batch_number, l1_batch_tx_index + FROM transactions + WHERE hash = $1 + ", + tx_hash.as_bytes() + ) + .fetch_optional(self.storage.conn()) + .await?; + let result = row.and_then(|row| match (row.l1_batch_number, row.l1_batch_tx_index) { + (Some(l1_batch_number), Some(l1_batch_tx_index)) => Some(( + L1BatchNumber(l1_batch_number as u32), + l1_batch_tx_index as u16, + )), + _ => None, + }); + Ok(result) + }) + } +} diff --git a/core/lib/dal/src/connection/holder.rs b/core/lib/dal/src/connection/holder.rs new file mode 100644 index 000000000000..d35e19bd9980 --- /dev/null +++ b/core/lib/dal/src/connection/holder.rs @@ -0,0 +1,27 @@ +// Built-in deps +use std::fmt; +// External imports +use sqlx::pool::PoolConnection; +use sqlx::{postgres::Postgres, PgConnection, Transaction}; +// Workspace imports +// Local imports + +/// Connection holder unifies the type of underlying connection, which +/// can be either pooled or direct. +pub enum ConnectionHolder<'a> { + Pooled(PoolConnection), + Direct(PgConnection), + Transaction(Transaction<'a, Postgres>), + TestTransaction(&'a mut Transaction<'static, Postgres>), +} + +impl<'a> fmt::Debug for ConnectionHolder<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Pooled(_) => write!(f, "Pooled connection"), + Self::Direct(_) => write!(f, "Direct connection"), + Self::Transaction(_) => write!(f, "Database Transaction"), + Self::TestTransaction(_) => write!(f, "Test Database Transaction"), + } + } +} diff --git a/core/lib/dal/src/connection/mod.rs b/core/lib/dal/src/connection/mod.rs new file mode 100644 index 000000000000..91de36ca0fb9 --- /dev/null +++ b/core/lib/dal/src/connection/mod.rs @@ -0,0 +1,95 @@ +// Built-in deps +use std::time::{Duration, Instant}; +// External imports +use async_std::task::{block_on, sleep}; +use sqlx::pool::PoolConnection; +use sqlx::postgres::{PgPool, PgPoolOptions, Postgres}; +// Local imports +use crate::{get_master_database_url, get_replica_database_url, StorageProcessor}; +use zksync_utils::parse_env; + +pub use self::test_pool::TestPool; + +pub mod holder; +pub mod test_pool; + +#[derive(Clone, Debug)] +pub enum ConnectionPool { + Real(PgPool), + Test(TestPool), +} + +impl ConnectionPool { + /// Establishes a pool of the connections to the database and + /// creates a new `ConnectionPool` object. + /// pool_max_size - number of connections in pool, if not set env variable "DATABASE_POOL_SIZE" is going to be used. + pub fn new(pool_max_size: Option, connect_to_master: bool) -> Self { + let database_url = if connect_to_master { + get_master_database_url() + } else { + get_replica_database_url() + }; + let max_connections = pool_max_size.unwrap_or_else(|| parse_env("DATABASE_POOL_SIZE")); + + let options = PgPoolOptions::new().max_connections(max_connections); + let pool = block_on(options.connect(&database_url)).unwrap(); + Self::Real(pool) + } + + /// Creates a `StorageProcessor` entity over a recoverable connection. + /// Upon a database outage connection will block the thread until + /// it will be able to recover the connection (or, if connection cannot + /// be restored after several retries, this will be considered as + /// irrecoverable database error and result in panic). + /// + /// This method is intended to be used in crucial contexts, where the + /// database access is must-have (e.g. block committer). + pub async fn access_storage(&self) -> StorageProcessor<'_> { + match self { + ConnectionPool::Real(real_pool) => { + let start = Instant::now(); + let conn = Self::acquire_connection_retried(real_pool).await; + metrics::histogram!("sql.connection_acquire", start.elapsed()); + StorageProcessor::from_pool(conn) + } + ConnectionPool::Test(test) => test.access_storage().await, + } + } + + pub async fn acquire_connection_retried(pool: &PgPool) -> PoolConnection { + const DB_CONNECTION_RETRIES: u32 = 3; + + let mut retry_count = 0; + + while retry_count < DB_CONNECTION_RETRIES { + metrics::histogram!("sql.connection_pool.size", pool.size() as f64); + metrics::histogram!("sql.connection_pool.idle", pool.num_idle() as f64); + + let connection = pool.acquire().await; + match connection { + Ok(connection) => return connection, + Err(_) => retry_count += 1, + } + + // Backing off for one second if facing an error + vlog::warn!("Failed to get connection to db. Backing off for 1 second"); + sleep(Duration::from_secs(1)).await; + } + + // Attempting to get the pooled connection for the last time + pool.acquire().await.unwrap() + } + + pub fn access_storage_blocking(&self) -> StorageProcessor<'_> { + block_on(self.access_storage()) + } + + pub async fn access_test_storage(&self) -> StorageProcessor<'static> { + match self { + ConnectionPool::Test(test) => test.access_storage().await, + ConnectionPool::Real(_real) => { + panic!("Attempt to access test storage with the real pool") + } + } + } +} diff --git a/core/lib/dal/src/connection/test_pool.rs b/core/lib/dal/src/connection/test_pool.rs new file mode 100644 index 000000000000..5d4ee35667f0 --- /dev/null +++ b/core/lib/dal/src/connection/test_pool.rs @@ -0,0 +1,108 @@ +// Built-in deps +use std::sync::Arc; +// External imports +use async_std::sync::Mutex; +use sqlx::{PgConnection, Postgres, Transaction}; + +// Public re-export for proc macro to use `begin` on the connection. +#[doc(hidden)] +pub use sqlx::Connection; + +use crate::StorageProcessor; +// Local imports + +/// Implementation of the test/fake connection pool to be used in tests. +/// This implementation works over an established transaction in order to reject +/// any changes made to the database, even if the tested component initiates and commits +/// its own transactions. +/// +/// ## How does it work +/// +/// Test pool uses an established transaction to be created. This transaction, in its turn, +/// is used to establish a *subtransaction*. Reference to this subtransaction will be used +/// as a connection to create `StorageProcessor` objects in test. +/// +/// Having a subtransaction is necessary: even if some component will (mistakenly) will not +/// initiate a transaction and will call `commit` on `StorageProcessor`, changes won't be +/// persisted, since top-level transaction will be dropped. +/// +/// At the same time, using *reference* to the subtransaction in created `StorageProcessor` +/// objects is also necessary: upon `drop`, transaction gets discarded. It means that if we +/// use transaction and somewhere in test `StorageProcessor` is created, used without +/// transaction and then dropped (which is a normal use case for e.g. test setup) -- such +/// changes would be discarded and test will not execute correctly. +/// +/// ## Safety +/// +/// Test pool relies on unsafe code to work, so it comes with several invariants to be +/// upheld by its user. They are *not* enforced by compiler and breaking *any* of them +/// will result in undefined behavior. +/// +/// Usage invariants: +/// - This object should never outlive the transaction used to create it. If, for example, +/// test pool is created and passed to another thread and the thread with the original +/// connection panics (or connection is simply dropped), the behavior is undefined. +/// - Concurrent access to the pool is forbidden. `TestPool` has to be `Sync` in order to +/// not break the interface of the `ConnectionPool`, but since it operates over a single +/// established transaction, it can't be safely accessed from multiple threads. +/// Moving the object to another thread is safe though. +/// - Since we use mutable reference to the subtransaction to create `StorageProcessor`, you +/// should not create and use multiple `StorageProcessor` objects in the same scope. +/// +/// This object is meant to be used in unit tests only, any attempt to use it with the real +/// database is on the conscience of the user. I have warned you. +#[derive(Debug, Clone)] +pub struct TestPool { + // Sub-transaction to be used to instantiate connections. + // + // `Arc` is required to keep the pool `Clone` and `Send` and also to pin the transaction + // location in the memory. + // `Mutex` is required to keep the object `Sync` and provide mutable access to the transaction + // from the immutable `access_storage` method. + subtransaction: Arc>>, +} + +impl TestPool { + /// Establishes a Postgres connection to the test database. + pub async fn connect_to_test_db() -> PgConnection { + let database_url = crate::get_test_database_url(); + PgConnection::connect(&database_url).await.unwrap() + } + + /// Constructs a new object using an already established transaction to the database. + /// This method is unsafe, since internally it extends lifetime of the provided `Transaction`. + /// + /// ## Safety + /// + /// When calling this method, caller must guarantee that resulting object will not live longer + /// than the transaction to the database used to create this object. + pub async unsafe fn new(transaction: &mut Transaction<'_, Postgres>) -> Self { + // Using `std::mem::transmute` to extend the lifetime of an object is an unsafe but + // valid way to use this method. + let subtransaction: Transaction<'static, Postgres> = + std::mem::transmute(transaction.begin().await.unwrap()); + Self { + subtransaction: Arc::new(Mutex::new(subtransaction)), + } + } + + pub async fn access_storage(&self) -> StorageProcessor<'static> { + let mut lock = self.subtransaction.lock().await; + let subtransaction = &mut *lock; + + // ## Safety + // + // Guarantees held by this method: + // - memory location: original `transaction` object is behind the smart pointer, so its location don't change. + // + // Guarantees held by the caller: + // - cross-thread access: accessing `TestPool` concurrently is forbidden by the contract of the object. + // - having multiple `StorageProcessor` objects is forbidden by the contract of the object. + // - lifetime: we are transmuting lifetime to the static lifetime, so the transaction should never live longer + // than the test pool object. + let subtransaction_ref: &'static mut Transaction = + unsafe { std::mem::transmute(subtransaction) }; + + StorageProcessor::from_test_transaction(subtransaction_ref) + } +} diff --git a/core/lib/dal/src/eth_sender_dal.rs b/core/lib/dal/src/eth_sender_dal.rs new file mode 100644 index 000000000000..8a005b8a71fd --- /dev/null +++ b/core/lib/dal/src/eth_sender_dal.rs @@ -0,0 +1,289 @@ +use crate::models::storage_eth_tx::{StorageEthTx, StorageTxHistory, StorageTxHistoryToSend}; +use crate::StorageProcessor; +use std::convert::TryFrom; +use zksync_types::aggregated_operations::AggregatedActionType; +use zksync_types::eth_sender::{EthTx, TxHistory, TxHistoryToSend}; +use zksync_types::{Address, H256, U256}; + +#[derive(Debug)] +pub struct EthSenderDal<'a, 'c> { + pub storage: &'a mut StorageProcessor<'c>, +} + +impl EthSenderDal<'_, '_> { + pub fn get_inflight_txs(&mut self) -> Vec { + async_std::task::block_on(async { + let txs = sqlx::query_as!( + StorageEthTx, + "SELECT * FROM eth_txs WHERE confirmed_eth_tx_history_id IS NULL + AND id <= (SELECT COALESCE(MAX(eth_tx_id), 0) FROM eth_txs_history WHERE sent_at_block IS NOT NULL) + ORDER BY id" + ) + .fetch_all(self.storage.conn()) + .await + .unwrap(); + txs.into_iter().map(|tx| tx.into()).collect() + }) + } + + pub fn get_eth_tx(&mut self, eth_tx_id: u32) -> Option { + async_std::task::block_on(async { + sqlx::query_as!( + StorageEthTx, + "SELECT * FROM eth_txs WHERE id = $1", + eth_tx_id as i32 + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(Into::into) + }) + } + + pub fn get_new_eth_txs(&mut self, limit: u64) -> Vec { + async_std::task::block_on(async { + let txs = sqlx::query_as!( + StorageEthTx, + r#"SELECT * FROM eth_txs + WHERE id > (SELECT COALESCE(MAX(eth_tx_id), 0) FROM eth_txs_history) + ORDER BY id + LIMIT $1 + "#, + limit as i64 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap(); + txs.into_iter().map(|tx| tx.into()).collect() + }) + } + + pub fn get_unsent_txs(&mut self) -> Vec { + async_std::task::block_on(async { + let txs = sqlx::query_as!( + StorageTxHistoryToSend, + r#" + SELECT + eth_txs_history.id, + eth_txs_history.eth_tx_id, + eth_txs_history.tx_hash, + eth_txs_history.base_fee_per_gas, + eth_txs_history.priority_fee_per_gas, + eth_txs_history.signed_raw_tx, + eth_txs.nonce + FROM eth_txs_history + JOIN eth_txs ON eth_txs.id = eth_txs_history.eth_tx_id + WHERE eth_txs_history.sent_at_block IS NULL AND eth_txs.confirmed_eth_tx_history_id IS NULL + ORDER BY eth_txs_history.id DESC"#, + ) + .fetch_all(self.storage.conn()) + .await + .unwrap(); + txs.into_iter().map(|tx| tx.into()).collect() + }) + } + + pub fn save_eth_tx( + &mut self, + nonce: u64, + raw_tx: Vec, + tx_type: AggregatedActionType, + contract_address: Address, + predicted_gas_cost: u32, + ) -> EthTx { + async_std::task::block_on(async { + let address = format!("{:#x}", contract_address); + let eth_tx = sqlx::query_as!( + StorageEthTx, + "INSERT INTO eth_txs (raw_tx, nonce, tx_type, contract_address, predicted_gas_cost, created_at, updated_at) + VALUES ($1, $2, $3, $4, $5, now(), now()) + RETURNING *", + raw_tx, + nonce as i64, + tx_type.to_string(), + address, + predicted_gas_cost as i64 + ) + .fetch_one(self.storage.conn()) + .await + .unwrap(); + eth_tx.into() + }) + } + + pub fn insert_tx_history( + &mut self, + eth_tx_id: u32, + base_fee_per_gas: u64, + priority_fee_per_gas: u64, + tx_hash: H256, + raw_signed_tx: Vec, + ) -> u32 { + async_std::task::block_on(async { + let priority_fee_per_gas = + i64::try_from(priority_fee_per_gas).expect("Can't convert U256 to i64"); + let base_fee_per_gas = + i64::try_from(base_fee_per_gas).expect("Can't convert U256 to i64"); + let tx_hash = format!("{:#x}", tx_hash); + + sqlx::query!( + "INSERT INTO eth_txs_history + (eth_tx_id, base_fee_per_gas, priority_fee_per_gas, tx_hash, signed_raw_tx, created_at, updated_at) + VALUES ($1, $2, $3, $4, $5, now(), now()) + RETURNING id", + eth_tx_id as u32, + base_fee_per_gas, + priority_fee_per_gas, + tx_hash, + raw_signed_tx + ) + .fetch_one(self.storage.conn()) + .await + .unwrap() + .id as u32 + }) + } + + pub fn set_sent_at_block(&mut self, eth_txs_history_id: u32, sent_at_block: u32) { + async_std::task::block_on(async { + sqlx::query!( + "UPDATE eth_txs_history SET sent_at_block = $2, sent_at = now() + WHERE id = $1 AND sent_at_block IS NULL", + eth_txs_history_id as i32, + sent_at_block as i32 + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn remove_tx_history(&mut self, eth_txs_history_id: u32) { + async_std::task::block_on(async { + sqlx::query!( + "DELETE FROM eth_txs_history + WHERE id = $1", + eth_txs_history_id as i64 + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn confirm_tx(&mut self, tx_hash: H256, gas_used: U256) { + async_std::task::block_on(async { + let gas_used = i64::try_from(gas_used).expect("Can't convert U256 to i64"); + let tx_hash = format!("{:#x}", tx_hash); + let ids = sqlx::query!( + "UPDATE eth_txs_history + SET updated_at = now(), confirmed_at = now() + WHERE tx_hash = $1 + RETURNING id, eth_tx_id", + tx_hash, + ) + .fetch_one(self.storage.conn()) + .await + .unwrap(); + + sqlx::query!( + "UPDATE eth_txs + SET gas_used = $1, confirmed_eth_tx_history_id = $2 + WHERE id = $3", + gas_used, + ids.id, + ids.eth_tx_id + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn get_tx_history_to_check(&mut self, eth_tx_id: u32) -> Vec { + async_std::task::block_on(async { + let tx_history = sqlx::query_as!( + StorageTxHistory, + "SELECT * FROM eth_txs_history WHERE eth_tx_id = $1 ORDER BY created_at DESC", + eth_tx_id as i32 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap(); + tx_history.into_iter().map(|tx| tx.into()).collect() + }) + } + + pub fn get_block_number_on_first_sent_attempt(&mut self, eth_tx_id: u32) -> Option { + async_std::task::block_on(async { + let sent_at_block = sqlx::query_scalar!( + "SELECT sent_at_block FROM eth_txs_history WHERE eth_tx_id = $1 AND sent_at_block IS NOT NULL ORDER BY created_at ASC LIMIT 1", + eth_tx_id as i32 + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap(); + sent_at_block.flatten().map(|block| block as u32) + }) + } + + pub fn get_last_sent_eth_tx(&mut self, eth_tx_id: u32) -> Option { + async_std::task::block_on(async { + let history_item = sqlx::query_as!( + StorageTxHistory, + "SELECT * FROM eth_txs_history WHERE eth_tx_id = $1 ORDER BY created_at DESC LIMIT 1", + eth_tx_id as i32 + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap(); + history_item.map(|tx| tx.into()) + }) + } + + pub fn get_next_nonce(&mut self) -> Option { + async_std::task::block_on(async { + sqlx::query!(r#"SELECT MAX(nonce) as "max_nonce?" FROM eth_txs"#,) + .fetch_one(self.storage.conn()) + .await + .unwrap() + .max_nonce + .map(|n| n as u64 + 1) + }) + } + + pub fn mark_failed_transaction(&mut self, eth_tx_id: u32) { + async_std::task::block_on(async { + sqlx::query!( + "UPDATE eth_txs SET has_failed = TRUE WHERE id = $1", + eth_tx_id as i32 + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn get_number_of_failed_transactions(&mut self) -> i64 { + async_std::task::block_on(async { + sqlx::query!("SELECT COUNT(*) FROM eth_txs WHERE has_failed = TRUE") + .fetch_one(self.storage.conn()) + .await + .unwrap() + .count + .unwrap() + }) + } + + pub fn clear_failed_transactions(&mut self) { + async_std::task::block_on(async { + sqlx::query!( + "DELETE FROM eth_txs WHERE id >= + (SELECT MIN(id) FROM eth_txs WHERE has_failed = TRUE)" + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } +} diff --git a/core/lib/dal/src/events_dal.rs b/core/lib/dal/src/events_dal.rs new file mode 100644 index 000000000000..59100c7f0ff3 --- /dev/null +++ b/core/lib/dal/src/events_dal.rs @@ -0,0 +1,205 @@ +use crate::StorageProcessor; +use sqlx::types::chrono::Utc; +use zksync_types::l2_to_l1_log::L2ToL1Log; +use zksync_types::{tx::IncludedTxLocation, MiniblockNumber, VmEvent}; + +#[derive(Debug)] +pub struct EventsDal<'a, 'c> { + pub storage: &'a mut StorageProcessor<'c>, +} + +impl EventsDal<'_, '_> { + pub fn save_events( + &mut self, + block_number: MiniblockNumber, + all_block_events: Vec<(IncludedTxLocation, Vec)>, + ) { + async_std::task::block_on(async { + let mut copy = self + .storage + .conn() + .copy_in_raw( + "COPY events( + miniblock_number, tx_hash, tx_index_in_block, address, + event_index_in_block, event_index_in_tx, + topic1, topic2, topic3, topic4, value, + tx_initiator_address, + created_at, updated_at + ) + FROM STDIN WITH (DELIMITER '|')", + ) + .await + .unwrap(); + + let mut bytes: Vec = Vec::new(); + let now = Utc::now().naive_utc().to_string(); + let mut event_index_in_block = 0u32; + let mut event_index_in_tx: u32; + for ( + IncludedTxLocation { + tx_hash, + tx_index_in_miniblock: tx_index_in_block, + tx_initiator_address, + }, + events, + ) in all_block_events + { + event_index_in_tx = 0; + let tx_hash_str = format!("\\\\x{}", hex::encode(tx_hash.0)); + let tx_initiator_address_str = + format!("\\\\x{}", hex::encode(tx_initiator_address.0)); + for event in events { + let address_str = format!("\\\\x{}", hex::encode(event.address.0)); + let mut topics_str: Vec = event + .indexed_topics + .into_iter() + .map(|topic| format!("\\\\x{}", hex::encode(topic.0))) + .collect(); + topics_str.resize(4, "\\\\x".to_string()); + let value_str = format!("\\\\x{}", hex::encode(event.value)); + let row = format!( + "{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}\n", + block_number, + tx_hash_str, + tx_index_in_block, + address_str, + event_index_in_block, + event_index_in_tx, + topics_str[0], + topics_str[1], + topics_str[2], + topics_str[3], + value_str, + tx_initiator_address_str, + now, + now + ); + bytes.extend_from_slice(row.as_bytes()); + + event_index_in_block += 1; + event_index_in_tx += 1; + } + } + copy.send(bytes).await.unwrap(); + copy.finish().await.unwrap(); + }) + } + + pub fn rollback_events(&mut self, block_number: MiniblockNumber) { + async_std::task::block_on(async { + sqlx::query!( + "DELETE FROM events WHERE miniblock_number > $1", + block_number.0 as i64 + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn set_tx_initiator_address( + &mut self, + from_block_number: MiniblockNumber, + to_block_number: MiniblockNumber, + ) { + async_std::task::block_on(async { + sqlx::query!( + " + UPDATE events SET tx_initiator_address = transactions.initiator_address + FROM transactions WHERE transactions.hash = events.tx_hash AND events.miniblock_number BETWEEN $1 AND $2 + ", + from_block_number.0 as i64, + to_block_number.0 as i64, + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn save_l2_to_l1_logs( + &mut self, + block_number: MiniblockNumber, + all_block_l2_to_l1_logs: Vec<(IncludedTxLocation, Vec)>, + ) { + async_std::task::block_on(async { + let mut copy = self + .storage + .conn() + .copy_in_raw( + "COPY l2_to_l1_logs( + miniblock_number, log_index_in_miniblock, log_index_in_tx, tx_hash, + tx_index_in_miniblock, tx_index_in_l1_batch, + shard_id, is_service, sender, key, value, + created_at, updated_at + ) + FROM STDIN WITH (DELIMITER '|')", + ) + .await + .unwrap(); + + let mut bytes: Vec = Vec::new(); + let now = Utc::now().naive_utc().to_string(); + let mut log_index_in_miniblock = 0u32; + let mut log_index_in_tx: u32; + for (tx_location, logs) in all_block_l2_to_l1_logs { + log_index_in_tx = 0; + let tx_hash_str = format!("\\\\x{}", hex::encode(tx_location.tx_hash.0)); + for log in logs { + let sender_str = format!("\\\\x{}", hex::encode(log.sender)); + let key_str = format!("\\\\x{}", hex::encode(log.key)); + let value_str = format!("\\\\x{}", hex::encode(log.value)); + let row = format!( + "{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}\n", + block_number, + log_index_in_miniblock, + log_index_in_tx, + tx_hash_str, + tx_location.tx_index_in_miniblock, + log.tx_number_in_block, + log.shard_id, + log.is_service, + sender_str, + key_str, + value_str, + now, + now + ); + bytes.extend_from_slice(row.as_bytes()); + + log_index_in_miniblock += 1; + log_index_in_tx += 1; + } + } + copy.send(bytes).await.unwrap(); + copy.finish().await.unwrap(); + }) + } + + pub fn rollback_l2_to_l1_logs(&mut self, block_number: MiniblockNumber) { + async_std::task::block_on(async { + sqlx::query!( + "DELETE FROM l2_to_l1_logs WHERE miniblock_number > $1", + block_number.0 as i64 + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn get_first_miniblock_with_saved_l2_to_l1_logs(&mut self) -> Option { + async_std::task::block_on(async { + let row = sqlx::query!( + r#" + SELECT MIN(miniblock_number) as "min?" + FROM l2_to_l1_logs + "#, + ) + .fetch_one(self.storage.conn()) + .await + .unwrap(); + row.min.map(|min| MiniblockNumber(min as u32)) + }) + } +} diff --git a/core/lib/dal/src/events_web3_dal.rs b/core/lib/dal/src/events_web3_dal.rs new file mode 100644 index 000000000000..7e3e8c5e9fdd --- /dev/null +++ b/core/lib/dal/src/events_web3_dal.rs @@ -0,0 +1,180 @@ +use std::time::Instant; + +use sqlx::Row; + +use crate::models::storage_block::web3_block_number_to_sql; +use crate::models::storage_event::StorageWeb3Log; +use crate::SqlxError; +use crate::StorageProcessor; +use zksync_types::{ + api::{self, GetLogsFilter, Log}, + MiniblockNumber, +}; + +#[derive(Debug)] +pub struct EventsWeb3Dal<'a, 'c> { + pub storage: &'a mut StorageProcessor<'c>, +} + +impl EventsWeb3Dal<'_, '_> { + /// Returns miniblock number of log for given filter and offset. + /// Used to determine if there is more than `offset` logs that satisfies filter. + pub fn get_log_block_number( + &mut self, + filter: GetLogsFilter, + offset: usize, + ) -> Result, SqlxError> { + async_std::task::block_on(async { + let started_at = Instant::now(); + let (where_sql, arg_index) = self.build_get_logs_where_clause(&filter); + + let query = format!( + r#" + SELECT miniblock_number + FROM events + WHERE {} + ORDER BY miniblock_number ASC, event_index_in_block ASC + LIMIT 1 OFFSET ${} + "#, + where_sql, arg_index + ); + + let mut query = sqlx::query(&query); + query = query.bind(filter.from_block.0 as i64); + + if let Some(api::BlockNumber::Number(number)) = filter.to_block { + query = query.bind(number.as_u64() as i64); + } + if !filter.addresses.is_empty() { + let addresses: Vec<_> = filter + .addresses + .into_iter() + .map(|address| address.0.to_vec()) + .collect(); + query = query.bind(addresses); + } + for (_, topics) in filter.topics { + let topics: Vec<_> = topics.into_iter().map(|topic| topic.0.to_vec()).collect(); + query = query.bind(topics); + } + query = query.bind(offset as i32); + let log = query.fetch_optional(self.storage.conn()).await?; + + metrics::histogram!("dal.request", started_at.elapsed(), "method" => "get_log_block_number"); + + Ok(log.map(|row| MiniblockNumber(row.get::("miniblock_number") as u32))) + }) + } + + /// Returns logs for given filter. + #[allow(clippy::type_complexity)] + pub fn get_logs(&mut self, filter: GetLogsFilter, limit: usize) -> Result, SqlxError> { + async_std::task::block_on(async { + let started_at = Instant::now(); + let (where_sql, arg_index) = self.build_get_logs_where_clause(&filter); + + let query = format!( + r#" + WITH events_select AS ( + SELECT + address, topic1, topic2, topic3, topic4, value, + miniblock_number, tx_hash, tx_index_in_block, + event_index_in_block, event_index_in_tx + FROM events + WHERE {} + ORDER BY miniblock_number ASC, event_index_in_block ASC + LIMIT ${} + ) + SELECT miniblocks.hash as "block_hash", miniblocks.l1_batch_number as "l1_batch_number", events_select.* + FROM events_select + LEFT JOIN miniblocks ON events_select.miniblock_number = miniblocks.number + ORDER BY miniblock_number ASC, event_index_in_block ASC + "#, + where_sql, arg_index + ); + + let mut query = sqlx::query_as(&query); + query = query.bind(filter.from_block.0 as i64); + + if let Some(api::BlockNumber::Number(number)) = filter.to_block { + query = query.bind(number.as_u64() as i64); + } + if !filter.addresses.is_empty() { + let addresses: Vec<_> = filter + .addresses + .into_iter() + .map(|address| address.0.to_vec()) + .collect(); + query = query.bind(addresses); + } + for (_, topics) in filter.topics { + let topics: Vec<_> = topics.into_iter().map(|topic| topic.0.to_vec()).collect(); + query = query.bind(topics); + } + query = query.bind(limit as i32); + + let db_logs: Vec = query.fetch_all(self.storage.conn()).await?; + let logs = db_logs.into_iter().map(Into::into).collect(); + metrics::histogram!("dal.request", started_at.elapsed(), "method" => "get_logs"); + Ok(logs) + }) + } + + fn build_get_logs_where_clause(&self, filter: &GetLogsFilter) -> (String, u8) { + let mut arg_index = 1; + + let (block_sql, new_arg_index) = web3_block_number_to_sql( + api::BlockNumber::Number(filter.from_block.0.into()), + arg_index, + ); + let mut where_sql = format!("(miniblock_number >= {})", block_sql); + arg_index = new_arg_index; + + if let Some(to_block) = filter.to_block { + let (block_sql, new_arg_index) = web3_block_number_to_sql(to_block, arg_index); + where_sql += &format!(" AND (miniblock_number <= {})", block_sql); + arg_index = new_arg_index; + } + if !filter.addresses.is_empty() { + where_sql += &format!(" AND (address = ANY(${}))", arg_index); + arg_index += 1; + } + for (topic_index, _) in filter.topics.iter() { + where_sql += &format!(" AND (topic{} = ANY(${}))", topic_index, arg_index); + arg_index += 1; + } + + (where_sql, arg_index) + } + + pub fn get_all_logs(&mut self, from_block: MiniblockNumber) -> Result, SqlxError> { + async_std::task::block_on(async { + let db_logs: Vec = sqlx::query_as!( + StorageWeb3Log, + r#" + WITH events_select AS ( + SELECT + address, topic1, topic2, topic3, topic4, value, + miniblock_number, tx_hash, tx_index_in_block, + event_index_in_block, event_index_in_tx + FROM events + WHERE miniblock_number > $1 + ORDER BY miniblock_number ASC, event_index_in_block ASC + ) + SELECT miniblocks.hash as "block_hash?", + address as "address!", topic1 as "topic1!", topic2 as "topic2!", topic3 as "topic3!", topic4 as "topic4!", value as "value!", + miniblock_number as "miniblock_number!", miniblocks.l1_batch_number as "l1_batch_number?", tx_hash as "tx_hash!", + tx_index_in_block as "tx_index_in_block!", event_index_in_block as "event_index_in_block!", event_index_in_tx as "event_index_in_tx!" + FROM events_select + INNER JOIN miniblocks ON events_select.miniblock_number = miniblocks.number + ORDER BY miniblock_number ASC, event_index_in_block ASC + "#, + from_block.0 as i64 + ) + .fetch_all(self.storage.conn()) + .await?; + let logs = db_logs.into_iter().map(Into::into).collect(); + Ok(logs) + }) + } +} diff --git a/core/lib/dal/src/explorer/contract_verification_dal.rs b/core/lib/dal/src/explorer/contract_verification_dal.rs new file mode 100644 index 000000000000..b0635ae268b7 --- /dev/null +++ b/core/lib/dal/src/explorer/contract_verification_dal.rs @@ -0,0 +1,363 @@ +use std::time::Duration; + +use zksync_types::{ + explorer_api::{ + DeployContractCalldata, VerificationIncomingRequest, VerificationInfo, VerificationRequest, + VerificationRequestStatus, + }, + get_code_key, Address, CONTRACT_DEPLOYER_ADDRESS, FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH, +}; + +use sqlx::postgres::types::PgInterval; + +use crate::SqlxError; +use crate::StorageProcessor; + +#[derive(Debug)] +pub struct ContractVerificationDal<'a, 'c> { + pub storage: &'a mut StorageProcessor<'c>, +} + +impl ContractVerificationDal<'_, '_> { + pub fn get_count_of_queued_verification_requests(&mut self) -> Result { + async_std::task::block_on(async { + sqlx::query!( + r#" + SELECT COUNT(*) as "count!" + FROM contract_verification_requests + WHERE status = 'queued' + "# + ) + .fetch_one(self.storage.conn()) + .await + .map(|row| row.count as usize) + }) + } + + pub fn add_contract_verification_request( + &mut self, + query: VerificationIncomingRequest, + ) -> Result { + async_std::task::block_on(async { + sqlx::query!( + " + INSERT INTO contract_verification_requests ( + contract_address, + source_code, + contract_name, + compiler_zksolc_version, + compiler_solc_version, + optimization_used, + constructor_arguments, + status, + created_at, + updated_at + ) + VALUES ($1, $2, $3, $4, $5, $6, $7, 'queued', now(), now()) + RETURNING id + ", + query.contract_address.as_bytes(), + serde_json::to_string(&query.source_code_data).unwrap(), + query.contract_name, + query.compiler_zksolc_version, + query.compiler_solc_version, + query.optimization_used, + query.constructor_arguments.0, + ) + .fetch_one(self.storage.conn()) + .await + .map(|row| row.id as usize) + }) + } + + /// Returns the next verification request for processing. + /// Considering the situation where processing of some request + /// can be interrupted (panic, pod restart, etc..), + /// `processing_timeout` parameter is added to avoid stucking of requests. + pub fn get_next_queued_verification_request( + &mut self, + processing_timeout: Duration, + ) -> Result, SqlxError> { + async_std::task::block_on(async { + let processing_timeout = PgInterval { + months: 0, + days: 0, + microseconds: processing_timeout.as_micros() as i64, + }; + let result = sqlx::query!( + "UPDATE contract_verification_requests + SET status = 'in_progress', attempts = attempts + 1, + updated_at = now(), processing_started_at = now() + WHERE id = ( + SELECT id FROM contract_verification_requests + WHERE status = 'queued' OR (status = 'in_progress' AND processing_started_at < now() - $1::interval) + ORDER BY created_at + LIMIT 1 + FOR UPDATE + SKIP LOCKED + ) + RETURNING contract_verification_requests.*", + &processing_timeout + ) + .fetch_optional(self.storage.conn()) + .await? + .map(|row| VerificationRequest { + id: row.id as usize, + req: VerificationIncomingRequest { + contract_address: Address::from_slice(&row.contract_address), + source_code_data: serde_json::from_str(&row.source_code).unwrap(), + contract_name: row.contract_name, + compiler_zksolc_version: row.compiler_zksolc_version, + compiler_solc_version: row.compiler_solc_version, + optimization_used: row.optimization_used, + constructor_arguments: row.constructor_arguments.into(), + }, + }); + Ok(result) + }) + } + + /// Updates the verification request status and inserts the verification info upon successful verification. + pub fn save_verification_info( + &mut self, + verification_info: VerificationInfo, + ) -> Result<(), SqlxError> { + async_std::task::block_on(async { + let mut transaction = self.storage.start_transaction().await; + + sqlx::query!( + " + UPDATE contract_verification_requests + SET status = 'successful', updated_at = now() + WHERE id = $1 + ", + verification_info.request.id as i64, + ) + .execute(transaction.conn()) + .await?; + + let address = verification_info.request.req.contract_address; + let verification_info_json = serde_json::to_value(verification_info) + .expect("Failed to serialize verification info into serde_json"); + sqlx::query!( + " + INSERT INTO contracts_verification_info + (address, verification_info) + VALUES ($1, $2) + ON CONFLICT (address) + DO UPDATE SET verification_info = $2 + ", + address.as_bytes(), + &verification_info_json + ) + .execute(transaction.conn()) + .await?; + + transaction.commit().await; + Ok(()) + }) + } + + pub fn save_verification_error( + &mut self, + id: usize, + error: String, + compilation_errors: serde_json::Value, + panic_message: Option, + ) -> Result<(), SqlxError> { + async_std::task::block_on(async { + sqlx::query!( + " + UPDATE contract_verification_requests + SET status = 'failed', updated_at = now(), error = $2, compilation_errors = $3, panic_message = $4 + WHERE id = $1 + ", + id as i64, + error.as_str(), + &compilation_errors, + panic_message + ) + .execute(self.storage.conn()) + .await?; + Ok(()) + }) + } + + pub fn get_verification_request_status( + &mut self, + id: usize, + ) -> Result, SqlxError> { + async_std::task::block_on(async { + let result = sqlx::query!( + " + SELECT status, error, compilation_errors FROM contract_verification_requests + WHERE id = $1 + ", + id as i64, + ) + .fetch_optional(self.storage.conn()) + .await? + .map(|row| VerificationRequestStatus { + status: row.status, + error: row.error, + compilation_errors: row + .compilation_errors + .and_then(|errors: serde_json::Value| { + let string_array: Vec = errors + .as_array() + .unwrap() + .iter() + .map(|value| value.as_str().unwrap().to_string()) + .collect(); + if string_array.is_empty() { + None + } else { + Some(string_array) + } + }), + }); + Ok(result) + }) + } + + /// Returns bytecode and calldata from the contract and the transaction that created it. + pub fn get_contract_info_for_verification( + &mut self, + address: Address, + ) -> Result, DeployContractCalldata)>, SqlxError> { + async_std::task::block_on(async { + let hashed_key = get_code_key(&address).hashed_key(); + let result = sqlx::query!( + " + SELECT factory_deps.bytecode, transactions.data, transactions.contract_address + FROM ( + SELECT * FROM storage_logs + WHERE storage_logs.hashed_key = $1 + ORDER BY miniblock_number DESC, operation_number DESC + LIMIT 1 + ) storage_logs + JOIN factory_deps ON factory_deps.bytecode_hash = storage_logs.value + JOIN transactions ON transactions.hash = storage_logs.tx_hash + WHERE storage_logs.value != $2 + ", + hashed_key.as_bytes(), + FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH.as_bytes() + ) + .fetch_optional(self.storage.conn()) + .await? + .map(|row| { + let calldata = match row.contract_address { + Some(contract_address) + if contract_address == CONTRACT_DEPLOYER_ADDRESS.0.to_vec() => + { + let data: serde_json::Value = row.data; + let calldata_str: String = + serde_json::from_value(data.get("calldata").unwrap().clone()).unwrap(); + let calldata = hex::decode(&calldata_str[2..]).unwrap(); + DeployContractCalldata::Deploy(calldata) + } + _ => DeployContractCalldata::Ignore, + }; + (row.bytecode, calldata) + }); + Ok(result) + }) + } + + /// Returns true if the contract has a stored contracts_verification_info. + pub fn is_contract_verified(&mut self, address: Address) -> bool { + async_std::task::block_on(async { + let count = sqlx::query!( + r#" + SELECT COUNT(*) as "count!" + FROM contracts_verification_info + WHERE address = $1 + "#, + address.as_bytes() + ) + .fetch_one(self.storage.conn()) + .await + .unwrap() + .count; + count > 0 + }) + } + + pub fn get_zksolc_versions(&mut self) -> Result, SqlxError> { + async_std::task::block_on(async { + let versions: Vec<_> = sqlx::query!( + "SELECT version FROM contract_verification_zksolc_versions ORDER by version" + ) + .fetch_all(self.storage.conn()) + .await? + .into_iter() + .map(|row| row.version) + .collect(); + Ok(versions) + }) + } + + pub fn get_solc_versions(&mut self) -> Result, SqlxError> { + async_std::task::block_on(async { + let versions: Vec<_> = sqlx::query!( + "SELECT version FROM contract_verification_solc_versions ORDER by version" + ) + .fetch_all(self.storage.conn()) + .await? + .into_iter() + .map(|row| row.version) + .collect(); + Ok(versions) + }) + } + + pub fn set_zksolc_versions(&mut self, versions: Vec) -> Result<(), SqlxError> { + async_std::task::block_on(async { + let mut transaction = self.storage.start_transaction().await; + + sqlx::query!("DELETE FROM contract_verification_zksolc_versions") + .execute(transaction.conn()) + .await?; + + sqlx::query!( + " + INSERT INTO contract_verification_zksolc_versions (version, created_at, updated_at) + SELECT u.version, now(), now() + FROM UNNEST($1::text[]) + AS u(version) + ", + &versions + ) + .execute(transaction.conn()) + .await?; + + transaction.commit().await; + Ok(()) + }) + } + + pub fn set_solc_versions(&mut self, versions: Vec) -> Result<(), SqlxError> { + async_std::task::block_on(async { + let mut transaction = self.storage.start_transaction().await; + + sqlx::query!("DELETE FROM contract_verification_solc_versions") + .execute(transaction.conn()) + .await?; + + sqlx::query!( + " + INSERT INTO contract_verification_solc_versions (version, created_at, updated_at) + SELECT u.version, now(), now() + FROM UNNEST($1::text[]) + AS u(version) + ", + &versions + ) + .execute(transaction.conn()) + .await?; + + transaction.commit().await; + Ok(()) + }) + } +} diff --git a/core/lib/dal/src/explorer/explorer_accounts_dal.rs b/core/lib/dal/src/explorer/explorer_accounts_dal.rs new file mode 100644 index 000000000000..78c398450e14 --- /dev/null +++ b/core/lib/dal/src/explorer/explorer_accounts_dal.rs @@ -0,0 +1,139 @@ +use std::collections::HashMap; + +use zksync_types::{ + api, + explorer_api::{AccountType, BalanceItem, ExplorerTokenInfo}, + get_code_key, + tokens::ETHEREUM_ADDRESS, + utils::storage_key_for_standard_token_balance, + AccountTreeId, Address, Nonce, FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH, L2_ETH_TOKEN_ADDRESS, + U256, +}; + +use crate::SqlxError; +use crate::StorageProcessor; + +#[derive(Debug)] +pub struct ExplorerAccountsDal<'a, 'c> { + pub storage: &'a mut StorageProcessor<'c>, +} + +impl ExplorerAccountsDal<'_, '_> { + pub fn get_balances_for_address( + &mut self, + address: Address, + ) -> Result, SqlxError> { + async_std::task::block_on(async { + let token_l2_addresses = self + .storage + .explorer() + .misc_dal() + .get_well_known_token_l2_addresses()?; + let hashed_keys: Vec> = token_l2_addresses + .into_iter() + .map(|mut l2_token_address| { + if l2_token_address == ETHEREUM_ADDRESS { + l2_token_address = L2_ETH_TOKEN_ADDRESS; + } + storage_key_for_standard_token_balance( + AccountTreeId::new(l2_token_address), + &address, + ) + .hashed_key() + .0 + .to_vec() + }) + .collect(); + let rows = sqlx::query!( + r#" + SELECT storage.value as "value!", + tokens.l1_address as "l1_address!", tokens.l2_address as "l2_address!", + tokens.symbol as "symbol!", tokens.name as "name!", tokens.decimals as "decimals!", tokens.usd_price as "usd_price?" + FROM storage + INNER JOIN tokens ON + storage.address = tokens.l2_address OR (storage.address = $2 AND tokens.l2_address = $3) + WHERE storage.hashed_key = ANY($1) + "#, + &hashed_keys, + L2_ETH_TOKEN_ADDRESS.as_bytes(), + ETHEREUM_ADDRESS.as_bytes(), + ) + .fetch_all(self.storage.conn()) + .await?; + let result = rows + .into_iter() + .filter_map(|row| { + let balance = U256::from_big_endian(&row.value); + if balance.is_zero() { + None + } else { + let l2_address = Address::from_slice(&row.l2_address); + let token_info = ExplorerTokenInfo { + l1_address: Address::from_slice(&row.l1_address), + l2_address, + address: l2_address, + symbol: row.symbol, + name: row.name, + decimals: row.decimals as u8, + usd_price: row.usd_price, + }; + let balance_item = BalanceItem { + token_info, + balance, + }; + Some((l2_address, balance_item)) + } + }) + .collect(); + Ok(result) + }) + } + + /// Returns sealed and verified nonces for address. + pub fn get_account_nonces(&mut self, address: Address) -> Result<(Nonce, Nonce), SqlxError> { + let sealed_nonce = self + .storage + .storage_web3_dal() + .get_address_historical_nonce(address, api::BlockId::Number(api::BlockNumber::Latest))? + .unwrap() + .as_u32(); + let verified_nonce = self + .storage + .storage_web3_dal() + .get_address_historical_nonce( + address, + api::BlockId::Number(api::BlockNumber::Finalized), + )? + .unwrap_or_default() + .as_u32(); + + Ok((Nonce(sealed_nonce), Nonce(verified_nonce))) + } + + pub fn get_account_type(&mut self, address: Address) -> Result { + let hashed_key = get_code_key(&address).hashed_key(); + async_std::task::block_on(async { + let contract_exists = sqlx::query!( + r#" + SELECT true as "exists" + FROM ( + SELECT * FROM storage_logs + WHERE hashed_key = $1 + ORDER BY miniblock_number DESC, operation_number DESC + LIMIT 1 + ) sl + WHERE sl.value != $2 + "#, + hashed_key.as_bytes(), + FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH.as_bytes() + ) + .fetch_optional(self.storage.conn()) + .await?; + let result = match contract_exists { + Some(_) => AccountType::Contract, + None => AccountType::EOA, + }; + Ok(result) + }) + } +} diff --git a/core/lib/dal/src/explorer/explorer_blocks_dal.rs b/core/lib/dal/src/explorer/explorer_blocks_dal.rs new file mode 100644 index 000000000000..27081a867452 --- /dev/null +++ b/core/lib/dal/src/explorer/explorer_blocks_dal.rs @@ -0,0 +1,88 @@ +use zksync_types::explorer_api::{BlockDetails, BlockPageItem, BlocksQuery, PaginationDirection}; +use zksync_types::MiniblockNumber; + +use crate::models::storage_block::{block_page_item_from_storage, StorageBlockDetails}; +use crate::SqlxError; +use crate::StorageProcessor; + +#[derive(Debug)] +pub struct ExplorerBlocksDal<'a, 'c> { + pub storage: &'a mut StorageProcessor<'c>, +} + +impl ExplorerBlocksDal<'_, '_> { + pub fn get_blocks_page( + &mut self, + query: BlocksQuery, + last_verified: MiniblockNumber, + ) -> Result, SqlxError> { + async_std::task::block_on(async { + let (cmp_sign, order_str) = match query.pagination.direction { + PaginationDirection::Older => ("<", "DESC"), + PaginationDirection::Newer => (">", "ASC"), + }; + let cmp_str = if query.from.is_some() { + format!("WHERE miniblocks.number {} $3", cmp_sign) + } else { + "".to_string() + }; + let sql_query_str = format!( + " + SELECT number, l1_tx_count, l2_tx_count, hash, timestamp FROM miniblocks + {} + ORDER BY miniblocks.number {} + LIMIT $1 + OFFSET $2 + ", + cmp_str, order_str + ); + + let mut sql_query = sqlx::query_as(&sql_query_str).bind(query.pagination.limit as i32); + sql_query = sql_query.bind(query.pagination.offset as i32); + if let Some(from) = query.from { + sql_query = sql_query.bind(from.0 as i64); + } + let result = sql_query + .fetch_all(self.storage.conn()) + .await? + .into_iter() + .map(|row| block_page_item_from_storage(row, last_verified)) + .collect(); + Ok(result) + }) + } + + pub fn get_block_details( + &mut self, + block_number: MiniblockNumber, + ) -> Result, SqlxError> { + async_std::task::block_on(async { + let block_details: Option = sqlx::query_as!( + StorageBlockDetails, + r#" + SELECT miniblocks.number, + miniblocks.timestamp, + miniblocks.l1_tx_count, + miniblocks.l2_tx_count, + miniblocks.hash as "root_hash?", + commit_tx.tx_hash as "commit_tx_hash?", + commit_tx.confirmed_at as "committed_at?", + prove_tx.tx_hash as "prove_tx_hash?", + prove_tx.confirmed_at as "proven_at?", + execute_tx.tx_hash as "execute_tx_hash?", + execute_tx.confirmed_at as "executed_at?" + FROM miniblocks + LEFT JOIN l1_batches ON miniblocks.l1_batch_number = l1_batches.number + LEFT JOIN eth_txs_history as commit_tx ON (l1_batches.eth_commit_tx_id = commit_tx.eth_tx_id AND commit_tx.confirmed_at IS NOT NULL) + LEFT JOIN eth_txs_history as prove_tx ON (l1_batches.eth_prove_tx_id = prove_tx.eth_tx_id AND prove_tx.confirmed_at IS NOT NULL) + LEFT JOIN eth_txs_history as execute_tx ON (l1_batches.eth_execute_tx_id = execute_tx.eth_tx_id AND execute_tx.confirmed_at IS NOT NULL) + WHERE miniblocks.number = $1 + "#, + block_number.0 as i64 + ) + .fetch_optional(self.storage.conn()) + .await?; + Ok(block_details.map(BlockDetails::from)) + }) + } +} diff --git a/core/lib/dal/src/explorer/explorer_events_dal.rs b/core/lib/dal/src/explorer/explorer_events_dal.rs new file mode 100644 index 000000000000..37d6b279f732 --- /dev/null +++ b/core/lib/dal/src/explorer/explorer_events_dal.rs @@ -0,0 +1,117 @@ +use zksync_types::api::Log; +use zksync_types::explorer_api::{EventsQuery, EventsResponse, PaginationDirection}; + +use sqlx::Row; + +use crate::models::storage_event::StorageWeb3Log; +use crate::{SqlxError, StorageProcessor}; + +#[derive(Debug)] +pub struct ExplorerEventsDal<'a, 'c> { + pub storage: &'a mut StorageProcessor<'c>, +} + +impl ExplorerEventsDal<'_, '_> { + pub fn get_events_page( + &mut self, + query: EventsQuery, + max_total: usize, + ) -> Result { + async_std::task::block_on(async { + let (cmp_sign, order_str) = match query.pagination.direction { + PaginationDirection::Older => ("<", "DESC"), + PaginationDirection::Newer => (">", "ASC"), + }; + + let mut filters = Vec::new(); + let mut bind_index = 1usize; + if query.from_block_number.is_some() { + filters.push(format!( + "(events.miniblock_number {} ${})", + cmp_sign, bind_index + )); + bind_index += 1; + } + if query.contract_address.is_some() { + filters.push(format!("(events.address = ${})", bind_index)); + bind_index += 1; + } + let filters: String = if !filters.is_empty() { + format!("WHERE {}", filters.join(" AND ")) + } else { + "".to_string() + }; + + let ordering = format!( + "events.miniblock_number {0}, events.event_index_in_block {0}", + order_str + ); + let sql_list_query_str = format!( + r#" + SELECT events.*, miniblocks.hash as "block_hash", miniblocks.l1_batch_number + FROM ( + SELECT address, topic1, topic2, topic3, topic4, value, + miniblock_number, tx_hash, tx_index_in_block, + event_index_in_block, event_index_in_tx + FROM events + {0} + ORDER BY {1} + LIMIT ${2} + OFFSET ${3} + ) as events + JOIN miniblocks ON events.miniblock_number = miniblocks.number + ORDER BY {1} + "#, + filters, + ordering, + bind_index, + bind_index + 1 + ); + + let mut sql_query = sqlx::query_as(&sql_list_query_str); + if let Some(block_number) = query.from_block_number { + sql_query = sql_query.bind(block_number.0 as i64); + } + if let Some(contract_address) = query.contract_address { + sql_query = sql_query.bind(contract_address.0.to_vec()); + } + sql_query = sql_query + .bind(query.pagination.limit as i64) + .bind(query.pagination.offset as i64); + + let storage_web3_logs: Vec = + sql_query.fetch_all(self.storage.conn()).await?; + let logs = storage_web3_logs.into_iter().map(Log::from).collect(); + + let sql_count_query_str = format!( + r#" + SELECT COUNT(*) as "count" FROM ( + SELECT true + FROM events + {0} + LIMIT ${1} + ) AS c + "#, + filters, bind_index + ); + + let mut sql_query = sqlx::query(&sql_count_query_str); + if let Some(block_number) = query.from_block_number { + sql_query = sql_query.bind(block_number.0 as i64); + } + if let Some(contract_address) = query.contract_address { + sql_query = sql_query.bind(contract_address.0.to_vec()); + } + sql_query = sql_query.bind(max_total as i64); + + let total = sql_query + .fetch_one(self.storage.conn()) + .await? + .get::("count"); + Ok(EventsResponse { + list: logs, + total: total as usize, + }) + }) + } +} diff --git a/core/lib/dal/src/explorer/explorer_misc_dal.rs b/core/lib/dal/src/explorer/explorer_misc_dal.rs new file mode 100644 index 000000000000..50003dc4c15e --- /dev/null +++ b/core/lib/dal/src/explorer/explorer_misc_dal.rs @@ -0,0 +1,113 @@ +use crate::explorer::storage_contract_info::StorageContractInfo; +use crate::SqlxError; +use crate::StorageProcessor; +use zksync_types::{ + explorer_api::{ContractBasicInfo, ContractStats, ExplorerTokenInfo}, + get_code_key, Address, FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH, +}; + +#[derive(Debug)] +pub struct ExplorerMiscDal<'a, 'c> { + pub storage: &'a mut StorageProcessor<'c>, +} + +impl ExplorerMiscDal<'_, '_> { + pub fn get_token_details( + &mut self, + address: Address, + ) -> Result, SqlxError> { + async_std::task::block_on(async { + let row = sqlx::query!( + r#" + SELECT l1_address, l2_address, symbol, name, decimals, usd_price + FROM tokens + WHERE l2_address = $1 + "#, + address.as_bytes() + ) + .fetch_optional(self.storage.conn()) + .await?; + let result = row.map(|row| ExplorerTokenInfo { + l1_address: Address::from_slice(&row.l1_address), + l2_address: Address::from_slice(&row.l2_address), + address: Address::from_slice(&row.l2_address), + symbol: row.symbol, + name: row.name, + decimals: row.decimals as u8, + usd_price: row.usd_price, + }); + Ok(result) + }) + } + + pub fn get_well_known_token_l2_addresses(&mut self) -> Result, SqlxError> { + async_std::task::block_on(async { + let addresses = sqlx::query!("SELECT l2_address FROM tokens WHERE well_known = true") + .fetch_all(self.storage.conn()) + .await? + .into_iter() + .map(|record| Address::from_slice(&record.l2_address)) + .collect(); + Ok(addresses) + }) + } + + pub fn get_contract_info( + &mut self, + address: Address, + ) -> Result, SqlxError> { + async_std::task::block_on(async { + let hashed_key = get_code_key(&address).hashed_key(); + let info = sqlx::query_as!( + StorageContractInfo, + r#" + WITH sl AS ( + SELECT * FROM storage_logs + WHERE storage_logs.hashed_key = $1 + ORDER BY storage_logs.miniblock_number DESC, storage_logs.operation_number DESC + LIMIT 1 + ) + SELECT + sl.key as "key_address", + fd.bytecode, + txs.initiator_address as "creator_address?", + txs.hash as "creator_tx_hash?", + sl.miniblock_number as "created_in_block_number", + c.verification_info + FROM sl + JOIN factory_deps fd ON fd.bytecode_hash = sl.value + LEFT JOIN transactions txs ON txs.hash = sl.tx_hash + LEFT JOIN contracts_verification_info c ON c.address = $2 + WHERE sl.value != $3 + "#, + hashed_key.as_bytes(), + address.as_bytes(), + FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH.as_bytes() + ) + .fetch_optional(self.storage.conn()) + .await?; + Ok(info.map(|info| info.into())) + }) + } + + pub fn get_contract_stats(&mut self, address: Address) -> Result { + async_std::task::block_on(async { + let row = sqlx::query!( + r#" + SELECT COUNT(*) as "total_transactions!" + FROM transactions + WHERE contract_address = $1 + "#, + address.as_bytes() + ) + .fetch_optional(self.storage.conn()) + .await?; + let result = row + .map(|row| ContractStats { + total_transactions: row.total_transactions as usize, + }) + .unwrap_or_default(); + Ok(result) + }) + } +} diff --git a/core/lib/dal/src/explorer/explorer_transactions_dal.rs b/core/lib/dal/src/explorer/explorer_transactions_dal.rs new file mode 100644 index 000000000000..96a9e31fc6cc --- /dev/null +++ b/core/lib/dal/src/explorer/explorer_transactions_dal.rs @@ -0,0 +1,806 @@ +use std::collections::HashMap; + +use itertools::Itertools; +use once_cell::sync::Lazy; +use sqlx::Row; + +use zksync_config::constants::ERC20_TRANSFER_TOPIC; +use zksync_types::api::Log; +use zksync_types::explorer_api::{ + BalanceChangeInfo, BalanceChangeType, Erc20TransferInfo, ExplorerTokenInfo, + PaginationDirection, PaginationQuery, TransactionDetails, TransactionResponse, + TransactionsResponse, TxPosition, +}; +use zksync_types::{ + tokens::ETHEREUM_ADDRESS, tx::Execute, Address, MiniblockNumber, H256, L2_ETH_TOKEN_ADDRESS, + U256, U64, +}; + +use crate::models::storage_event::StorageWeb3Log; +use crate::models::storage_transaction::{ + transaction_details_from_storage, StorageTransactionDetails, +}; +use crate::SqlxError; +use crate::StorageProcessor; + +#[derive(Debug)] +pub struct ExplorerTransactionsDal<'a, 'c> { + pub storage: &'a mut StorageProcessor<'c>, +} + +impl ExplorerTransactionsDal<'_, '_> { + pub fn get_transactions_count_after( + &mut self, + block_number: MiniblockNumber, + ) -> Result { + async_std::task::block_on(async { + let tx_count = sqlx::query!( + r#"SELECT COUNT(*) as "count!" FROM transactions + WHERE miniblock_number > $1 AND miniblock_number IS NOT NULL"#, + block_number.0 as i64 + ) + .fetch_one(self.storage.conn()) + .await? + .count as usize; + Ok(tx_count) + }) + } + + pub fn get_transaction_details( + &mut self, + hash: H256, + l2_erc20_bridge_addr: Address, + ) -> Result, SqlxError> { + async_std::task::block_on(async { + let tx_details: Option = sqlx::query_as!( + StorageTransactionDetails, + r#" + SELECT transactions.*, miniblocks.hash as "block_hash?", + commit_tx.tx_hash as "eth_commit_tx_hash?", + prove_tx.tx_hash as "eth_prove_tx_hash?", + execute_tx.tx_hash as "eth_execute_tx_hash?" + FROM transactions + LEFT JOIN miniblocks ON miniblocks.number = transactions.miniblock_number + LEFT JOIN l1_batches ON l1_batches.number = miniblocks.l1_batch_number + LEFT JOIN eth_txs_history as commit_tx ON (l1_batches.eth_commit_tx_id = commit_tx.eth_tx_id AND commit_tx.confirmed_at IS NOT NULL) + LEFT JOIN eth_txs_history as prove_tx ON (l1_batches.eth_prove_tx_id = prove_tx.eth_tx_id AND prove_tx.confirmed_at IS NOT NULL) + LEFT JOIN eth_txs_history as execute_tx ON (l1_batches.eth_execute_tx_id = execute_tx.eth_tx_id AND execute_tx.confirmed_at IS NOT NULL) + WHERE transactions.hash = $1 + "#, + hash.as_bytes() + ) + .fetch_optional(self.storage.conn()) + .await?; + let tx = if let Some(tx_details) = tx_details { + let list = self + .storage_tx_list_to_tx_details_list(vec![tx_details], l2_erc20_bridge_addr)?; + let tx = list[0].clone(); + let logs: Vec = sqlx::query_as!( + StorageWeb3Log, + r#" + SELECT + address, topic1, topic2, topic3, topic4, value, + Null::bytea as "block_hash", Null::bigint as "l1_batch_number?", + miniblock_number, tx_hash, tx_index_in_block, + event_index_in_block, event_index_in_tx + FROM events + WHERE tx_hash = $1 + ORDER BY miniblock_number ASC, event_index_in_block ASC + "#, + hash.as_bytes() + ) + .fetch_all(self.storage.conn()) + .await? + .into_iter() + .map(|storage_log: StorageWeb3Log| { + let mut log = Log::from(storage_log); + log.block_hash = tx.block_hash; + log.l1_batch_number = tx.l1_batch_number.map(|n| U64::from(n.0)); + log + }) + .collect(); + Some(TransactionResponse { tx, logs }) + } else { + None + }; + Ok(tx) + }) + } + + #[allow(clippy::too_many_arguments)] + pub fn get_transactions_page( + &mut self, + from_tx_location: Option, + block_number: Option, + contract_address: Option
, + pagination: PaginationQuery, + max_total: usize, + l2_erc20_bridge_addr: Address, + ) -> Result { + async_std::task::block_on(async { + let (cmp_sign, order_str) = match pagination.direction { + PaginationDirection::Older => ("<", "DESC"), + PaginationDirection::Newer => (">", "ASC"), + }; + let mut filters = vec!["transactions.miniblock_number IS NOT NULL".to_string()]; + if let Some(from_tx_location) = from_tx_location { + if let Some(tx_index) = from_tx_location.tx_index { + filters.push(format!( + "(transactions.miniblock_number, transactions.index_in_block) {} ({}, {})", + cmp_sign, from_tx_location.block_number, tx_index + )); + } else { + filters.push(format!( + "transactions.miniblock_number {} {}", + cmp_sign, from_tx_location.block_number + )); + } + } + if let Some(address) = contract_address { + filters.push(format!( + "(transactions.contract_address = '\\x{0}' OR transactions.initiator_address = '\\x{0}')", + hex::encode(address) + )); + } + if let Some(number) = block_number { + filters.push(format!("transactions.miniblock_number = {}", number.0)); + } + let filters: String = if !filters.is_empty() { + format!("WHERE {}", filters.join(" AND ")) + } else { + "".to_string() + }; + let ordering = format!( + "transactions.miniblock_number {0}, transactions.index_in_block {0}", + order_str + ); + + let sql_query_list_str = format!( + r#" + SELECT transactions.*, miniblocks.hash as "block_hash", + commit_tx.tx_hash as eth_commit_tx_hash, + prove_tx.tx_hash as eth_prove_tx_hash, + execute_tx.tx_hash as eth_execute_tx_hash + FROM ( + SELECT * FROM transactions + {0} + ORDER BY {1} + LIMIT {2} + OFFSET {3} + ) as transactions + LEFT JOIN miniblocks ON miniblocks.number = transactions.miniblock_number + LEFT JOIN l1_batches ON l1_batches.number = miniblocks.l1_batch_number + LEFT JOIN eth_txs_history as commit_tx ON (l1_batches.eth_commit_tx_id = commit_tx.eth_tx_id AND commit_tx.confirmed_at IS NOT NULL) + LEFT JOIN eth_txs_history as prove_tx ON (l1_batches.eth_prove_tx_id = prove_tx.eth_tx_id AND prove_tx.confirmed_at IS NOT NULL) + LEFT JOIN eth_txs_history as execute_tx ON (l1_batches.eth_execute_tx_id = execute_tx.eth_tx_id AND execute_tx.confirmed_at IS NOT NULL) + ORDER BY {1} + "#, + filters, ordering, pagination.limit, pagination.offset + ); + let storage_txs: Vec = sqlx::query_as(&sql_query_list_str) + .fetch_all(self.storage.conn()) + .await?; + let list = + self.storage_tx_list_to_tx_details_list(storage_txs, l2_erc20_bridge_addr)?; + + let sql_query_total_str = format!( + r#" + SELECT COUNT(*) as "count" FROM ( + SELECT true FROM transactions + {} + LIMIT {} + ) as c + "#, + filters, max_total + ); + let total = sqlx::query(&sql_query_total_str) + .fetch_one(self.storage.conn()) + .await? + .get::("count") as usize; + + Ok(TransactionsResponse { list, total }) + }) + } + + #[allow(clippy::too_many_arguments)] + pub fn get_account_transactions_page( + &mut self, + account_address: Address, + from_tx_location: Option, + block_number: Option, + pagination: PaginationQuery, + max_total: usize, + l2_erc20_bridge_addr: Address, + ) -> Result { + async_std::task::block_on(async { + let order_str = match pagination.direction { + PaginationDirection::Older => "DESC", + PaginationDirection::Newer => "ASC", + }; + + let (hashes, total) = self.get_account_transactions_hashes_page( + account_address, + from_tx_location, + block_number, + pagination, + max_total, + )?; + let sql_query_str = format!( + r#" + SELECT transactions.*, miniblocks.hash as "block_hash", + commit_tx.tx_hash as eth_commit_tx_hash, + prove_tx.tx_hash as eth_prove_tx_hash, + execute_tx.tx_hash as eth_execute_tx_hash + FROM transactions + LEFT JOIN miniblocks ON miniblocks.number = transactions.miniblock_number + LEFT JOIN l1_batches ON l1_batches.number = miniblocks.l1_batch_number + LEFT JOIN eth_txs_history as commit_tx ON (l1_batches.eth_commit_tx_id = commit_tx.eth_tx_id AND commit_tx.confirmed_at IS NOT NULL) + LEFT JOIN eth_txs_history as prove_tx ON (l1_batches.eth_prove_tx_id = prove_tx.eth_tx_id AND prove_tx.confirmed_at IS NOT NULL) + LEFT JOIN eth_txs_history as execute_tx ON (l1_batches.eth_execute_tx_id = execute_tx.eth_tx_id AND execute_tx.confirmed_at IS NOT NULL) + WHERE transactions.hash = ANY($1) + ORDER BY transactions.miniblock_number {}, transactions.index_in_block {} + "#, + order_str, order_str + ); + + let sql_query = sqlx::query_as(&sql_query_str).bind(hashes); + let storage_txs: Vec = + sql_query.fetch_all(self.storage.conn()).await?; + let list = + self.storage_tx_list_to_tx_details_list(storage_txs, l2_erc20_bridge_addr)?; + + Ok(TransactionsResponse { list, total }) + }) + } + + fn get_account_transactions_hashes_page( + &mut self, + account_address: Address, + from_tx_location: Option, + block_number: Option, + pagination: PaginationQuery, + max_total: usize, + ) -> Result<(Vec>, usize), SqlxError> { + async_std::task::block_on(async { + let (cmp_sign, order_str) = match pagination.direction { + PaginationDirection::Older => ("<", "DESC"), + PaginationDirection::Newer => (">", "ASC"), + }; + let mut optional_filters = String::new(); + if let Some(block_number) = block_number { + optional_filters += format!("AND miniblock_number = {}\n", block_number.0).as_str(); + } + if let Some(from_tx_location) = from_tx_location { + if let Some(from_tx_index) = from_tx_location.tx_index { + optional_filters += format!( + "AND (miniblock_number, tx_index_in_block) {} ({}, {})\n", + cmp_sign, from_tx_location.block_number.0, from_tx_index + ) + .as_str(); + } else { + optional_filters += format!( + "AND miniblock_number {} {}\n", + cmp_sign, from_tx_location.block_number.0 + ) + .as_str(); + } + } + + let mut padded_address = [0u8; 12].to_vec(); + padded_address.extend_from_slice(account_address.as_bytes()); + + let sql_query_str = format!( + " + SELECT tx_hash FROM ( + SELECT tx_hash, lag(tx_hash) OVER (ORDER BY miniblock_number {0}, tx_index_in_block {0}) as prev_hash, + miniblock_number, tx_index_in_block + FROM events + WHERE + ( + ( + ( + topic2 = $1 + OR + topic3 = $1 + ) + AND topic1 = $2 + AND (address IN (SELECT l2_address FROM tokens) OR address = $3) + ) + OR events.tx_initiator_address = $4 + ) + {1} + ) AS h + WHERE prev_hash IS NULL OR tx_hash != prev_hash + ORDER BY miniblock_number {0}, tx_index_in_block {0} + LIMIT {2} OFFSET {3} + ", + order_str, optional_filters, pagination.limit, pagination.offset + ); + let sql_query = sqlx::query(&sql_query_str) + .bind(padded_address.clone()) + .bind(ERC20_TRANSFER_TOPIC.as_bytes().to_vec()) + .bind(L2_ETH_TOKEN_ADDRESS.as_bytes().to_vec()) + .bind(account_address.as_bytes().to_vec()); + let hashes: Vec> = sql_query + .fetch_all(self.storage.conn()) + .await? + .into_iter() + .map(|row| row.get::, &str>("tx_hash")) + .collect(); + + let sql_count_query_str = format!( + r#" + SELECT COUNT(*) as "count" FROM ( + SELECT true FROM ( + SELECT tx_hash, lag(tx_hash) OVER (ORDER BY miniblock_number {0}, tx_index_in_block {0}) as prev_hash, + miniblock_number, tx_index_in_block + FROM events + WHERE + ( + topic2 = $1 + OR + topic3 = $1 + ) + AND topic1 = $2 + AND (address IN (SELECT l2_address FROM tokens) OR address = $3) + {1} + ) AS h + WHERE prev_hash IS NULL OR tx_hash != prev_hash + ORDER BY miniblock_number {0}, tx_index_in_block {0} + LIMIT {2} + ) AS c + "#, + order_str, optional_filters, max_total + ); + let sql_count_query = sqlx::query(&sql_count_query_str) + .bind(padded_address) + .bind(ERC20_TRANSFER_TOPIC.as_bytes().to_vec()) + .bind(L2_ETH_TOKEN_ADDRESS.as_bytes().to_vec()); + let total = sql_count_query + .fetch_one(self.storage.conn()) + .await? + .get::("count"); + Ok((hashes, total as usize)) + }) + } + + fn get_erc20_transfers( + &mut self, + hashes: Vec>, + ) -> Result>, SqlxError> { + async_std::task::block_on(async { + let transfers = sqlx::query!( + r#" + SELECT tx_hash, topic2 as "topic2!", topic3 as "topic3!", value as "value!", + tokens.l1_address as "l1_address!", tokens.l2_address as "l2_address!", + tokens.symbol as "symbol!", tokens.name as "name!", tokens.decimals as "decimals!", tokens.usd_price as "usd_price?" + FROM events + INNER JOIN tokens ON + tokens.l2_address = events.address OR (events.address = $3 AND tokens.l2_address = $4) + WHERE tx_hash = ANY($1) AND topic1 = $2 + ORDER BY tx_hash, miniblock_number ASC, event_index_in_block ASC + "#, + &hashes, + ERC20_TRANSFER_TOPIC.as_bytes(), + L2_ETH_TOKEN_ADDRESS.as_bytes(), + ETHEREUM_ADDRESS.as_bytes(), + ) + .fetch_all(self.storage.conn()) + .await? + .into_iter() + .group_by(|row| row.tx_hash.clone()) + .into_iter() + .map(|(hash, group)| (H256::from_slice(&hash), group.map(|row| { + let token_info = ExplorerTokenInfo { + l1_address: Address::from_slice(&row.l1_address), + l2_address: Address::from_slice(&row.l2_address), + address: Address::from_slice(&row.l2_address), + symbol: row.symbol, + name: row.name, + decimals: row.decimals as u8, + usd_price: row.usd_price, + }; + let from = Self::erc20_decode_address_from_topic(H256::from_slice(&row.topic2)); + let to = Self::erc20_decode_address_from_topic(H256::from_slice(&row.topic3)); + let amount = U256::from_big_endian(&row.value); + Erc20TransferInfo { + token_info, + from, + to, + amount, + } + }).collect::>())) + .collect(); + Ok(transfers) + }) + } + + fn get_withdrawals( + &mut self, + hashes: Vec>, + l2_erc20_bridge_addr: Address, + ) -> Result>, SqlxError> { + async_std::task::block_on(async { + static ERC20_WITHDRAW_EVENT_SIGNATURE: Lazy = Lazy::new(|| { + zksync_contracts::l2_bridge_contract() + .event("WithdrawalInitiated") + .unwrap() + .signature() + }); + + let erc20_withdrawals: HashMap> = sqlx::query!( + r#" + SELECT tx_hash, topic2 as "topic2!", topic3 as "topic3!", value as "value!", + tokens.l1_address as "l1_address!", tokens.l2_address as "l2_address!", + tokens.symbol as "symbol!", tokens.name as "name!", tokens.decimals as "decimals!", tokens.usd_price as "usd_price?" + FROM events + INNER JOIN tokens ON + events.topic4 = ('\x000000000000000000000000'::bytea || tokens.l2_address) + WHERE tx_hash = ANY($1) AND events.topic1 = $2 AND events.address = $3 + ORDER BY tx_hash, miniblock_number ASC, event_index_in_block ASC + "#, + &hashes, + ERC20_WITHDRAW_EVENT_SIGNATURE.as_bytes(), + l2_erc20_bridge_addr.as_bytes() + ) + .fetch_all(self.storage.conn()) + .await? + .into_iter() + .group_by(|row| row.tx_hash.clone()) + .into_iter() + .map(|(hash, group)| (H256::from_slice(&hash), group.map(|row| { + let token_info = ExplorerTokenInfo { + l1_address: Address::from_slice(&row.l1_address), + l2_address: Address::from_slice(&row.l2_address), + address: Address::from_slice(&row.l2_address), + symbol: row.symbol, + name: row.name, + decimals: row.decimals as u8, + usd_price: row.usd_price, + }; + let l2_sender = Self::erc20_decode_address_from_topic(H256::from_slice(&row.topic2)); + let l1_receiver = Self::erc20_decode_address_from_topic(H256::from_slice(&row.topic3)); + let amount = U256::from_big_endian(&row.value); + BalanceChangeInfo { + token_info, + from: l2_sender, + to: l1_receiver, + amount, + r#type: BalanceChangeType::Withdrawal + } + }).collect::>())) + .collect(); + + static ETH_WITHDRAW_EVENT_SIGNATURE: Lazy = Lazy::new(|| { + zksync_contracts::eth_contract() + .event("Withdrawal") + .unwrap() + .signature() + }); + + let eth_withdrawals: HashMap> = sqlx::query!( + r#" + SELECT tx_hash, topic2 as "topic2!", topic3 as "topic3!", value as "value!", + tokens.l1_address as "l1_address!", tokens.l2_address as "l2_address!", + tokens.symbol as "symbol!", tokens.name as "name!", tokens.decimals as "decimals!", tokens.usd_price as "usd_price?" + FROM events + INNER JOIN tokens ON tokens.l2_address = '\x0000000000000000000000000000000000000000' + WHERE tx_hash = ANY($1) AND events.topic1 = $2 AND events.address = $3 + ORDER BY tx_hash, miniblock_number ASC, event_index_in_block ASC + "#, + &hashes, + ETH_WITHDRAW_EVENT_SIGNATURE.as_bytes(), + L2_ETH_TOKEN_ADDRESS.as_bytes(), + ) + .fetch_all(self.storage.conn()) + .await? + .into_iter() + .group_by(|row| row.tx_hash.clone()) + .into_iter() + .map(|(hash, group)| (H256::from_slice(&hash), group.map(|row| { + let token_info = ExplorerTokenInfo { + l1_address: Address::from_slice(&row.l1_address), + l2_address: Address::from_slice(&row.l2_address), + address: Address::from_slice(&row.l2_address), + symbol: row.symbol, + name: row.name, + decimals: row.decimals as u8, + usd_price: row.usd_price, + }; + let l2_sender = Self::erc20_decode_address_from_topic(H256::from_slice(&row.topic2)); + let l1_receiver = Self::erc20_decode_address_from_topic(H256::from_slice(&row.topic3)); + let amount = U256::from_big_endian(&row.value); + BalanceChangeInfo { + token_info, + from: l2_sender, + to: l1_receiver, + amount, + r#type: BalanceChangeType::Withdrawal + } + }).collect::>())) + .collect(); + + let mut withdrawals = erc20_withdrawals; + for (hash, mut items) in eth_withdrawals { + withdrawals.entry(hash).or_default().append(&mut items); + } + + Ok(withdrawals) + }) + } + + /// Returns hashmap with transactions that are deposits. + fn get_deposits( + &mut self, + hashes: Vec>, + l2_erc20_bridge_addr: Address, + ) -> Result>, SqlxError> { + async_std::task::block_on(async { + static ERC20_DEPOSIT_EVENT_SIGNATURE: Lazy = Lazy::new(|| { + zksync_contracts::l2_bridge_contract() + .event("FinalizeDeposit") + .unwrap() + .signature() + }); + let erc20_deposits: HashMap> = sqlx::query!( + r#" + SELECT tx_hash, topic2 as "topic2!", topic3 as "topic3!", value as "value!", + tokens.l1_address as "l1_address!", tokens.l2_address as "l2_address!", + tokens.symbol as "symbol!", tokens.name as "name!", tokens.decimals as "decimals!", tokens.usd_price as "usd_price?" + FROM events + INNER JOIN tokens ON + events.topic4 = ('\x000000000000000000000000'::bytea || tokens.l2_address) + WHERE tx_hash = ANY($1) AND events.topic1 = $2 AND events.address = $3 + "#, + &hashes, + ERC20_DEPOSIT_EVENT_SIGNATURE.as_bytes(), + l2_erc20_bridge_addr.as_bytes() + ) + .fetch_all(self.storage.conn()) + .await? + .into_iter() + .map(|row| { + let token_info = ExplorerTokenInfo { + l1_address: Address::from_slice(&row.l1_address), + l2_address: Address::from_slice(&row.l2_address), + address: Address::from_slice(&row.l2_address), + symbol: row.symbol, + name: row.name, + decimals: row.decimals as u8, + usd_price: row.usd_price, + }; + let l1_sender = Self::erc20_decode_address_from_topic(H256::from_slice(&row.topic2)); + let l2_receiver = Self::erc20_decode_address_from_topic(H256::from_slice(&row.topic3)); + let amount = U256::from_big_endian(&row.value); + let deposit_info = BalanceChangeInfo { + token_info, + from: l1_sender, + to: l2_receiver, + amount, + r#type: BalanceChangeType::Deposit + }; + (H256::from_slice(&row.tx_hash), vec![deposit_info]) + }) + .collect(); + + static ETH_MINT_EVENT_SIGNATURE: Lazy = Lazy::new(|| { + zksync_contracts::eth_contract() + .event("Mint") + .unwrap() + .signature() + }); + let eth_deposits: HashMap> = sqlx::query!( + r#" + SELECT events.tx_hash, transactions.initiator_address as "l1_sender!", events.topic2 as "topic2!", events.value as "value!", + tokens.l1_address as "l1_address!", tokens.l2_address as "l2_address!", + tokens.symbol as "symbol!", tokens.name as "name!", tokens.decimals as "decimals!", tokens.usd_price as "usd_price?" + FROM events + INNER JOIN tokens ON tokens.l2_address = '\x0000000000000000000000000000000000000000' + INNER JOIN transactions ON transactions.hash = events.tx_hash + WHERE tx_hash = ANY($1) AND events.topic1 = $2 AND events.address = $3 + ORDER BY tx_hash, events.miniblock_number ASC, event_index_in_block ASC + "#, + &hashes, + ETH_MINT_EVENT_SIGNATURE.as_bytes(), + L2_ETH_TOKEN_ADDRESS.as_bytes(), + ) + .fetch_all(self.storage.conn()) + .await? + .into_iter() + .group_by(|row| row.tx_hash.clone()) + .into_iter() + .map(|(hash, group)| (H256::from_slice(&hash), group.map(|row| { + let token_info = ExplorerTokenInfo { + l1_address: Address::from_slice(&row.l1_address), + l2_address: Address::from_slice(&row.l2_address), + address: Address::from_slice(&row.l2_address), + symbol: row.symbol, + name: row.name, + decimals: row.decimals as u8, + usd_price: row.usd_price, + }; + let l1_sender = Address::from_slice(&row.l1_sender); + let l2_receiver = Self::erc20_decode_address_from_topic(H256::from_slice(&row.topic2)); + let amount = U256::from_big_endian(&row.value); + BalanceChangeInfo { + token_info, + from: l1_sender, + to: l2_receiver, + amount, + r#type: BalanceChangeType::Deposit + } + }).collect::>())) + .collect(); + + let mut deposits = erc20_deposits; + for (hash, mut items) in eth_deposits { + deposits.entry(hash).or_default().append(&mut items); + } + + Ok(deposits) + }) + } + + /// Returns hashmap with transactions that are ERC20 transfers. + fn filter_erc20_transfers( + &mut self, + txs: &[StorageTransactionDetails], + ) -> Result, SqlxError> { + async_std::task::block_on(async { + let hashes: Vec> = txs.iter().map(|tx| tx.hash.clone()).collect(); + // For transaction to be ERC20 transfer 2 conditions should be met + // 1) It is an execute transaction and contract address is an ERC20 token. + let filtered_by_contract_address: HashMap = sqlx::query!( + r#" + SELECT hash as "hash!", + tokens.l1_address as "l1_address!", tokens.l2_address as "l2_address!", + tokens.symbol as "symbol!", tokens.name as "name!", tokens.decimals as "decimals!", tokens.usd_price as "usd_price?" + FROM transactions + INNER JOIN tokens + ON tokens.l2_address = transactions.contract_address OR (transactions.contract_address = $2 AND tokens.l2_address = $3) + WHERE hash = ANY($1) + "#, + &hashes, + L2_ETH_TOKEN_ADDRESS.as_bytes(), + ETHEREUM_ADDRESS.as_bytes(), + ) + .fetch_all(self.storage.conn()) + .await? + .into_iter() + .map(|row| { + let token_info = ExplorerTokenInfo { + l1_address: Address::from_slice(&row.l1_address), + l2_address: Address::from_slice(&row.l2_address), + address: Address::from_slice(&row.l2_address), + symbol: row.symbol, + name: row.name, + decimals: row.decimals as u8, + usd_price: row.usd_price, + }; + (H256::from_slice(&row.hash), token_info) + }) + .collect(); + + // 2) Calldata is a valid ERC20 `transfer` calldata + let erc20_transfers_iter = txs.iter().filter_map(|tx| { + let hash = H256::from_slice(&tx.hash); + if let Some(token_info) = filtered_by_contract_address.get(&hash).cloned() { + let execute = serde_json::from_value::(tx.data.clone()).unwrap(); + let calldata = execute.calldata(); + Self::parse_erc20_transfer_calldata(calldata).map(|(to, amount)| { + let from = Address::from_slice(&tx.initiator_address); + ( + hash, + Erc20TransferInfo { + from, + to, + amount, + token_info, + }, + ) + }) + } else { + None + } + }); + + // Also include ETH transfers + let eth_token_info = self + .storage + .explorer() + .misc_dal() + .get_token_details(Address::zero())? + .expect("Info about ETH should be present in DB"); + let eth_transfers_iter = txs.iter().filter_map(|tx| { + let hash = H256::from_slice(&tx.hash); + let execute = serde_json::from_value::(tx.data.clone()).unwrap(); + // All transactions with an empty calldata are considered to be called "transfers". + if execute.calldata().is_empty() { + let from = Address::from_slice(&tx.initiator_address); + let to = execute.contract_address; + let amount = execute.value; + + Some(( + hash, + Erc20TransferInfo { + from, + to, + amount, + token_info: eth_token_info.clone(), + }, + )) + } else { + None + } + }); + + let result = erc20_transfers_iter.chain(eth_transfers_iter).collect(); + Ok(result) + }) + } + + fn storage_tx_list_to_tx_details_list( + &mut self, + txs: Vec, + l2_erc20_bridge_addr: Address, + ) -> Result, SqlxError> { + let hashes: Vec> = txs.iter().map(|tx| tx.hash.clone()).collect(); + let erc20_transfers_map = self.get_erc20_transfers(hashes.clone())?; + let withdrawals_map = self.get_withdrawals(hashes.clone(), l2_erc20_bridge_addr)?; + let erc20_transfers_filtered = self.filter_erc20_transfers(&txs)?; + let deposits_map = self.get_deposits(hashes, l2_erc20_bridge_addr)?; + let txs = txs + .into_iter() + .map(|tx_details| { + Self::build_transaction_details( + &erc20_transfers_map, + &withdrawals_map, + &erc20_transfers_filtered, + &deposits_map, + tx_details, + ) + }) + .collect(); + Ok(txs) + } + + fn build_transaction_details( + erc20_transfers_map: &HashMap>, + withdrawals_map: &HashMap>, + filtered_transfers: &HashMap, + deposits_map: &HashMap>, + tx_details: StorageTransactionDetails, + ) -> TransactionDetails { + let hash = H256::from_slice(&tx_details.hash); + let erc20_transfers = erc20_transfers_map.get(&hash).cloned().unwrap_or_default(); + let withdrawals = withdrawals_map.get(&hash).cloned().unwrap_or_default(); + let transfer = filtered_transfers.get(&hash).cloned(); + let deposits = deposits_map.get(&hash).cloned().unwrap_or_default(); + transaction_details_from_storage( + tx_details, + erc20_transfers, + withdrawals, + transfer, + deposits, + ) + } + + /// Checks if calldata is erc20 `transfer` calldata and parses (to, amount) from it + fn parse_erc20_transfer_calldata(calldata: Vec) -> Option<(Address, U256)> { + // Check calldata length + if calldata.len() != 68 { + return None; + } + // Check signature match + if calldata[0..4].to_vec() != vec![0xa9, 0x05, 0x9c, 0xbb] { + return None; + } + let to = Address::from_slice(&calldata[16..36]); + let amount = U256::from_big_endian(&calldata[36..68]); + Some((to, amount)) + } + + fn erc20_decode_address_from_topic(topic: H256) -> Address { + Address::from_slice(&topic.as_bytes()[12..]) + } +} diff --git a/core/lib/dal/src/explorer/mod.rs b/core/lib/dal/src/explorer/mod.rs new file mode 100644 index 000000000000..f1951a002461 --- /dev/null +++ b/core/lib/dal/src/explorer/mod.rs @@ -0,0 +1,58 @@ +use crate::StorageProcessor; +use contract_verification_dal::ContractVerificationDal; +use explorer_accounts_dal::ExplorerAccountsDal; +use explorer_blocks_dal::ExplorerBlocksDal; +use explorer_events_dal::ExplorerEventsDal; +use explorer_misc_dal::ExplorerMiscDal; +use explorer_transactions_dal::ExplorerTransactionsDal; + +pub mod contract_verification_dal; +pub mod explorer_accounts_dal; +pub mod explorer_blocks_dal; +pub mod explorer_events_dal; +pub mod explorer_misc_dal; +pub mod explorer_transactions_dal; +pub mod storage_contract_info; + +#[derive(Debug)] +pub struct ExplorerIntermediator<'a, 'c> { + pub storage: &'a mut StorageProcessor<'c>, +} + +impl<'a, 'c> ExplorerIntermediator<'a, 'c> { + pub fn contract_verification_dal(self) -> ContractVerificationDal<'a, 'c> { + ContractVerificationDal { + storage: self.storage, + } + } + + pub fn transactions_dal(self) -> ExplorerTransactionsDal<'a, 'c> { + ExplorerTransactionsDal { + storage: self.storage, + } + } + + pub fn blocks_dal(self) -> ExplorerBlocksDal<'a, 'c> { + ExplorerBlocksDal { + storage: self.storage, + } + } + + pub fn accounts_dal(self) -> ExplorerAccountsDal<'a, 'c> { + ExplorerAccountsDal { + storage: self.storage, + } + } + + pub fn misc_dal(self) -> ExplorerMiscDal<'a, 'c> { + ExplorerMiscDal { + storage: self.storage, + } + } + + pub fn events_dal(self) -> ExplorerEventsDal<'a, 'c> { + ExplorerEventsDal { + storage: self.storage, + } + } +} diff --git a/core/lib/dal/src/explorer/storage_contract_info.rs b/core/lib/dal/src/explorer/storage_contract_info.rs new file mode 100644 index 000000000000..bdd1d23383c1 --- /dev/null +++ b/core/lib/dal/src/explorer/storage_contract_info.rs @@ -0,0 +1,34 @@ +use zksync_types::{explorer_api::ContractBasicInfo, Address, Bytes, MiniblockNumber, H256}; +use zksync_utils::h256_to_account_address; + +#[derive(Debug, Clone, sqlx::FromRow)] +pub struct StorageContractInfo { + pub key_address: Vec, + pub bytecode: Vec, + pub creator_address: Option>, + pub creator_tx_hash: Option>, + pub created_in_block_number: i64, + pub verification_info: Option, +} + +impl From for ContractBasicInfo { + fn from(info: StorageContractInfo) -> ContractBasicInfo { + ContractBasicInfo { + address: h256_to_account_address(&H256::from_slice(&info.key_address)), + bytecode: Bytes(info.bytecode), + creator_address: info + .creator_address + .map(|address| Address::from_slice(&address)) + .unwrap_or_else(Address::zero), + creator_tx_hash: info + .creator_tx_hash + .map(|tx_hash| H256::from_slice(&tx_hash)) + .unwrap_or_else(H256::zero), + created_in_block_number: MiniblockNumber(info.created_in_block_number as u32), + verification_info: info.verification_info.map(|verification_info| { + serde_json::from_value(verification_info) + .expect("invalid verification_info json in database") + }), + } + } +} diff --git a/core/lib/dal/src/fee_monitor_dal.rs b/core/lib/dal/src/fee_monitor_dal.rs new file mode 100644 index 000000000000..e15ec62aa5aa --- /dev/null +++ b/core/lib/dal/src/fee_monitor_dal.rs @@ -0,0 +1,169 @@ +use crate::StorageProcessor; +use crate::{events_web3_dal::EventsWeb3Dal, models::storage_fee_monitor::StorageBlockGasData}; +use zksync_config::constants::ERC20_TRANSFER_TOPIC; +use zksync_types::{ + api::{self, GetLogsFilter}, + Address, L1BatchNumber, L2_ETH_TOKEN_ADDRESS, U256, +}; +use zksync_utils::address_to_h256; + +// Dev note: these structure is not fundamental and exists for auxiliary +// monitoring purposes, it's use cases are limited and will normally appear +// together with calls to `FeeMonitorDal` methods, thus it doesn't really +// makes sense to extract it to the `types` crate. + +#[derive(Debug)] +pub struct GasConsumptionData { + pub consumed_gas: u64, + pub base_gas_price: u64, + pub priority_gas_price: u64, +} + +impl GasConsumptionData { + pub fn wei_spent(&self) -> u128 { + (self.base_gas_price + self.priority_gas_price) as u128 * self.consumed_gas as u128 + } +} + +#[derive(Debug)] +pub struct BlockGasConsumptionData { + pub block_number: L1BatchNumber, + pub commit: GasConsumptionData, + pub prove: GasConsumptionData, + pub execute: GasConsumptionData, +} + +impl BlockGasConsumptionData { + pub fn wei_spent(&self) -> u128 { + self.commit.wei_spent() + self.prove.wei_spent() + self.execute.wei_spent() + } +} + +#[derive(Debug)] +pub struct FeeMonitorDal<'a, 'c> { + pub storage: &'a mut StorageProcessor<'c>, +} + +impl FeeMonitorDal<'_, '_> { + /// Returns data related to the gas consumption and gas costs for certain block. + /// In case of any unexpected situation (i.e. some data is not present in the database) + /// will return an error. + pub fn get_block_gas_consumption( + &mut self, + block_number: L1BatchNumber, + ) -> Result { + async_std::task::block_on(async { + let res: StorageBlockGasData = sqlx::query_as!( + StorageBlockGasData, + r#" + SELECT + l1_batches.number, + commit_tx_data.gas_used as "commit_gas?", + commit_tx.base_fee_per_gas as "commit_base_gas_price?", + commit_tx.priority_fee_per_gas as "commit_priority_gas_price?", + prove_tx_data.gas_used as "prove_gas?", + prove_tx.base_fee_per_gas as "prove_base_gas_price?", + prove_tx.priority_fee_per_gas as "prove_priority_gas_price?", + execute_tx_data.gas_used as "execute_gas?", + execute_tx.base_fee_per_gas as "execute_base_gas_price?", + execute_tx.priority_fee_per_gas as "execute_priority_gas_price?" + FROM l1_batches + LEFT JOIN eth_txs_history as commit_tx + ON (l1_batches.eth_commit_tx_id = commit_tx.eth_tx_id AND commit_tx.confirmed_at IS NOT NULL) + LEFT JOIN eth_txs as commit_tx_data + ON (l1_batches.eth_commit_tx_id = commit_tx_data.id) + LEFT JOIN eth_txs_history as prove_tx + ON (l1_batches.eth_prove_tx_id = prove_tx.eth_tx_id AND prove_tx.confirmed_at IS NOT NULL) + LEFT JOIN eth_txs as prove_tx_data + ON (l1_batches.eth_prove_tx_id = prove_tx_data.id) + LEFT JOIN eth_txs_history as execute_tx + ON (l1_batches.eth_execute_tx_id = execute_tx.eth_tx_id AND execute_tx.confirmed_at IS NOT NULL) + LEFT JOIN eth_txs as execute_tx_data + ON (l1_batches.eth_execute_tx_id = execute_tx_data.id) + WHERE l1_batches.number = $1 + "#, + block_number.0 as i64 + ) + .fetch_optional(self.storage.conn()) + .await? + .ok_or_else(|| anyhow::format_err!("No block details for requested block {block_number}"))?; + + // Closure extracting `u64` out of `Option`. + // Normally we expect data to be present, but if for any reason it isn't we'll just return an error: + // it's tracking module, so no big deal. + let extract = |opt: Option| { + opt.map(|val| val as u64).ok_or_else(|| { + anyhow::format_err!("Some field was `None` for block {block_number}. Data from database: {res:?}") + }) + }; + + Ok(BlockGasConsumptionData { + block_number, + commit: GasConsumptionData { + consumed_gas: extract(res.commit_gas)?, + base_gas_price: extract(res.commit_base_gas_price)?, + priority_gas_price: extract(res.commit_priority_gas_price)?, + }, + prove: GasConsumptionData { + consumed_gas: extract(res.prove_gas)?, + base_gas_price: extract(res.prove_base_gas_price)?, + priority_gas_price: extract(res.prove_priority_gas_price)?, + }, + execute: GasConsumptionData { + consumed_gas: extract(res.execute_gas)?, + base_gas_price: extract(res.execute_base_gas_price)?, + priority_gas_price: extract(res.execute_priority_gas_price)?, + }, + }) + }) + } + + /// Fetches ETH ERC-20 transfers to a certain account for a certain block. + /// Returns the vector of transfer amounts. + pub fn fetch_erc20_transfers( + &mut self, + block_number: L1BatchNumber, + account: Address, + ) -> Result, anyhow::Error> { + // We expect one log per transaction, thus limitiing is not really important. + const MAX_LOGS_PER_BLOCK: usize = 100_000; + + // Event signature: `Transfer(address from, address to, uint256 value)`. + // We're filtering by the 1st (signature hash) and 3rd (receiver). + let topics = vec![ + (1, vec![ERC20_TRANSFER_TOPIC]), + (3, vec![address_to_h256(&account)]), + ]; + let miniblocks_range = match self + .storage + .blocks_dal() + .get_miniblock_range_of_l1_batch(block_number) + { + Some(range) => range, + None => return Ok(Vec::new()), + }; + + let logs = { + let mut events_web3_dal = EventsWeb3Dal { + storage: self.storage, + }; + events_web3_dal.get_logs( + GetLogsFilter { + from_block: miniblocks_range.0, + to_block: Some(api::BlockNumber::Number(miniblocks_range.1 .0.into())), + addresses: vec![L2_ETH_TOKEN_ADDRESS], + topics, + }, + MAX_LOGS_PER_BLOCK, + )? + }; + + // Now collect the transfer amounts from retrieved logs. + let balances: Vec<_> = logs + .into_iter() + .map(|log| U256::from_big_endian(&log.data.0)) + .collect(); + + Ok(balances) + } +} diff --git a/core/lib/dal/src/gpu_prover_queue_dal.rs b/core/lib/dal/src/gpu_prover_queue_dal.rs new file mode 100644 index 000000000000..3002a24fb523 --- /dev/null +++ b/core/lib/dal/src/gpu_prover_queue_dal.rs @@ -0,0 +1,168 @@ +use std::net::IpAddr; +use std::time::Duration; + +use crate::time_utils::pg_interval_from_duration; +use crate::StorageProcessor; + +#[derive(Debug)] +pub struct GpuProverQueueDal<'a, 'c> { + pub storage: &'a mut StorageProcessor<'c>, +} + +#[derive(Debug, Clone)] +pub struct SocketAddress { + pub host: IpAddr, + pub port: u16, +} + +#[derive(Debug)] +pub enum GpuProverInstanceStatus { + // The instance is available for processing. + Available, + // The instance is running at full capacity. + Full, + // The instance is reserved by an synthesizer. + Reserved, + // The instance is not alive anymore. + Dead, +} + +impl GpuProverQueueDal<'_, '_> { + pub fn get_free_prover_instance( + &mut self, + processing_timeout: Duration, + specialized_prover_group_id: u8, + ) -> Option { + async_std::task::block_on(async { + let processing_timeout = pg_interval_from_duration(processing_timeout); + let result: Option = sqlx::query!( + " + UPDATE gpu_prover_queue + SET instance_status = 'reserved', + updated_at = now(), + processing_started_at = now() + WHERE (instance_host, instance_port) in ( + SELECT instance_host, instance_port + FROM gpu_prover_queue + WHERE specialized_prover_group_id=$2 + AND ( + instance_status = 'available' + OR (instance_status = 'reserved' AND processing_started_at < now() - $1::interval) + ) + ORDER BY updated_at ASC + LIMIT 1 + FOR UPDATE + SKIP LOCKED + ) + RETURNING gpu_prover_queue.* + ", + &processing_timeout, + specialized_prover_group_id as i16 + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|row| SocketAddress { + host: row.instance_host.network(), + port: row.instance_port as u16, + }); + + result + }) + } + + pub fn insert_prover_instance( + &mut self, + address: SocketAddress, + queue_capacity: usize, + specialized_prover_group_id: u8, + ) { + async_std::task::block_on(async { + sqlx::query!( + " + INSERT INTO gpu_prover_queue (instance_host, instance_port, queue_capacity, queue_free_slots, instance_status, specialized_prover_group_id, created_at, updated_at) + VALUES (cast($1::text as inet), $2, $3, $3, 'available', $4, now(), now()) + ON CONFLICT(instance_host, instance_port) + DO UPDATE SET instance_status='available', queue_capacity=$3, queue_free_slots=$3, specialized_prover_group_id=$4, updated_at=now()", + format!("{}",address.host), + address.port as i32, + queue_capacity as i32, + specialized_prover_group_id as i16) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn update_prover_instance_status( + &mut self, + address: SocketAddress, + status: GpuProverInstanceStatus, + queue_free_slots: usize, + ) { + async_std::task::block_on(async { + sqlx::query!( + " + UPDATE gpu_prover_queue + SET instance_status = $1, updated_at = now(), queue_free_slots = $4 + WHERE instance_host = $2::text::inet + AND instance_port = $3 + ", + format!("{:?}", status).to_lowercase(), + format!("{}", address.host), + address.port as i32, + queue_free_slots as i32, + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn update_prover_instance_from_full_to_available( + &mut self, + address: SocketAddress, + queue_free_slots: usize, + ) { + async_std::task::block_on(async { + sqlx::query!( + " + UPDATE gpu_prover_queue + SET instance_status = 'available', updated_at = now(), queue_free_slots = $3 + WHERE instance_host = $1::text::inet + AND instance_port = $2 + AND instance_status = 'full' + ", + format!("{}", address.host), + address.port as i32, + queue_free_slots as i32 + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn get_count_of_jobs_ready_for_processing(&mut self) -> u32 { + async_std::task::block_on(async { + sqlx::query!( + r#" + SELECT MIN(count) as "count" + FROM (SELECT COALESCE(SUM(queue_free_slots), 0) as "count" + FROM gpu_prover_queue + where instance_status = 'available' + UNION + SELECT count(*) as "count" + from prover_jobs + where status = 'queued' + ) as t1; + "#, + ) + .fetch_one(self.storage.conn()) + .await + .unwrap() + .count + .unwrap() as u32 + }) + } +} diff --git a/core/lib/dal/src/lib.rs b/core/lib/dal/src/lib.rs new file mode 100644 index 000000000000..90d099ac476b --- /dev/null +++ b/core/lib/dal/src/lib.rs @@ -0,0 +1,241 @@ +#![allow(clippy::derive_partial_eq_without_eq, clippy::format_push_string)] + +use std::env; + +// Built-in deps +use async_std::task::block_on; +pub use sqlx::Error as SqlxError; +use sqlx::{postgres::Postgres, Connection, PgConnection, Transaction}; +// External imports +use sqlx::pool::PoolConnection; +pub use sqlx::types::BigDecimal; + +// Local imports +use crate::blocks_dal::BlocksDal; +use crate::blocks_web3_dal::BlocksWeb3Dal; +use crate::connection::holder::ConnectionHolder; +pub use crate::connection::ConnectionPool; +use crate::eth_sender_dal::EthSenderDal; +use crate::events_dal::EventsDal; +use crate::events_web3_dal::EventsWeb3Dal; +use crate::explorer::ExplorerIntermediator; +use crate::fee_monitor_dal::FeeMonitorDal; +use crate::gpu_prover_queue_dal::GpuProverQueueDal; +use crate::prover_dal::ProverDal; +use crate::storage_dal::StorageDal; +use crate::storage_load_dal::StorageLoadDal; +use crate::storage_logs_dal::StorageLogsDal; +use crate::storage_logs_dedup_dal::StorageLogsDedupDal; +use crate::storage_web3_dal::StorageWeb3Dal; +use crate::tokens_dal::TokensDal; +use crate::tokens_web3_dal::TokensWeb3Dal; +use crate::transactions_dal::TransactionsDal; +use crate::transactions_web3_dal::TransactionsWeb3Dal; +use crate::witness_generator_dal::WitnessGeneratorDal; + +pub mod blocks_dal; +pub mod blocks_web3_dal; +pub mod connection; +pub mod eth_sender_dal; +pub mod events_dal; +pub mod events_web3_dal; +pub mod explorer; +pub mod fee_monitor_dal; +pub mod gpu_prover_queue_dal; +mod models; +pub mod prover_dal; +pub mod storage_dal; +pub mod storage_load_dal; +pub mod storage_logs_dal; +pub mod storage_logs_dedup_dal; +pub mod storage_web3_dal; +pub mod time_utils; +pub mod tokens_dal; +pub mod tokens_web3_dal; +pub mod transactions_dal; +pub mod transactions_web3_dal; +pub mod witness_generator_dal; + +#[cfg(test)] +mod tests; + +/// Obtains the master database URL from the environment variable. +pub fn get_master_database_url() -> String { + env::var("DATABASE_URL").expect("DATABASE_URL must be set") +} + +/// Obtains the replica database URL from the environment variable. +pub fn get_replica_database_url() -> String { + env::var("DATABASE_REPLICA_URL").unwrap_or_else(|_| get_master_database_url()) +} + +/// Obtains the test database URL from the environment variable. +pub fn get_test_database_url() -> String { + env::var("TEST_DATABASE_URL").expect("TEST_DATABASE_URL must be set") +} + +/// Storage processor is the main storage interaction point. +/// It holds down the connection (either direct or pooled) to the database +/// and provide methods to obtain different storage schemas. +#[derive(Debug)] +pub struct StorageProcessor<'a> { + conn: ConnectionHolder<'a>, + in_transaction: bool, +} + +impl<'a> StorageProcessor<'a> { + /// Creates a `StorageProcessor` using an unique sole connection to the database. + pub async fn establish_connection(connect_to_master: bool) -> StorageProcessor<'static> { + let database_url = if connect_to_master { + get_master_database_url() + } else { + get_replica_database_url() + }; + let connection = PgConnection::connect(&database_url).await.unwrap(); + StorageProcessor { + conn: ConnectionHolder::Direct(connection), + in_transaction: false, + } + } + + pub async fn start_transaction<'c: 'b, 'b>(&'c mut self) -> StorageProcessor<'b> { + let transaction = self.conn().begin().await.unwrap(); + + let mut processor = StorageProcessor::from_transaction(transaction); + processor.in_transaction = true; + + processor + } + + pub fn start_transaction_blocking<'c: 'b, 'b>(&'c mut self) -> StorageProcessor<'b> { + block_on(self.start_transaction()) + } + + /// Checks if the `StorageProcessor` is currently within database transaction. + pub fn in_transaction(&self) -> bool { + self.in_transaction + } + + pub fn from_transaction(conn: Transaction<'_, Postgres>) -> StorageProcessor<'_> { + StorageProcessor { + conn: ConnectionHolder::Transaction(conn), + in_transaction: true, + } + } + + pub fn from_test_transaction<'b>( + conn: &'b mut Transaction<'static, Postgres>, + ) -> StorageProcessor<'b> { + StorageProcessor { + conn: ConnectionHolder::TestTransaction(conn), + in_transaction: true, + } + } + + pub async fn commit(self) { + if let ConnectionHolder::Transaction(transaction) = self.conn { + transaction.commit().await.unwrap(); + } else { + panic!("StorageProcessor::commit can only be invoked after calling StorageProcessor::begin_transaction"); + } + } + + pub fn commit_blocking(self) { + block_on(self.commit()) + } + + /// Creates a `StorageProcessor` using a pool of connections. + /// This method borrows one of the connections from the pool, and releases it + /// after `drop`. + pub fn from_pool(conn: PoolConnection) -> Self { + Self { + conn: ConnectionHolder::Pooled(conn), + in_transaction: false, + } + } + + fn conn(&mut self) -> &mut PgConnection { + match &mut self.conn { + ConnectionHolder::Pooled(conn) => conn, + ConnectionHolder::Direct(conn) => conn, + ConnectionHolder::Transaction(conn) => conn, + ConnectionHolder::TestTransaction(conn) => conn, + } + } + + pub fn transactions_dal(&mut self) -> TransactionsDal<'_, 'a> { + TransactionsDal { storage: self } + } + + pub fn transactions_web3_dal(&mut self) -> TransactionsWeb3Dal<'_, 'a> { + TransactionsWeb3Dal { storage: self } + } + + pub fn blocks_dal(&mut self) -> BlocksDal<'_, 'a> { + BlocksDal { storage: self } + } + + pub fn blocks_web3_dal(&mut self) -> BlocksWeb3Dal<'_, 'a> { + BlocksWeb3Dal { storage: self } + } + + pub fn eth_sender_dal(&mut self) -> EthSenderDal<'_, 'a> { + EthSenderDal { storage: self } + } + + pub fn events_dal(&mut self) -> EventsDal<'_, 'a> { + EventsDal { storage: self } + } + + pub fn events_web3_dal(&mut self) -> EventsWeb3Dal<'_, 'a> { + EventsWeb3Dal { storage: self } + } + + pub fn storage_dal(&mut self) -> StorageDal<'_, 'a> { + StorageDal { storage: self } + } + + pub fn storage_web3_dal(&mut self) -> StorageWeb3Dal<'_, 'a> { + StorageWeb3Dal { storage: self } + } + + pub fn storage_logs_dal(&mut self) -> StorageLogsDal<'_, 'a> { + StorageLogsDal { storage: self } + } + + pub fn storage_logs_dedup_dal(&mut self) -> StorageLogsDedupDal<'_, 'a> { + StorageLogsDedupDal { storage: self } + } + + pub fn storage_load_dal(&mut self) -> StorageLoadDal<'_, 'a> { + StorageLoadDal { storage: self } + } + + pub fn tokens_dal(&mut self) -> TokensDal<'_, 'a> { + TokensDal { storage: self } + } + + pub fn tokens_web3_dal(&mut self) -> TokensWeb3Dal<'_, 'a> { + TokensWeb3Dal { storage: self } + } + + pub fn prover_dal(&mut self) -> ProverDal<'_, 'a> { + ProverDal { storage: self } + } + + pub fn witness_generator_dal(&mut self) -> WitnessGeneratorDal<'_, 'a> { + WitnessGeneratorDal { storage: self } + } + + pub fn explorer(&mut self) -> ExplorerIntermediator<'_, 'a> { + ExplorerIntermediator { storage: self } + } + + pub fn fee_monitor_dal(&mut self) -> FeeMonitorDal<'_, 'a> { + FeeMonitorDal { storage: self } + } + + pub fn gpu_prover_queue_dal(&mut self) -> GpuProverQueueDal<'_, 'a> { + GpuProverQueueDal { storage: self } + } +} diff --git a/core/lib/dal/src/models/mod.rs b/core/lib/dal/src/models/mod.rs new file mode 100644 index 000000000000..e491bcf5678c --- /dev/null +++ b/core/lib/dal/src/models/mod.rs @@ -0,0 +1,11 @@ +pub mod storage_block; +pub mod storage_contract; +pub mod storage_eth_tx; +pub mod storage_event; +pub mod storage_fee_monitor; +pub mod storage_log; +pub mod storage_prover_job_info; +pub mod storage_state_record; +pub mod storage_token; +pub mod storage_transaction; +pub mod storage_witness_job_info; diff --git a/core/lib/dal/src/models/storage_block.rs b/core/lib/dal/src/models/storage_block.rs new file mode 100644 index 000000000000..0a0a4d12bb11 --- /dev/null +++ b/core/lib/dal/src/models/storage_block.rs @@ -0,0 +1,352 @@ +use bigdecimal::{BigDecimal, ToPrimitive}; +use sqlx::postgres::PgArguments; +use std::convert::TryInto; +use std::str::FromStr; +use thiserror::Error; +use zksync_types::explorer_api::BlockDetails; + +use sqlx::query::Query; + +use sqlx::types::chrono::{DateTime, NaiveDateTime, Utc}; +use sqlx::Postgres; +use zksync_types::api::{self, BlockId}; +use zksync_types::commitment::{BlockMetaParameters, BlockMetadata}; +use zksync_types::{ + block::L1BatchHeader, + explorer_api::{BlockPageItem, BlockStatus}, + Address, L1BatchNumber, MiniblockNumber, H2048, H256, U256, +}; + +#[derive(Debug, Error)] +pub enum StorageBlockConvertError { + #[error("Incomplete block")] + IncompleteBlock, +} + +#[derive(Debug, Clone, sqlx::FromRow)] +pub struct StorageBlock { + pub number: i64, + pub timestamp: i64, + pub is_finished: bool, + pub l1_tx_count: i32, + pub l2_tx_count: i32, + pub fee_account_address: Vec, + pub bloom: Vec, + pub l2_to_l1_logs: Vec>, + pub priority_ops_onchain_data: Vec>, + + pub created_at: NaiveDateTime, + pub updated_at: NaiveDateTime, + + pub parent_hash: Option>, + pub hash: Option>, + pub merkle_root_hash: Option>, + + pub commitment: Option>, + pub meta_parameters_hash: Option>, + pub pass_through_data_hash: Option>, + pub aux_data_hash: Option>, + + pub rollup_last_leaf_index: Option, + pub zkporter_is_available: Option, + pub bootloader_code_hash: Option>, + pub default_aa_code_hash: Option>, + + pub l2_to_l1_messages: Vec>, + pub l2_l1_compressed_messages: Option>, + pub l2_l1_merkle_root: Option>, + pub compressed_initial_writes: Option>, + pub compressed_repeated_writes: Option>, + pub compressed_write_logs: Option>, + pub compressed_contracts: Option>, + + pub eth_prove_tx_id: Option, + pub eth_commit_tx_id: Option, + pub eth_execute_tx_id: Option, + + pub predicted_commit_gas_cost: i64, + pub predicted_prove_gas_cost: i64, + pub predicted_execute_gas_cost: i64, + + pub initial_bootloader_heap_content: serde_json::Value, + pub used_contract_hashes: serde_json::Value, + + pub base_fee_per_gas: BigDecimal, + pub l1_gas_price: i64, + pub l2_fair_gas_price: i64, + + // These fields are not used, but are present for compatibility reasons + pub gas_per_pubdata_byte_in_block: Option, + pub gas_per_pubdata_limit: i64, + + pub skip_proof: bool, +} + +impl From for L1BatchHeader { + fn from(block: StorageBlock) -> Self { + let priority_ops_onchain_data: Vec<_> = block + .priority_ops_onchain_data + .into_iter() + .map(|raw_data| raw_data.into()) + .collect(); + + let l2_to_l1_logs: Vec<_> = block + .l2_to_l1_logs + .into_iter() + .map(|raw_data| raw_data.into()) + .collect(); + + L1BatchHeader { + number: L1BatchNumber(block.number as u32), + is_finished: block.is_finished, + timestamp: block.timestamp as u64, + fee_account_address: Address::from_slice(&block.fee_account_address), + priority_ops_onchain_data, + l1_tx_count: block.l1_tx_count as u16, + l2_tx_count: block.l2_tx_count as u16, + l2_to_l1_logs, + l2_to_l1_messages: block.l2_to_l1_messages, + + bloom: H2048::from_slice(&block.bloom), + initial_bootloader_contents: serde_json::from_value::>( + block.initial_bootloader_heap_content, + ) + .expect("invalid value for initial_bootloader_heap_content in the DB"), + used_contract_hashes: serde_json::from_value::>(block.used_contract_hashes) + .expect("invalid value for used_contract_hashes in the DB"), + base_fee_per_gas: block + .base_fee_per_gas + .to_u64() + .expect("base_fee_per_gas should fit in u64"), + l1_gas_price: block.l1_gas_price as u64, + l2_fair_gas_price: block.l2_fair_gas_price as u64, + } + } +} + +impl TryInto for StorageBlock { + type Error = StorageBlockConvertError; + + fn try_into(self) -> Result { + Ok(BlockMetadata { + root_hash: H256::from_slice( + &self + .hash + .clone() + .ok_or(StorageBlockConvertError::IncompleteBlock)?, + ), + rollup_last_leaf_index: self + .rollup_last_leaf_index + .ok_or(StorageBlockConvertError::IncompleteBlock)? + as u64, + merkle_root_hash: H256::from_slice( + &self + .merkle_root_hash + .clone() + .ok_or(StorageBlockConvertError::IncompleteBlock)?, + ), + initial_writes_compressed: self + .compressed_initial_writes + .clone() + .ok_or(StorageBlockConvertError::IncompleteBlock)?, + repeated_writes_compressed: self + .compressed_repeated_writes + .clone() + .ok_or(StorageBlockConvertError::IncompleteBlock)?, + l2_l1_messages_compressed: self + .l2_l1_compressed_messages + .clone() + .ok_or(StorageBlockConvertError::IncompleteBlock)?, + l2_l1_merkle_root: H256::from_slice( + &self + .l2_l1_merkle_root + .clone() + .ok_or(StorageBlockConvertError::IncompleteBlock)?, + ), + aux_data_hash: H256::from_slice( + &self + .aux_data_hash + .ok_or(StorageBlockConvertError::IncompleteBlock)?, + ), + meta_parameters_hash: H256::from_slice( + &self + .meta_parameters_hash + .ok_or(StorageBlockConvertError::IncompleteBlock)?, + ), + pass_through_data_hash: H256::from_slice( + &self + .pass_through_data_hash + .ok_or(StorageBlockConvertError::IncompleteBlock)?, + ), + commitment: H256::from_slice( + &self + .commitment + .ok_or(StorageBlockConvertError::IncompleteBlock)?, + ), + block_meta_params: BlockMetaParameters { + zkporter_is_available: self + .zkporter_is_available + .ok_or(StorageBlockConvertError::IncompleteBlock)?, + bootloader_code_hash: H256::from_slice( + &self + .bootloader_code_hash + .ok_or(StorageBlockConvertError::IncompleteBlock)?, + ), + default_aa_code_hash: H256::from_slice( + &self + .default_aa_code_hash + .ok_or(StorageBlockConvertError::IncompleteBlock)?, + ), + }, + }) + } +} + +#[derive(Debug, Clone, sqlx::FromRow)] +pub struct StorageBlockPageItem { + pub number: i64, + pub l1_tx_count: i32, + pub l2_tx_count: i32, + pub hash: Option>, + pub timestamp: i64, +} + +pub fn block_page_item_from_storage( + storage: StorageBlockPageItem, + last_verified: MiniblockNumber, +) -> BlockPageItem { + let status = if storage.number > last_verified.0 as i64 { + BlockStatus::Sealed + } else { + BlockStatus::Verified + }; + BlockPageItem { + number: MiniblockNumber(storage.number as u32), + l1_tx_count: storage.l1_tx_count as usize, + l2_tx_count: storage.l2_tx_count as usize, + hash: storage.hash.map(|hash| H256::from_slice(&hash)), + status, + timestamp: storage.timestamp as u64, + } +} + +/// Returns block_number SQL statement and the next argument index that can be used +pub fn web3_block_number_to_sql(block_number: api::BlockNumber, arg_index: u8) -> (String, u8) { + match block_number { + api::BlockNumber::Earliest => ("(SELECT 0::bigint as number)".to_string(), arg_index), + api::BlockNumber::Pending => ( + "(SELECT (MAX(number) + 1) as number FROM miniblocks)".to_string(), + arg_index, + ), + api::BlockNumber::Latest => ( + "(SELECT MAX(number) as number FROM miniblocks)".to_string(), + arg_index, + ), + api::BlockNumber::Number(_) => { + (format!("(SELECT ${} as number)", arg_index), arg_index + 1) + } + api::BlockNumber::Committed => ( + "(SELECT MAX(number) as number FROM miniblocks)".to_string(), + arg_index, + ), + api::BlockNumber::Finalized => ( + " + (SELECT COALESCE( + ( + SELECT miniblocks.number FROM miniblocks + JOIN l1_batches ON miniblocks.l1_batch_number = l1_batches.number + JOIN eth_txs ON l1_batches.eth_execute_tx_id = eth_txs.id + WHERE eth_txs.confirmed_eth_tx_history_id IS NOT NULL + ORDER BY miniblocks.number DESC + LIMIT 1 + ), + 0 + ) as number) + " + .to_string(), + arg_index, + ), + } +} + +pub fn web3_block_where_sql(block_id: BlockId, arg_index: u8) -> String { + match block_id { + BlockId::Hash(_) => format!("miniblocks.hash = ${}", arg_index), + BlockId::Number(number) => { + let block_sql = web3_block_number_to_sql(number, arg_index).0; + format!("miniblocks.number = {}", block_sql) + } + } +} + +pub fn bind_block_where_sql_params( + block_id: BlockId, + query: Query, +) -> Query { + match block_id { + // these block_id types result in `$1` in the query string, which we have to `bind` + BlockId::Hash(block_hash) => query.bind(block_hash.0.to_vec()), + BlockId::Number(api::BlockNumber::Number(number)) => query.bind(number.as_u64() as i64), + // others don't introduce `$1`, so we don't have to `bind` anything + _ => query, + } +} + +#[derive(Debug, Clone, sqlx::FromRow)] +pub struct StorageBlockDetails { + pub number: i64, + pub timestamp: i64, + pub l1_tx_count: i32, + pub l2_tx_count: i32, + pub root_hash: Option>, + pub commit_tx_hash: Option, + pub committed_at: Option, + pub prove_tx_hash: Option, + pub proven_at: Option, + pub execute_tx_hash: Option, + pub executed_at: Option, +} + +impl From for BlockDetails { + fn from(storage_block_details: StorageBlockDetails) -> Self { + let status = if storage_block_details.number == 0 + || storage_block_details.execute_tx_hash.is_some() + { + BlockStatus::Verified + } else { + BlockStatus::Sealed + }; + BlockDetails { + number: MiniblockNumber(storage_block_details.number as u32), + timestamp: storage_block_details.timestamp as u64, + l1_tx_count: storage_block_details.l1_tx_count as usize, + l2_tx_count: storage_block_details.l2_tx_count as usize, + status, + root_hash: storage_block_details + .root_hash + .as_deref() + .map(H256::from_slice), + commit_tx_hash: storage_block_details + .commit_tx_hash + .as_deref() + .map(|hash| H256::from_str(hash).expect("Incorrect commit_tx hash")), + committed_at: storage_block_details + .committed_at + .map(|committed_at| DateTime::::from_utc(committed_at, Utc)), + prove_tx_hash: storage_block_details + .prove_tx_hash + .as_deref() + .map(|hash| H256::from_str(hash).expect("Incorrect verify_tx hash")), + proven_at: storage_block_details + .proven_at + .map(|proven_at| DateTime::::from_utc(proven_at, Utc)), + execute_tx_hash: storage_block_details + .execute_tx_hash + .as_deref() + .map(|hash| H256::from_str(hash).expect("Incorrect verify_tx hash")), + executed_at: storage_block_details + .executed_at + .map(|executed_at| DateTime::::from_utc(executed_at, Utc)), + } + } +} diff --git a/core/lib/dal/src/models/storage_contract.rs b/core/lib/dal/src/models/storage_contract.rs new file mode 100644 index 000000000000..67b9bb9e76b8 --- /dev/null +++ b/core/lib/dal/src/models/storage_contract.rs @@ -0,0 +1,17 @@ +use zksync_types::vm_trace::ContractSourceDebugInfo; + +#[derive(Debug, Clone, sqlx::FromRow)] +pub struct StorageContractSource { + pub assembly_code: String, + pub pc_line_mapping: serde_json::Value, +} + +impl From for ContractSourceDebugInfo { + fn from(source: StorageContractSource) -> ContractSourceDebugInfo { + ContractSourceDebugInfo { + assembly_code: source.assembly_code, + pc_line_mapping: serde_json::from_value(source.pc_line_mapping) + .expect("invalid pc_line_mapping json in database"), + } + } +} diff --git a/core/lib/dal/src/models/storage_eth_tx.rs b/core/lib/dal/src/models/storage_eth_tx.rs new file mode 100644 index 000000000000..21fc9c559730 --- /dev/null +++ b/core/lib/dal/src/models/storage_eth_tx.rs @@ -0,0 +1,95 @@ +use sqlx::types::chrono::NaiveDateTime; +use std::str::FromStr; +use zksync_types::aggregated_operations::AggregatedActionType; +use zksync_types::eth_sender::{EthTx, TxHistory, TxHistoryToSend}; +use zksync_types::{Address, H256}; + +#[derive(Debug, Clone)] +pub struct StorageEthTx { + pub id: i32, + pub nonce: i64, + pub contract_address: String, + pub raw_tx: Vec, + pub tx_type: String, + pub has_failed: bool, + pub confirmed_eth_tx_history_id: Option, + pub gas_used: Option, + pub predicted_gas_cost: i64, + pub created_at: NaiveDateTime, + pub updated_at: NaiveDateTime, + pub sent_at_block: Option, +} + +#[derive(Clone, Debug)] +pub struct StorageTxHistoryToSend { + pub id: i32, + pub eth_tx_id: i32, + pub tx_hash: String, + pub priority_fee_per_gas: i64, + pub base_fee_per_gas: i64, + pub signed_raw_tx: Option>, + pub nonce: i64, +} + +#[derive(Clone, Debug)] +pub struct StorageTxHistory { + pub id: i32, + pub eth_tx_id: i32, + pub priority_fee_per_gas: i64, + pub base_fee_per_gas: i64, + pub tx_hash: String, + pub confirmed_at: Option, + pub sent_at: Option, + pub created_at: NaiveDateTime, + pub updated_at: NaiveDateTime, + pub signed_raw_tx: Option>, + pub sent_at_block: Option, +} + +impl From for EthTx { + fn from(tx: StorageEthTx) -> EthTx { + EthTx { + id: tx.id as u32, + nonce: tx.nonce as u64, + contract_address: Address::from_str(&tx.contract_address) + .expect("Incorrect address in db"), + raw_tx: tx.raw_tx.clone(), + tx_type: AggregatedActionType::from_str(&tx.tx_type).expect("Wrong agg type"), + created_at_timestamp: tx.created_at.timestamp() as u64, + predicted_gas_cost: tx.predicted_gas_cost as u64, + } + } +} + +impl From for TxHistory { + fn from(history: StorageTxHistory) -> TxHistory { + TxHistory { + id: history.id as u32, + eth_tx_id: history.eth_tx_id as u32, + base_fee_per_gas: history.base_fee_per_gas as u64, + priority_fee_per_gas: history.priority_fee_per_gas as u64, + tx_hash: H256::from_str(&history.tx_hash).expect("Incorrect hash"), + signed_raw_tx: history + .signed_raw_tx + .expect("Should rely only on the new txs"), + + sent_at_block: history.sent_at_block.map(|block| block as u32), + } + } +} + +impl From for TxHistoryToSend { + fn from(history: StorageTxHistoryToSend) -> TxHistoryToSend { + TxHistoryToSend { + id: history.id as u32, + eth_tx_id: history.eth_tx_id as u32, + tx_hash: H256::from_str(&history.tx_hash).expect("Incorrect hash"), + base_fee_per_gas: history.base_fee_per_gas as u64, + priority_fee_per_gas: history.priority_fee_per_gas as u64, + signed_raw_tx: history + .signed_raw_tx + .expect("Should rely only on the new txs"), + nonce: history.nonce as u64, + } + } +} diff --git a/core/lib/dal/src/models/storage_event.rs b/core/lib/dal/src/models/storage_event.rs new file mode 100644 index 000000000000..754c9fa1d6a6 --- /dev/null +++ b/core/lib/dal/src/models/storage_event.rs @@ -0,0 +1,87 @@ +use zksync_types::{ + api::{L2ToL1Log, Log}, + web3::types::{Bytes, Index, U256, U64}, + Address, H256, +}; + +#[derive(sqlx::FromRow, Debug, Clone)] +pub struct StorageWeb3Log { + pub address: Vec, + pub topic1: Vec, + pub topic2: Vec, + pub topic3: Vec, + pub topic4: Vec, + pub value: Vec, + pub block_hash: Option>, + pub miniblock_number: i64, + pub l1_batch_number: Option, + pub tx_hash: Vec, + pub tx_index_in_block: i32, + pub event_index_in_block: i32, + pub event_index_in_tx: i32, +} + +impl From for Log { + fn from(log: StorageWeb3Log) -> Log { + let topics = vec![log.topic1, log.topic2, log.topic3, log.topic4] + .into_iter() + .filter_map(|topic| { + if !topic.is_empty() { + Some(H256::from_slice(&topic)) + } else { + None + } + }) + .collect(); + Log { + address: Address::from_slice(&log.address), + topics, + data: Bytes(log.value), + block_hash: log.block_hash.map(|hash| H256::from_slice(&hash)), + block_number: Some(U64::from(log.miniblock_number as u32)), + l1_batch_number: log.l1_batch_number.map(U64::from), + transaction_hash: Some(H256::from_slice(&log.tx_hash)), + transaction_index: Some(Index::from(log.tx_index_in_block as u32)), + log_index: Some(U256::from(log.event_index_in_block as u32)), + transaction_log_index: Some(U256::from(log.event_index_in_block as u32)), + log_type: None, + removed: Some(false), + } + } +} + +#[derive(sqlx::FromRow, Debug, Clone)] +pub struct StorageL2ToL1Log { + pub block_hash: Option>, + pub miniblock_number: i64, + pub l1_batch_number: Option, + pub log_index_in_miniblock: i32, + pub log_index_in_tx: i32, + pub tx_hash: Vec, + pub shard_id: i32, + pub is_service: bool, + pub tx_index_in_miniblock: i32, + pub tx_index_in_l1_batch: i32, + pub sender: Vec, + pub key: Vec, + pub value: Vec, +} + +impl From for L2ToL1Log { + fn from(log: StorageL2ToL1Log) -> L2ToL1Log { + L2ToL1Log { + block_hash: log.block_hash.map(|hash| H256::from_slice(&hash)), + block_number: (log.miniblock_number as u32).into(), + l1_batch_number: (log.l1_batch_number).map(|n| (n as u32).into()), + log_index: (log.log_index_in_miniblock as u32).into(), + transaction_index: (log.tx_index_in_miniblock as u32).into(), + transaction_hash: H256::from_slice(&log.tx_hash), + transaction_log_index: (log.log_index_in_tx as u32).into(), + shard_id: (log.shard_id as u32).into(), + is_service: log.is_service, + sender: Address::from_slice(&log.sender), + key: H256::from_slice(&log.key), + value: H256::from_slice(&log.value), + } + } +} diff --git a/core/lib/dal/src/models/storage_fee_monitor.rs b/core/lib/dal/src/models/storage_fee_monitor.rs new file mode 100644 index 000000000000..989308f79fea --- /dev/null +++ b/core/lib/dal/src/models/storage_fee_monitor.rs @@ -0,0 +1,16 @@ +#[derive(Debug, Clone, sqlx::FromRow)] +pub struct StorageBlockGasData { + pub number: i64, + + pub commit_gas: Option, + pub commit_base_gas_price: Option, + pub commit_priority_gas_price: Option, + + pub prove_gas: Option, + pub prove_base_gas_price: Option, + pub prove_priority_gas_price: Option, + + pub execute_gas: Option, + pub execute_base_gas_price: Option, + pub execute_priority_gas_price: Option, +} diff --git a/core/lib/dal/src/models/storage_log.rs b/core/lib/dal/src/models/storage_log.rs new file mode 100644 index 000000000000..bc4028b4d8b4 --- /dev/null +++ b/core/lib/dal/src/models/storage_log.rs @@ -0,0 +1,29 @@ +use sqlx::types::chrono::NaiveDateTime; +use zksync_types::{AccountTreeId, Address, StorageKey, StorageLog, StorageLogKind, H256}; + +#[derive(sqlx::FromRow, Debug, Clone)] +pub struct DBStorageLog { + pub id: i64, + pub hashed_key: Vec, + pub address: Vec, + pub key: Vec, + pub value: Vec, + pub operation_number: i32, + pub tx_hash: Vec, + pub miniblock_number: i64, + pub created_at: NaiveDateTime, + pub updated_at: NaiveDateTime, +} + +impl From for StorageLog { + fn from(log: DBStorageLog) -> StorageLog { + StorageLog { + kind: StorageLogKind::Write, + key: StorageKey::new( + AccountTreeId::new(Address::from_slice(&log.address)), + H256::from_slice(&log.key), + ), + value: H256::from_slice(&log.value), + } + } +} diff --git a/core/lib/dal/src/models/storage_prover_job_info.rs b/core/lib/dal/src/models/storage_prover_job_info.rs new file mode 100644 index 000000000000..485d126ece59 --- /dev/null +++ b/core/lib/dal/src/models/storage_prover_job_info.rs @@ -0,0 +1,76 @@ +use core::panic; +use sqlx::types::chrono::{DateTime, NaiveDateTime, NaiveTime, Utc}; +use std::convert::TryFrom; +use std::str::FromStr; + +use zksync_types::proofs::{ + JobPosition, ProverJobStatus, ProverJobStatusFailed, ProverJobStatusInProgress, + ProverJobStatusSuccessful, +}; +use zksync_types::{ + proofs::{AggregationRound, ProverJobInfo}, + L1BatchNumber, +}; + +#[derive(sqlx::FromRow)] +pub struct StorageProverJobInfo { + pub id: i64, + pub l1_batch_number: i64, + pub circuit_type: String, + pub status: String, + pub aggregation_round: i32, + pub sequence_number: i32, + pub input_length: i32, + pub attempts: i32, + pub created_at: NaiveDateTime, + pub updated_at: NaiveDateTime, + pub processing_started_at: Option, + pub time_taken: Option, + pub error: Option, +} + +impl From for ProverJobInfo { + fn from(x: StorageProverJobInfo) -> Self { + fn nt2d(nt: NaiveDateTime) -> DateTime { + DateTime::from_utc(nt, Utc) + } + + let status = match ProverJobStatus::from_str(x.status.as_str()) + .unwrap_or_else(|_| panic!("Unknown value '{}' in prover job status.", x.status)) + { + ProverJobStatus::InProgress(_) => { + ProverJobStatus::InProgress(ProverJobStatusInProgress { + started_at: nt2d(x.processing_started_at.unwrap()), + }) + } + ProverJobStatus::Successful(_) => { + ProverJobStatus::Successful(ProverJobStatusSuccessful { + started_at: nt2d(x.processing_started_at.unwrap()), + time_taken: x.time_taken.unwrap() - NaiveTime::from_hms_opt(0, 0, 0).unwrap(), + }) + } + ProverJobStatus::Failed(_) => ProverJobStatus::Failed(ProverJobStatusFailed { + started_at: nt2d(x.processing_started_at.unwrap()), + error: x.error.unwrap_or_else(|| { + panic!("Error must be present on failed prover job records.") + }), + }), + x => x, + }; + + ProverJobInfo { + id: x.id as u32, + block_number: L1BatchNumber(x.l1_batch_number as u32), + circuit_type: x.circuit_type, + position: JobPosition { + aggregation_round: AggregationRound::try_from(x.aggregation_round).unwrap(), + sequence_number: x.sequence_number as usize, + }, + input_length: x.input_length as u64, + status, + attempts: x.attempts as u32, + created_at: nt2d(x.created_at), + updated_at: nt2d(x.updated_at), + } + } +} diff --git a/core/lib/dal/src/models/storage_state_record.rs b/core/lib/dal/src/models/storage_state_record.rs new file mode 100644 index 000000000000..46a031b1893e --- /dev/null +++ b/core/lib/dal/src/models/storage_state_record.rs @@ -0,0 +1,6 @@ +#[derive(Debug, Clone, sqlx::FromRow)] +pub struct StorageStateRecord { + pub address: Vec, + pub key: Vec, + pub value: Vec, +} diff --git a/core/lib/dal/src/models/storage_token.rs b/core/lib/dal/src/models/storage_token.rs new file mode 100644 index 000000000000..0ebd6b95bd15 --- /dev/null +++ b/core/lib/dal/src/models/storage_token.rs @@ -0,0 +1,70 @@ +use sqlx::types::chrono::{DateTime, Utc}; +use sqlx::types::{chrono::NaiveDateTime, BigDecimal}; +use zksync_types::tokens::{TokenMarketVolume, TokenMetadata, TokenPrice}; +use zksync_utils::big_decimal_to_ratio; + +#[derive(Debug, Clone, sqlx::FromRow)] +pub struct StorageTokenMetadata { + pub name: String, + pub symbol: String, + pub decimals: i32, +} + +impl From for TokenMetadata { + fn from(metadata: StorageTokenMetadata) -> TokenMetadata { + TokenMetadata { + name: metadata.name, + symbol: metadata.symbol, + decimals: metadata.decimals as u8, + } + } +} + +#[derive(Debug, Clone, sqlx::FromRow)] +pub struct StorageTokenPrice { + pub usd_price: Option, + pub usd_price_updated_at: Option, +} + +impl From for Option { + fn from(price: StorageTokenPrice) -> Option { + match (&price.usd_price, price.usd_price_updated_at) { + (Some(usd_price), Some(updated_at)) => Some(TokenPrice { + usd_price: big_decimal_to_ratio(usd_price).unwrap(), + last_updated: DateTime::::from_utc(updated_at, Utc), + }), + (None, None) => None, + _ => { + vlog::warn!( + "Found storage token with {:?} `usd_price` and {:?} `usd_price_updated_at`", + price.usd_price, + price.usd_price_updated_at + ); + None + } + } + } +} + +#[derive(Debug, Clone, sqlx::FromRow)] +pub struct StorageTokenMarketVolume { + pub market_volume: Option, + pub market_volume_updated_at: Option, +} + +impl From for Option { + fn from(market_volume: StorageTokenMarketVolume) -> Option { + market_volume + .market_volume + .as_ref() + .map(|volume| TokenMarketVolume { + market_volume: big_decimal_to_ratio(volume).unwrap(), + last_updated: DateTime::::from_utc( + market_volume + .market_volume_updated_at + .expect("If `market_volume` is Some then `updated_at` must be Some"), + Utc, + ), + }) + } +} diff --git a/core/lib/dal/src/models/storage_transaction.rs b/core/lib/dal/src/models/storage_transaction.rs new file mode 100644 index 000000000000..f7ddeb2a463b --- /dev/null +++ b/core/lib/dal/src/models/storage_transaction.rs @@ -0,0 +1,648 @@ +use crate::BigDecimal; +use bigdecimal::Zero; +use itertools::Itertools; +use sqlx::postgres::PgRow; +use sqlx::types::chrono::{DateTime, NaiveDateTime, Utc}; +use sqlx::Row; + +use std::str::FromStr; +use zksync_types::l2::TransactionType; +use zksync_types::transaction_request::PaymasterParams; +use zksync_types::web3::types::U64; +use zksync_types::{api, explorer_api, L2_ETH_TOKEN_ADDRESS}; +use zksync_types::{ + explorer_api::{BalanceChangeInfo, BalanceChangeType, Erc20TransferInfo, TransactionStatus}, + fee::Fee, + l1::{OpProcessingType, PriorityQueueType}, + Address, Execute, L1TxCommonData, L2ChainId, L2TxCommonData, Nonce, PackedEthSignature, + PriorityOpId, Transaction, BOOTLOADER_ADDRESS, EIP_1559_TX_TYPE, EIP_2930_TX_TYPE, + EIP_712_TX_TYPE, H160, H256, U256, +}; +use zksync_types::{ExecuteTransactionCommon, L1BatchNumber, MiniblockNumber}; +use zksync_utils::bigdecimal_to_u256; + +#[derive(Debug, Clone, sqlx::FromRow)] +pub struct StorageTransaction { + pub priority_op_id: Option, + pub hash: Vec, + pub is_priority: bool, + pub full_fee: Option, + pub layer_2_tip_fee: Option, + pub initiator_address: Vec, + pub nonce: Option, + pub signature: Option>, + pub gas_limit: Option, + pub max_fee_per_gas: Option, + pub max_priority_fee_per_gas: Option, + pub gas_per_storage_limit: Option, + pub gas_per_pubdata_limit: Option, + pub input: Option>, + pub tx_format: Option, + pub data: serde_json::Value, + pub received_at: NaiveDateTime, + pub in_mempool: bool, + + pub l1_block_number: Option, + pub l1_batch_number: Option, + pub l1_batch_tx_index: Option, + pub miniblock_number: Option, + pub index_in_block: Option, + pub error: Option, + pub effective_gas_price: Option, + pub contract_address: Option>, + pub value: BigDecimal, + + pub paymaster: Vec, + pub paymaster_input: Vec, + + pub refunded_gas: i64, + + pub execution_info: serde_json::Value, + + pub l1_tx_mint: Option, + pub l1_tx_refund_recipient: Option>, + + pub created_at: NaiveDateTime, + pub updated_at: NaiveDateTime, +} + +#[derive(Debug, Clone, sqlx::FromRow)] +pub struct StorageTransactionDetails { + pub priority_op_id: Option, + pub hash: Vec, + pub is_priority: bool, + pub full_fee: Option, + pub layer_2_tip_fee: Option, + pub initiator_address: Vec, + pub nonce: Option, + pub signature: Option>, + pub gas_limit: Option, + pub max_fee_per_gas: Option, + pub max_priority_fee_per_gas: Option, + pub gas_per_storage_limit: Option, + pub gas_per_pubdata_limit: Option, + pub input: Option>, + pub tx_format: Option, + pub data: serde_json::Value, + pub received_at: NaiveDateTime, + pub in_mempool: bool, + + pub l1_block_number: Option, + pub l1_batch_tx_index: Option, + pub l1_batch_number: Option, + pub miniblock_number: Option, + pub block_hash: Option>, + pub index_in_block: Option, + pub error: Option, + pub effective_gas_price: Option, + pub contract_address: Option>, + pub value: BigDecimal, + pub paymaster: Vec, + pub paymaster_input: Vec, + + pub l1_tx_mint: Option, + pub l1_tx_refund_recipient: Option>, + + pub refunded_gas: i64, + + pub execution_info: serde_json::Value, + + pub created_at: NaiveDateTime, + pub updated_at: NaiveDateTime, + + pub eth_commit_tx_hash: Option, + pub eth_prove_tx_hash: Option, + pub eth_execute_tx_hash: Option, +} + +impl StorageTransactionDetails { + fn get_transaction_status(&self) -> TransactionStatus { + if self.error.is_some() { + TransactionStatus::Failed + } else if self.eth_execute_tx_hash.is_some() { + TransactionStatus::Verified + } else if self.miniblock_number.is_some() { + TransactionStatus::Included + } else { + TransactionStatus::Pending + } + } +} + +impl From for api::TransactionDetails { + fn from(tx_details: StorageTransactionDetails) -> Self { + let status = tx_details.get_transaction_status(); + + let fee = if tx_details.is_priority { + let full_fee_string = tx_details + .full_fee + .expect("full fee is mandatory for priority operation") + .to_string(); + + U256::from_dec_str(&full_fee_string) + .unwrap_or_else(|_| panic!("Incorrect full fee value in DB {}", full_fee_string)) + } else { + let effective_gas_price = + bigdecimal_to_u256(tx_details.effective_gas_price.clone().unwrap_or_default()); + + let gas_limit = bigdecimal_to_u256( + tx_details + .gas_limit + .clone() + .expect("gas limit is mandatory for transaction"), + ); + gas_limit * effective_gas_price + }; + + let initiator_address = H160::from_slice(tx_details.initiator_address.as_slice()); + let received_at = DateTime::::from_utc(tx_details.received_at, Utc); + + let eth_commit_tx_hash = tx_details + .eth_commit_tx_hash + .map(|hash| H256::from_str(&hash).unwrap()); + let eth_prove_tx_hash = tx_details + .eth_prove_tx_hash + .map(|hash| H256::from_str(&hash).unwrap()); + let eth_execute_tx_hash = tx_details + .eth_execute_tx_hash + .map(|hash| H256::from_str(&hash).unwrap()); + + api::TransactionDetails { + is_l1_originated: tx_details.is_priority, + status, + fee, + initiator_address, + received_at, + eth_commit_tx_hash, + eth_prove_tx_hash, + eth_execute_tx_hash, + } + } +} + +pub fn web3_transaction_select_sql() -> &'static str { + r#" + transactions.hash as tx_hash, + transactions.index_in_block as index_in_block, + transactions.miniblock_number as block_number, + transactions.nonce as nonce, + transactions.signature as signature, + transactions.initiator_address as initiator_address, + transactions.tx_format as tx_format, + transactions.value as value, + transactions.gas_limit as gas_limit, + transactions.max_fee_per_gas as max_fee_per_gas, + transactions.max_priority_fee_per_gas as max_priority_fee_per_gas, + transactions.effective_gas_price as effective_gas_price, + transactions.l1_batch_number as l1_batch_number_tx, + transactions.l1_batch_tx_index as l1_batch_tx_index, + transactions.data->'contractAddress' as "execute_contract_address", + transactions.data->'calldata' as "calldata", + miniblocks.hash as "block_hash" + "# +} + +pub fn extract_web3_transaction( + db_row: PgRow, + chain_id: L2ChainId, +) -> zksync_types::api::Transaction { + let row_signature: Option> = db_row.get("signature"); + let signature = + row_signature.and_then(|signature| PackedEthSignature::deserialize_packed(&signature).ok()); + zksync_types::api::Transaction { + hash: H256::from_slice(db_row.get("tx_hash")), + nonce: U256::from(db_row.try_get::("nonce").ok().unwrap_or(0)), + block_hash: db_row.try_get("block_hash").ok().map(H256::from_slice), + block_number: db_row + .try_get::("block_number") + .ok() + .map(U64::from), + transaction_index: db_row + .try_get::("index_in_block") + .ok() + .map(U64::from), + from: Some(H160::from_slice(db_row.get("initiator_address"))), + to: Some( + serde_json::from_value::
(db_row.get("execute_contract_address")) + .expect("incorrect address value in the database"), + ), + value: bigdecimal_to_u256(db_row.get::("value")), + // `gas_price`, `max_fee_per_gas`, `max_priority_fee_per_gas` will be zero for the priority transactions. + // For common L2 transactions `gas_price` is equal to `effective_gas_price` if the transaction is included + // in some block, or `max_fee_per_gas` otherwise. + gas_price: Some(bigdecimal_to_u256( + db_row + .try_get::("effective_gas_price") + .or_else(|_| db_row.try_get::("max_fee_per_gas")) + .unwrap_or_else(|_| BigDecimal::zero()), + )), + max_fee_per_gas: Some(bigdecimal_to_u256( + db_row + .try_get::("max_fee_per_gas") + .unwrap_or_else(|_| BigDecimal::zero()), + )), + max_priority_fee_per_gas: Some(bigdecimal_to_u256( + db_row + .try_get::("max_priority_fee_per_gas") + .unwrap_or_else(|_| BigDecimal::zero()), + )), + gas: bigdecimal_to_u256(db_row.get::("gas_limit")), + input: serde_json::from_value(db_row.get::("calldata")) + .expect("Incorrect calldata value in the database"), + raw: None, + v: signature.as_ref().map(|s| U64::from(s.v())), + r: signature.as_ref().map(|s| U256::from(s.r())), + s: signature.as_ref().map(|s| U256::from(s.s())), + transaction_type: db_row + .try_get::, &str>("tx_format") + .unwrap_or_default() + .map(U64::from), + access_list: None, + chain_id: U256::from(chain_id.0), + l1_batch_number: db_row + .try_get::("l1_batch_number_tx") + .ok() + .map(U64::from), + l1_batch_tx_index: db_row + .try_get::("l1_batch_tx_index") + .ok() + .map(U64::from), + } +} + +impl From for Transaction { + fn from(tx: StorageTransaction) -> Self { + let gas_limit = { + let gas_limit_string = tx + .gas_limit + .as_ref() + .expect("gas limit is mandatory for transaction") + .to_string(); + + U256::from_dec_str(&gas_limit_string) + .unwrap_or_else(|_| panic!("Incorrect gas limit value in DB {}", gas_limit_string)) + }; + + if tx.is_priority { + let full_fee = { + let full_fee_string = tx + .full_fee + .expect("full fee is mandatory for priority operation") + .to_string(); + + U256::from_dec_str(&full_fee_string).unwrap_or_else(|_| { + panic!("Incorrect full fee value in DB {}", full_fee_string) + }) + }; + + let layer_2_tip_fee = { + let layer_2_tip_fee_string = tx + .layer_2_tip_fee + .expect("layer 2 tip fee is mandatory for priority operation") + .to_string(); + + U256::from_dec_str(&layer_2_tip_fee_string).unwrap_or_else(|_| { + panic!( + "Incorrect layer 2 tip fee value in DB {}", + layer_2_tip_fee_string + ) + }) + }; + + // Supporting None for compatibility with the old transactions + let to_mint = tx.l1_tx_mint.map(bigdecimal_to_u256).unwrap_or_default(); + // Supporting None for compatibility with the old transactions + let refund_recipient = tx + .l1_tx_refund_recipient + .map(|recipient| Address::from_slice(&recipient)) + .unwrap_or_default(); + + // `tx.hash` represents the transaction hash obtained from the execution results, + // and it should be exactly the same as the canonical tx hash calculated from the + // transaction data, so we don't store it as a separate "canonical_tx_hash" field. + let canonical_tx_hash = H256::from_slice(&tx.hash); + + let tx_common_data = L1TxCommonData { + full_fee, + layer_2_tip_fee, + priority_queue_type: PriorityQueueType::Deque, + op_processing_type: OpProcessingType::Common, + sender: Address::from_slice(&tx.initiator_address), + serial_id: PriorityOpId(tx.priority_op_id.unwrap() as u64), + gas_limit, + to_mint, + refund_recipient, + // Using 1 for old transactions that did not have the necessary field stored + gas_per_pubdata_limit: tx + .gas_per_pubdata_limit + .map(bigdecimal_to_u256) + .unwrap_or_else(|| U256::from(1u32)), + deadline_block: 0, + eth_hash: Default::default(), + eth_block: tx.l1_block_number.unwrap_or_default() as u64, + canonical_tx_hash, + }; + + let hash = H256::from_slice(&tx.hash); + let inner = serde_json::from_value::(tx.data) + .unwrap_or_else(|_| panic!("invalid json in database for tx {:?}", hash)); + Transaction { + common_data: ExecuteTransactionCommon::L1(tx_common_data), + execute: inner, + received_timestamp_ms: tx.received_at.timestamp_millis() as u64, + } + } else { + let nonce = Nonce(tx.nonce.expect("no nonce in L2 tx in DB") as u32); + let max_fee_per_gas = { + let max_fee_per_gas_string = tx + .max_fee_per_gas + .as_ref() + .expect("max price per gas is mandatory for transaction") + .to_string(); + + U256::from_dec_str(&max_fee_per_gas_string).unwrap_or_else(|_| { + panic!( + "Incorrect max price per gas value in DB {}", + max_fee_per_gas_string + ) + }) + }; + + let max_priority_fee_per_gas = { + let max_priority_fee_per_gas_string = tx + .max_priority_fee_per_gas + .as_ref() + .expect("max priority fee per gas is mandatory for transaction") + .to_string(); + + U256::from_dec_str(&max_priority_fee_per_gas_string).unwrap_or_else(|_| { + panic!( + "Incorrect max priority fee per gas value in DB {}", + max_priority_fee_per_gas_string + ) + }) + }; + + let gas_per_pubdata_limit = { + let gas_per_pubdata_limit_string = tx + .gas_per_pubdata_limit + .as_ref() + .expect("gas price per pubdata limit is mandatory for transaction") + .to_string(); + U256::from_dec_str(&gas_per_pubdata_limit_string).unwrap_or_else(|_| { + panic!( + "Incorrect gas price per pubdata limit value in DB {}", + gas_per_pubdata_limit_string + ) + }) + }; + + let fee = Fee { + gas_limit, + max_fee_per_gas, + max_priority_fee_per_gas, + gas_per_pubdata_limit, + }; + + let tx_format = match tx.tx_format.map(|a| a as u8) { + Some(EIP_712_TX_TYPE) => TransactionType::EIP712Transaction, + Some(EIP_2930_TX_TYPE) => TransactionType::EIP2930Transaction, + Some(EIP_1559_TX_TYPE) => TransactionType::EIP1559Transaction, + Some(0) | None => TransactionType::LegacyTransaction, + Some(_) => unreachable!("Unsupported tx type"), + }; + + let StorageTransaction { + paymaster, + paymaster_input, + initiator_address, + signature, + hash, + input, + data, + received_at, + .. + } = tx; + + let paymaster_params = PaymasterParams { + paymaster: Address::from_slice(&paymaster), + paymaster_input, + }; + + let tx_common_data = L2TxCommonData::new( + nonce, + fee, + Address::from_slice(&initiator_address), + signature.unwrap_or_else(|| { + panic!("Signature is mandatory for transactions. Tx {:#?}", hash) + }), + tx_format, + input.expect("input data is mandatory for l2 transactions"), + H256::from_slice(&hash), + paymaster_params, + ); + + let inner = serde_json::from_value::(data) + .unwrap_or_else(|_| panic!("invalid json in database for tx {:?}", hash)); + Transaction { + common_data: ExecuteTransactionCommon::L2(tx_common_data), + execute: inner, + received_timestamp_ms: received_at.timestamp_millis() as u64, + } + } + } +} + +pub fn transaction_details_from_storage( + tx_details: StorageTransactionDetails, + mut erc20_transfers: Vec, + mut withdrawals: Vec, + transfer: Option, + mut deposits: Vec, +) -> explorer_api::TransactionDetails { + let status = tx_details.get_transaction_status(); + + // Dirty fix to avoid inconsistency. + // Info about the transactions is built using several DB requests. + // So, it is possible that the transaction will be included in a block between these requests. + // That will result in inconsistency with transaction's events. + // Note: `transfer` field is built based only on the calldata, so it shouldn't be touched here. + if matches!(status, TransactionStatus::Pending) { + erc20_transfers = Vec::new(); + withdrawals = Vec::new(); + deposits = Vec::new(); + } + + let block_number = tx_details + .miniblock_number + .map(|number| MiniblockNumber(number as u32)); + let l1_batch_number = tx_details + .l1_batch_number + .map(|number| L1BatchNumber(number as u32)); + let block_hash = tx_details.block_hash.map(|hash| H256::from_slice(&hash)); + let index_in_block = tx_details.index_in_block.map(|i| i as u32); + + let eth_commit_tx_hash = tx_details + .eth_commit_tx_hash + .map(|hash| H256::from_str(&hash).unwrap()); + let eth_prove_tx_hash = tx_details + .eth_prove_tx_hash + .map(|hash| H256::from_str(&hash).unwrap()); + let eth_execute_tx_hash = tx_details + .eth_execute_tx_hash + .map(|hash| H256::from_str(&hash).unwrap()); + + let received_at = DateTime::::from_utc(tx_details.received_at, Utc); + let paymaster_address = Address::from_slice(&tx_details.paymaster); + + let storage_tx = StorageTransaction { + priority_op_id: tx_details.priority_op_id, + hash: tx_details.hash, + is_priority: tx_details.is_priority, + full_fee: tx_details.full_fee, + layer_2_tip_fee: tx_details.layer_2_tip_fee, + initiator_address: tx_details.initiator_address, + nonce: tx_details.nonce, + signature: tx_details.signature, + gas_limit: tx_details.gas_limit, + max_fee_per_gas: tx_details.max_fee_per_gas, + max_priority_fee_per_gas: tx_details.max_priority_fee_per_gas, + gas_per_storage_limit: tx_details.gas_per_storage_limit, + gas_per_pubdata_limit: tx_details.gas_per_pubdata_limit, + input: tx_details.input, + tx_format: tx_details.tx_format, + data: tx_details.data, + received_at: tx_details.received_at, + in_mempool: tx_details.in_mempool, + l1_block_number: tx_details.l1_block_number, + l1_batch_number: tx_details.l1_batch_number, + l1_batch_tx_index: tx_details.l1_batch_tx_index, + miniblock_number: tx_details.miniblock_number, + index_in_block: tx_details.index_in_block, + error: tx_details.error, + effective_gas_price: tx_details.effective_gas_price, + contract_address: tx_details.contract_address, + value: tx_details.value, + paymaster: tx_details.paymaster, + paymaster_input: tx_details.paymaster_input, + l1_tx_mint: tx_details.l1_tx_mint, + l1_tx_refund_recipient: tx_details.l1_tx_refund_recipient, + refunded_gas: tx_details.refunded_gas, + execution_info: tx_details.execution_info, + created_at: tx_details.created_at, + updated_at: tx_details.updated_at, + }; + let effective_gas_price = + bigdecimal_to_u256(storage_tx.effective_gas_price.clone().unwrap_or_default()); + let tx: Transaction = storage_tx.into(); + let fee = match &tx.common_data { + ExecuteTransactionCommon::L1(data) => data.full_fee, + ExecuteTransactionCommon::L2(data) => { + (data.fee.gas_limit - tx_details.refunded_gas) * effective_gas_price + } + }; + + let tx_type = tx.tx_format(); + + let transaction_hash = tx.hash(); + let nonce = tx.nonce(); + let initiator_address = tx.initiator_account(); + let is_l1_originated = tx.is_l1(); + let data = tx.execute; + + let mut transfer_changes = erc20_transfers.clone(); + for withdraw in withdrawals.iter() { + // Ether is being sent to `L2_ETH_TOKEN_ADDRESS` when burning + // but other tokens are being sent to the zero address. + let to = if withdraw.token_info.l1_address == Address::zero() { + L2_ETH_TOKEN_ADDRESS + } else { + Address::zero() + }; + let burn_event_to_remove = Erc20TransferInfo { + token_info: withdraw.token_info.clone(), + from: withdraw.from, + to, + amount: withdraw.amount, + }; + let elem_to_remove = transfer_changes + .iter() + .find_position(|event| event == &&burn_event_to_remove); + if let Some(idx_to_remove) = elem_to_remove { + transfer_changes.remove(idx_to_remove.0); + } else { + vlog::error!( + "Burn event for withdrawal must be present, tx hash: {:?}", + transaction_hash + ); + } + } + for deposit in deposits.iter() { + // Ether doesn't emit `Transfer` event when minting unlike other tokens. + if deposit.token_info.l1_address != Address::zero() { + let mint_event_to_remove = Erc20TransferInfo { + token_info: deposit.token_info.clone(), + from: Address::zero(), + to: deposit.to, + amount: deposit.amount, + }; + let elem_to_remove = transfer_changes + .iter() + .find_position(|event| event == &&mint_event_to_remove); + if let Some(idx_to_remove) = elem_to_remove { + transfer_changes.remove(idx_to_remove.0); + } else { + vlog::error!( + "Mint event for deposit must be present, tx hash: {:?}", + transaction_hash + ); + } + } + } + let fee_receiver_address = if paymaster_address == Address::zero() { + BOOTLOADER_ADDRESS + } else { + paymaster_address + }; + let balance_changes = transfer_changes + .into_iter() + .map(|transfer_info| { + let balance_change_type = if transfer_info.to == fee_receiver_address { + BalanceChangeType::Fee + } else { + BalanceChangeType::Transfer + }; + BalanceChangeInfo { + token_info: transfer_info.token_info, + from: transfer_info.from, + to: transfer_info.to, + amount: transfer_info.amount, + r#type: balance_change_type, + } + }) + .chain(withdrawals) + .chain(deposits) + .collect(); + + explorer_api::TransactionDetails { + transaction_hash, + data, + is_l1_originated, + status, + fee, + nonce, + block_number, + l1_batch_number, + block_hash, + index_in_block, + initiator_address, + received_at, + eth_commit_tx_hash, + eth_prove_tx_hash, + eth_execute_tx_hash, + erc20_transfers, + transfer, + balance_changes, + r#type: tx_type as u32, + } +} diff --git a/core/lib/dal/src/models/storage_witness_job_info.rs b/core/lib/dal/src/models/storage_witness_job_info.rs new file mode 100644 index 000000000000..376325f2e910 --- /dev/null +++ b/core/lib/dal/src/models/storage_witness_job_info.rs @@ -0,0 +1,76 @@ +use sqlx::types::chrono::{DateTime, Utc}; +use std::convert::TryFrom; +use std::str::FromStr; +use vlog::__chrono::{NaiveDateTime, NaiveTime}; +use zksync_types::proofs::{ + AggregationRound, JobPosition, WitnessJobInfo, WitnessJobStatus, WitnessJobStatusFailed, + WitnessJobStatusSuccessful, +}; +use zksync_types::L1BatchNumber; + +#[derive(sqlx::FromRow)] +pub struct StorageWitnessJobInfo { + pub aggregation_round: i32, + pub l1_batch_number: i64, + pub status: String, + pub error: Option, + pub created_at: NaiveDateTime, + pub updated_at: NaiveDateTime, + pub time_taken: Option, + pub processing_started_at: Option, + pub attempts: i32, +} + +impl From for WitnessJobInfo { + fn from(x: StorageWitnessJobInfo) -> Self { + fn nt2d(nt: NaiveDateTime) -> DateTime { + DateTime::from_utc(nt, Utc) + } + + let status = + match WitnessJobStatus::from_str(x.status.as_str()) + .unwrap_or_else(|_| panic!("Unknown value '{}' in witness job status db record.", x.status)) { + WitnessJobStatus::Successful(_) => WitnessJobStatus::Successful(WitnessJobStatusSuccessful { + started_at: + nt2d(x.processing_started_at + .unwrap_or_else(|| panic!( + "Witness job is successful but lacks processing timestamp. Batch:round {}:{} ", + x.l1_batch_number, + x.aggregation_round))), + time_taken: x.time_taken.unwrap() - NaiveTime::from_hms_opt(0,0,0).unwrap() + }), + WitnessJobStatus::Failed(_) => { + let batch = x.l1_batch_number; + let round = x.aggregation_round; + + WitnessJobStatus::Failed( + WitnessJobStatusFailed { + started_at: + nt2d(x.processing_started_at + .unwrap_or_else(|| panic!( + "Witness job is failed but lacks processing timestamp. Batch:round {}:{} ", + x.l1_batch_number, + x.aggregation_round))), + error: + x.error + .unwrap_or_else(|| panic!( + "Witness job failed but lacks error message. Batch:round {}:{}", + batch, + round)), + }) + }, + x => x + }; + + WitnessJobInfo { + block_number: L1BatchNumber(x.l1_batch_number as u32), + created_at: nt2d(x.created_at), + updated_at: nt2d(x.updated_at), + status, + position: JobPosition { + aggregation_round: AggregationRound::try_from(x.aggregation_round).unwrap(), + sequence_number: 1, // Witness job 1:1 aggregation round, per block + }, + } + } +} diff --git a/core/lib/dal/src/prover_dal.rs b/core/lib/dal/src/prover_dal.rs new file mode 100644 index 000000000000..588bcf7439d4 --- /dev/null +++ b/core/lib/dal/src/prover_dal.rs @@ -0,0 +1,578 @@ +use std::collections::HashMap; +use std::convert::TryFrom; +use std::ops::Range; +use std::time::{Duration, Instant}; + +use zksync_object_store::gcs_utils::prover_circuit_input_blob_url; +use zksync_types::aggregated_operations::BlockProofForL1; +use zksync_types::proofs::{ + AggregationRound, JobCountStatistics, JobExtendedStatistics, ProverJobInfo, ProverJobMetadata, +}; +use zksync_types::zkevm_test_harness::abstract_zksync_circuit::concrete_circuits::ZkSyncProof; +use zksync_types::zkevm_test_harness::bellman::bn256::Bn256; +use zksync_types::L1BatchNumber; + +use crate::models::storage_prover_job_info::StorageProverJobInfo; +use crate::time_utils::{duration_to_naive_time, pg_interval_from_duration}; +use crate::StorageProcessor; + +#[derive(Debug)] +pub struct ProverDal<'a, 'c> { + pub storage: &'a mut StorageProcessor<'c>, +} + +impl ProverDal<'_, '_> { + pub fn get_next_prover_job( + &mut self, + _processing_timeout: Duration, + max_attempts: u32, + ) -> Option { + async_std::task::block_on(async { + let processing_timeout = pg_interval_from_duration(_processing_timeout); + let result: Option = sqlx::query!( + " + UPDATE prover_jobs + SET status = 'in_progress', attempts = attempts + 1, + updated_at = now(), processing_started_at = now() + WHERE id = ( + SELECT id + FROM prover_jobs + WHERE status = 'queued' + OR (status = 'in_progress' AND processing_started_at < now() - $1::interval) + OR (status = 'in_gpu_proof' AND processing_started_at < now() - $1::interval) + OR (status = 'failed' AND attempts < $2) + ORDER BY aggregation_round DESC, l1_batch_number ASC, id ASC + LIMIT 1 + FOR UPDATE + SKIP LOCKED + ) + RETURNING prover_jobs.* + ", + &processing_timeout, + max_attempts as i32 + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|row| ProverJobMetadata { + id: row.id as u32, + block_number: L1BatchNumber(row.l1_batch_number as u32), + circuit_type: row.circuit_type.clone(), + aggregation_round: AggregationRound::try_from(row.aggregation_round).unwrap(), + sequence_number: row.sequence_number as usize, + }); + result + }) + } + + pub fn get_next_prover_job_by_circuit_types( + &mut self, + processing_timeout: Duration, + max_attempts: u32, + circuit_types: Vec, + ) -> Option { + async_std::task::block_on(async { + let processing_timeout = pg_interval_from_duration(processing_timeout); + let result: Option = sqlx::query!( + " + UPDATE prover_jobs + SET status = 'in_progress', attempts = attempts + 1, + updated_at = now(), processing_started_at = now() + WHERE id = ( + SELECT id + FROM prover_jobs + WHERE circuit_type = ANY($3) + AND + ( status = 'queued' + OR (status = 'in_progress' AND processing_started_at < now() - $1::interval) + OR (status = 'failed' AND attempts < $2) + ) + ORDER BY aggregation_round DESC, l1_batch_number ASC, id ASC + LIMIT 1 + FOR UPDATE + SKIP LOCKED + ) + RETURNING prover_jobs.* + ", + &processing_timeout, + max_attempts as i32, + &circuit_types[..], + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|row| ProverJobMetadata { + id: row.id as u32, + block_number: L1BatchNumber(row.l1_batch_number as u32), + circuit_type: row.circuit_type, + aggregation_round: AggregationRound::try_from(row.aggregation_round).unwrap(), + sequence_number: row.sequence_number as usize, + }); + + result + }) + } + + // If making changes to this method, consider moving the serialization logic to the DAL layer. + pub fn insert_prover_jobs( + &mut self, + l1_batch_number: L1BatchNumber, + circuits: Vec, + aggregation_round: AggregationRound, + ) { + async_std::task::block_on(async { + let started_at = Instant::now(); + for (sequence_number, circuit) in circuits.into_iter().enumerate() { + let circuit_input_blob_url = prover_circuit_input_blob_url( + l1_batch_number, + sequence_number, + circuit.clone(), + aggregation_round, + ); + sqlx::query!( + " + INSERT INTO prover_jobs (l1_batch_number, circuit_type, sequence_number, prover_input, aggregation_round, circuit_input_blob_url, status, created_at, updated_at) + VALUES ($1, $2, $3, $4, $5, $6, 'queued', now(), now()) + ON CONFLICT(l1_batch_number, aggregation_round, sequence_number) DO NOTHING + ", + l1_batch_number.0 as i64, + circuit, + sequence_number as i64, + vec![], + aggregation_round as i64, + circuit_input_blob_url + ) + .execute(self.storage.conn()) + .await + .unwrap(); + metrics::histogram!("dal.request", started_at.elapsed(), "method" => "save_witness"); + } + }) + } + + pub fn save_proof( + &mut self, + id: u32, + time_taken: Duration, + proof: Vec, + proccesed_by: &str, + ) { + async_std::task::block_on(async { + let started_at = Instant::now(); + sqlx::query!( + " + UPDATE prover_jobs + SET status = 'successful', updated_at = now(), time_taken = $1, result = $2, proccesed_by = $3 + WHERE id = $4 + ", + duration_to_naive_time(time_taken), + proof, + proccesed_by, + id as i64, + ) + .execute(self.storage.conn()) + .await + .unwrap(); + + metrics::histogram!("dal.request", started_at.elapsed(), "method" => "save_proof"); + }) + } + + pub fn lock_prover_jobs_table_exclusive(&mut self) { + async_std::task::block_on(async { + sqlx::query!("LOCK TABLE prover_jobs IN EXCLUSIVE MODE") + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn save_proof_error(&mut self, id: u32, error: String, max_attempts: u32) { + async_std::task::block_on(async { + let mut transaction = self.storage.start_transaction().await; + + let row = sqlx::query!( + " + UPDATE prover_jobs + SET status = 'failed', error = $1, updated_at = now() + WHERE id = $2 + RETURNING l1_batch_number, attempts + ", + error, + id as i64, + ) + .fetch_one(transaction.conn()) + .await + .unwrap(); + + if row.attempts as u32 >= max_attempts { + transaction + .blocks_dal() + .set_skip_proof_for_l1_batch(L1BatchNumber(row.l1_batch_number as u32)); + } + + transaction.commit().await; + }) + } + + // For each block in the provided range it returns a tuple: + // (aggregation_coords; scheduler_proof) + pub fn get_final_proofs_for_blocks( + &mut self, + from_block: L1BatchNumber, + to_block: L1BatchNumber, + ) -> Vec { + async_std::task::block_on(async { + sqlx::query!( + "SELECT prover_jobs.result as proof, scheduler_witness_jobs.aggregation_result_coords + FROM prover_jobs + INNER JOIN scheduler_witness_jobs + ON prover_jobs.l1_batch_number = scheduler_witness_jobs.l1_batch_number + WHERE prover_jobs.l1_batch_number >= $1 AND prover_jobs.l1_batch_number <= $2 + AND prover_jobs.aggregation_round = 3 + AND prover_jobs.status = 'successful' + AND scheduler_witness_jobs.status = 'successful' + ", + from_block.0 as i32, + to_block.0 as i32 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap() + .into_iter() + .map(|row| { + let deserialized_proof = bincode::deserialize::>( + &row.proof + .expect("prove_job with `successful` status has no result"), + ).expect("cannot deserialize proof"); + let deserialized_aggregation_result_coords = bincode::deserialize::<[[u8; 32]; 4]>( + &row.aggregation_result_coords + .expect("scheduler_witness_job with `successful` status has no aggregation_result_coords"), + ).expect("cannot deserialize proof"); + BlockProofForL1 { + aggregation_result_coords: deserialized_aggregation_result_coords, + scheduler_proof: ZkSyncProof::into_proof(deserialized_proof), + } + }) + .collect() + }) + } + + pub fn get_prover_jobs_stats(&mut self) -> JobCountStatistics { + async_std::task::block_on(async { + let mut results: HashMap = sqlx::query!( + r#" + SELECT COUNT(*) as "count!", status as "status!" + FROM prover_jobs + GROUP BY status + "# + ) + .fetch_all(self.storage.conn()) + .await + .unwrap() + .into_iter() + .map(|row| (row.status, row.count as usize)) + .collect::>(); + JobCountStatistics { + queued: results.remove("queued").unwrap_or(0usize), + in_progress: results.remove("in_progress").unwrap_or(0usize), + failed: results.remove("failed").unwrap_or(0usize), + successful: results.remove("successful").unwrap_or(0usize), + } + }) + } + + pub fn successful_proofs_count( + &mut self, + block_number: L1BatchNumber, + aggregation_round: AggregationRound, + ) -> usize { + async_std::task::block_on(async { + sqlx::query!( + r#" + SELECT COUNT(*) as "count!" + FROM prover_jobs + WHERE status = 'successful' AND l1_batch_number = $1 AND aggregation_round = $2 + "#, + block_number.0 as i64, + aggregation_round as i64 + ) + .fetch_one(self.storage.conn()) + .await + .unwrap() + .count as usize + }) + } + + pub fn min_unproved_l1_batch_number(&mut self, max_attempts: u32) -> Option { + async_std::task::block_on(async { + sqlx::query!( + r#" + SELECT MIN(l1_batch_number) as "l1_batch_number?" + FROM prover_jobs + WHERE status = 'queued' OR status = 'in_progress' + OR status = 'in_gpu_proof' + OR (status = 'failed' AND attempts < $1) + "#, + max_attempts as i32 + ) + .fetch_one(self.storage.conn()) + .await + .unwrap() + .l1_batch_number + .map(|n| L1BatchNumber(n as u32)) + }) + } + + pub fn get_extended_stats(&mut self) -> anyhow::Result { + async_std::task::block_on(async { + let limits = sqlx::query!( + r#" + SELECT + (SELECT l1_batch_number + FROM prover_jobs + WHERE status NOT IN ('successful', 'skipped') + ORDER BY l1_batch_number + LIMIT 1) as "successful_limit!", + + (SELECT l1_batch_number + FROM prover_jobs + WHERE status <> 'queued' + ORDER BY l1_batch_number DESC + LIMIT 1) as "queued_limit!", + + (SELECT MAX(l1_batch_number) as "max!" FROM prover_jobs) as "max_block!" + "# + ) + .fetch_one(self.storage.conn()) + .await?; + + let active_area = self.get_jobs(GetProverJobsParams::blocks( + L1BatchNumber(limits.successful_limit as u32) + ..L1BatchNumber(limits.queued_limit as u32), + ))?; + + Ok(JobExtendedStatistics { + successful_padding: L1BatchNumber(limits.successful_limit as u32 - 1), + queued_padding: L1BatchNumber(limits.queued_limit as u32 + 1), + queued_padding_len: (limits.max_block - limits.queued_limit) as u32, + active_area, + }) + }) + } + + pub fn get_jobs( + &mut self, + opts: GetProverJobsParams, + ) -> Result, sqlx::Error> { + let statuses = opts + .statuses + .map(|ss| { + { + // Until statuses are enums + let whitelist = vec!["queued", "in_progress", "successful", "failed"]; + if !ss.iter().all(|x| whitelist.contains(&x.as_str())) { + panic!("Forbidden value in statuses list.") + } + } + + format!( + "AND status IN ({})", + ss.iter() + .map(|x| format!("'{}'", x)) + .collect::>() + .join(",") + ) + }) + .unwrap_or_default(); + + let block_range = opts + .blocks + .as_ref() + .map(|range| { + format!( + "AND l1_batch_number >= {} + AND l1_batch_number <= {}", + range.start.0, range.end.0 + ) + }) + .unwrap_or_default(); + + let round = opts + .round + .map(|round| format!("AND aggregation_round = {}", round as u32)) + .unwrap_or_default(); + + let order = match opts.desc { + true => "DESC", + false => "ASC", + }; + + let limit = opts + .limit + .map(|limit| format!("LIMIT {}", limit)) + .unwrap_or_default(); + + let sql = format!( + r#" + SELECT + id, + circuit_type, + l1_batch_number, + status, + aggregation_round, + sequence_number, + length(prover_input) as input_length, + attempts, + created_at, + updated_at, + processing_started_at, + time_taken, + error + FROM prover_jobs + WHERE 1 = 1 -- Where clause can't be empty + {statuses} + {block_range} + {round} + ORDER BY "id" {order} + {limit} + "# + ); + + let query = sqlx::query_as(&sql); + + Ok( + async_std::task::block_on(async move { query.fetch_all(self.storage.conn()).await })? + .into_iter() + .map(|x: StorageProverJobInfo| x.into()) + .collect::>(), + ) + } + + pub fn get_prover_job_by_id(&mut self, job_id: u32) -> Option { + async_std::task::block_on(async { + let result: Option = + sqlx::query!("SELECT * from prover_jobs where id=$1", job_id as i64) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|row| ProverJobMetadata { + id: row.id as u32, + block_number: L1BatchNumber(row.l1_batch_number as u32), + circuit_type: row.circuit_type.clone(), + aggregation_round: AggregationRound::try_from(row.aggregation_round) + .unwrap(), + sequence_number: row.sequence_number as usize, + }); + result + }) + } + + pub fn get_l1_batches_with_blobs_in_db(&mut self, limit: u8) -> Vec { + async_std::task::block_on(async { + let job_ids = sqlx::query!( + r#" + SELECT id FROM prover_jobs + WHERE length(prover_input) <> 0 + LIMIT $1; + "#, + limit as i32 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap(); + job_ids.into_iter().map(|row| row.id).collect() + }) + } + + pub fn get_circuit_input_blob_urls_to_be_cleaned(&mut self, limit: u8) -> Vec<(i64, String)> { + async_std::task::block_on(async { + let job_ids = sqlx::query!( + r#" + SELECT id, circuit_input_blob_url FROM prover_jobs + WHERE status='successful' AND is_blob_cleaned=FALSE + AND circuit_input_blob_url is NOT NULL + AND updated_at < NOW() - INTERVAL '2 days' + LIMIT $1; + "#, + limit as i32 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap(); + job_ids + .into_iter() + .map(|row| (row.id, row.circuit_input_blob_url.unwrap())) + .collect() + }) + } + + pub fn mark_gcs_blobs_as_cleaned(&mut self, ids: Vec) { + async_std::task::block_on(async { + sqlx::query!( + r#" + UPDATE prover_jobs + SET is_blob_cleaned=TRUE + WHERE id = ANY($1); + "#, + &ids[..] + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn purge_blobs_from_db(&mut self, job_ids: Vec) { + async_std::task::block_on(async { + sqlx::query!( + r#" + UPDATE prover_jobs + SET prover_input='' + WHERE id = ANY($1); + "#, + &job_ids[..] + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn update_status(&mut self, id: u32, status: &str) { + async_std::task::block_on(async { + sqlx::query!( + r#" + UPDATE prover_jobs + SET status = $1, updated_at = now() + WHERE id = $2 + "#, + status, + id as i64, + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } +} + +pub struct GetProverJobsParams { + pub statuses: Option>, + pub blocks: Option>, + pub limit: Option, + pub desc: bool, + pub round: Option, +} + +impl GetProverJobsParams { + pub fn blocks(range: Range) -> GetProverJobsParams { + GetProverJobsParams { + blocks: Some(range), + statuses: None, + limit: None, + desc: false, + round: None, + } + } +} diff --git a/core/lib/dal/src/storage_dal.rs b/core/lib/dal/src/storage_dal.rs new file mode 100644 index 000000000000..a38e793f4830 --- /dev/null +++ b/core/lib/dal/src/storage_dal.rs @@ -0,0 +1,248 @@ +use crate::models::storage_contract::StorageContractSource; +use crate::StorageProcessor; +use std::collections::{HashMap, HashSet}; +use std::time::Instant; +use zksync_types::{ + vm_trace::ContractSourceDebugInfo, Address, MiniblockNumber, StorageKey, StorageLog, + StorageValue, ACCOUNT_CODE_STORAGE_ADDRESS, FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH, H256, + U256, +}; +use zksync_utils::{bytes_to_chunks, h256_to_account_address}; + +#[derive(Debug)] +pub struct StorageDal<'a, 'c> { + pub storage: &'a mut StorageProcessor<'c>, +} + +impl StorageDal<'_, '_> { + pub fn insert_factory_deps( + &mut self, + block_number: MiniblockNumber, + factory_deps: HashMap>, + ) { + async_std::task::block_on(async { + let (bytecode_hashes, bytecodes): (Vec<_>, Vec<_>) = factory_deps + .into_iter() + .map(|dep| (dep.0.as_bytes().into(), dep.1)) + .unzip(); + + // Copy from stdin can't be used here because of 'ON CONFLICT'. + sqlx::query!( + "INSERT INTO factory_deps + (bytecode_hash, bytecode, miniblock_number, created_at, updated_at) + SELECT u.bytecode_hash, u.bytecode, $3, now(), now() + FROM UNNEST($1::bytea[], $2::bytea[]) + AS u(bytecode_hash, bytecode) + ON CONFLICT (bytecode_hash) DO NOTHING + ", + &bytecode_hashes, + &bytecodes, + block_number.0 as i64, + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn get_factory_dep(&mut self, hash: H256) -> Option> { + async_std::task::block_on(async { + sqlx::query!( + "SELECT bytecode FROM factory_deps WHERE bytecode_hash = $1", + &hash.0.to_vec(), + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|row| row.bytecode) + }) + } + + pub fn get_factory_deps(&mut self, hashes: &HashSet) -> HashMap> { + let hashes_as_vec_u8: Vec> = hashes.iter().map(|hash| hash.0.to_vec()).collect(); + + async_std::task::block_on(async { + sqlx::query!( + "SELECT bytecode, bytecode_hash FROM factory_deps WHERE bytecode_hash = ANY($1)", + &hashes_as_vec_u8, + ) + .fetch_all(self.storage.conn()) + .await + .unwrap() + .into_iter() + .map(|row| { + ( + U256::from_big_endian(&row.bytecode_hash), + bytes_to_chunks(&row.bytecode), + ) + }) + .collect() + }) + } + + pub fn get_contracts_for_revert(&mut self, block_number: MiniblockNumber) -> Vec
{ + async_std::task::block_on(async { + sqlx::query!( + " + SELECT key + FROM storage_logs + WHERE address = $1 AND miniblock_number > $2 AND NOT EXISTS ( + SELECT 1 FROM storage_logs as s + WHERE + s.hashed_key = storage_logs.hashed_key AND + (s.miniblock_number, s.operation_number) >= (storage_logs.miniblock_number, storage_logs.operation_number) AND + s.value = $3 + ) + ", + ACCOUNT_CODE_STORAGE_ADDRESS.as_bytes(), + block_number.0 as i64, + FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH.as_bytes() + ) + .fetch_all(self.storage.conn()) + .await + .unwrap() + .into_iter() + .map(|row| h256_to_account_address(&H256::from_slice(&row.key))) + .collect() + }) + } + + pub fn get_factory_deps_for_revert(&mut self, block_number: MiniblockNumber) -> Vec { + async_std::task::block_on(async { + sqlx::query!( + "SELECT bytecode_hash FROM factory_deps WHERE miniblock_number > $1", + block_number.0 as i64 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap() + .into_iter() + .map(|row| H256::from_slice(&row.bytecode_hash)) + .collect() + }) + } + + pub fn set_contract_source(&mut self, address: Address, source: ContractSourceDebugInfo) { + async_std::task::block_on(async { + sqlx::query!( + "INSERT INTO contract_sources (address, assembly_code, pc_line_mapping, created_at, updated_at) + VALUES ($1, $2, $3, now(), now()) + ON CONFLICT (address) + DO UPDATE SET assembly_code = $2, pc_line_mapping = $3, updated_at = now() + ", + address.as_bytes(), + source.assembly_code, + serde_json::to_value(source.pc_line_mapping).unwrap() + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn get_contract_source(&mut self, address: Address) -> Option { + async_std::task::block_on(async { + let source = sqlx::query_as!( + StorageContractSource, + "SELECT assembly_code, pc_line_mapping FROM contract_sources WHERE address = $1", + address.as_bytes() + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap(); + source.map(Into::into) + }) + } + + // we likely don't need `storage` table at all, as we have `storage_logs` table + // Returns the list of unique storage updates for block + pub fn apply_storage_logs( + &mut self, + updates: &[(H256, Vec)], + ) -> Vec<(StorageKey, (H256, StorageValue))> { + async_std::task::block_on(async { + let mut unique_updates: HashMap = HashMap::new(); + for (tx_hash, storage_logs) in updates { + for storage_log in storage_logs { + unique_updates.insert(storage_log.key, (*tx_hash, storage_log.value)); + } + } + let unique_updates: Vec<(StorageKey, (H256, StorageValue))> = + unique_updates.into_iter().collect(); + + let hashed_keys: Vec> = unique_updates + .iter() + .map(|(key, _)| key.hashed_key().0.to_vec()) + .collect(); + + let addresses: Vec<_> = unique_updates + .iter() + .map(|(key, _)| key.address().0.to_vec()) + .collect(); + let keys: Vec<_> = unique_updates + .iter() + .map(|(key, _)| key.key().0.to_vec()) + .collect(); + let values: Vec> = unique_updates + .iter() + .map(|(_, (_, value))| value.as_bytes().to_vec()) + .collect(); + + let tx_hashes: Vec> = unique_updates + .iter() + .map(|(_, (tx_hash, _))| tx_hash.0.to_vec()) + .collect(); + + // Copy from stdin can't be used here because of 'ON CONFLICT'. + sqlx::query!( + "INSERT INTO storage (hashed_key, address, key, value, tx_hash, created_at, updated_at) + SELECT u.hashed_key, u.address, u.key, u.value, u.tx_hash, now(), now() + FROM UNNEST ($1::bytea[], $2::bytea[], $3::bytea[], $4::bytea[], $5::bytea[]) + AS u(hashed_key, address, key, value, tx_hash) + ON CONFLICT (hashed_key) + DO UPDATE SET tx_hash = excluded.tx_hash, value = excluded.value, updated_at = now() + ", + &hashed_keys, + &addresses, + &keys, + &values, + &tx_hashes, + ) + .execute(self.storage.conn()) + .await + .unwrap(); + + unique_updates + }) + } + + pub fn get_by_key(&mut self, key: &StorageKey) -> Option { + async_std::task::block_on(async { + let started_at = Instant::now(); + + let result = sqlx::query!( + "SELECT value FROM storage WHERE hashed_key = $1", + &key.hashed_key().0.to_vec() + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|row| H256::from_slice(&row.value)); + metrics::histogram!("dal.request", started_at.elapsed(), "method" => "get_by_key"); + + result + }) + } + + pub fn rollback_factory_deps(&mut self, block_number: MiniblockNumber) { + async_std::task::block_on(async { + sqlx::query!( + "DELETE FROM factory_deps WHERE miniblock_number > $1", + block_number.0 as i64 + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } +} diff --git a/core/lib/dal/src/storage_load_dal.rs b/core/lib/dal/src/storage_load_dal.rs new file mode 100644 index 000000000000..16b382526a30 --- /dev/null +++ b/core/lib/dal/src/storage_load_dal.rs @@ -0,0 +1,155 @@ +use crate::StorageProcessor; +use std::time::Instant; +use zksync_state::secondary_storage::SecondaryStateStorage; +use zksync_storage::RocksDB; +use zksync_types::{ + AccountTreeId, Address, L1BatchNumber, StorageKey, StorageLog, ACCOUNT_CODE_STORAGE_ADDRESS, + FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH, H256, +}; +use zksync_utils::h256_to_account_address; + +#[derive(Debug)] +pub struct StorageLoadDal<'a, 'c> { + pub storage: &'a mut StorageProcessor<'c>, +} + +impl StorageLoadDal<'_, '_> { + pub fn load_secondary_storage(&mut self, db: RocksDB) -> SecondaryStateStorage { + async_std::task::block_on(async { + let stage_started_at: Instant = Instant::now(); + let latest_l1_batch_number = self.storage.blocks_dal().get_sealed_block_number(); + vlog::debug!( + "loading storage for l1 batch number {}", + latest_l1_batch_number.0 + ); + + let mut result = SecondaryStateStorage::new(db); + let mut current_l1_batch_number = result.get_l1_batch_number().0; + + assert!( + current_l1_batch_number <= latest_l1_batch_number.0 + 1, + "L1 batch number in state keeper cache is greater than last sealed L1 batch number in Postgres" + ); + while current_l1_batch_number <= latest_l1_batch_number.0 { + let (from_miniblock_number, to_miniblock_number) = self + .storage + .blocks_dal() + .get_miniblock_range_of_l1_batch(L1BatchNumber(current_l1_batch_number)) + .expect("L1 batch should contain at least one miniblock"); + + vlog::debug!( + "loading state changes for l1 batch {}", + current_l1_batch_number + ); + let storage_logs: Vec<_> = sqlx::query!( + " + SELECT address, key, value FROM storage_logs + WHERE miniblock_number >= $1 AND miniblock_number <= $2 + ORDER BY miniblock_number, operation_number ASC + ", + from_miniblock_number.0 as i64, + to_miniblock_number.0 as i64, + ) + .fetch_all(self.storage.conn()) + .await + .unwrap() + .into_iter() + .map(|row| { + StorageLog::new_write_log( + StorageKey::new( + AccountTreeId::new(Address::from_slice(&row.address)), + H256::from_slice(&row.key), + ), + H256::from_slice(&row.value), + ) + }) + .collect(); + result.process_transaction_logs(&storage_logs); + + vlog::debug!( + "loading deployed contracts for l1 batch {}", + current_l1_batch_number + ); + sqlx::query!( + " + SELECT storage_logs.key, factory_deps.bytecode + FROM storage_logs + JOIN factory_deps ON storage_logs.value = factory_deps.bytecode_hash + WHERE + storage_logs.address = $1 AND + storage_logs.miniblock_number >= $3 AND + storage_logs.miniblock_number <= $4 AND + NOT EXISTS ( + SELECT 1 FROM storage_logs as s + WHERE + s.hashed_key = storage_logs.hashed_key AND + (s.miniblock_number, s.operation_number) >= (storage_logs.miniblock_number, storage_logs.operation_number) AND + s.value = $2 + ) + ", + ACCOUNT_CODE_STORAGE_ADDRESS.as_bytes(), + FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH.as_bytes(), + from_miniblock_number.0 as i64, + to_miniblock_number.0 as i64 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap() + .into_iter() + .for_each(|row| { + result.store_contract( + h256_to_account_address(&H256::from_slice(&row.key)), + row.bytecode, + ) + }); + + vlog::debug!( + "loading factory deps for l1 batch {}", + current_l1_batch_number + ); + sqlx::query!( + "SELECT bytecode_hash, bytecode FROM factory_deps + WHERE miniblock_number >= $1 AND miniblock_number <= $2", + from_miniblock_number.0 as i64, + to_miniblock_number.0 as i64 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap() + .into_iter() + .for_each(|row| { + result.store_factory_dep(H256::from_slice(&row.bytecode_hash), row.bytecode) + }); + + current_l1_batch_number += 1; + result.save(L1BatchNumber(current_l1_batch_number)); + } + + metrics::histogram!( + "server.state_keeper.update_secondary_storage", + stage_started_at.elapsed() + ); + result + }) + } + + pub fn load_number_of_contracts(&mut self) -> u64 { + async_std::task::block_on(async { + sqlx::query!( + "SELECT count(*) + FROM storage + WHERE + address = $1 AND + value != $2 + ", + ACCOUNT_CODE_STORAGE_ADDRESS.as_bytes(), + FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH.as_bytes(), + ) + .fetch_one(self.storage.conn()) + .await + .unwrap() + .count + .unwrap() as u64 + }) + } +} diff --git a/core/lib/dal/src/storage_logs_dal.rs b/core/lib/dal/src/storage_logs_dal.rs new file mode 100644 index 000000000000..bb2c83ab5ffb --- /dev/null +++ b/core/lib/dal/src/storage_logs_dal.rs @@ -0,0 +1,200 @@ +use crate::StorageProcessor; +use sqlx::types::chrono::Utc; +use zksync_types::{ + get_code_key, Address, MiniblockNumber, StorageLog, FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH, + H256, +}; + +#[derive(Debug)] +pub struct StorageLogsDal<'a, 'c> { + pub storage: &'a mut StorageProcessor<'c>, +} + +impl StorageLogsDal<'_, '_> { + pub fn insert_storage_logs( + &mut self, + block_number: MiniblockNumber, + logs: &[(H256, Vec)], + ) { + async_std::task::block_on(async { + let mut copy = self + .storage + .conn() + .copy_in_raw( + "COPY storage_logs (hashed_key, address, key, value, operation_number, tx_hash, miniblock_number, created_at, updated_at) + FROM STDIN WITH (DELIMITER '|')", + ) + .await + .unwrap(); + + let mut bytes: Vec = Vec::new(); + let now = Utc::now().naive_utc().to_string(); + let mut operation_number = 0u32; + for (tx_hash, logs) in logs { + let tx_hash_str = format!("\\\\x{}", hex::encode(tx_hash.0)); + for log in logs { + let hashed_key_str = format!("\\\\x{}", hex::encode(log.key.hashed_key().0)); + let address_str = format!("\\\\x{}", hex::encode(log.key.address().0)); + let key_str = format!("\\\\x{}", hex::encode(log.key.key().0)); + let value_str = format!("\\\\x{}", hex::encode(log.value.0)); + let row = format!( + "{}|{}|{}|{}|{}|{}|{}|{}|{}\n", + hashed_key_str, + address_str, + key_str, + value_str, + operation_number, + tx_hash_str, + block_number, + now, + now + ); + bytes.extend_from_slice(row.as_bytes()); + + operation_number += 1; + } + } + copy.send(bytes).await.unwrap(); + copy.finish().await.unwrap(); + }) + } + + pub fn append_storage_logs( + &mut self, + block_number: MiniblockNumber, + logs: &[(H256, Vec)], + ) { + async_std::task::block_on(async { + let mut operation_number = sqlx::query!( + r#"SELECT COUNT(*) as "count!" FROM storage_logs WHERE miniblock_number = $1"#, + block_number.0 as i64 + ) + .fetch_one(self.storage.conn()) + .await + .unwrap() + .count as u32; + + let mut copy = self + .storage + .conn() + .copy_in_raw( + "COPY storage_logs (hashed_key, address, key, value, operation_number, tx_hash, miniblock_number, created_at, updated_at) + FROM STDIN WITH (DELIMITER '|')", + ) + .await + .unwrap(); + + let mut bytes: Vec = Vec::new(); + let now = Utc::now().naive_utc().to_string(); + for (tx_hash, logs) in logs { + let tx_hash_str = format!("\\\\x{}", hex::encode(tx_hash.0)); + for log in logs { + let hashed_key_str = format!("\\\\x{}", hex::encode(log.key.hashed_key().0)); + let address_str = format!("\\\\x{}", hex::encode(log.key.address().0)); + let key_str = format!("\\\\x{}", hex::encode(log.key.key().0)); + let value_str = format!("\\\\x{}", hex::encode(log.value.0)); + let row = format!( + "{}|{}|{}|{}|{}|{}|{}|{}|{}\n", + hashed_key_str, + address_str, + key_str, + value_str, + operation_number, + tx_hash_str, + block_number, + now, + now + ); + bytes.extend_from_slice(row.as_bytes()); + + operation_number += 1; + } + } + copy.send(bytes).await.unwrap(); + copy.finish().await.unwrap(); + }) + } + + pub fn rollback_storage(&mut self, block_number: MiniblockNumber) { + async_std::task::block_on(async { + vlog::info!("fetching keys that were changed after given block number"); + let modified_keys: Vec = sqlx::query!( + "SELECT DISTINCT ON (hashed_key) hashed_key FROM + (SELECT * FROM storage_logs WHERE miniblock_number > $1) inn", + block_number.0 as i64 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap() + .into_iter() + .map(|row| H256::from_slice(&row.hashed_key)) + .collect(); + vlog::info!("loaded {:?} keys", modified_keys.len()); + + for key in modified_keys { + let previous_value: Option = sqlx::query!( + "select value from storage_logs where hashed_key = $1 and miniblock_number <= $2 order by miniblock_number desc, operation_number desc limit 1", + key.as_bytes(), + block_number.0 as i64 + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|r| H256::from_slice(&r.value)); + match previous_value { + None => { + sqlx::query!("delete from storage where hashed_key = $1", key.as_bytes(),) + .execute(self.storage.conn()) + .await + .unwrap() + } + Some(val) => sqlx::query!( + "update storage set value = $1 where hashed_key = $2", + val.as_bytes(), + key.as_bytes(), + ) + .execute(self.storage.conn()) + .await + .unwrap(), + }; + } + }) + } + + pub fn rollback_storage_logs(&mut self, block_number: MiniblockNumber) { + async_std::task::block_on(async { + sqlx::query!( + "DELETE FROM storage_logs WHERE miniblock_number > $1", + block_number.0 as i64 + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn is_contract_deployed_at_address(&mut self, address: Address) -> bool { + let hashed_key = get_code_key(&address).hashed_key(); + async_std::task::block_on(async { + let count = sqlx::query!( + r#" + SELECT COUNT(*) as "count!" + FROM ( + SELECT * FROM storage_logs + WHERE storage_logs.hashed_key = $1 + ORDER BY storage_logs.miniblock_number DESC, storage_logs.operation_number DESC + LIMIT 1 + ) sl + WHERE sl.value != $2 + "#, + hashed_key.as_bytes(), + FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH.as_bytes(), + ) + .fetch_one(self.storage.conn()) + .await + .unwrap() + .count; + count > 0 + }) + } +} diff --git a/core/lib/dal/src/storage_logs_dedup_dal.rs b/core/lib/dal/src/storage_logs_dedup_dal.rs new file mode 100644 index 000000000000..01a514d401b8 --- /dev/null +++ b/core/lib/dal/src/storage_logs_dedup_dal.rs @@ -0,0 +1,359 @@ +use crate::StorageProcessor; +use sqlx::types::chrono::Utc; +use std::collections::{HashMap, HashSet}; +use vm::zk_evm::ethereum_types::H256; +use zksync_types::{AccountTreeId, Address, L1BatchNumber, StorageKey, StorageLogQuery}; +use zksync_utils::u256_to_h256; + +#[derive(Debug)] +pub struct StorageLogsDedupDal<'a, 'c> { + pub storage: &'a mut StorageProcessor<'c>, +} + +impl StorageLogsDedupDal<'_, '_> { + pub fn insert_storage_logs(&mut self, block_number: L1BatchNumber, logs: &[StorageLogQuery]) { + async_std::task::block_on(async { + let mut copy = self + .storage + .conn() + .copy_in_raw( + "COPY storage_logs_dedup (hashed_key, address, key, value_read, value_written, operation_number, is_write, l1_batch_number, created_at) + FROM STDIN WITH (DELIMITER '|')", + ) + .await + .unwrap(); + + let mut bytes: Vec = Vec::new(); + let now = Utc::now().naive_utc().to_string(); + for (operation_number, log_query) in logs.iter().enumerate() { + let log = &log_query.log_query; + let hashed_key_str = format!( + "\\\\x{}", + hex::encode(StorageKey::raw_hashed_key( + &log.address, + &u256_to_h256(log.key) + )) + ); + let address_str = format!("\\\\x{}", hex::encode(log.address.0)); + let key_str = format!("\\\\x{}", hex::encode(u256_to_h256(log.key).0)); + let read_value_str = + format!("\\\\x{}", hex::encode(u256_to_h256(log.read_value).0)); + let written_value_str = + format!("\\\\x{}", hex::encode(u256_to_h256(log.written_value).0)); + let row = format!( + "{}|{}|{}|{}|{}|{}|{}|{}|{}\n", + hashed_key_str, + address_str, + key_str, + read_value_str, + written_value_str, + operation_number, + log.rw_flag, + block_number, + now + ); + bytes.extend_from_slice(row.as_bytes()); + } + copy.send(bytes).await.unwrap(); + copy.finish().await.unwrap(); + }) + } + + pub fn rollback_storage_logs(&mut self, block_number: L1BatchNumber) { + async_std::task::block_on(async { + sqlx::query!( + "DELETE FROM storage_logs_dedup WHERE l1_batch_number > $1", + block_number.0 as i64 + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn insert_protective_reads( + &mut self, + l1_batch_number: L1BatchNumber, + read_logs: &[StorageLogQuery], + ) { + async_std::task::block_on(async { + let mut copy = self + .storage + .conn() + .copy_in_raw( + "COPY protective_reads (l1_batch_number, address, key, created_at, updated_at) + FROM STDIN WITH (DELIMITER '|')", + ) + .await + .unwrap(); + + let mut bytes: Vec = Vec::new(); + let now = Utc::now().naive_utc().to_string(); + for log_query in read_logs.iter() { + let log = &log_query.log_query; + let address_str = format!("\\\\x{}", hex::encode(log.address.0)); + let key_str = format!("\\\\x{}", hex::encode(u256_to_h256(log.key).0)); + let row = format!( + "{}|{}|{}|{}|{}\n", + l1_batch_number, address_str, key_str, now, now + ); + bytes.extend_from_slice(row.as_bytes()); + } + copy.send(bytes).await.unwrap(); + copy.finish().await.unwrap(); + }) + } + + pub fn insert_initial_writes( + &mut self, + l1_batch_number: L1BatchNumber, + write_logs: &[StorageLogQuery], + ) { + async_std::task::block_on(async { + let hashed_keys: Vec<_> = write_logs + .iter() + .map(|log| { + StorageKey::raw_hashed_key( + &log.log_query.address, + &u256_to_h256(log.log_query.key), + ) + .to_vec() + }) + .collect(); + + sqlx::query!( + "INSERT INTO initial_writes (hashed_key, l1_batch_number, created_at, updated_at) + SELECT u.hashed_key, $2, now(), now() + FROM UNNEST($1::bytea[]) AS u(hashed_key) + ON CONFLICT (hashed_key) DO NOTHING + ", + &hashed_keys, + l1_batch_number.0 as i64, + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn get_protective_reads_for_l1_batch( + &mut self, + l1_batch_number: L1BatchNumber, + ) -> HashSet { + async_std::task::block_on(async { + sqlx::query!( + " + SELECT address, key FROM protective_reads + WHERE l1_batch_number = $1 + ", + l1_batch_number.0 as i64 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap() + .into_iter() + .map(|row| { + StorageKey::new( + AccountTreeId::new(Address::from_slice(&row.address)), + H256::from_slice(&row.key), + ) + }) + .collect() + }) + } + + pub fn get_touched_slots_for_l1_batch( + &mut self, + l1_batch_number: L1BatchNumber, + ) -> HashMap { + async_std::task::block_on(async { + let storage_logs = sqlx::query!( + " + SELECT address, key, value + FROM storage_logs + WHERE miniblock_number BETWEEN (SELECT MIN(number) FROM miniblocks WHERE l1_batch_number = $1) + AND (SELECT MAX(number) FROM miniblocks WHERE l1_batch_number = $1) + ORDER BY miniblock_number, operation_number + ", + l1_batch_number.0 as i64 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap(); + + let mut touched_slots = HashMap::new(); + for storage_log in storage_logs.into_iter() { + touched_slots.insert( + StorageKey::new( + AccountTreeId::new(Address::from_slice(&storage_log.address)), + H256::from_slice(&storage_log.key), + ), + H256::from_slice(&storage_log.value), + ); + } + touched_slots + }) + } + + pub fn get_storage_logs_for_revert( + &mut self, + l1_batch_number: L1BatchNumber, + ) -> Vec<(H256, Option)> { + async_std::task::block_on(async { + let miniblock_number = match self + .storage + .blocks_dal() + .get_miniblock_range_of_l1_batch(l1_batch_number) + { + None => return Vec::new(), + Some((_, number)) => number, + }; + + vlog::info!("fetching keys that were changed after given block number"); + let modified_keys: Vec = sqlx::query!( + "SELECT DISTINCT ON (hashed_key) hashed_key FROM + (SELECT * FROM storage_logs WHERE miniblock_number > $1) inn", + miniblock_number.0 as i64 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap() + .into_iter() + .map(|row| H256::from_slice(&row.hashed_key)) + .collect(); + vlog::info!("loaded {:?} keys", modified_keys.len()); + + let mut result: Vec<(H256, Option)> = vec![]; + + for key in modified_keys { + let initially_written_at: Option = sqlx::query!( + " + SELECT l1_batch_number FROM initial_writes + WHERE hashed_key = $1 + ", + key.as_bytes(), + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|row| L1BatchNumber(row.l1_batch_number as u32)); + match initially_written_at { + // Key isn't written to the storage - nothing to rollback. + None => continue, + // Key was initially written, it's needed to remove it. + Some(initially_written_at) if initially_written_at > l1_batch_number => { + result.push((key, None)); + } + // Key was rewritten, it's needed to restore the previous value. + Some(_) => { + let previous_value: Vec = sqlx::query!( + " + SELECT value FROM storage_logs + WHERE hashed_key = $1 AND miniblock_number <= $2 + ORDER BY miniblock_number DESC, operation_number DESC + LIMIT 1 + ", + key.as_bytes(), + miniblock_number.0 as i64 + ) + .fetch_one(self.storage.conn()) + .await + .unwrap() + .value; + result.push((key, Some(H256::from_slice(&previous_value)))); + } + } + if result.len() % 1000 == 0 { + vlog::info!("processed {:?} values", result.len()); + } + } + + result + }) + } + + pub fn get_previous_storage_values( + &mut self, + hashed_keys: Vec, + l1_batch_number: L1BatchNumber, + ) -> HashMap { + async_std::task::block_on(async { + let hashed_keys: Vec<_> = hashed_keys.into_iter().map(|key| key.0.to_vec()).collect(); + let (miniblock_number, _) = self + .storage + .blocks_dal() + .get_miniblock_range_of_l1_batch(l1_batch_number) + .unwrap(); + sqlx::query!( + r#" + SELECT u.hashed_key as "hashed_key!", + (SELECT value FROM storage_logs + WHERE hashed_key = u.hashed_key AND miniblock_number < $2 + ORDER BY miniblock_number DESC, operation_number DESC LIMIT 1) as "value?" + FROM UNNEST($1::bytea[]) AS u(hashed_key) + "#, + &hashed_keys, + miniblock_number.0 as i64 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap() + .into_iter() + .map(|row| { + ( + H256::from_slice(&row.hashed_key), + row.value + .map(|value| H256::from_slice(&value)) + .unwrap_or_else(H256::zero), + ) + }) + .collect() + }) + } + + pub fn migrate_protective_reads( + &mut self, + from_l1_batch_number: L1BatchNumber, + to_l1_batch_number: L1BatchNumber, + ) { + async_std::task::block_on(async { + sqlx::query!( + "INSERT INTO protective_reads (l1_batch_number, address, key, created_at, updated_at) + SELECT storage_logs_dedup.l1_batch_number, storage_logs_dedup.address, storage_logs_dedup.key, now(), now() + FROM storage_logs_dedup + WHERE l1_batch_number BETWEEN $1 AND $2 + AND is_write = FALSE + ON CONFLICT DO NOTHING + ", + from_l1_batch_number.0 as i64, + to_l1_batch_number.0 as i64, + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn migrate_initial_writes( + &mut self, + from_l1_batch_number: L1BatchNumber, + to_l1_batch_number: L1BatchNumber, + ) { + async_std::task::block_on(async { + sqlx::query!( + "INSERT INTO initial_writes (hashed_key, l1_batch_number, created_at, updated_at) + SELECT storage_logs_dedup.hashed_key, storage_logs_dedup.l1_batch_number, now(), now() + FROM storage_logs_dedup + WHERE l1_batch_number BETWEEN $1 AND $2 + AND is_write = TRUE + ON CONFLICT DO NOTHING + ", + from_l1_batch_number.0 as i64, + to_l1_batch_number.0 as i64, + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } +} diff --git a/core/lib/dal/src/storage_web3_dal.rs b/core/lib/dal/src/storage_web3_dal.rs new file mode 100644 index 000000000000..e1f4491b5861 --- /dev/null +++ b/core/lib/dal/src/storage_web3_dal.rs @@ -0,0 +1,200 @@ +use crate::{SqlxError, StorageProcessor}; +use std::time::Instant; +use zksync_types::{ + api::BlockId, + get_code_key, get_nonce_key, + utils::{decompose_full_nonce, storage_key_for_standard_token_balance}, + AccountTreeId, Address, StorageKey, FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH, H256, U256, +}; +use zksync_utils::h256_to_u256; +use zksync_web3_decl::error::Web3Error; + +#[derive(Debug)] +pub struct StorageWeb3Dal<'a, 'c> { + pub storage: &'a mut StorageProcessor<'c>, +} + +impl StorageWeb3Dal<'_, '_> { + pub fn get_address_historical_nonce( + &mut self, + address: Address, + block_id: BlockId, + ) -> Result, SqlxError> { + let nonce_key = get_nonce_key(&address); + let nonce = self.get_historical_value(&nonce_key, block_id)?.map(|n| { + let full_nonce = h256_to_u256(n); + decompose_full_nonce(full_nonce).0 + }); + Ok(nonce) + } + + pub fn standard_token_historical_balance( + &mut self, + token_id: AccountTreeId, + account_id: AccountTreeId, + block_id: BlockId, + ) -> Result, SqlxError> { + let key = storage_key_for_standard_token_balance(token_id, account_id.address()); + + let balance = self.get_historical_value(&key, block_id)?; + Ok(balance.map(h256_to_u256)) + } + + pub fn get_historical_value( + &mut self, + key: &StorageKey, + block_id: BlockId, + ) -> Result, SqlxError> { + let block_number = self.storage.blocks_web3_dal().resolve_block_id(block_id)?; + match block_number { + Ok(block_number) => { + let value = self.get_historical_value_unchecked(key, block_number)?; + Ok(Ok(value)) + } + Err(err) => Ok(Err(err)), + } + } + + /// This method does not check if a block with this number exists in the database. + /// It will return the current value if the block is in the future. + pub fn get_historical_value_unchecked( + &mut self, + key: &StorageKey, + block_number: zksync_types::MiniblockNumber, + ) -> Result { + async_std::task::block_on(async { + let started_at = Instant::now(); + // We need to proper distinguish if the value is zero or None + // for the VM to correctly determine initial writes. + // So, we accept that the value is None if it's zero and it wasn't initially written at the moment. + let result = sqlx::query!( + r#" + SELECT value + FROM storage_logs + WHERE storage_logs.hashed_key = $1 AND storage_logs.miniblock_number <= $2 + ORDER BY storage_logs.miniblock_number DESC, storage_logs.operation_number DESC + LIMIT 1 + "#, + key.hashed_key().0.to_vec(), + block_number.0 as i64 + ) + .fetch_optional(self.storage.conn()) + .await + .map(|option_row| { + option_row + .map(|row| H256::from_slice(&row.value)) + .unwrap_or_else(H256::zero) + }); + metrics::histogram!("dal.request", started_at.elapsed(), "method" => "get_historical_value_unchecked"); + + result + }) + } + + pub fn is_write_initial( + &mut self, + key: &StorageKey, + block_number: zksync_types::MiniblockNumber, + consider_new_l1_batch: bool, + ) -> Result { + async_std::task::block_on(async { + let started_at = Instant::now(); + let row = sqlx::query!( + r#" + SELECT (SELECT l1_batch_number FROM initial_writes WHERE hashed_key = $1) as "initial_write_l1_batch_number?", + (SELECT miniblocks.l1_batch_number FROM miniblocks WHERE number = $2) as "current_l1_batch_number?" + "#, + key.hashed_key().0.to_vec(), + block_number.0 as i64 + ) + .fetch_one(self.storage.conn()) + .await?; + // Note: if `row.current_l1_batch_number` is `None` it means + // that the l1 batch that the miniblock is included in isn't sealed yet. + let is_initial = match ( + row.current_l1_batch_number, + row.initial_write_l1_batch_number, + ) { + (_, None) => true, + (Some(current_l1_batch_number), Some(initial_write_l1_batch_number)) => { + if consider_new_l1_batch { + current_l1_batch_number < initial_write_l1_batch_number + } else { + current_l1_batch_number <= initial_write_l1_batch_number + } + } + (None, Some(_initial_write_l1_batch_number)) => false, + }; + metrics::histogram!("dal.request", started_at.elapsed(), "method" => "is_write_initial"); + + Ok(is_initial) + }) + } + + pub fn get_contract_code( + &mut self, + address: Address, + block_id: BlockId, + ) -> Result>, Web3Error>, SqlxError> { + let block_number = self.storage.blocks_web3_dal().resolve_block_id(block_id)?; + match block_number { + Ok(block_number) => { + let code = self.get_contract_code_unchecked(address, block_number)?; + Ok(Ok(code)) + } + Err(err) => Ok(Err(err)), + } + } + + /// This method doesn't check if block with number equals to `block_number` + /// is present in the database. For such blocks `None` will be returned. + pub fn get_contract_code_unchecked( + &mut self, + address: Address, + block_number: zksync_types::MiniblockNumber, + ) -> Result>, SqlxError> { + let hashed_key = get_code_key(&address).hashed_key(); + async_std::task::block_on(async { + sqlx::query!( + " + SELECT bytecode FROM ( + SELECT * FROM storage_logs + WHERE + storage_logs.hashed_key = $1 AND + storage_logs.miniblock_number <= $2 + ORDER BY + storage_logs.miniblock_number DESC, storage_logs.operation_number DESC + LIMIT 1 + ) t + JOIN factory_deps ON value = factory_deps.bytecode_hash + WHERE value != $3 + ", + hashed_key.as_bytes(), + block_number.0 as i64, + FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH.as_bytes(), + ) + .fetch_optional(self.storage.conn()) + .await + .map(|option_row| option_row.map(|row| row.bytecode)) + }) + } + + /// This method doesn't check if block with number equals to `block_number` + /// is present in the database. For such blocks `None` will be returned. + pub fn get_factory_dep_unchecked( + &mut self, + hash: H256, + block_number: zksync_types::MiniblockNumber, + ) -> Result>, SqlxError> { + async_std::task::block_on(async { + sqlx::query!( + "SELECT bytecode FROM factory_deps WHERE bytecode_hash = $1 AND miniblock_number <= $2", + &hash.0.to_vec(), + block_number.0 as i64 + ) + .fetch_optional(self.storage.conn()) + .await + .map(|option_row| option_row.map(|row| row.bytecode)) + }) + } +} diff --git a/core/lib/dal/src/tests/mod.rs b/core/lib/dal/src/tests/mod.rs new file mode 100644 index 000000000000..72c515e777e1 --- /dev/null +++ b/core/lib/dal/src/tests/mod.rs @@ -0,0 +1,240 @@ +use std::time::Duration; + +use db_test_macro::db_test; +use zksync_types::block::{L1BatchHeader, MiniblockHeader}; +use zksync_types::proofs::AggregationRound; +use zksync_types::{ + fee::{Fee, TransactionExecutionMetrics}, + helpers::unix_timestamp_ms, + l1::{L1Tx, OpProcessingType, PriorityQueueType}, + l2::L2Tx, + tx::{tx_execution_info::TxExecutionStatus, TransactionExecutionResult}, + Address, Execute, L1BatchNumber, L1BlockNumber, L1TxCommonData, L2ChainId, MiniblockNumber, + PriorityOpId, H160, H256, U256, +}; +use zksync_types::{FAIR_L2_GAS_PRICE, MAX_GAS_PER_PUBDATA_BYTE}; + +use crate::blocks_dal::BlocksDal; +use crate::prover_dal::{GetProverJobsParams, ProverDal}; +use crate::transactions_dal::L2TxSubmissionResult; +use crate::transactions_dal::TransactionsDal; +use crate::transactions_web3_dal::TransactionsWeb3Dal; + +fn mock_tx_execution_metrics() -> TransactionExecutionMetrics { + TransactionExecutionMetrics::default() +} + +const DEFAULT_GAS_PER_PUBDATA: u32 = 100; + +fn mock_l2_transaction() -> L2Tx { + let fee = Fee { + gas_limit: U256::from(1_000_000u32), + max_fee_per_gas: FAIR_L2_GAS_PRICE.into(), + max_priority_fee_per_gas: U256::zero(), + gas_per_pubdata_limit: U256::from(DEFAULT_GAS_PER_PUBDATA), + }; + let mut l2_tx = L2Tx::new_signed( + Address::random(), + vec![], + zksync_types::Nonce(0), + fee, + Default::default(), + L2ChainId(270), + &H256::random(), + None, + Default::default(), + ) + .unwrap(); + + l2_tx.set_input(H256::random().0.to_vec(), H256::random()); + l2_tx +} + +fn mock_l1_execute() -> L1Tx { + let serial_id = 1; + let priority_op_data = L1TxCommonData { + sender: H160::random(), + canonical_tx_hash: H256::from_low_u64_be(serial_id), + serial_id: PriorityOpId(serial_id), + deadline_block: 100000, + layer_2_tip_fee: U256::zero(), + full_fee: U256::zero(), + gas_limit: U256::from(100_100), + gas_per_pubdata_limit: MAX_GAS_PER_PUBDATA_BYTE.into(), + op_processing_type: OpProcessingType::Common, + priority_queue_type: PriorityQueueType::Deque, + eth_hash: H256::random(), + to_mint: U256::zero(), + refund_recipient: Address::random(), + eth_block: 1, + }; + + let execute = Execute { + contract_address: H160::random(), + value: Default::default(), + calldata: vec![], + factory_deps: None, + }; + + L1Tx { + common_data: priority_op_data, + execute, + received_timestamp_ms: 0, + } +} + +#[db_test(dal_crate)] +async fn workflow_with_submit_tx_equal_hashes(connection_pool: ConnectionPool) { + let storage = &mut connection_pool.access_test_storage().await; + let mut transactions_dal = TransactionsDal { storage }; + + let tx = mock_l2_transaction(); + let result = transactions_dal.insert_transaction_l2(tx.clone(), mock_tx_execution_metrics()); + + assert_eq!(result, L2TxSubmissionResult::Added); + + let result = transactions_dal.insert_transaction_l2(tx, mock_tx_execution_metrics()); + + assert_eq!(result, L2TxSubmissionResult::Replaced); +} + +#[db_test(dal_crate)] +async fn workflow_with_submit_tx_diff_hashes(connection_pool: ConnectionPool) { + let storage = &mut connection_pool.access_test_storage().await; + let mut transactions_dal = TransactionsDal { storage }; + + let tx = mock_l2_transaction(); + + let nonce = tx.common_data.nonce; + let initiator_address = tx.common_data.initiator_address; + + let result = transactions_dal.insert_transaction_l2(tx, mock_tx_execution_metrics()); + + assert_eq!(result, L2TxSubmissionResult::Added); + + let mut tx = mock_l2_transaction(); + tx.common_data.nonce = nonce; + tx.common_data.initiator_address = initiator_address; + let result = transactions_dal.insert_transaction_l2(tx, mock_tx_execution_metrics()); + + assert_eq!(result, L2TxSubmissionResult::Replaced); +} + +#[db_test(dal_crate)] +async fn remove_stuck_txs(connection_pool: ConnectionPool) { + let storage = &mut connection_pool.access_test_storage().await; + let mut transactions_dal = TransactionsDal { storage }; + let storage = &mut connection_pool.access_test_storage().await; + let mut blocks_dal = BlocksDal { storage }; + + // Stuck tx + let mut tx = mock_l2_transaction(); + tx.received_timestamp_ms = unix_timestamp_ms() - Duration::new(1000, 0).as_millis() as u64; + transactions_dal.insert_transaction_l2(tx, mock_tx_execution_metrics()); + // Tx in mempool + let tx = mock_l2_transaction(); + transactions_dal.insert_transaction_l2(tx, mock_tx_execution_metrics()); + + // Stuck L1 tx. We should never ever remove L1 tx + let mut tx = mock_l1_execute(); + tx.received_timestamp_ms = unix_timestamp_ms() - Duration::new(1000, 0).as_millis() as u64; + transactions_dal.insert_transaction_l1(tx, L1BlockNumber(1)); + + // Old executed tx + let mut executed_tx = mock_l2_transaction(); + executed_tx.received_timestamp_ms = + unix_timestamp_ms() - Duration::new(1000, 0).as_millis() as u64; + transactions_dal.insert_transaction_l2(executed_tx.clone(), mock_tx_execution_metrics()); + + // Get all txs + transactions_dal.reset_mempool(); + let txs = transactions_dal.sync_mempool(vec![], vec![], 0, 0, 1000).0; + assert_eq!(txs.len(), 4); + + blocks_dal.insert_miniblock(MiniblockHeader { + number: MiniblockNumber(1), + timestamp: 0, + hash: Default::default(), + l1_tx_count: 0, + l2_tx_count: 0, + base_fee_per_gas: Default::default(), + l1_gas_price: 0, + l2_fair_gas_price: 0, + }); + transactions_dal.mark_txs_as_executed_in_miniblock( + MiniblockNumber(1), + &[TransactionExecutionResult { + transaction: executed_tx.clone().into(), + hash: executed_tx.hash(), + execution_info: Default::default(), + execution_status: TxExecutionStatus::Success, + refunded_gas: 0, + operator_suggested_refund: 0, + }], + U256::from(1), + ); + + // Get all txs + transactions_dal.reset_mempool(); + let txs = transactions_dal.sync_mempool(vec![], vec![], 0, 0, 1000).0; + assert_eq!(txs.len(), 3); + + // Remove one stuck tx + let removed_txs = transactions_dal.remove_stuck_txs(Duration::from_secs(500)); + assert_eq!(removed_txs, 1); + transactions_dal.reset_mempool(); + let txs = transactions_dal.sync_mempool(vec![], vec![], 0, 0, 1000).0; + assert_eq!(txs.len(), 2); + + // We shouldn't collect executed tx + let storage = &mut connection_pool.access_test_storage().await; + let mut transactions_web3_dal = TransactionsWeb3Dal { storage }; + transactions_web3_dal + .get_transaction_receipt(executed_tx.hash()) + .unwrap() + .unwrap(); +} + +#[db_test(dal_crate)] +async fn test_duplicate_insert_prover_jobs(connection_pool: ConnectionPool) { + let storage = &mut connection_pool.access_test_storage().await; + let block_number = 1; + let header = L1BatchHeader::mock(L1BatchNumber(block_number)); + storage + .blocks_dal() + .insert_l1_batch(header, Default::default()); + + let mut prover_dal = ProverDal { storage }; + let circuits: Vec = vec![ + "Main VM".to_string(), + "SHA256".to_string(), + "Code decommitter".to_string(), + "Log demuxer".to_string(), + ]; + let l1_batch_number = L1BatchNumber(block_number); + prover_dal.insert_prover_jobs( + l1_batch_number, + circuits.clone(), + AggregationRound::BasicCircuits, + ); + + // try inserting the same jobs again to ensure it does not panic + prover_dal.insert_prover_jobs( + l1_batch_number, + circuits.clone(), + AggregationRound::BasicCircuits, + ); + + let prover_jobs_params = GetProverJobsParams { + statuses: None, + blocks: Some(std::ops::Range { + start: l1_batch_number, + end: l1_batch_number + 1, + }), + limit: None, + desc: false, + round: None, + }; + let jobs = prover_dal.get_jobs(prover_jobs_params).unwrap(); + assert_eq!(circuits.len(), jobs.len()); +} diff --git a/core/lib/dal/src/time_utils.rs b/core/lib/dal/src/time_utils.rs new file mode 100644 index 000000000000..78e71d234d4e --- /dev/null +++ b/core/lib/dal/src/time_utils.rs @@ -0,0 +1,21 @@ +use sqlx::postgres::types::PgInterval; +use sqlx::types::chrono::NaiveTime; +use std::time::Duration; + +pub fn duration_to_naive_time(duration: Duration) -> NaiveTime { + let total_seconds = duration.as_secs() as u32; + NaiveTime::from_hms_opt( + total_seconds / 3600, + (total_seconds / 60) % 60, + total_seconds % 60, + ) + .unwrap() +} + +pub fn pg_interval_from_duration(processing_timeout: Duration) -> PgInterval { + PgInterval { + months: 0, + days: 0, + microseconds: processing_timeout.as_micros() as i64, + } +} diff --git a/core/lib/dal/src/tokens_dal.rs b/core/lib/dal/src/tokens_dal.rs new file mode 100644 index 000000000000..9f3628a3de9b --- /dev/null +++ b/core/lib/dal/src/tokens_dal.rs @@ -0,0 +1,201 @@ +use crate::models::storage_token::StorageTokenMarketVolume; +use crate::StorageProcessor; +use num::{rational::Ratio, BigUint}; +use sqlx::types::chrono::Utc; +use zksync_types::{ + tokens::{TokenInfo, TokenMarketVolume, TokenMetadata, TokenPrice}, + Address, MiniblockNumber, ACCOUNT_CODE_STORAGE_ADDRESS, + FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH, +}; +use zksync_utils::ratio_to_big_decimal; + +// Precision of the USD price per token +pub(crate) const STORED_USD_PRICE_PRECISION: usize = 6; + +#[derive(Debug)] +pub struct TokensDal<'a, 'c> { + pub storage: &'a mut StorageProcessor<'c>, +} + +impl TokensDal<'_, '_> { + pub fn add_tokens(&mut self, tokens: Vec) { + async_std::task::block_on(async { + let mut copy = self + .storage + .conn() + .copy_in_raw( + "COPY tokens (l1_address, l2_address, name, symbol, decimals, well_known, created_at, updated_at) + FROM STDIN WITH (DELIMITER '|')", + ) + .await + .unwrap(); + + let mut bytes: Vec = Vec::new(); + let now = Utc::now().naive_utc().to_string(); + for TokenInfo { + l1_address, + l2_address, + metadata: + TokenMetadata { + name, + symbol, + decimals, + }, + } in tokens + { + let l1_address_str = format!("\\\\x{}", hex::encode(l1_address.0)); + let l2_address_str = format!("\\\\x{}", hex::encode(l2_address.0)); + let row = format!( + "{}|{}|{}|{}|{}|FALSE|{}|{}\n", + l1_address_str, l2_address_str, name, symbol, decimals, now, now + ); + bytes.extend_from_slice(row.as_bytes()); + } + copy.send(bytes).await.unwrap(); + copy.finish().await.unwrap(); + }) + } + + pub fn update_well_known_l1_token(&mut self, l1_address: &Address, metadata: TokenMetadata) { + async_std::task::block_on(async { + sqlx::query!( + "UPDATE tokens SET token_list_name = $2, token_list_symbol = $3, + token_list_decimals = $4, well_known = true, updated_at = now() + WHERE l1_address = $1 + ", + l1_address.as_bytes(), + metadata.name, + metadata.symbol, + metadata.decimals as i32, + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn get_well_known_token_addresses(&mut self) -> Vec<(Address, Address)> { + async_std::task::block_on(async { + let records = + sqlx::query!("SELECT l1_address, l2_address FROM tokens WHERE well_known = true") + .fetch_all(self.storage.conn()) + .await + .unwrap(); + let addresses: Vec<(Address, Address)> = records + .into_iter() + .map(|record| { + ( + Address::from_slice(&record.l1_address), + Address::from_slice(&record.l2_address), + ) + }) + .collect(); + addresses + }) + } + + pub fn get_unknown_l1_token_addresses(&mut self) -> Vec
{ + async_std::task::block_on(async { + let records = sqlx::query!("SELECT l1_address FROM tokens WHERE well_known = false") + .fetch_all(self.storage.conn()) + .await + .unwrap(); + let addresses: Vec
= records + .into_iter() + .map(|record| Address::from_slice(&record.l1_address)) + .collect(); + addresses + }) + } + + pub fn get_l1_tokens_by_volume(&mut self, min_volume: &Ratio) -> Vec
{ + async_std::task::block_on(async { + let min_volume = ratio_to_big_decimal(min_volume, STORED_USD_PRICE_PRECISION); + let records = sqlx::query!( + "SELECT l1_address FROM tokens WHERE market_volume > $1", + min_volume + ) + .fetch_all(self.storage.conn()) + .await + .unwrap(); + let addresses: Vec
= records + .into_iter() + .map(|record| Address::from_slice(&record.l1_address)) + .collect(); + addresses + }) + } + + pub fn set_l1_token_price(&mut self, l1_address: &Address, price: TokenPrice) { + async_std::task::block_on(async { + sqlx::query!( + "UPDATE tokens SET usd_price = $2, usd_price_updated_at = $3, updated_at = now() WHERE l1_address = $1", + l1_address.as_bytes(), + ratio_to_big_decimal(&price.usd_price, STORED_USD_PRICE_PRECISION), + price.last_updated.naive_utc(), + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn set_l1_token_market_volume( + &mut self, + l1_address: &Address, + market_volume: TokenMarketVolume, + ) { + async_std::task::block_on(async { + sqlx::query!( + "UPDATE tokens SET market_volume = $2, market_volume_updated_at = $3, updated_at = now() WHERE l1_address = $1", + l1_address.as_bytes(), + ratio_to_big_decimal(&market_volume.market_volume, STORED_USD_PRICE_PRECISION), + market_volume.last_updated.naive_utc(), + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn get_token_market_volume(&mut self, l2_address: &Address) -> Option { + async_std::task::block_on(async { + let storage_market_volume = sqlx::query_as!( + StorageTokenMarketVolume, + "SELECT market_volume, market_volume_updated_at FROM tokens WHERE l2_address = $1", + l2_address.as_bytes(), + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap(); + storage_market_volume.and_then(Into::into) + }) + } + + pub fn rollback_tokens(&mut self, block_number: MiniblockNumber) { + async_std::task::block_on(async { + sqlx::query!( + " + DELETE FROM tokens + WHERE l2_address IN + ( + SELECT substring(key, 12, 20) FROM storage_logs + WHERE storage_logs.address = $1 AND miniblock_number > $2 AND NOT EXISTS ( + SELECT 1 FROM storage_logs as s + WHERE + s.hashed_key = storage_logs.hashed_key AND + (s.miniblock_number, s.operation_number) >= (storage_logs.miniblock_number, storage_logs.operation_number) AND + s.value = $3 + ) + ) + ", + ACCOUNT_CODE_STORAGE_ADDRESS.as_bytes(), + block_number.0 as i64, + FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH.as_bytes() + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } +} diff --git a/core/lib/dal/src/tokens_web3_dal.rs b/core/lib/dal/src/tokens_web3_dal.rs new file mode 100644 index 000000000000..d828c0684e02 --- /dev/null +++ b/core/lib/dal/src/tokens_web3_dal.rs @@ -0,0 +1,124 @@ +use crate::models::storage_token::{StorageTokenMetadata, StorageTokenPrice}; +use crate::SqlxError; +use crate::StorageProcessor; +use num::{rational::Ratio, BigUint}; +use sqlx::postgres::types::PgInterval; +use zksync_types::{ + tokens::{TokenInfo, TokenMetadata, TokenPrice}, + Address, +}; +use zksync_utils::ratio_to_big_decimal; + +// Precision of the USD price per token +pub(crate) const STORED_USD_PRICE_PRECISION: usize = 6; + +#[derive(Debug)] +pub struct TokensWeb3Dal<'a, 'c> { + pub storage: &'a mut StorageProcessor<'c>, +} + +impl TokensWeb3Dal<'_, '_> { + pub fn get_well_known_tokens(&mut self) -> Result, SqlxError> { + async_std::task::block_on(async { + let records = sqlx::query!( + "SELECT l1_address, l2_address, name, symbol, decimals FROM tokens + WHERE well_known = true + ORDER BY symbol" + ) + .fetch_all(self.storage.conn()) + .await?; + let result: Vec = records + .into_iter() + .map(|record| TokenInfo { + l1_address: Address::from_slice(&record.l1_address), + l2_address: Address::from_slice(&record.l2_address), + metadata: TokenMetadata { + name: record.name, + symbol: record.symbol, + decimals: record.decimals as u8, + }, + }) + .collect(); + Ok(result) + }) + } + + pub fn is_token_actively_trading( + &mut self, + l2_token: &Address, + min_volume: &Ratio, + max_acceptable_volume_age_in_secs: u32, + max_acceptable_price_age_in_secs: u32, + ) -> Result { + async_std::task::block_on(async { + let min_volume = ratio_to_big_decimal(min_volume, STORED_USD_PRICE_PRECISION); + let volume_pg_interval = PgInterval { + months: 0, + days: 0, + microseconds: (max_acceptable_volume_age_in_secs as i64) * 1000000, + }; + let price_pg_interval = PgInterval { + months: 0, + days: 0, + microseconds: (max_acceptable_price_age_in_secs as i64) * 1000000, + }; + let count = sqlx::query!( + r#" + SELECT COUNT(*) as "count!" FROM tokens + WHERE l2_address = $1 AND + market_volume > $2 AND now() - market_volume_updated_at < $3 AND + usd_price > 0 AND now() - usd_price_updated_at < $4 + "#, + l2_token.as_bytes(), + min_volume, + volume_pg_interval, + price_pg_interval + ) + .fetch_one(self.storage.conn()) + .await + .unwrap() + .count; + Ok(count == 1) + }) + } + + pub fn get_token_price( + &mut self, + l2_address: &Address, + ) -> Result, SqlxError> { + async_std::task::block_on(async { + let storage_price = sqlx::query_as!( + StorageTokenPrice, + "SELECT usd_price, usd_price_updated_at FROM tokens WHERE l2_address = $1", + l2_address.as_bytes(), + ) + .fetch_optional(self.storage.conn()) + .await?; + + Ok(storage_price.and_then(Into::into)) + }) + } + + pub fn get_token_metadata( + &mut self, + l2_address: &Address, + ) -> Result, SqlxError> { + async_std::task::block_on(async { + let storage_token_metadata = sqlx::query_as!( + StorageTokenMetadata, + r#" + SELECT + COALESCE(token_list_name, name) as "name!", + COALESCE(token_list_symbol, symbol) as "symbol!", + COALESCE(token_list_decimals, decimals) as "decimals!" + FROM tokens WHERE l2_address = $1 + "#, + l2_address.as_bytes(), + ) + .fetch_optional(self.storage.conn()) + .await?; + + Ok(storage_token_metadata.map(Into::into)) + }) + } +} diff --git a/core/lib/dal/src/transactions_dal.rs b/core/lib/dal/src/transactions_dal.rs new file mode 100644 index 000000000000..a9be90932052 --- /dev/null +++ b/core/lib/dal/src/transactions_dal.rs @@ -0,0 +1,821 @@ +use bigdecimal::BigDecimal; +use std::collections::HashMap; +use std::fmt::{self, Debug}; +use std::iter::FromIterator; +use std::time::Duration; +use zksync_types::fee::TransactionExecutionMetrics; + +use itertools::Itertools; +use sqlx::error; +use sqlx::types::chrono::NaiveDateTime; + +use zksync_types::tx::tx_execution_info::TxExecutionStatus; +use zksync_types::{get_nonce_key, U256}; +use zksync_types::{ + l1::L1Tx, l2::L2Tx, tx::TransactionExecutionResult, vm_trace::VmExecutionTrace, Address, + ExecuteTransactionCommon, L1BatchNumber, L1BlockNumber, MiniblockNumber, Nonce, PriorityOpId, + Transaction, H256, +}; +use zksync_utils::{h256_to_u32, u256_to_big_decimal}; + +use crate::models::storage_transaction::StorageTransaction; +use crate::time_utils::pg_interval_from_duration; +use crate::StorageProcessor; + +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +pub enum L2TxSubmissionResult { + Added, + Replaced, + AlreadyExecuted, + Duplicate, +} +impl fmt::Display for L2TxSubmissionResult { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{:?}", self) + } +} + +pub struct TransactionsDal<'c, 'a> { + pub storage: &'c mut StorageProcessor<'a>, +} + +type TxLocations = Vec<(MiniblockNumber, Vec<(H256, u32, u16)>)>; + +impl TransactionsDal<'_, '_> { + pub fn insert_transaction_l1(&mut self, tx: L1Tx, l1_block_number: L1BlockNumber) { + async_std::task::block_on(async { + let contract_address = tx.execute.contract_address.as_bytes().to_vec(); + let tx_hash = tx.hash().0.to_vec(); + let json_data = serde_json::to_value(&tx.execute) + .unwrap_or_else(|_| panic!("cannot serialize tx {:?} to json", tx.hash())); + let gas_limit = u256_to_big_decimal(tx.common_data.gas_limit); + let full_fee = u256_to_big_decimal(tx.common_data.full_fee); + let layer_2_tip_fee = u256_to_big_decimal(tx.common_data.layer_2_tip_fee); + let sender = tx.common_data.sender.0.to_vec(); + let serial_id = tx.serial_id().0 as i64; + let gas_per_pubdata_limit = u256_to_big_decimal(tx.common_data.gas_per_pubdata_limit); + let value = u256_to_big_decimal(tx.execute.value); + let tx_format = tx.common_data.tx_format() as i32; + + let to_mint = u256_to_big_decimal(tx.common_data.to_mint); + let refund_recipient = tx.common_data.refund_recipient.as_bytes().to_vec(); + + let secs = (tx.received_timestamp_ms / 1000) as i64; + let nanosecs = ((tx.received_timestamp_ms % 1000) * 1_000_000) as u32; + let received_at = NaiveDateTime::from_timestamp_opt(secs, nanosecs).unwrap(); + + sqlx::query!( + " + INSERT INTO transactions + ( + hash, + is_priority, + initiator_address, + + gas_limit, + gas_per_pubdata_limit, + + data, + priority_op_id, + full_fee, + layer_2_tip_fee, + contract_address, + l1_block_number, + value, + + paymaster, + paymaster_input, + tx_format, + + l1_tx_mint, + l1_tx_refund_recipient, + + received_at, + created_at, + updated_at + ) + VALUES + ( + $1, TRUE, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, + $13, $14, $15, $16, $17, now(), now() + ) + ", + tx_hash, + sender, + gas_limit, + gas_per_pubdata_limit, + json_data, + serial_id, + full_fee, + layer_2_tip_fee, + contract_address, + l1_block_number.0 as i32, + value, + &Address::default().0.to_vec(), + &vec![], + tx_format, + to_mint, + refund_recipient, + received_at, + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn insert_transaction_l2( + &mut self, + tx: L2Tx, + exec_info: TransactionExecutionMetrics, + ) -> L2TxSubmissionResult { + async_std::task::block_on(async { + let contract_address = tx.execute.contract_address.as_bytes().to_vec(); + let tx_hash = tx.hash().0.to_vec(); + let json_data = serde_json::to_value(&tx.execute) + .unwrap_or_else(|_| panic!("cannot serialize tx {:?} to json", tx.hash())); + let gas_limit = u256_to_big_decimal(tx.common_data.fee.gas_limit); + let max_fee_per_gas = u256_to_big_decimal(tx.common_data.fee.max_fee_per_gas); + let max_priority_fee_per_gas = + u256_to_big_decimal(tx.common_data.fee.max_priority_fee_per_gas); + let gas_per_pubdata_limit = + u256_to_big_decimal(tx.common_data.fee.gas_per_pubdata_limit); + let tx_format = tx.common_data.transaction_type as i32; + let initiator = tx.initiator_account().0.to_vec(); + let signature = tx.common_data.signature.clone(); + let nonce = tx.common_data.nonce.0 as i64; + let input_data = tx + .common_data + .input + .clone() + .expect("Data is mandatory") + .data; + let value = u256_to_big_decimal(tx.execute.value); + let paymaster = tx.common_data.paymaster_params.paymaster.0.to_vec(); + let paymaster_input = tx.common_data.paymaster_params.paymaster_input.clone(); + let secs = (tx.received_timestamp_ms / 1000) as i64; + let nanosecs = ((tx.received_timestamp_ms % 1000) * 1_000_000) as u32; + let received_at = NaiveDateTime::from_timestamp_opt(secs, nanosecs).unwrap(); + // Besides just adding or updating(on conflict) the record, we want to extract some info + // from the query below, to indicate what actually happened: + // 1) transaction is added + // 2) transaction is replaced + // 3) WHERE clause conditions for DO UPDATE block were not met, so the transaction can't be replaced + // the subquery in RETURNING clause looks into pre-UPDATE state of the table. So if the subquery will return NULL + // transaction is fresh and was added to db(the second condition of RETURNING clause checks it). + // Otherwise, if the subquery won't return NULL it means that there is already tx with such nonce and initiator_address in DB + // and we can replace it WHERE clause conditions are met. + // It is worth mentioning that if WHERE clause conditions are not met, None will be returned. + let query_result = sqlx::query!( + r#" + INSERT INTO transactions + ( + hash, + is_priority, + initiator_address, + nonce, + signature, + gas_limit, + max_fee_per_gas, + max_priority_fee_per_gas, + gas_per_pubdata_limit, + input, + data, + tx_format, + contract_address, + value, + paymaster, + paymaster_input, + execution_info, + received_at, + created_at, + updated_at + ) + VALUES + ( + $1, FALSE, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, + jsonb_build_object('gas_used', $16::bigint, 'storage_writes', $17::int, 'contracts_used', $18::int), + $19, now(), now() + ) + ON CONFLICT + (initiator_address, nonce) + DO UPDATE + SET hash=$1, + signature=$4, + gas_limit=$5, + max_fee_per_gas=$6, + max_priority_fee_per_gas=$7, + gas_per_pubdata_limit=$8, + input=$9, + data=$10, + tx_format=$11, + contract_address=$12, + value=$13, + paymaster=$14, + paymaster_input=$15, + execution_info=jsonb_build_object('gas_used', $16::bigint, 'storage_writes', $17::int, 'contracts_used', $18::int), + in_mempool=FALSE, + received_at=$19, + created_at=now(), + updated_at=now(), + error = NULL + WHERE transactions.is_priority = FALSE AND transactions.miniblock_number IS NULL + RETURNING (SELECT hash FROM transactions WHERE transactions.initiator_address = $2 AND transactions.nonce = $3) IS NOT NULL as "is_replaced!" + "#, + &tx_hash, + &initiator, + nonce, + &signature, + gas_limit, + max_fee_per_gas, + max_priority_fee_per_gas, + gas_per_pubdata_limit, + input_data, + &json_data, + tx_format, + contract_address, + value, + &paymaster, + &paymaster_input, + exec_info.gas_used as i64, + (exec_info.initial_storage_writes + exec_info.repeated_storage_writes) as i32, + exec_info.contracts_used as i32, + received_at + ) + .fetch_optional(self.storage.conn()) + .await + .map(|option_record| option_record.map(|record| record.is_replaced)); + + let l2_tx_insertion_result = match query_result { + Ok(option_query_result) => match option_query_result { + Some(true) => L2TxSubmissionResult::Replaced, + Some(false) => L2TxSubmissionResult::Added, + None => L2TxSubmissionResult::AlreadyExecuted, + }, + Err(err) => { + // So, we consider a tx hash to be a primary key of the transaction + // Based on the idea that we can't have two transactions with the same hash + // We assume that if there already exists some transaction with some tx hash + // another tx with the same tx hash is supposed to have the same data + // In this case we identify it as Duplicate + // Note, this error can happen because of the race condition (tx can be taken by several + // api servers, that simultaneously start execute it and try to inserted to DB) + if let error::Error::Database(ref error) = err { + if let Some(constraint) = error.constraint() { + if constraint == "transactions_pkey" { + return L2TxSubmissionResult::Duplicate; + } + } + } + panic!("{}", err); + } + }; + vlog::debug!( + "{:?} l2 transaction {:?} to DB. init_acc {:?} nonce {:?} returned option {:?}", + l2_tx_insertion_result, + tx.hash(), + tx.initiator_account(), + tx.nonce(), + l2_tx_insertion_result + ); + + l2_tx_insertion_result + }) + } + + pub fn mark_txs_as_executed_in_l1_batch( + &mut self, + block_number: L1BatchNumber, + transactions: &[TransactionExecutionResult], + ) { + async_std::task::block_on(async { + let hashes: Vec> = transactions + .iter() + .map(|tx| tx.hash.as_bytes().to_vec()) + .collect(); + let l1_batch_tx_indexes = Vec::from_iter(0..transactions.len() as i32); + sqlx::query!( + " + UPDATE transactions + SET + l1_batch_number = $3, + l1_batch_tx_index = data_table.l1_batch_tx_index, + updated_at = now() + FROM + (SELECT + UNNEST($1::int[]) AS l1_batch_tx_index, + UNNEST($2::bytea[]) AS hash + ) AS data_table + WHERE transactions.hash=data_table.hash + ", + &l1_batch_tx_indexes, + &hashes, + block_number.0 as i64 + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn set_correct_tx_type_for_priority_operations(&mut self, limit: u32) -> bool { + async_std::task::block_on(async { + sqlx::query!( + r#" + UPDATE transactions + SET tx_format=255 + WHERE hash IN ( + SELECT hash + FROM transactions + WHERE is_priority = true + AND tx_format is null + LIMIT $1 + ) + RETURNING tx_format + "#, + limit as i32 + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .is_some() + }) + } + + pub fn mark_txs_as_executed_in_miniblock( + &mut self, + miniblock_number: MiniblockNumber, + transactions: &[TransactionExecutionResult], + block_base_fee_per_gas: U256, + ) { + async_std::task::block_on(async { + let mut l1_hashes = Vec::with_capacity(transactions.len()); + let mut l1_indices_in_block = Vec::with_capacity(transactions.len()); + let mut l1_errors = Vec::with_capacity(transactions.len()); + let mut l1_execution_infos = Vec::with_capacity(transactions.len()); + + let mut l2_hashes = Vec::with_capacity(transactions.len()); + let mut l2_indices_in_block = Vec::with_capacity(transactions.len()); + let mut l2_initiators = Vec::with_capacity(transactions.len()); + let mut l2_nonces = Vec::with_capacity(transactions.len()); + let mut l2_signatures = Vec::with_capacity(transactions.len()); + let mut l2_tx_formats = Vec::with_capacity(transactions.len()); + let mut l2_errors = Vec::with_capacity(transactions.len()); + let mut l2_effective_gas_prices = Vec::with_capacity(transactions.len()); + let mut l2_execution_infos = Vec::with_capacity(transactions.len()); + let mut l2_inputs = Vec::with_capacity(transactions.len()); + let mut l2_datas = Vec::with_capacity(transactions.len()); + let mut l2_gas_limits = Vec::with_capacity(transactions.len()); + let mut l2_max_fees_per_gas = Vec::with_capacity(transactions.len()); + let mut l2_max_priority_fees_per_gas = Vec::with_capacity(transactions.len()); + let mut l2_gas_per_pubdata_limit = Vec::with_capacity(transactions.len()); + let mut l2_refunded_gas = Vec::with_capacity(transactions.len()); + + transactions + .iter() + .enumerate() + .for_each(|(index_in_block, tx_res)| { + let TransactionExecutionResult { + hash, + execution_info, + transaction, + execution_status, + refunded_gas, + .. + } = tx_res; + + // Bootloader currently doesn't return detailed errors. + let error = match execution_status { + TxExecutionStatus::Success => None, + // The string error used here is copied from the previous version. + // It is applied to every failed transaction - + // currently detailed errors are not supported. + TxExecutionStatus::Failure => Some("Bootloader-based tx failed".to_owned()), + }; + + match &transaction.common_data { + ExecuteTransactionCommon::L1(_) => { + l1_hashes.push(hash.0.to_vec()); + l1_indices_in_block.push(index_in_block as i32); + l1_errors.push(error.unwrap_or_default()); + l1_execution_infos.push(serde_json::to_value(execution_info).unwrap()); + } + ExecuteTransactionCommon::L2(common_data) => { + let data = serde_json::to_value(&transaction.execute).unwrap(); + l2_hashes.push(hash.0.to_vec()); + l2_indices_in_block.push(index_in_block as i32); + l2_initiators.push(transaction.initiator_account().0.to_vec()); + l2_nonces.push(common_data.nonce.0 as i32); + l2_signatures.push(common_data.signature.clone()); + l2_tx_formats.push(common_data.transaction_type as i32); + l2_errors.push(error.unwrap_or_default()); + let l2_effective_gas_price = common_data + .fee + .get_effective_gas_price(block_base_fee_per_gas); + l2_effective_gas_prices + .push(u256_to_big_decimal(l2_effective_gas_price)); + l2_execution_infos.push(serde_json::to_value(execution_info).unwrap()); + // Normally input data is mandatory + l2_inputs.push(common_data.input_data().unwrap_or_default()); + l2_datas.push(data); + l2_gas_limits.push(u256_to_big_decimal(common_data.fee.gas_limit)); + l2_max_fees_per_gas + .push(u256_to_big_decimal(common_data.fee.max_fee_per_gas)); + l2_max_priority_fees_per_gas.push(u256_to_big_decimal( + common_data.fee.max_priority_fee_per_gas, + )); + l2_gas_per_pubdata_limit + .push(u256_to_big_decimal(common_data.fee.gas_per_pubdata_limit)); + l2_refunded_gas.push(*refunded_gas as i64); + } + } + }); + + if !l2_hashes.is_empty() { + // Update l2 txs + + // Due to the current tx replacement model, it's possible that tx has been replaced, + // but the original was executed in memory, + // so we have to update all fields for tx from fields stored in memory. + sqlx::query!( + r#" + UPDATE transactions + SET + hash = data_table.hash, + signature = data_table.signature, + gas_limit = data_table.gas_limit, + max_fee_per_gas = data_table.max_fee_per_gas, + max_priority_fee_per_gas = data_table.max_priority_fee_per_gas, + gas_per_pubdata_limit = data_table.gas_per_pubdata_limit, + input = data_table.input, + data = data_table.data, + tx_format = data_table.tx_format, + miniblock_number = $17, + index_in_block = data_table.index_in_block, + error = NULLIF(data_table.error, ''), + effective_gas_price = data_table.effective_gas_price, + execution_info = data_table.new_execution_info, + refunded_gas = data_table.refunded_gas, + in_mempool = FALSE, + updated_at = now() + FROM + ( + SELECT + UNNEST($1::bytea[]) AS initiator_address, + UNNEST($2::int[]) AS nonce, + UNNEST($3::bytea[]) AS hash, + UNNEST($4::bytea[]) AS signature, + UNNEST($5::numeric[]) AS gas_limit, + UNNEST($6::numeric[]) AS max_fee_per_gas, + UNNEST($7::numeric[]) AS max_priority_fee_per_gas, + UNNEST($8::numeric[]) AS gas_per_pubdata_limit, + UNNEST($9::int[]) AS tx_format, + UNNEST($10::integer[]) AS index_in_block, + UNNEST($11::varchar[]) AS error, + UNNEST($12::numeric[]) AS effective_gas_price, + UNNEST($13::jsonb[]) AS new_execution_info, + UNNEST($14::bytea[]) AS input, + UNNEST($15::jsonb[]) AS data, + UNNEST($16::bigint[]) as refunded_gas + ) AS data_table + WHERE transactions.initiator_address=data_table.initiator_address + AND transactions.nonce=data_table.nonce + "#, + &l2_initiators, + &l2_nonces, + &l2_hashes, + &l2_signatures, + &l2_gas_limits, + &l2_max_fees_per_gas, + &l2_max_priority_fees_per_gas, + &l2_gas_per_pubdata_limit, + &l2_tx_formats, + &l2_indices_in_block, + &l2_errors, + &l2_effective_gas_prices, + &l2_execution_infos, + &l2_inputs, + &l2_datas, + &l2_refunded_gas, + miniblock_number.0 as i32, + ) + .execute(self.storage.conn()) + .await + .unwrap(); + } + + // We can't replace l1 transaction, so we simply write the execution result + if !l1_hashes.is_empty() { + sqlx::query!( + r#" + UPDATE transactions + SET + miniblock_number = $1, + index_in_block = data_table.index_in_block, + error = NULLIF(data_table.error, ''), + in_mempool=FALSE, + execution_info = execution_info || data_table.new_execution_info, + updated_at = now() + FROM + ( + SELECT + UNNEST($2::bytea[]) AS hash, + UNNEST($3::integer[]) AS index_in_block, + UNNEST($4::varchar[]) AS error, + UNNEST($5::jsonb[]) AS new_execution_info + ) AS data_table + WHERE transactions.hash = data_table.hash + "#, + miniblock_number.0 as i32, + &l1_hashes, + &l1_indices_in_block, + &l1_errors, + &l1_execution_infos + ) + .execute(self.storage.conn()) + .await + .unwrap(); + } + }) + } + + pub fn mark_tx_as_rejected(&mut self, transaction_hash: H256, error: &str) { + async_std::task::block_on(async { + // If the rejected tx has been replaced, it means that this tx hash does not exist in the database + // and we will update nothing. + // These txs don't affect the state, so we can just easily skip this update. + sqlx::query!( + "UPDATE transactions + SET error = $1, updated_at = now() + WHERE hash = $2", + error, + transaction_hash.0.to_vec() + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn reset_transactions_state(&mut self, miniblock_number: MiniblockNumber) { + async_std::task::block_on(async { + sqlx::query!( + "UPDATE transactions + SET l1_batch_number = NULL, miniblock_number = NULL, error = NULL, index_in_block = NULL, execution_info = '{}' + WHERE miniblock_number > $1", + miniblock_number.0 as i64 + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn remove_stuck_txs(&mut self, stuck_tx_timeout: Duration) -> usize { + async_std::task::block_on(async { + let stuck_tx_timeout = pg_interval_from_duration(stuck_tx_timeout); + sqlx::query!( + "DELETE FROM transactions \ + WHERE miniblock_number IS NULL AND received_at < now() - $1::interval \ + AND is_priority=false AND error IS NULL \ + RETURNING hash", + stuck_tx_timeout + ) + .fetch_all(self.storage.conn()) + .await + .unwrap() + .len() + }) + } + + /// Fetches new updates for mempool + /// Returns new transactions and current nonces for related accounts + /// Latter is only used to bootstrap mempool for given account + pub fn sync_mempool( + &mut self, + stashed_accounts: Vec
, + purged_accounts: Vec
, + gas_per_pubdata: u32, + fee_per_gas: u64, + limit: usize, + ) -> (Vec, HashMap) { + async_std::task::block_on(async { + let stashed_addresses: Vec<_> = + stashed_accounts.into_iter().map(|a| a.0.to_vec()).collect(); + sqlx::query!( + "UPDATE transactions SET in_mempool = FALSE \ + FROM UNNEST ($1::bytea[]) AS s(address) \ + WHERE transactions.in_mempool = TRUE AND transactions.initiator_address = s.address", + &stashed_addresses, + ) + .execute(self.storage.conn()) + .await + .unwrap(); + + let purged_addresses: Vec<_> = + purged_accounts.into_iter().map(|a| a.0.to_vec()).collect(); + sqlx::query!( + "DELETE FROM transactions \ + WHERE in_mempool = TRUE AND initiator_address = ANY($1)", + &purged_addresses[..] + ) + .execute(self.storage.conn()) + .await + .unwrap(); + + let transactions = sqlx::query_as!( + StorageTransaction, + "UPDATE transactions + SET in_mempool = TRUE + FROM ( + SELECT hash + FROM transactions + WHERE miniblock_number IS NULL AND in_mempool = FALSE AND error IS NULL + AND (is_priority = TRUE OR (max_fee_per_gas >= $2 and gas_per_pubdata_limit >= $3)) + ORDER BY is_priority DESC, priority_op_id, received_at + LIMIT $1 + ) as subquery + WHERE transactions.hash = subquery.hash + RETURNING transactions.*", + limit as i32, + BigDecimal::from(fee_per_gas), + BigDecimal::from(gas_per_pubdata), + ) + .fetch_all(self.storage.conn()) + .await + .unwrap(); + + let nonce_keys: HashMap<_, _> = transactions + .iter() + .map(|tx| { + let address = Address::from_slice(&tx.initiator_address); + let nonce_key = get_nonce_key(&address).hashed_key(); + (nonce_key, address) + }) + .collect(); + + let storage_keys: Vec<_> = nonce_keys.keys().map(|key| key.0.to_vec()).collect(); + let nonces: HashMap<_, _> = sqlx::query!( + r#"SELECT hashed_key, value as "value!" FROM storage WHERE hashed_key = ANY($1)"#, + &storage_keys, + ) + .fetch_all(self.storage.conn()) + .await + .unwrap() + .into_iter() + .map(|row| { + let nonce_key = H256::from_slice(&row.hashed_key); + let nonce = Nonce(h256_to_u32(H256::from_slice(&row.value))); + + (*nonce_keys.get(&nonce_key).unwrap(), nonce) + }) + .collect(); + + ( + transactions.into_iter().map(|tx| tx.into()).collect(), + nonces, + ) + }) + } + + pub fn reset_mempool(&mut self) { + async_std::task::block_on(async { + sqlx::query!("UPDATE transactions SET in_mempool = FALSE WHERE in_mempool = TRUE") + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn get_last_processed_l1_block(&mut self) -> Option { + async_std::task::block_on(async { + sqlx::query!( + "SELECT l1_block_number FROM transactions + WHERE priority_op_id IS NOT NULL + ORDER BY priority_op_id DESC + LIMIT 1" + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .and_then(|x| x.l1_block_number.map(|block| L1BlockNumber(block as u32))) + }) + } + + pub fn last_priority_id(&mut self) -> Option { + async_std::task::block_on(async { + let op_id = sqlx::query!( + r#"SELECT MAX(priority_op_id) as "op_id" from transactions where is_priority = true"# + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap()? + .op_id?; + Some(PriorityOpId(op_id as u64)) + }) + } + + pub fn next_priority_id(&mut self) -> PriorityOpId { + async_std::task::block_on(async { + sqlx::query!( + r#"SELECT MAX(priority_op_id) as "op_id" from transactions where is_priority = true AND miniblock_number IS NOT NULL"# + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .and_then(|row| row.op_id) + .map(|value| PriorityOpId((value + 1) as u64)) + .unwrap_or_default() + }) + } + + pub fn insert_trace(&mut self, hash: H256, trace: VmExecutionTrace) { + async_std::task::block_on(async { + sqlx::query!( + "INSERT INTO transaction_traces (tx_hash, trace, created_at, updated_at) VALUES ($1, $2, now(), now())", + hash.as_bytes(), + serde_json::to_value(trace).unwrap() + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn get_trace(&mut self, hash: H256) -> Option { + async_std::task::block_on(async { + let trace = sqlx::query!( + "SELECT trace FROM transaction_traces WHERE tx_hash = $1", + hash.as_bytes() + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|record| record.trace); + trace.map(|trace| { + serde_json::from_value(trace) + .unwrap_or_else(|_| panic!("invalid trace json in database for {:?}", hash)) + }) + }) + } + + // Returns transactions that state_keeper needs to reexecute on restart. + // That is the transactions that are included to some miniblock, + // but not included to L1 batch. The order of the transactions is the same as it was + // during the previous execution. + pub fn get_transactions_to_reexecute(&mut self) -> Vec<(MiniblockNumber, Vec)> { + async_std::task::block_on(async { + sqlx::query_as!( + StorageTransaction, + " + SELECT * FROM transactions + WHERE miniblock_number IS NOT NULL AND l1_batch_number IS NULL + ORDER BY miniblock_number, index_in_block + ", + ) + .fetch_all(self.storage.conn()) + .await + .unwrap() + .into_iter() + .group_by(|tx| tx.miniblock_number) + .into_iter() + .map(|(miniblock_number, txs)| { + ( + MiniblockNumber(miniblock_number.unwrap() as u32), + txs.map(Into::::into) + .collect::>(), + ) + }) + .collect() + }) + } + + pub fn get_tx_locations(&mut self, l1_batch_number: L1BatchNumber) -> TxLocations { + async_std::task::block_on(async { + sqlx::query!( + r#" + SELECT miniblock_number as "miniblock_number!", + hash, index_in_block as "index_in_block!", l1_batch_tx_index as "l1_batch_tx_index!" + FROM transactions + WHERE l1_batch_number = $1 + ORDER BY miniblock_number, index_in_block + "#, + l1_batch_number.0 as i64 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap() + .into_iter() + .group_by(|tx| tx.miniblock_number) + .into_iter() + .map(|(miniblock_number, rows)| { + ( + MiniblockNumber(miniblock_number as u32), + rows.map(|row| (H256::from_slice(&row.hash), row.index_in_block as u32, row.l1_batch_tx_index as u16)) + .collect::>(), + ) + }) + .collect() + }) + } +} diff --git a/core/lib/dal/src/transactions_web3_dal.rs b/core/lib/dal/src/transactions_web3_dal.rs new file mode 100644 index 000000000000..904750c508e8 --- /dev/null +++ b/core/lib/dal/src/transactions_web3_dal.rs @@ -0,0 +1,348 @@ +use sqlx::types::chrono::NaiveDateTime; + +use zksync_types::{ + api::{ + BlockId, BlockNumber, L2ToL1Log, Log, Transaction, TransactionDetails, TransactionId, + TransactionReceipt, + }, + Address, L2ChainId, ACCOUNT_CODE_STORAGE_ADDRESS, FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH, + H160, H256, U256, U64, +}; +use zksync_utils::{bigdecimal_to_u256, h256_to_account_address}; + +use crate::models::{ + storage_block::{bind_block_where_sql_params, web3_block_where_sql}, + storage_event::{StorageL2ToL1Log, StorageWeb3Log}, + storage_transaction::{ + extract_web3_transaction, web3_transaction_select_sql, StorageTransactionDetails, + }, +}; +use crate::SqlxError; +use crate::StorageProcessor; + +pub struct TransactionsWeb3Dal<'a, 'c> { + pub storage: &'a mut StorageProcessor<'c>, +} + +impl TransactionsWeb3Dal<'_, '_> { + pub fn get_transaction_receipt( + &mut self, + hash: H256, + ) -> Result, SqlxError> { + async_std::task::block_on(async { + let receipt: Option = sqlx::query!( + r#" + WITH sl AS ( + SELECT * FROM storage_logs + WHERE storage_logs.address = $1 AND storage_logs.tx_hash = $2 + ORDER BY storage_logs.miniblock_number DESC, storage_logs.operation_number DESC + LIMIT 1 + ) + SELECT + transactions.hash as tx_hash, + transactions.index_in_block as index_in_block, + transactions.l1_batch_tx_index as l1_batch_tx_index, + transactions.miniblock_number as block_number, + transactions.error as error, + transactions.effective_gas_price as effective_gas_price, + transactions.initiator_address as initiator_address, + transactions.data->'to' as "transfer_to?", + transactions.data->'contractAddress' as "execute_contract_address?", + transactions.tx_format as "tx_format?", + transactions.refunded_gas as refunded_gas, + transactions.gas_limit as gas_limit, + miniblocks.hash as "block_hash?", + miniblocks.l1_batch_number as "l1_batch_number?", + sl.key as "contract_address?" + FROM transactions + LEFT JOIN miniblocks + ON miniblocks.number = transactions.miniblock_number + LEFT JOIN sl + ON sl.value != $3 + WHERE transactions.hash = $2 + "#, + ACCOUNT_CODE_STORAGE_ADDRESS.as_bytes(), + hash.0.to_vec(), + FAILED_CONTRACT_DEPLOYMENT_BYTECODE_HASH.as_bytes() + ) + .fetch_optional(self.storage.conn()) + .await? + .map(|db_row| { + let status = match (db_row.block_number, db_row.error) { + (_, Some(_)) => Some(U64::from(0)), + (Some(_), None) => Some(U64::from(1)), + // tx not executed yet + _ => None, + }; + let tx_type = db_row.tx_format.map(U64::from).unwrap_or_default(); + let transaction_index = db_row.index_in_block.map(U64::from).unwrap_or_default(); + + TransactionReceipt { + transaction_hash: H256::from_slice(&db_row.tx_hash), + transaction_index, + block_hash: db_row + .block_hash + .clone() + .map(|bytes| H256::from_slice(&bytes)), + block_number: db_row.block_number.map(U64::from), + l1_batch_tx_index: db_row.l1_batch_tx_index.map(U64::from), + l1_batch_number: db_row.l1_batch_number.map(U64::from), + from: H160::from_slice(&db_row.initiator_address), + to: db_row + .transfer_to + .or(db_row.execute_contract_address) + .map(|addr| { + serde_json::from_value::
(addr) + .expect("invalid address value in the database") + }) + // For better compatibility with various clients, we never return null. + .or_else(|| Some(Address::default())), + cumulative_gas_used: Default::default(), + gas_used: { + let refunded_gas: U256 = db_row.refunded_gas.into(); + db_row.gas_limit.map(|val| { + let gas_limit = bigdecimal_to_u256(val); + gas_limit - refunded_gas + }) + }, + effective_gas_price: Some( + db_row + .effective_gas_price + .map(bigdecimal_to_u256) + .unwrap_or_default(), + ), + contract_address: db_row + .contract_address + .map(|addr| h256_to_account_address(&H256::from_slice(&addr))), + logs: vec![], + l2_to_l1_logs: vec![], + status, + root: db_row.block_hash.map(|bytes| H256::from_slice(&bytes)), + logs_bloom: Default::default(), + // Even though the Rust SDK recommends us to supply "None" for legacy transactions + // we always supply some number anyway to have the same behaviour as most popular RPCs + transaction_type: Some(tx_type), + } + }); + match receipt { + Some(mut receipt) => { + let logs: Vec = sqlx::query_as!( + StorageWeb3Log, + r#" + SELECT + address, topic1, topic2, topic3, topic4, value, + Null::bytea as "block_hash", Null::bigint as "l1_batch_number?", + miniblock_number, tx_hash, tx_index_in_block, + event_index_in_block, event_index_in_tx + FROM events + WHERE tx_hash = $1 + ORDER BY miniblock_number ASC, event_index_in_block ASC + "#, + hash.as_bytes() + ) + .fetch_all(self.storage.conn()) + .await? + .into_iter() + .map(|storage_log: StorageWeb3Log| { + let mut log = Log::from(storage_log); + log.block_hash = receipt.block_hash; + log.l1_batch_number = receipt.l1_batch_number; + log + }) + .collect(); + receipt.logs = logs; + + let l2_to_l1_logs: Vec = sqlx::query_as!( + StorageL2ToL1Log, + r#" + SELECT + miniblock_number, log_index_in_miniblock, log_index_in_tx, tx_hash, + Null::bytea as "block_hash", Null::bigint as "l1_batch_number?", + shard_id, is_service, tx_index_in_miniblock, tx_index_in_l1_batch, sender, key, value + FROM l2_to_l1_logs + WHERE tx_hash = $1 + ORDER BY log_index_in_tx ASC + "#, + hash.as_bytes() + ) + .fetch_all(self.storage.conn()) + .await? + .into_iter() + .map(|storage_l2_to_l1_log: StorageL2ToL1Log| { + let mut l2_to_l1_log = L2ToL1Log::from(storage_l2_to_l1_log); + l2_to_l1_log.block_hash = receipt.block_hash; + l2_to_l1_log.l1_batch_number = receipt.l1_batch_number; + l2_to_l1_log + }) + .collect(); + receipt.l2_to_l1_logs = l2_to_l1_logs; + + Ok(Some(receipt)) + } + None => Ok(None), + } + }) + } + + pub fn get_transaction( + &mut self, + transaction_id: TransactionId, + chain_id: L2ChainId, + ) -> Result, SqlxError> { + async_std::task::block_on(async { + let where_sql = match transaction_id { + TransactionId::Hash(_) => "transactions.hash = $1".to_owned(), + TransactionId::Block(block_id, _) => { + format!( + "transactions.index_in_block = $1 AND {}", + web3_block_where_sql(block_id, 2) + ) + } + }; + let query = format!( + r#" + SELECT + {} + FROM transactions + LEFT JOIN miniblocks + ON miniblocks.number = transactions.miniblock_number + WHERE {} + "#, + web3_transaction_select_sql(), + where_sql + ); + let query = sqlx::query(&query); + + let query = match transaction_id { + TransactionId::Hash(tx_hash) => query.bind(tx_hash.0.to_vec()), + TransactionId::Block(block_id, tx_index) => { + let tx_index = if tx_index.as_u64() > i32::MAX as u64 { + return Ok(None); + } else { + tx_index.as_u64() as i32 + }; + bind_block_where_sql_params(block_id, query.bind(tx_index)) + } + }; + + let tx = query + .fetch_optional(self.storage.conn()) + .await? + .map(|row| extract_web3_transaction(row, chain_id)); + Ok(tx) + }) + } + + pub fn get_transaction_details( + &mut self, + hash: H256, + ) -> Result, SqlxError> { + async_std::task::block_on(async { + let storage_tx_details: Option = sqlx::query_as!( + StorageTransactionDetails, + r#" + SELECT transactions.*, + miniblocks.hash as "block_hash?", + commit_tx.tx_hash as "eth_commit_tx_hash?", + prove_tx.tx_hash as "eth_prove_tx_hash?", + execute_tx.tx_hash as "eth_execute_tx_hash?" + FROM transactions + LEFT JOIN miniblocks ON miniblocks.number = transactions.miniblock_number + LEFT JOIN l1_batches ON l1_batches.number = miniblocks.l1_batch_number + LEFT JOIN eth_txs_history as commit_tx ON (l1_batches.eth_commit_tx_id = commit_tx.eth_tx_id AND commit_tx.confirmed_at IS NOT NULL) + LEFT JOIN eth_txs_history as prove_tx ON (l1_batches.eth_prove_tx_id = prove_tx.eth_tx_id AND prove_tx.confirmed_at IS NOT NULL) + LEFT JOIN eth_txs_history as execute_tx ON (l1_batches.eth_execute_tx_id = execute_tx.eth_tx_id AND execute_tx.confirmed_at IS NOT NULL) + WHERE transactions.hash = $1 + "#, + hash.as_bytes() + ) + .fetch_optional(self.storage.conn()) + .await?; + + let tx = storage_tx_details.map(|tx_details| tx_details.into()); + + Ok(tx) + }) + } + + /// Returns hashes of txs which were received after `from_timestamp` and the time of receiving the last tx. + pub fn get_pending_txs_hashes_after( + &mut self, + from_timestamp: NaiveDateTime, + limit: Option, + ) -> Result<(Vec, Option), SqlxError> { + async_std::task::block_on(async { + let records = sqlx::query!( + " + SELECT transactions.hash, transactions.received_at + FROM transactions + LEFT JOIN miniblocks ON miniblocks.number = miniblock_number + WHERE received_at > $1 + ORDER BY received_at ASC + LIMIT $2 + ", + from_timestamp, + limit.map(|l| l as i64) + ) + .fetch_all(self.storage.conn()) + .await?; + let last_loc = records.last().map(|record| record.received_at); + let hashes = records + .into_iter() + .map(|record| H256::from_slice(&record.hash)) + .collect(); + Ok((hashes, last_loc)) + }) + } + + pub fn next_nonce_by_initiator_account( + &mut self, + initiator_address: Address, + ) -> Result { + async_std::task::block_on(async { + let latest_nonce = self + .storage + .storage_web3_dal() + .get_address_historical_nonce( + initiator_address, + BlockId::Number(BlockNumber::Latest), + )? + .expect("Failed to get `latest` nonce") + .as_u64(); + + // Get nonces of non-rejected transactions, starting from the 'latest' nonce. + // `latest` nonce is used, because it is guaranteed that there are no gaps before it. + // `(miniblock_number IS NOT NULL OR error IS NULL)` is the condition that filters non-rejected transactions. + // Query is fast because we have an index on (`initiator_address`, `nonce`) + // and it cannot return more than `max_nonce_ahead` nonces. + let non_rejected_nonces: Vec = sqlx::query!( + r#" + SELECT nonce as "nonce!" FROM transactions + WHERE initiator_address = $1 AND nonce >= $2 + AND is_priority = FALSE + AND (miniblock_number IS NOT NULL OR error IS NULL) + ORDER BY nonce + "#, + initiator_address.0.to_vec(), + latest_nonce as i64 + ) + .fetch_all(self.storage.conn()) + .await? + .into_iter() + .map(|row| row.nonce as u64) + .collect(); + + // Find pending nonce as the first "gap" in nonces. + let mut pending_nonce = latest_nonce; + for nonce in non_rejected_nonces { + if pending_nonce == nonce { + pending_nonce += 1; + } else { + break; + } + } + + Ok(U256::from(pending_nonce)) + }) + } +} diff --git a/core/lib/dal/src/witness_generator_dal.rs b/core/lib/dal/src/witness_generator_dal.rs new file mode 100644 index 000000000000..ad95917dd7dd --- /dev/null +++ b/core/lib/dal/src/witness_generator_dal.rs @@ -0,0 +1,978 @@ +use std::collections::HashMap; +use std::ops::Range; +use std::time::{Duration, Instant}; + +use itertools::Itertools; +use sqlx::Row; + +use crate::models::storage_witness_job_info::StorageWitnessJobInfo; +use zksync_object_store::gcs_utils::merkle_tree_paths_blob_url; +use zksync_object_store::gcs_utils::{ + aggregation_outputs_blob_url, basic_circuits_blob_url, basic_circuits_inputs_blob_url, + final_node_aggregations_blob_url, leaf_layer_subqueues_blob_url, scheduler_witness_blob_url, +}; +use zksync_types::proofs::{ + AggregationRound, JobCountStatistics, WitnessGeneratorJobMetadata, WitnessJobInfo, +}; +use zksync_types::zkevm_test_harness::abstract_zksync_circuit::concrete_circuits::ZkSyncCircuit; +use zksync_types::zkevm_test_harness::abstract_zksync_circuit::concrete_circuits::ZkSyncProof; +use zksync_types::zkevm_test_harness::bellman::bn256::Bn256; +use zksync_types::zkevm_test_harness::bellman::plonk::better_better_cs::proof::Proof; +use zksync_types::zkevm_test_harness::witness::oracle::VmWitnessOracle; +use zksync_types::L1BatchNumber; + +use crate::time_utils::{duration_to_naive_time, pg_interval_from_duration}; +use crate::StorageProcessor; + +#[derive(Debug)] +pub struct WitnessGeneratorDal<'a, 'c> { + pub storage: &'a mut StorageProcessor<'c>, +} + +impl WitnessGeneratorDal<'_, '_> { + pub fn get_next_basic_circuit_witness_job( + &mut self, + processing_timeout: Duration, + max_attempts: u32, + ) -> Option { + async_std::task::block_on(async { + let processing_timeout = pg_interval_from_duration(processing_timeout); + let result: Option = sqlx::query!( + " + UPDATE witness_inputs + SET status = 'in_progress', attempts = attempts + 1, + updated_at = now(), processing_started_at = now() + WHERE l1_batch_number = ( + SELECT l1_batch_number + FROM witness_inputs + WHERE status = 'queued' + OR (status = 'in_progress' AND processing_started_at < now() - $1::interval) + OR (status = 'failed' AND attempts < $2) + ORDER BY l1_batch_number ASC + LIMIT 1 + FOR UPDATE + SKIP LOCKED + ) + RETURNING witness_inputs.* + ", + &processing_timeout, + max_attempts as i32 + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|row| WitnessGeneratorJobMetadata { + block_number: L1BatchNumber(row.l1_batch_number as u32), + proofs: vec![], + }); + + result + }) + } + + pub fn get_next_leaf_aggregation_witness_job( + &mut self, + processing_timeout: Duration, + max_attempts: u32, + ) -> Option { + async_std::task::block_on(async { + let processing_timeout = pg_interval_from_duration(processing_timeout); + sqlx::query!( + " + UPDATE leaf_aggregation_witness_jobs + SET status = 'in_progress', attempts = attempts + 1, + updated_at = now(), processing_started_at = now() + WHERE l1_batch_number = ( + SELECT l1_batch_number + FROM leaf_aggregation_witness_jobs + WHERE status = 'queued' + OR (status = 'in_progress' AND processing_started_at < now() - $1::interval) + OR (status = 'failed' AND attempts < $2) + ORDER BY l1_batch_number ASC + LIMIT 1 + FOR UPDATE + SKIP LOCKED + ) + RETURNING leaf_aggregation_witness_jobs.* + ", &processing_timeout, + max_attempts as i32 + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|row| { + let l1_batch_number = L1BatchNumber(row.l1_batch_number as u32); + let number_of_basic_circuits = row.number_of_basic_circuits; + + // Now that we have a job in `queued` status, we need to enrich it with the computed proofs. + // We select `aggregation_round = 0` to only get basic circuits. + // Note that at this point there cannot be any other circuits anyway, + // but we keep the check for explicitness + let basic_circuits_proofs: Vec< + Proof>>, + > = self.load_proofs_for_block(l1_batch_number, AggregationRound::BasicCircuits); + + assert_eq!( + basic_circuits_proofs.len(), + number_of_basic_circuits as usize, + "leaf_aggregation_witness_job for l1 batch {} is in status `queued`, but there are only {} computed basic proofs, which is different from expected {}", + l1_batch_number, + basic_circuits_proofs.len(), + number_of_basic_circuits + ); + + WitnessGeneratorJobMetadata { + block_number: l1_batch_number, + proofs: basic_circuits_proofs + } + }) + }) + } + + pub fn get_next_node_aggregation_witness_job( + &mut self, + processing_timeout: Duration, + max_attempts: u32, + ) -> Option { + async_std::task::block_on(async { + let processing_timeout = pg_interval_from_duration(processing_timeout); + sqlx::query!( + " + UPDATE node_aggregation_witness_jobs + SET status = 'in_progress', attempts = attempts + 1, + updated_at = now(), processing_started_at = now() + WHERE l1_batch_number = ( + SELECT l1_batch_number + FROM node_aggregation_witness_jobs + WHERE status = 'queued' + OR (status = 'in_progress' AND processing_started_at < now() - $1::interval) + OR (status = 'failed' AND attempts < $2) + ORDER BY l1_batch_number ASC + LIMIT 1 + FOR UPDATE + SKIP LOCKED + ) + RETURNING node_aggregation_witness_jobs.* + ", &processing_timeout, + max_attempts as i32 + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|row| { + let l1_batch_number = L1BatchNumber(row.l1_batch_number as u32); + let number_of_leaf_circuits = row.number_of_leaf_circuits.expect("number_of_leaf_circuits is not found in a `queued` `node_aggregation_witness_jobs` job"); + + // Now that we have a job in `queued` status, we need to enrich it with the computed proofs. + // We select `aggregation_round = 1` to only get leaf aggregation circuits + let leaf_circuits_proofs: Vec< + Proof>>, + > = self.load_proofs_for_block(l1_batch_number, AggregationRound::LeafAggregation); + + assert_eq!( + leaf_circuits_proofs.len(), + number_of_leaf_circuits as usize, + "node_aggregation_witness_job for l1 batch {} is in status `queued`, but there are only {} computed leaf proofs, which is different from expected {}", + l1_batch_number, + leaf_circuits_proofs.len(), + number_of_leaf_circuits + ); + WitnessGeneratorJobMetadata { + block_number: l1_batch_number, + proofs: leaf_circuits_proofs + } + }) + }) + } + + pub fn get_next_scheduler_witness_job( + &mut self, + processing_timeout: Duration, + max_attempts: u32, + ) -> Option { + async_std::task::block_on(async { + let processing_timeout = pg_interval_from_duration(processing_timeout); + sqlx::query!( + " + UPDATE scheduler_witness_jobs + SET status = 'in_progress', attempts = attempts + 1, + updated_at = now(), processing_started_at = now() + WHERE l1_batch_number = ( + SELECT l1_batch_number + FROM scheduler_witness_jobs + WHERE status = 'queued' + OR (status = 'in_progress' AND processing_started_at < now() - $1::interval) + OR (status = 'failed' AND attempts < $2) + ORDER BY l1_batch_number ASC + LIMIT 1 + FOR UPDATE + SKIP LOCKED + ) + RETURNING scheduler_witness_jobs.* + ", &processing_timeout, + max_attempts as i32 + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap() + .map(|row| { + let l1_batch_number = L1BatchNumber(row.l1_batch_number as u32); + // Now that we have a job in `queued` status, we need to enrich it with the computed proof. + // We select `aggregation_round = 2` to only get node aggregation circuits + let leaf_circuits_proofs: Vec< + Proof>>, + > = self.load_proofs_for_block(l1_batch_number, AggregationRound::NodeAggregation); + + assert_eq!( + leaf_circuits_proofs.len(), + 1usize, + "scheduler_job for l1 batch {} is in status `queued`, but there is {} computed node proofs. We expect exactly one node proof.", + l1_batch_number.0, + leaf_circuits_proofs.len() + ); + + WitnessGeneratorJobMetadata { + block_number: l1_batch_number, + proofs: leaf_circuits_proofs + } + }) + }) + } + + fn load_proofs_for_block( + &mut self, + block_number: L1BatchNumber, + aggregation_round: AggregationRound, + ) -> Vec>>> { + async_std::task::block_on(async { + sqlx::query!( + " + SELECT circuit_type, result from prover_jobs + WHERE l1_batch_number = $1 AND status = 'successful' AND aggregation_round = $2 + ORDER BY sequence_number ASC; + ", + block_number.0 as i64, + aggregation_round as i64 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap() + .into_iter() + .map(|row| { + ZkSyncProof::into_proof(bincode::deserialize::>( + &row.result + .expect("prove_job with `successful` status has no result"), + ) + .expect("cannot deserialize proof")) + }) + .collect::>>>>() + }) + } + + pub fn mark_witness_job_as_successful( + &mut self, + block_number: L1BatchNumber, + aggregation_round: AggregationRound, + time_taken: Duration, + ) { + async_std::task::block_on(async { + let table_name = Self::input_table_name_for(aggregation_round); + let sql = format!( + "UPDATE {} + SET status = 'successful', updated_at = now(), time_taken = $1 + WHERE l1_batch_number = $2", + table_name + ); + let mut query = sqlx::query(&sql); + query = query.bind(duration_to_naive_time(time_taken)); + query = query.bind(block_number.0 as i64); + + query.execute(self.storage.conn()).await.unwrap(); + }); + } + + /// Is invoked by the prover when all the required proofs are computed + pub fn mark_witness_job_as_queued( + &mut self, + block_number: L1BatchNumber, + aggregation_round: AggregationRound, + ) { + async_std::task::block_on(async { + let table_name = Self::input_table_name_for(aggregation_round); + let sql = format!( + "UPDATE {} + SET status = 'queued', updated_at = now() + WHERE l1_batch_number = $1", + table_name + ); + let mut query = sqlx::query(&sql); + query = query.bind(block_number.0 as i64); + + query.execute(self.storage.conn()).await.unwrap(); + }); + } + + pub fn mark_witness_job_as_skipped( + &mut self, + block_number: L1BatchNumber, + aggregation_round: AggregationRound, + ) { + async_std::task::block_on(async { + let table_name = Self::input_table_name_for(aggregation_round); + let sql = format!( + "UPDATE {} + SET status = 'skipped', updated_at = now() + WHERE l1_batch_number = $1", + table_name + ); + let mut query = sqlx::query(&sql); + query = query.bind(block_number.0 as i64); + + let mut transaction = self.storage.start_transaction().await; + query.execute(transaction.conn()).await.unwrap(); + + transaction + .blocks_dal() + .set_skip_proof_for_l1_batch(block_number); + transaction.commit().await; + }); + } + + /// Is invoked by the Witness Generator when the previous aggregation round is complete + pub fn mark_witness_job_as_waiting_for_proofs( + &mut self, + block_number: L1BatchNumber, + aggregation_round: AggregationRound, + ) { + async_std::task::block_on(async { + let table_name = Self::input_table_name_for(aggregation_round); + let sql = format!( + "UPDATE {} + SET status = 'waiting_for_proofs', updated_at = now() + WHERE l1_batch_number = $1", + table_name + ); + let mut query = sqlx::query(&sql); + query = query.bind(block_number.0 as i64); + + query.execute(self.storage.conn()).await.unwrap(); + }); + } + + pub fn mark_witness_job_as_failed( + &mut self, + block_number: L1BatchNumber, + aggregation_round: AggregationRound, + time_taken: Duration, + error: String, + max_attempts: u32, + ) { + async_std::task::block_on(async { + let table_name = Self::input_table_name_for(aggregation_round); + let sql = format!( + "UPDATE {} + SET status = 'failed', updated_at = now(), time_taken = $1, error = $2 + WHERE l1_batch_number = $3 + RETURNING attempts + ", + table_name + ); + let mut query = sqlx::query(&sql); + query = query.bind(duration_to_naive_time(time_taken)); + query = query.bind(error); + query = query.bind(block_number.0 as i64); + + let mut transaction = self.storage.start_transaction().await; + let attempts = query + .fetch_one(transaction.conn()) + .await + .unwrap() + .get::("attempts"); + if attempts as u32 >= max_attempts { + transaction + .blocks_dal() + .set_skip_proof_for_l1_batch(block_number); + } + transaction.commit().await; + }) + } + + /// Creates a leaf_aggregation_job in `waiting_for_proofs` status, + /// and also a node_aggregation_job and scheduler_job in `waiting_for_artifacts` status. + /// The jobs will be advanced to `waiting_for_proofs` by the `Witness Generator` when the corresponding artifacts are computed, + /// and to `queued` by the `Prover` when all the dependency proofs are computed + pub fn create_aggregation_jobs( + &mut self, + block_number: L1BatchNumber, + number_of_basic_circuits: usize, + ) { + async_std::task::block_on(async { + let started_at = Instant::now(); + + sqlx::query!( + " + INSERT INTO leaf_aggregation_witness_jobs + (l1_batch_number, basic_circuits, basic_circuits_inputs, basic_circuits_blob_url, basic_circuits_inputs_blob_url, number_of_basic_circuits, status, created_at, updated_at) + VALUES ($1, $2, $3, $4, $5, $6, 'waiting_for_proofs', now(), now()) + ", + block_number.0 as i64, + vec![], + vec![], + basic_circuits_blob_url(block_number), + basic_circuits_inputs_blob_url(block_number), + number_of_basic_circuits as i64, + ) + .execute(self.storage.conn()) + .await + .unwrap(); + + sqlx::query!( + " + INSERT INTO node_aggregation_witness_jobs + (l1_batch_number, status, created_at, updated_at) + VALUES ($1, 'waiting_for_artifacts', now(), now()) + ", + block_number.0 as i64, + ) + .execute(self.storage.conn()) + .await + .unwrap(); + + sqlx::query!( + " + INSERT INTO scheduler_witness_jobs + (l1_batch_number, scheduler_witness, scheduler_witness_blob_url, status, created_at, updated_at) + VALUES ($1, $2, $3, 'waiting_for_artifacts', now(), now()) + ", + block_number.0 as i64, + vec![], + scheduler_witness_blob_url(block_number), + ) + .execute(self.storage.conn()) + .await + .unwrap(); + + metrics::histogram!("dal.request", started_at.elapsed(), "method" => "create_aggregation_jobs"); + }) + } + + /// Saves artifacts in node_aggregation_job + /// and advances it to `waiting_for_proofs` status + /// it will be advanced to `queued` by the prover when all the dependency proofs are computed + pub fn save_leaf_aggregation_artifacts( + &mut self, + block_number: L1BatchNumber, + number_of_leaf_circuits: usize, + ) { + async_std::task::block_on(async { + let started_at = Instant::now(); + sqlx::query!( + " + UPDATE node_aggregation_witness_jobs + SET number_of_leaf_circuits = $1, + leaf_layer_subqueues_blob_url = $3, + aggregation_outputs_blob_url = $4, + status = 'waiting_for_proofs', + updated_at = now() + WHERE l1_batch_number = $2 + ", + number_of_leaf_circuits as i64, + block_number.0 as i64, + leaf_layer_subqueues_blob_url(block_number), + aggregation_outputs_blob_url(block_number), + ) + .execute(self.storage.conn()) + .await + .unwrap(); + + metrics::histogram!("dal.request", started_at.elapsed(), "method" => "save_leaf_aggregation_artifacts"); + }) + } + + /// Saves artifacts in scheduler_artifacts_jobs` + /// and advances it to `waiting_for_proofs` status + /// it will be advanced to `queued` by the prover when all the dependency proofs are computed + pub fn save_node_aggregation_artifacts(&mut self, block_number: L1BatchNumber) { + async_std::task::block_on(async { + let started_at = Instant::now(); + sqlx::query!( + " + UPDATE scheduler_witness_jobs + SET final_node_aggregations_blob_url = $2, status = 'waiting_for_proofs', + updated_at = now() + WHERE l1_batch_number = $1 + ", + block_number.0 as i64, + final_node_aggregations_blob_url(block_number), + ) + .execute(self.storage.conn()) + .await + .unwrap(); + + metrics::histogram!("dal.request", started_at.elapsed(), "method" => "save_node_aggregation_artifacts"); + }) + } + + pub fn save_final_aggregation_result( + &mut self, + block_number: L1BatchNumber, + aggregation_result_coords: [[u8; 32]; 4], + ) { + async_std::task::block_on(async { + let aggregation_result_coords_serialized = + bincode::serialize(&aggregation_result_coords) + .expect("cannot serialize aggregation_result_coords"); + sqlx::query!( + " + UPDATE scheduler_witness_jobs + SET aggregation_result_coords = $1, + updated_at = now() + WHERE l1_batch_number = $2 + ", + aggregation_result_coords_serialized, + block_number.0 as i64, + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn get_witness_jobs_stats( + &mut self, + aggregation_round: AggregationRound, + ) -> JobCountStatistics { + async_std::task::block_on(async { + let table_name = Self::input_table_name_for(aggregation_round); + let sql = format!( + r#" + SELECT COUNT(*) as "count", status as "status" + FROM {} + GROUP BY status + "#, + table_name + ); + let mut results: HashMap = sqlx::query(&sql) + .fetch_all(self.storage.conn()) + .await + .unwrap() + .into_iter() + .map(|row| (row.get("status"), row.get::("count"))) + .collect::>(); + + JobCountStatistics { + queued: results.remove("queued").unwrap_or(0i64) as usize, + in_progress: results.remove("in_progress").unwrap_or(0i64) as usize, + failed: results.remove("failed").unwrap_or(0i64) as usize, + successful: results.remove("successful").unwrap_or(0i64) as usize, + } + }) + } + + pub fn required_proofs_count( + &mut self, + block_number: L1BatchNumber, + aggregation_round: AggregationRound, + ) -> usize { + async_std::task::block_on(async { + let table_name = Self::input_table_name_for(aggregation_round); + let circuits_number_input_name = match aggregation_round { + // Basic circuit job doesn't have any pre-requirements + AggregationRound::BasicCircuits => unreachable!(), + AggregationRound::LeafAggregation => "number_of_basic_circuits", + AggregationRound::NodeAggregation => "number_of_leaf_circuits", + // There is always just one final node circuit + AggregationRound::Scheduler => return 1, + }; + let sql = format!( + r#" + SELECT {} as "count" + FROM {} + WHERE l1_batch_number = $1 + "#, + circuits_number_input_name, table_name + ); + let mut query = sqlx::query(&sql); + query = query.bind(block_number.0 as i64); + query + .fetch_one(self.storage.conn()) + .await + .unwrap() + .get::("count") as usize + }) + } + + fn input_table_name_for(aggregation_round: AggregationRound) -> &'static str { + match aggregation_round { + AggregationRound::BasicCircuits => "witness_inputs", + AggregationRound::LeafAggregation => "leaf_aggregation_witness_jobs", + AggregationRound::NodeAggregation => "node_aggregation_witness_jobs", + AggregationRound::Scheduler => "scheduler_witness_jobs", + } + } + + pub fn get_jobs( + &mut self, + opts: GetWitnessJobsParams, + ) -> Result, sqlx::Error> { + struct SqlSlice { + columns: String, + table_name: String, + } + + impl SqlSlice { + fn new(ar: u32, table_name: String) -> SqlSlice { + SqlSlice { + columns: format!( + "{} as aggregation_round, + l1_batch_number, + created_at, + updated_at, + status, + time_taken, + processing_started_at, + error, + attempts", + ar + ), + table_name, + } + } + + fn sql(&self, opts: &GetWitnessJobsParams) -> String { + let where_blocks = opts + .blocks + .as_ref() + .map(|b| format!("AND l1_batch_number BETWEEN {} AND {}", b.start, b.end)) + .unwrap_or_default(); + + format!( + "SELECT {} + FROM {} + WHERE 1 = 1 -- Where clause can't be empty + {where_blocks}", + self.columns, self.table_name + ) + } + } + + let slices = vec![ + SqlSlice::new(0, "witness_inputs".to_string()), + SqlSlice::new(1, "leaf_aggregation_witness_jobs".to_string()), + SqlSlice::new(2, "node_aggregation_witness_jobs".to_string()), + SqlSlice::new(3, "scheduler_witness_jobs".to_string()), + ]; + + let sql = slices.iter().map(move |x| x.sql(&opts)).join(" UNION "); + + let query = sqlx::query_as(&sql); + + let x = + async_std::task::block_on(async move { query.fetch_all(self.storage.conn()).await }); + + Ok(x? + .into_iter() + .map(|x: StorageWitnessJobInfo| x.into()) + .collect()) + } + + pub fn get_leaf_aggregation_l1_batches_with_blobs_in_db( + &mut self, + limit: u8, + ) -> Vec { + async_std::task::block_on(async { + let l1_batches = sqlx::query!( + r#" + SELECT l1_batch_number FROM leaf_aggregation_witness_jobs + WHERE length(basic_circuits) <> 0 + OR length(basic_circuits_inputs) <> 0 + LIMIT $1; + "#, + limit as i32 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap(); + l1_batches + .into_iter() + .map(|row| L1BatchNumber(row.l1_batch_number as u32)) + .collect() + }) + } + + pub fn save_witness_inputs(&mut self, block_number: L1BatchNumber) { + async_std::task::block_on(async { + sqlx::query!( + "INSERT INTO witness_inputs(l1_batch_number, merkle_tree_paths, merkel_tree_paths_blob_url, status, created_at, updated_at) \ + VALUES ($1, $2, $3, 'queued', now(), now()) + ON CONFLICT (l1_batch_number) DO NOTHING", + block_number.0 as i64, + vec![], + merkle_tree_paths_blob_url(block_number), + ) + .fetch_optional(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn purge_leaf_aggregation_blobs_from_db(&mut self, l1_batches: Vec) { + let l1_batches: Vec = l1_batches + .iter() + .map(|l1_batch| l1_batch.0 as i64) + .collect(); + async_std::task::block_on(async { + sqlx::query!( + r#" + UPDATE leaf_aggregation_witness_jobs + SET basic_circuits='', + basic_circuits_inputs='' + WHERE l1_batch_number = ANY($1); + "#, + &l1_batches[..] + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn get_node_aggregation_l1_batches_with_blobs_in_db( + &mut self, + limit: u8, + ) -> Vec { + async_std::task::block_on(async { + let l1_batches = sqlx::query!( + r#" + SELECT l1_batch_number FROM node_aggregation_witness_jobs + WHERE length(leaf_layer_subqueues) <> 0 + OR length(aggregation_outputs) <> 0 + LIMIT $1; + "#, + limit as i32 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap(); + l1_batches + .into_iter() + .map(|row| L1BatchNumber(row.l1_batch_number as u32)) + .collect() + }) + } + + pub fn purge_node_aggregation_blobs_from_db(&mut self, l1_batches: Vec) { + let l1_batches: Vec = l1_batches + .iter() + .map(|l1_batch| l1_batch.0 as i64) + .collect(); + async_std::task::block_on(async { + sqlx::query!( + r#" + UPDATE node_aggregation_witness_jobs + SET leaf_layer_subqueues='', + aggregation_outputs='' + WHERE l1_batch_number = ANY($1); + "#, + &l1_batches[..] + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn get_scheduler_l1_batches_with_blobs_in_db(&mut self, limit: u8) -> Vec { + async_std::task::block_on(async { + let l1_batches = sqlx::query!( + r#" + SELECT l1_batch_number FROM scheduler_witness_jobs + WHERE length(final_node_aggregations) <> 0 + LIMIT $1; + "#, + limit as i32 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap(); + l1_batches + .into_iter() + .map(|row| L1BatchNumber(row.l1_batch_number as u32)) + .collect() + }) + } + + pub fn purge_scheduler_blobs_from_db(&mut self, l1_batches: Vec) { + let l1_batches: Vec = l1_batches + .iter() + .map(|l1_batch| l1_batch.0 as i64) + .collect(); + async_std::task::block_on(async { + sqlx::query!( + r#" + UPDATE scheduler_witness_jobs + SET final_node_aggregations='' + WHERE l1_batch_number = ANY($1); + "#, + &l1_batches[..] + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn get_basic_circuit_and_circuit_inputs_blob_urls_to_be_cleaned( + &mut self, + limit: u8, + ) -> Vec<(i64, (String, String))> { + async_std::task::block_on(async { + let job_ids = sqlx::query!( + r#" + SELECT l1_batch_number, basic_circuits_blob_url, basic_circuits_inputs_blob_url FROM leaf_aggregation_witness_jobs + WHERE status='successful' AND is_blob_cleaned=FALSE + AND basic_circuits_blob_url is NOT NULL + AND basic_circuits_inputs_blob_url is NOT NULL + AND updated_at < NOW() - INTERVAL '2 days' + LIMIT $1; + "#, + limit as i32 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap(); + job_ids + .into_iter() + .map(|row| { + ( + row.l1_batch_number, + ( + row.basic_circuits_blob_url.unwrap(), + row.basic_circuits_inputs_blob_url.unwrap(), + ), + ) + }) + .collect() + }) + } + + pub fn get_leaf_layer_subqueues_and_aggregation_outputs_blob_urls_to_be_cleaned( + &mut self, + limit: u8, + ) -> Vec<(i64, (String, String))> { + async_std::task::block_on(async { + let job_ids = sqlx::query!( + r#" + SELECT l1_batch_number, leaf_layer_subqueues_blob_url, aggregation_outputs_blob_url FROM node_aggregation_witness_jobs + WHERE status='successful' AND is_blob_cleaned=FALSE + AND leaf_layer_subqueues_blob_url is NOT NULL + AND aggregation_outputs_blob_url is NOT NULL + AND updated_at < NOW() - INTERVAL '2 days' + LIMIT $1; + "#, + limit as i32 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap(); + job_ids + .into_iter() + .map(|row| { + ( + row.l1_batch_number, + ( + row.leaf_layer_subqueues_blob_url.unwrap(), + row.aggregation_outputs_blob_url.unwrap(), + ), + ) + }) + .collect() + }) + } + + pub fn get_scheduler_witness_and_node_aggregations_blob_urls_to_be_cleaned( + &mut self, + limit: u8, + ) -> Vec<(i64, (String, String))> { + async_std::task::block_on(async { + let job_ids = sqlx::query!( + r#" + SELECT l1_batch_number, scheduler_witness_blob_url, final_node_aggregations_blob_url FROM scheduler_witness_jobs + WHERE status='successful' AND is_blob_cleaned=FALSE + AND updated_at < NOW() - INTERVAL '2 days' + AND scheduler_witness_blob_url is NOT NULL + AND final_node_aggregations_blob_url is NOT NULL + LIMIT $1; + "#, + limit as i32 + ) + .fetch_all(self.storage.conn()) + .await + .unwrap(); + job_ids + .into_iter() + .map(|row| { + ( + row.l1_batch_number, + ( + row.scheduler_witness_blob_url.unwrap(), + row.final_node_aggregations_blob_url.unwrap(), + ), + ) + }) + .collect() + }) + } + + pub fn mark_leaf_aggregation_gcs_blobs_as_cleaned(&mut self, l1_batch_numbers: Vec) { + async_std::task::block_on(async { + sqlx::query!( + r#" + UPDATE leaf_aggregation_witness_jobs + SET is_blob_cleaned=TRUE + WHERE l1_batch_number = ANY($1); + "#, + &l1_batch_numbers[..] + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn mark_node_aggregation_gcs_blobs_as_cleaned(&mut self, l1_batch_numbers: Vec) { + async_std::task::block_on(async { + sqlx::query!( + r#" + UPDATE node_aggregation_witness_jobs + SET is_blob_cleaned=TRUE + WHERE l1_batch_number = ANY($1); + "#, + &l1_batch_numbers[..] + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } + + pub fn mark_scheduler_witness_gcs_blobs_as_cleaned(&mut self, l1_batch_numbers: Vec) { + async_std::task::block_on(async { + sqlx::query!( + r#" + UPDATE scheduler_witness_jobs + SET is_blob_cleaned=TRUE + WHERE l1_batch_number = ANY($1); + "#, + &l1_batch_numbers[..] + ) + .execute(self.storage.conn()) + .await + .unwrap(); + }) + } +} + +pub struct GetWitnessJobsParams { + pub blocks: Option>, +} diff --git a/core/lib/db_test_macro/Cargo.toml b/core/lib/db_test_macro/Cargo.toml new file mode 100644 index 000000000000..201990f7a2ca --- /dev/null +++ b/core/lib/db_test_macro/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "db_test_macro" +version = "0.1.0" +authors = ["The Matter Labs Team "] +edition = "2018" + +[lib] +proc-macro = true + +[dependencies] +proc-macro2 = "1.0.7" +quote = "1" +syn = { version = "1.0.3", features = ["full"] } diff --git a/core/lib/db_test_macro/src/lib.rs b/core/lib/db_test_macro/src/lib.rs new file mode 100644 index 000000000000..32f9dd64aaec --- /dev/null +++ b/core/lib/db_test_macro/src/lib.rs @@ -0,0 +1,134 @@ +use proc_macro::TokenStream; +use quote::quote; +use syn::{ + parse::{Parse, ParseStream}, + punctuated::Punctuated, + Ident, Token, +}; + +/// Argument that can be supplied to the `db_test` macro to be used in the `zksync_dal` crate. +const DAL_CRATE_MARKER_ARG: &str = "dal_crate"; +/// Name of the type that represents the connection pool in DAL. +const TYPE_NAME: &str = "ConnectionPool"; + +#[derive(Debug)] +struct Args { + vars: Vec, +} + +impl Parse for Args { + fn parse(input: ParseStream) -> syn::Result { + let vars = Punctuated::::parse_terminated(input)?; + Ok(Args { + vars: vars.into_iter().collect(), + }) + } +} + +fn parse_connection_pool_arg_name(arg: Option<&syn::FnArg>) -> Result { + if let Some(syn::FnArg::Typed(arg)) = arg { + if let syn::Pat::Ident(ident) = arg.pat.as_ref() { + if let syn::Type::Path(path_type) = arg.ty.as_ref() { + if path_type.path.is_ident(TYPE_NAME) { + return Ok(ident.clone()); + } + } + } + } + Err(()) +} + +fn parse_knobs(mut input: syn::ItemFn, inside_dal_crate: bool) -> Result { + let sig = &mut input.sig; + let body = &input.block; + let attrs = &input.attrs; + let vis = input.vis; + + if sig.asyncness.is_none() { + let msg = "the async keyword is missing from the function declaration"; + return Err(syn::Error::new_spanned(sig.fn_token, msg)); + } + + sig.asyncness = None; + + let argument_name = parse_connection_pool_arg_name(sig.inputs.first()); + if sig.inputs.len() != 1 || argument_name.is_err() { + let msg = format!( + "the DB test function must take a single argument of type {}", + TYPE_NAME + ); + return Err(syn::Error::new_spanned(&sig.inputs, msg)); + } + + // Remove argument, as the test function must not have one. + sig.inputs.pop(); + + // We've checked that argument is OK above. + let argument_name = argument_name.unwrap(); + + let rt = quote! { tokio::runtime::Builder::new_current_thread() }; + + let header = quote! { + #[::core::prelude::v1::test] + }; + + let dal_crate_id = if inside_dal_crate { + quote! { crate } + } else { + quote! { zksync_dal } + }; + + let result = quote! { + #header + #(#attrs)* + #vis #sig { + use #dal_crate_id::connection::test_pool::Connection; + + #rt + .enable_all() + .build() + .unwrap() + .block_on(async { + let mut __connection = #dal_crate_id::connection::TestPool::connect_to_test_db().await; + let mut __transaction = __connection.begin().await.unwrap(); + let mut #argument_name = unsafe { + #dal_crate_id::ConnectionPool::Test(#dal_crate_id::connection::TestPool::new(&mut __transaction).await) + }; + + { + #body + } + }) + } + }; + + Ok(result.into()) +} + +#[proc_macro_attribute] +pub fn db_test(raw_args: TokenStream, item: TokenStream) -> TokenStream { + let input = syn::parse_macro_input!(item as syn::ItemFn); + let args = syn::parse_macro_input!(raw_args as Args); + + // There may be only one argument, and it should match the exact expected value. + if args.vars.len() > 1 || (args.vars.len() == 1 && args.vars[0] != DAL_CRATE_MARKER_ARG) { + let msg = format!("only '{DAL_CRATE_MARKER_ARG}' argument is supported"); + return syn::Error::new_spanned(&args.vars[0], msg) + .to_compile_error() + .into(); + } + let inside_dal_crate = args + .vars + .first() + .map(|arg| arg == DAL_CRATE_MARKER_ARG) + .unwrap_or(false); + + for attr in &input.attrs { + if attr.path.is_ident("test") { + let msg = "second test attribute is supplied"; + return syn::Error::new_spanned(attr, msg).to_compile_error().into(); + } + } + + parse_knobs(input, inside_dal_crate).unwrap_or_else(|e| e.to_compile_error().into()) +} diff --git a/core/lib/eth_client/Cargo.toml b/core/lib/eth_client/Cargo.toml new file mode 100644 index 000000000000..a01b7d38dffc --- /dev/null +++ b/core/lib/eth_client/Cargo.toml @@ -0,0 +1,28 @@ +[package] +name = "zksync_eth_client" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-2" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] + +[dependencies] +zksync_types = { path = "../types", version = "1.0" } +zksync_eth_signer = { path = "../eth_signer", version = "1.0" } +zksync_config = { path = "../config", version = "1.0" } +zksync_contracts = { path = "../contracts", version = "1.0" } +vlog = { path = "../../lib/vlog", version = "1.0" } +jsonrpc-core = "18" + +serde = "1.0.90" +parity-crypto = { version = "0.9", features = ["publickey"] } +hex = "0.4" + +anyhow = "1.0" +metrics = "0.20" +thiserror = "1" +tokio = { version = "1", features = ["full"] } +async-trait = "0.1" diff --git a/core/lib/eth_client/src/clients/http_client.rs b/core/lib/eth_client/src/clients/http_client.rs new file mode 100644 index 000000000000..4387c9317e53 --- /dev/null +++ b/core/lib/eth_client/src/clients/http_client.rs @@ -0,0 +1,649 @@ +// Built-in deps +use std::cmp::min; +use std::sync::Arc; +use std::{fmt, time::Instant}; + +use async_trait::async_trait; +use zksync_config::ZkSyncConfig; +use zksync_contracts::zksync_contract; +use zksync_eth_signer::PrivateKeySigner; +// External uses +use zksync_types::web3::{ + self, + contract::{ + tokens::{Detokenize, Tokenize}, + Contract, Options, + }, + ethabi, + transports::Http, + types::{ + Address, BlockId, BlockNumber, Bytes, Filter, Log, Transaction, TransactionId, + TransactionReceipt, H160, H256, U256, U64, + }, + Web3, +}; +use zksync_types::{L1ChainId, PackedEthSignature, EIP_1559_TX_TYPE}; + +// Workspace uses +use zksync_eth_signer::{raw_ethereum_tx::TransactionParameters, EthereumSigner}; + +pub type EthereumClient = ETHDirectClient; + +/// Gas limit value to be used in transaction if for some reason +/// gas limit was not set for it. +/// +/// This is an emergency value, which will not be used normally. +const FALLBACK_GAS_LIMIT: u64 = 3_000_000; + +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[error("Request to ethereum gateway failed: {0}")] + EthereumGateway(#[from] zksync_types::web3::Error), + #[error("Call to contract failed: {0}")] + Contract(#[from] zksync_types::web3::contract::Error), + #[error("Transaction signing failed: {0}")] + Signer(#[from] zksync_eth_signer::error::SignerError), + #[error("Decoding revert reason failed: {0}")] + Decode(#[from] ethabi::Error), + #[error("Max fee {0} less than priority fee {1}")] + WrongFeeProvided(U256, U256), +} + +#[derive(Debug, Clone, PartialEq)] +pub struct SignedCallResult { + pub raw_tx: Vec, + pub max_priority_fee_per_gas: U256, + pub max_fee_per_gas: U256, + pub nonce: U256, + pub hash: H256, +} + +/// State of the executed Ethereum transaction. +#[derive(Debug, Clone)] +pub struct ExecutedTxStatus { + /// The hash of the executed L1 transaction. + pub tx_hash: H256, + /// Whether transaction was executed successfully or failed. + pub success: bool, + /// Receipt for a transaction. + pub receipt: TransactionReceipt, +} + +/// Information about transaction failure. +#[derive(Debug, Clone)] +pub struct FailureInfo { + pub revert_code: i64, + pub revert_reason: String, + pub gas_used: Option, + pub gas_limit: U256, +} + +#[async_trait] +pub trait EthInterface { + async fn nonce_at(&self, block: BlockNumber, component: &'static str) -> Result; + async fn current_nonce(&self, component: &'static str) -> Result { + self.nonce_at(BlockNumber::Latest, component).await + } + async fn pending_nonce(&self, component: &'static str) -> Result { + self.nonce_at(BlockNumber::Pending, component).await + } + async fn base_fee_history( + &self, + from_block: usize, + block_count: usize, + component: &'static str, + ) -> Result, Error>; + async fn get_gas_price(&self, component: &'static str) -> Result; + async fn block_number(&self, component: &'static str) -> Result; + async fn send_raw_tx(&self, tx: Vec) -> Result; + async fn sign_prepared_tx_for_addr( + &self, + data: Vec, + contract_addr: H160, + options: Options, + component: &'static str, + ) -> Result; + async fn get_tx_status( + &self, + hash: H256, + component: &'static str, + ) -> Result, Error>; + async fn failure_reason(&self, tx_hash: H256) -> Result, Error>; +} + +struct ETHDirectClientInner { + eth_signer: S, + sender_account: Address, + contract_addr: H160, + contract: ethabi::Contract, + chain_id: L1ChainId, + default_priority_fee_per_gas: U256, + web3: Web3, +} + +#[derive(Clone)] +pub struct ETHDirectClient { + inner: Arc>, +} + +impl fmt::Debug for ETHDirectClient { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + // We do not want to have a private key in the debug representation. + + f.debug_struct("ETHDirectClient") + .field("sender_account", &self.inner.sender_account) + .field("contract_addr", &self.inner.contract_addr) + .field("chain_id", &self.inner.chain_id) + .finish() + } +} + +#[async_trait] +impl EthInterface for ETHDirectClient { + async fn nonce_at(&self, block: BlockNumber, component: &'static str) -> Result { + metrics::counter!("server.ethereum_gateway.call", 1, "component" => component, "method" => "nonce_at"); + let start = Instant::now(); + let nonce = self + .inner + .web3 + .eth() + .transaction_count(self.inner.sender_account, Some(block)) + .await?; + metrics::histogram!("eth_client.direct.current_nonce", start.elapsed()); + Ok(nonce) + } + + async fn block_number(&self, component: &'static str) -> Result { + metrics::counter!("server.ethereum_gateway.call", 1, "component" => component, "method" => "block_number"); + let start = Instant::now(); + let block_number = self.inner.web3.eth().block_number().await?; + metrics::histogram!("eth_client.direct.block_number", start.elapsed()); + Ok(block_number) + } + + async fn get_gas_price(&self, component: &'static str) -> Result { + metrics::counter!("server.ethereum_gateway.call", 1, "component" => component, "method" => "get_gas_price"); + let start = Instant::now(); + let network_gas_price = self.inner.web3.eth().gas_price().await?; + metrics::histogram!("eth_client.direct.get_gas_price", start.elapsed()); + Ok(network_gas_price) + } + + async fn sign_prepared_tx_for_addr( + &self, + data: Vec, + contract_addr: H160, + options: Options, + component: &'static str, + ) -> Result { + let start = Instant::now(); + + // fetch current max priority fee per gas + let max_priority_fee_per_gas = match options.max_priority_fee_per_gas { + Some(max_priority_fee_per_gas) => max_priority_fee_per_gas, + None => self.inner.default_priority_fee_per_gas, + }; + + // fetch current base fee and add max_priority_fee_per_gas + let max_fee_per_gas = match options.max_fee_per_gas { + Some(max_fee_per_gas) => max_fee_per_gas, + None => { + self.get_pending_block_base_fee_per_gas(component).await? + max_priority_fee_per_gas + } + }; + + if max_fee_per_gas < max_priority_fee_per_gas { + return Err(Error::WrongFeeProvided( + max_fee_per_gas, + max_priority_fee_per_gas, + )); + } + + let nonce = match options.nonce { + Some(nonce) => nonce, + None => self.pending_nonce(component).await?, + }; + + let gas = match options.gas { + Some(gas) => gas, + None => { + // Verbosity level is set to `error`, since we expect all the transactions to have + // a set limit, but don't want to crаsh the application if for some reason in some + // place limit was not set. + vlog::error!( + "No gas limit was set for transaction, using the default limit: {}", + FALLBACK_GAS_LIMIT + ); + + U256::from(FALLBACK_GAS_LIMIT) + } + }; + + let tx = TransactionParameters { + nonce, + to: Some(contract_addr), + gas, + value: options.value.unwrap_or_default(), + data, + chain_id: self.inner.chain_id.0 as u64, + max_priority_fee_per_gas, + gas_price: None, + transaction_type: Some(EIP_1559_TX_TYPE.into()), + access_list: None, + max_fee_per_gas, + }; + + let signed_tx = self.inner.eth_signer.sign_transaction(tx).await?; + let hash = self + .inner + .web3 + .web3() + .sha3(Bytes(signed_tx.clone())) + .await?; + + metrics::histogram!( + "eth_client.direct.sign_prepared_tx_for_addr", + start.elapsed() + ); + Ok(SignedCallResult { + raw_tx: signed_tx, + max_priority_fee_per_gas, + max_fee_per_gas, + nonce, + hash, + }) + } + + async fn send_raw_tx(&self, tx: Vec) -> Result { + let start = Instant::now(); + let tx = self + .inner + .web3 + .eth() + .send_raw_transaction(Bytes(tx)) + .await?; + metrics::histogram!("eth_client.direct.send_raw_tx", start.elapsed()); + Ok(tx) + } + + async fn base_fee_history( + &self, + upto_block: usize, + block_count: usize, + component: &'static str, + ) -> Result, Error> { + const MAX_REQUEST_CHUNK: usize = 1024; + + metrics::counter!("server.ethereum_gateway.call", 1, "component" => component, "method" => "base_fee_history"); + let start = Instant::now(); + + let mut history = Vec::with_capacity(block_count); + let from_block = upto_block.saturating_sub(block_count); + + // Here we are requesting fee_history from blocks + // (from_block; upto_block] in chunks of size MAX_REQUEST_CHUNK + // starting from the oldest block. + for chunk_start in (from_block..=upto_block).step_by(MAX_REQUEST_CHUNK) { + let chunk_end = (chunk_start + MAX_REQUEST_CHUNK).min(upto_block); + let chunk_size = chunk_end - chunk_start; + let chunk = self + .inner + .web3 + .eth() + .fee_history(chunk_size.into(), chunk_end.into(), None) + .await? + .base_fee_per_gas; + + history.extend(chunk); + } + + metrics::histogram!("eth_client.direct.base_fee", start.elapsed()); + Ok(history.into_iter().map(|fee| fee.as_u64()).collect()) + } + async fn get_tx_status( + &self, + hash: H256, + component: &'static str, + ) -> Result, Error> { + metrics::counter!("server.ethereum_gateway.call", 1, "component" => component, "method" => "get_tx_status"); + let start = Instant::now(); + + let receipt = self.tx_receipt(hash, component).await?; + let res = match receipt { + Some(receipt) => match (receipt.status, receipt.block_number) { + (Some(status), Some(_)) => { + let success = status.as_u64() == 1; + + Some(ExecutedTxStatus { + tx_hash: receipt.transaction_hash, + success, + receipt, + }) + } + _ => None, + }, + _ => None, + }; + metrics::histogram!("eth_client.direct.get_tx_status", start.elapsed()); + Ok(res) + } + + async fn failure_reason(&self, tx_hash: H256) -> Result, Error> { + let start = Instant::now(); + let transaction = self.inner.web3.eth().transaction(tx_hash.into()).await?; + let receipt = self.inner.web3.eth().transaction_receipt(tx_hash).await?; + + match (transaction, receipt) { + (Some(transaction), Some(receipt)) => { + let gas_limit = transaction.gas; + let gas_used = receipt.gas_used; + + let call_request = web3::types::CallRequest { + from: transaction.from, + to: transaction.to, + gas: Some(transaction.gas), + gas_price: transaction.gas_price, + max_fee_per_gas: None, + max_priority_fee_per_gas: None, + value: Some(transaction.value), + data: Some(transaction.input), + transaction_type: None, + access_list: None, + }; + + let call_error = self + .inner + .web3 + .eth() + .call(call_request, receipt.block_number.map(Into::into)) + .await + .err(); + + let failure_info = match call_error { + Some(web3::Error::Rpc(rpc_error)) => { + let revert_code = rpc_error.code.code(); + let message_len = + min("execution reverted: ".len(), rpc_error.message.len()); + let revert_reason = rpc_error.message[message_len..].to_string(); + + Ok(Some(FailureInfo { + revert_code, + revert_reason, + gas_used, + gas_limit, + })) + } + Some(err) => Err(err.into()), + None => Ok(None), + }; + + metrics::histogram!("eth_client.direct.failure_reason", start.elapsed()); + + failure_info + } + _ => Ok(None), + } + } +} + +impl ETHDirectClient { + pub fn new( + transport: Http, + contract: ethabi::Contract, + operator_eth_addr: H160, + eth_signer: S, + contract_eth_addr: H160, + default_priority_fee_per_gas: U256, + chain_id: L1ChainId, + ) -> Self { + Self { + inner: Arc::new(ETHDirectClientInner { + sender_account: operator_eth_addr, + eth_signer, + contract_addr: contract_eth_addr, + chain_id, + contract, + default_priority_fee_per_gas, + web3: Web3::new(transport), + }), + } + } + + pub async fn get_pending_block_base_fee_per_gas( + &self, + component: &'static str, + ) -> Result { + metrics::counter!("server.ethereum_gateway.call", 1, "component" => component, "method" => "get_pending_block_base_fee_per_gas"); + let start = Instant::now(); + let block = self + .inner + .web3 + .eth() + .block(BlockId::Number(BlockNumber::Pending)) + .await? + .unwrap(); // Pending block should always exist + + metrics::histogram!("eth_client.direct.base_fee", start.elapsed()); + // base_fee_per_gas always exists after London fork + Ok(block.base_fee_per_gas.unwrap()) + } + + pub fn main_contract_with_address(&self, address: Address) -> Contract { + Contract::new(self.inner.web3.eth(), address, self.inner.contract.clone()) + } + + pub fn main_contract(&self) -> Contract { + self.main_contract_with_address(self.inner.contract_addr) + } + + pub fn create_contract(&self, address: Address, contract: ethabi::Contract) -> Contract { + Contract::new(self.inner.web3.eth(), address, contract) + } + + pub async fn block(&self, id: BlockId) -> Result>, Error> { + let start = Instant::now(); + let block = self.inner.web3.eth().block(id).await?; + metrics::histogram!("eth_client.direct.block", start.elapsed()); + Ok(block) + } + + pub async fn sign_prepared_tx( + &self, + data: Vec, + options: Options, + component: &'static str, + ) -> Result { + self.sign_prepared_tx_for_addr(data, self.inner.contract_addr, options, component) + .await + } + + pub async fn tx_receipt( + &self, + tx_hash: H256, + component: &'static str, + ) -> Result, Error> { + metrics::counter!("server.ethereum_gateway.call", 1, "component" => component, "method" => "tx_receipt"); + let start = Instant::now(); + let receipt = self.inner.web3.eth().transaction_receipt(tx_hash).await?; + metrics::histogram!("eth_client.direct.tx_receipt", start.elapsed()); + Ok(receipt) + } + + pub async fn eth_balance( + &self, + address: Address, + component: &'static str, + ) -> Result { + metrics::counter!("server.ethereum_gateway.call", 1, "component" => component, "method" => "eth_balance"); + let start = Instant::now(); + let balance = self.inner.web3.eth().balance(address, None).await?; + metrics::histogram!("eth_client.direct.eth_balance", start.elapsed()); + Ok(balance) + } + + pub async fn sender_eth_balance(&self, component: &'static str) -> Result { + metrics::counter!("server.ethereum_gateway.call", 1, "component" => component, "method" => "sender_eth_balance"); + self.eth_balance(self.inner.sender_account, component).await + } + + pub async fn allowance( + &self, + token_address: Address, + erc20_abi: ethabi::Contract, + ) -> Result { + self.allowance_on_contract(token_address, self.inner.contract_addr, erc20_abi) + .await + } + + pub async fn allowance_on_contract( + &self, + token_address: Address, + contract_address: Address, + erc20_abi: ethabi::Contract, + ) -> Result { + let start = Instant::now(); + let res = self + .call_contract_function( + "allowance", + (self.inner.sender_account, contract_address), + None, + Options::default(), + None, + token_address, + erc20_abi, + ) + .await?; + metrics::histogram!("eth_client.direct.allowance", start.elapsed()); + Ok(res) + } + + pub async fn call_main_contract_function( + &self, + func: &str, + params: P, + from: A, + options: Options, + block: B, + ) -> Result + where + R: Detokenize + Unpin, + A: Into>, + B: Into>, + P: Tokenize, + { + self.call_contract_function( + func, + params, + from, + options, + block, + self.inner.contract_addr, + self.inner.contract.clone(), + ) + .await + } + + #[allow(clippy::too_many_arguments)] + pub async fn call_contract_function( + &self, + func: &str, + params: P, + from: A, + options: Options, + block: B, + token_address: Address, + erc20_abi: ethabi::Contract, + ) -> Result + where + R: Detokenize + Unpin, + A: Into>, + B: Into>, + P: Tokenize, + { + let start = Instant::now(); + let contract = Contract::new(self.inner.web3.eth(), token_address, erc20_abi); + let res = contract.query(func, params, from, options, block).await?; + metrics::histogram!("eth_client.direct.call_contract_function", start.elapsed()); + Ok(res) + } + + pub async fn logs(&self, filter: Filter, component: &'static str) -> Result, Error> { + metrics::counter!("server.ethereum_gateway.call", 1, "component" => component, "method" => "logs"); + let start = Instant::now(); + let logs = self.inner.web3.eth().logs(filter).await?; + metrics::histogram!("eth_client.direct.logs", start.elapsed()); + Ok(logs) + } + + pub fn contract(&self) -> ðabi::Contract { + &self.inner.contract + } + + pub fn contract_addr(&self) -> H160 { + self.inner.contract_addr + } + + pub fn chain_id(&self) -> L1ChainId { + self.inner.chain_id + } + + pub fn sender_account(&self) -> Address { + self.inner.sender_account + } + + pub fn encode_tx_data(&self, func: &str, params: P) -> Vec { + let f = self + .contract() + .function(func) + .expect("failed to get function parameters"); + + f.encode_input(¶ms.into_tokens()) + .expect("failed to encode parameters") + } + + pub fn get_web3_transport(&self) -> &Http { + self.inner.web3.transport() + } + + pub async fn get_tx( + &self, + hash: H256, + component: &'static str, + ) -> Result, Error> { + metrics::counter!("server.ethereum_gateway.call", 1, "component" => component, "method" => "get_tx"); + let tx = self + .inner + .web3 + .eth() + .transaction(TransactionId::Hash(hash)) + .await?; + Ok(tx) + } +} + +impl EthereumClient { + pub fn from_config(config: &ZkSyncConfig) -> Self { + let transport = web3::transports::Http::new(&config.eth_client.web3_url).unwrap(); + + let operator_address = PackedEthSignature::address_from_private_key( + &config.eth_sender.sender.operator_private_key, + ) + .expect("Failed to get address from private key"); + + vlog::info!("Operator address: {:?}", operator_address); + + ETHDirectClient::new( + transport, + zksync_contract(), + config.eth_sender.sender.operator_commit_eth_addr, + PrivateKeySigner::new(config.eth_sender.sender.operator_private_key), + config.contracts.diamond_proxy_addr, + config + .eth_sender + .gas_adjuster + .default_priority_fee_per_gas + .into(), + L1ChainId(config.eth_client.chain_id), + ) + } +} diff --git a/core/lib/eth_client/src/clients/mock.rs b/core/lib/eth_client/src/clients/mock.rs new file mode 100644 index 000000000000..154b4ab1bf28 --- /dev/null +++ b/core/lib/eth_client/src/clients/mock.rs @@ -0,0 +1,322 @@ +use std::sync::atomic::{AtomicU64, Ordering}; + +use async_trait::async_trait; +use jsonrpc_core::types::error::Error as RpcError; +use std::collections::{BTreeMap, HashMap}; +use std::sync::RwLock; +use zksync_types::web3::{ + contract::tokens::Tokenize, + contract::Options, + ethabi, + types::{BlockNumber, U64}, + Error as Web3Error, +}; + +use zksync_types::{web3::types::TransactionReceipt, H160, H256, U256}; + +use super::http_client::{Error, EthInterface, ExecutedTxStatus, FailureInfo, SignedCallResult}; + +#[derive(Debug, Clone, Default, Copy)] +pub struct MockTx { + pub hash: H256, + pub nonce: u64, + pub base_fee: U256, +} + +impl From> for MockTx { + fn from(tx: Vec) -> Self { + use std::convert::TryFrom; + + let len = tx.len(); + let total_gas_price = U256::try_from(&tx[len - 96..len - 64]).unwrap(); + let priority_fee = U256::try_from(&tx[len - 64..len - 32]).unwrap(); + let base_fee = total_gas_price - priority_fee; + let nonce = U256::try_from(&tx[len - 32..]).unwrap().as_u64(); + let hash = { + let mut buffer: [u8; 32] = Default::default(); + buffer.copy_from_slice(&tx[..32]); + buffer.into() + }; + + Self { + nonce, + hash, + base_fee, + } + } +} + +/// Mock Ethereum client is capable of recording all the incoming requests for the further analysis. +#[derive(Debug)] +pub struct MockEthereum { + pub block_number: AtomicU64, + pub max_fee_per_gas: U256, + pub base_fee_history: RwLock>, + pub max_priority_fee_per_gas: U256, + pub tx_statuses: RwLock>, + pub sent_txs: RwLock>, + pub current_nonce: AtomicU64, + pub pending_nonce: AtomicU64, + pub nonces: RwLock>, +} + +impl Default for MockEthereum { + fn default() -> Self { + Self { + max_fee_per_gas: 100.into(), + max_priority_fee_per_gas: 10.into(), + block_number: Default::default(), + base_fee_history: Default::default(), + tx_statuses: Default::default(), + sent_txs: Default::default(), + current_nonce: Default::default(), + pending_nonce: Default::default(), + nonces: RwLock::new([(0, 0)].into()), + } + } +} + +impl MockEthereum { + /// A fake `sha256` hasher, which calculates an `std::hash` instead. + /// This is done for simplicity and it's also much faster. + pub fn fake_sha256(data: &[u8]) -> H256 { + use std::collections::hash_map::DefaultHasher; + use std::hash::Hasher; + + let mut hasher = DefaultHasher::new(); + hasher.write(data); + + let result = hasher.finish(); + + H256::from_low_u64_ne(result) + } + + /// Increments the blocks by a provided `confirmations` and marks the sent transaction + /// as a success. + pub fn execute_tx( + &self, + tx_hash: H256, + success: bool, + confirmations: u64, + ) -> anyhow::Result<()> { + let block_number = self.block_number.fetch_add(confirmations, Ordering::SeqCst); + let nonce = self.current_nonce.fetch_add(1, Ordering::SeqCst); + let tx_nonce = self.sent_txs.read().unwrap()[&tx_hash].nonce; + + anyhow::ensure!(tx_nonce == nonce, "nonce mismatch"); + self.nonces.write().unwrap().insert(block_number, nonce + 1); + + let status = ExecutedTxStatus { + tx_hash, + success, + receipt: TransactionReceipt { + gas_used: Some(21000u32.into()), + block_number: Some(block_number.into()), + transaction_hash: tx_hash, + ..Default::default() + }, + }; + + self.tx_statuses.write().unwrap().insert(tx_hash, status); + + Ok(()) + } + + pub fn sign_prepared_tx( + &self, + mut raw_tx: Vec, + options: Options, + ) -> Result { + let max_fee_per_gas = options.max_fee_per_gas.unwrap_or(self.max_fee_per_gas); + let max_priority_fee_per_gas = options + .max_priority_fee_per_gas + .unwrap_or(self.max_priority_fee_per_gas); + let nonce = options.nonce.expect("Nonce must be set for every tx"); + + // Nonce and gas_price are appended to distinguish the same transactions + // with different gas by their hash in tests. + raw_tx.append(&mut ethabi::encode(&max_fee_per_gas.into_tokens())); + raw_tx.append(&mut ethabi::encode(&max_priority_fee_per_gas.into_tokens())); + raw_tx.append(&mut ethabi::encode(&nonce.into_tokens())); + let hash = Self::fake_sha256(&raw_tx); // Okay for test purposes. + + // Concatenate raw_tx plus hash for test purposes + let mut new_raw_tx = hash.as_bytes().to_vec(); + new_raw_tx.extend(raw_tx); + Ok(SignedCallResult { + raw_tx: new_raw_tx, + max_priority_fee_per_gas, + max_fee_per_gas, + nonce, + hash, + }) + } + + pub fn advance_block_number(&self, val: u64) -> u64 { + self.block_number.fetch_add(val, Ordering::SeqCst) + val + } + + pub fn with_fee_history(self, history: Vec) -> Self { + Self { + base_fee_history: RwLock::new(history), + ..self + } + } +} + +#[async_trait] +impl EthInterface for MockEthereum { + async fn get_tx_status( + &self, + hash: H256, + _: &'static str, + ) -> Result, Error> { + Ok(self.tx_statuses.read().unwrap().get(&hash).cloned()) + } + + async fn block_number(&self, _: &'static str) -> Result { + Ok(self.block_number.load(Ordering::SeqCst).into()) + } + + async fn send_raw_tx(&self, tx: Vec) -> Result { + let mock_tx = MockTx::from(tx); + + if mock_tx.nonce < self.current_nonce.load(Ordering::SeqCst) { + return Err(Error::EthereumGateway(Web3Error::Rpc(RpcError { + message: "transaction with the same nonce already processed".to_string(), + code: 101.into(), + data: None, + }))); + } + + if mock_tx.nonce == self.pending_nonce.load(Ordering::SeqCst) { + self.pending_nonce.fetch_add(1, Ordering::SeqCst); + } + + self.sent_txs.write().unwrap().insert(mock_tx.hash, mock_tx); + + Ok(mock_tx.hash) + } + + async fn pending_nonce(&self, _: &'static str) -> Result { + Ok(self.pending_nonce.load(Ordering::SeqCst).into()) + } + + async fn current_nonce(&self, _: &'static str) -> Result { + Ok(self.current_nonce.load(Ordering::SeqCst).into()) + } + + async fn nonce_at(&self, block: BlockNumber, _: &'static str) -> Result { + if let BlockNumber::Number(block_number) = block { + Ok((*self + .nonces + .read() + .unwrap() + .range(..=block_number.as_u64()) + .next_back() + .unwrap() + .1) + .into()) + } else { + panic!("MockEthereum::nonce_at called with non-number block tag"); + } + } + + async fn sign_prepared_tx_for_addr( + &self, + data: Vec, + _contract_addr: H160, + options: Options, + _: &'static str, + ) -> Result { + self.sign_prepared_tx(data, options) + } + + async fn get_gas_price(&self, _: &'static str) -> Result { + Ok(self.max_fee_per_gas) + } + + async fn base_fee_history( + &self, + from_block: usize, + block_count: usize, + _component: &'static str, + ) -> Result, Error> { + Ok(self.base_fee_history.read().unwrap() + [from_block.saturating_sub(block_count - 1)..=from_block] + .to_vec()) + } + + async fn failure_reason(&self, tx_hash: H256) -> Result, Error> { + let tx_status = self.get_tx_status(tx_hash, "failure_reason").await.unwrap(); + + Ok(tx_status.map(|status| FailureInfo { + revert_code: status.success as i64, + revert_reason: "Unknown".into(), + gas_used: status.receipt.gas_used, + gas_limit: U256::zero(), + })) + } +} + +#[async_trait] +impl + Sync> EthInterface for T { + async fn current_nonce(&self, component: &'static str) -> Result { + self.as_ref().current_nonce(component).await + } + + async fn base_fee_history( + &self, + from_block: usize, + block_count: usize, + component: &'static str, + ) -> Result, Error> { + self.as_ref() + .base_fee_history(from_block, block_count, component) + .await + } + + async fn get_gas_price(&self, component: &'static str) -> Result { + self.as_ref().get_gas_price(component).await + } + + async fn pending_nonce(&self, component: &'static str) -> Result { + self.as_ref().pending_nonce(component).await + } + + async fn nonce_at(&self, block: BlockNumber, component: &'static str) -> Result { + self.as_ref().nonce_at(block, component).await + } + + async fn block_number(&self, component: &'static str) -> Result { + self.as_ref().block_number(component).await + } + + async fn send_raw_tx(&self, tx: Vec) -> Result { + self.as_ref().send_raw_tx(tx).await + } + + async fn sign_prepared_tx_for_addr( + &self, + data: Vec, + contract_addr: H160, + options: Options, + component: &'static str, + ) -> Result { + self.as_ref() + .sign_prepared_tx_for_addr(data, contract_addr, options, component) + .await + } + + async fn failure_reason(&self, tx_hash: H256) -> Result, Error> { + self.as_ref().failure_reason(tx_hash).await + } + + async fn get_tx_status( + &self, + hash: H256, + component: &'static str, + ) -> Result, Error> { + self.as_ref().get_tx_status(hash, component).await + } +} diff --git a/core/lib/eth_client/src/clients/mod.rs b/core/lib/eth_client/src/clients/mod.rs new file mode 100644 index 000000000000..3a73d0fb7e33 --- /dev/null +++ b/core/lib/eth_client/src/clients/mod.rs @@ -0,0 +1,2 @@ +pub mod http_client; +pub mod mock; diff --git a/core/lib/eth_client/src/lib.rs b/core/lib/eth_client/src/lib.rs new file mode 100644 index 000000000000..5c802a66ff54 --- /dev/null +++ b/core/lib/eth_client/src/lib.rs @@ -0,0 +1,4 @@ +#![allow(clippy::upper_case_acronyms, clippy::derive_partial_eq_without_eq)] + +pub mod clients; +pub use clients::http_client::{ETHDirectClient, EthInterface}; diff --git a/core/lib/eth_signer/Cargo.toml b/core/lib/eth_signer/Cargo.toml new file mode 100644 index 000000000000..66ede9580f5f --- /dev/null +++ b/core/lib/eth_signer/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "zksync_eth_signer" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-2" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] + +[dependencies] +zksync_types = { path = "../types", version = "1.0" } + +serde = "1.0.90" +serde_derive = "1.0.90" +serde_json = "1.0.0" +hex = "0.4.2" +secp256k1 = "0.21.3" + +parity-crypto = { version = "0.9", features = ["publickey"] } +rlp = "0.5" + +reqwest = { version = "0.11", features = ["json", "blocking"] } +thiserror = "1.0" + +jsonrpc-core = "18.0.0" +async-trait = "0.1" + +[dev-dependencies] +actix-rt = "2" +tokio = { version = "1", features = ["full"] } +actix-web = "4.0.0-beta.8" +futures = "0.3" + diff --git a/core/lib/eth_signer/src/error.rs b/core/lib/eth_signer/src/error.rs new file mode 100644 index 000000000000..d4a989789e2a --- /dev/null +++ b/core/lib/eth_signer/src/error.rs @@ -0,0 +1,34 @@ +pub use jsonrpc_core::types::response::Failure as RpcFailure; +use thiserror::Error; + +#[derive(Debug, Error, PartialEq)] +pub enum RpcSignerError { + #[error("Unable to decode server response")] + MalformedResponse(String), + #[error("RPC error: {0:?}")] + RpcError(RpcFailure), + #[error("Network error: {0}")] + NetworkError(String), +} + +#[derive(Debug, Error, PartialEq, Eq)] +pub enum SignerError { + #[error("Ethereum private key required to perform an operation")] + MissingEthPrivateKey, + #[error("EthereumSigner required to perform an operation")] + MissingEthSigner, + #[error("Signing failed: {0}")] + SigningFailed(String), + #[error("Unlocking failed: {0}")] + UnlockingFailed(String), + #[error("Decode raw transaction failed: {0}")] + DecodeRawTxFailed(String), + #[error("Signing key is not set in account")] + NoSigningKey, + #[error("Address determination error")] + DefineAddress, + #[error("Recover address from signature failed: {0}")] + RecoverAddress(String), + #[error("{0}")] + CustomError(String), +} diff --git a/core/lib/eth_signer/src/json_rpc_signer.rs b/core/lib/eth_signer/src/json_rpc_signer.rs new file mode 100644 index 000000000000..95156b53cd35 --- /dev/null +++ b/core/lib/eth_signer/src/json_rpc_signer.rs @@ -0,0 +1,554 @@ +use crate::error::{RpcSignerError, SignerError}; +use crate::json_rpc_signer::messages::JsonRpcRequest; +use crate::raw_ethereum_tx::TransactionParameters; +use crate::EthereumSigner; + +use jsonrpc_core::types::response::Output; +use zksync_types::tx::primitives::PackedEthSignature; +use zksync_types::{Address, EIP712TypedStructure, Eip712Domain, H256}; + +use serde_json::Value; + +pub fn is_signature_from_address( + signature: &PackedEthSignature, + signed_bytes: &H256, + address: Address, +) -> Result { + let signature_is_correct = signature + .signature_recover_signer(signed_bytes) + .map_err(|err| SignerError::RecoverAddress(err.to_string()))? + == address; + Ok(signature_is_correct) +} + +#[derive(Debug, Clone)] +pub enum AddressOrIndex { + Address(Address), + Index(usize), +} + +/// Describes whether to add a prefix `\x19Ethereum Signed Message:\n` +/// when requesting a message signature. +#[derive(Debug, Clone)] +pub enum SignerType { + NotNeedPrefix, + NeedPrefix, +} + +#[derive(Debug, Clone)] +pub struct JsonRpcSigner { + rpc_addr: String, + client: reqwest::Client, + address: Option
, + signer_type: Option, +} + +#[async_trait::async_trait] +impl EthereumSigner for JsonRpcSigner { + /// The sign method calculates an Ethereum specific signature with: + /// checks if the server adds a prefix if not then adds + /// return sign(keccak256("\x19Ethereum Signed Message:\n" + len(message) + message))). + async fn sign_message(&self, msg: &[u8]) -> Result { + let signature: PackedEthSignature = { + let msg = match &self.signer_type { + Some(SignerType::NotNeedPrefix) => msg.to_vec(), + Some(SignerType::NeedPrefix) => { + let prefix = format!("\x19Ethereum Signed Message:\n{}", msg.len()); + let mut bytes = Vec::with_capacity(prefix.len() + msg.len()); + bytes.extend_from_slice(prefix.as_bytes()); + bytes.extend_from_slice(msg); + + bytes + } + None => { + return Err(SignerError::MissingEthSigner); + } + }; + + let message = JsonRpcRequest::sign_message(self.address()?, &msg); + let ret = self + .post(&message) + .await + .map_err(|err| SignerError::SigningFailed(err.to_string()))?; + serde_json::from_value(ret) + .map_err(|err| SignerError::SigningFailed(err.to_string()))? + }; + + let signed_bytes = PackedEthSignature::message_to_signed_bytes(msg); + // Checks the correctness of the message signature without a prefix + if is_signature_from_address(&signature, &signed_bytes, self.address()?)? { + Ok(signature) + } else { + Err(SignerError::SigningFailed( + "Invalid signature from JsonRpcSigner".to_string(), + )) + } + } + + /// Signs typed struct using ethereum private key by EIP-712 signature standard. + /// Result of this function is the equivalent of RPC calling `eth_signTypedData`. + async fn sign_typed_data( + &self, + eip712_domain: &Eip712Domain, + typed_struct: &S, + ) -> Result { + let signature: PackedEthSignature = { + let message = + JsonRpcRequest::sign_typed_data(self.address()?, eip712_domain, typed_struct); + let ret = self + .post(&message) + .await + .map_err(|err| SignerError::SigningFailed(err.to_string()))?; + serde_json::from_value(ret) + .map_err(|err| SignerError::SigningFailed(err.to_string()))? + }; + + let signed_bytes = + PackedEthSignature::typed_data_to_signed_bytes(eip712_domain, typed_struct); + // Checks the correctness of the message signature without a prefix + if is_signature_from_address(&signature, &signed_bytes, self.address()?)? { + Ok(signature) + } else { + Err(SignerError::SigningFailed( + "Invalid signature from JsonRpcSigner".to_string(), + )) + } + } + + /// Signs and returns the RLP-encoded transaction. + async fn sign_transaction( + &self, + raw_tx: TransactionParameters, + ) -> Result, SignerError> { + let msg = JsonRpcRequest::sign_transaction(self.address()?, raw_tx); + + let ret = self + .post(&msg) + .await + .map_err(|err| SignerError::SigningFailed(err.to_string()))?; + + // get Json object and parse it to get raw Transaction + let json: Value = serde_json::from_value(ret) + .map_err(|err| SignerError::SigningFailed(err.to_string()))?; + + let raw_tx: Option<&str> = json + .get("raw") + .and_then(|value| value.as_str()) + .map(|value| &value["0x".len()..]); + + if let Some(raw_tx) = raw_tx { + hex::decode(raw_tx).map_err(|err| SignerError::DecodeRawTxFailed(err.to_string())) + } else { + Err(SignerError::DefineAddress) + } + } + + async fn get_address(&self) -> Result { + self.address() + } +} + +impl JsonRpcSigner { + pub async fn new( + rpc_addr: impl Into, + address_or_index: Option, + signer_type: Option, + password_to_unlock: Option, + ) -> Result { + let mut signer = Self { + rpc_addr: rpc_addr.into(), + client: reqwest::Client::new(), + address: None, + signer_type, + }; + + // If the user has not specified either the index or the address, + // then we will assume that by default the address will be the first one that the server will send + let address_or_index = match address_or_index { + Some(address_or_index) => address_or_index, + None => AddressOrIndex::Index(0), + }; + + // EthereumSigner can support many different addresses, + // we define only the one we need by the index + // of receiving from the server or by the address itself. + signer.detect_address(address_or_index).await?; + + if let Some(password) = password_to_unlock { + signer.unlock(&password).await?; + } + + // If it is not known whether it is necessary + // to add a prefix to messages, then we define this. + if signer.signer_type.is_none() { + signer.detect_signer_type().await?; + }; + + Ok(signer) + } + + /// Get Ethereum address. + pub fn address(&self) -> Result { + self.address.ok_or(SignerError::DefineAddress) + } + + /// Specifies the Ethreum address which sets the address for which all other requests will be processed. + /// If the address has already been set, then it will all the same change to a new one. + pub async fn detect_address( + &mut self, + address_or_index: AddressOrIndex, + ) -> Result { + self.address = match address_or_index { + AddressOrIndex::Address(address) => Some(address), + AddressOrIndex::Index(index) => { + let message = JsonRpcRequest::accounts(); + let ret = self + .post(&message) + .await + .map_err(|err| SignerError::SigningFailed(err.to_string()))?; + let accounts: Vec
= serde_json::from_value(ret) + .map_err(|err| SignerError::SigningFailed(err.to_string()))?; + accounts.get(index).copied() + } + }; + + self.address.ok_or(SignerError::DefineAddress) + } + + /// Server can either add the prefix `\x19Ethereum Signed Message:\n` to the message and not add. + /// Checks if a prefix should be added to the message. + pub async fn detect_signer_type(&mut self) -> Result<(), SignerError> { + // If the `sig_type` is set, then we do not need to detect it from the server. + if self.signer_type.is_some() { + return Ok(()); + } + + let msg = "JsonRpcSigner type was not specified. Sign this message to detect the signer type. It only has to be done once per session"; + let msg_with_prefix = format!("\x19Ethereum Signed Message:\n{}{}", msg.len(), msg); + + let signature: PackedEthSignature = { + let message = JsonRpcRequest::sign_message(self.address()?, msg.as_bytes()); + + let ret = self + .post(&message) + .await + .map_err(|err| SignerError::SigningFailed(err.to_string()))?; + serde_json::from_value(ret) + .map_err(|err| SignerError::SigningFailed(err.to_string()))? + }; + + let msg_signed_bytes = PackedEthSignature::message_to_signed_bytes(msg.as_bytes()); + if is_signature_from_address(&signature, &msg_signed_bytes, self.address()?)? { + self.signer_type = Some(SignerType::NotNeedPrefix); + } + + let msg_with_prefix_signed_bytes = + PackedEthSignature::message_to_signed_bytes(msg_with_prefix.as_bytes()); + if is_signature_from_address(&signature, &msg_with_prefix_signed_bytes, self.address()?)? { + self.signer_type = Some(SignerType::NeedPrefix); + } + + match self.signer_type.is_some() { + true => Ok(()), + false => Err(SignerError::SigningFailed( + "Failed to get the correct signature".to_string(), + )), + } + } + + /// Unlocks the current account, after that the server can sign messages and transactions. + pub async fn unlock(&self, password: &str) -> Result<(), SignerError> { + let message = JsonRpcRequest::unlock_account(self.address()?, password); + let ret = self + .post(&message) + .await + .map_err(|err| SignerError::UnlockingFailed(err.to_string()))?; + + let res: bool = serde_json::from_value(ret) + .map_err(|err| SignerError::UnlockingFailed(err.to_string()))?; + + if res { + Ok(()) + } else { + Err(SignerError::UnlockingFailed( + "Server response: false".to_string(), + )) + } + } + + /// Performs a POST query to the JSON RPC endpoint, + /// and decodes the response, returning the decoded `serde_json::Value`. + /// `Ok` is returned only for successful calls, for any kind of error + /// the `Err` variant is returned (including the failed RPC method + /// execution response). + async fn post( + &self, + message: impl serde::Serialize, + ) -> Result { + let reply: Output = self.post_raw(message).await?; + + let ret = match reply { + Output::Success(success) => success.result, + Output::Failure(failure) => return Err(RpcSignerError::RpcError(failure)), + }; + + Ok(ret) + } + + /// Performs a POST query to the JSON RPC endpoint, + /// and decodes the response, returning the decoded `serde_json::Value`. + /// `Ok` is returned only for successful calls, for any kind of error + /// the `Err` variant is returned (including the failed RPC method + /// execution response). + async fn post_raw(&self, message: impl serde::Serialize) -> Result { + let res = self + .client + .post(&self.rpc_addr) + .json(&message) + .send() + .await + .map_err(|err| RpcSignerError::NetworkError(err.to_string()))?; + if res.status() != reqwest::StatusCode::OK { + let error = format!( + "Post query responded with a non-OK response: {}", + res.status() + ); + return Err(RpcSignerError::NetworkError(error)); + } + let reply: Output = res + .json() + .await + .map_err(|err| RpcSignerError::MalformedResponse(err.to_string()))?; + + Ok(reply) + } +} + +mod messages { + use crate::raw_ethereum_tx::TransactionParameters; + use hex::encode; + use serde::{Deserialize, Serialize}; + use zksync_types::{ + eip712_signature::utils::get_eip712_json, Address, EIP712TypedStructure, Eip712Domain, + }; + + #[derive(Debug, Serialize, Deserialize)] + pub struct JsonRpcRequest { + pub id: String, + pub method: String, + pub jsonrpc: String, + pub params: Vec, + } + + impl JsonRpcRequest { + fn create(method: impl ToString, params: Vec) -> Self { + Self { + id: "1".to_owned(), + jsonrpc: "2.0".to_owned(), + method: method.to_string(), + params, + } + } + + /// Returns a list of addresses owned by client. + pub fn accounts() -> Self { + let params = Vec::new(); + Self::create("eth_accounts", params) + } + + // Unlocks the address, after that the server can sign messages and transactions. + pub fn unlock_account(address: Address, password: &str) -> Self { + let params = vec![ + serde_json::to_value(address).expect("serialization fail"), + serde_json::to_value(password).expect("serialization fail"), + ]; + Self::create("personal_unlockAccount", params) + } + + /// The sign method calculates an Ethereum specific signature with: + /// sign(keccak256("\x19Ethereum Signed Message:\n" + len(message) + message))). + /// The address to sign with must be unlocked. + pub fn sign_message(address: Address, message: &[u8]) -> Self { + let params = vec![ + serde_json::to_value(address).expect("serialization fail"), + serde_json::to_value(format!("0x{}", encode(message))).expect("serialization fail"), + ]; + Self::create("eth_sign", params) + } + + /// Signs typed struct using ethereum private key by EIP-712 signature standard. + /// The address to sign with must be unlocked. + pub fn sign_typed_data( + address: Address, + eip712_domain: &Eip712Domain, + typed_struct: &S, + ) -> Self { + let params = vec![ + serde_json::to_value(address).expect("serialization fail"), + get_eip712_json(eip712_domain, typed_struct), + ]; + + Self::create("eth_signTypedData_v3", params) + } + + /// Signs a transaction that can be submitted to the network. + /// The address to sign with must be unlocked. + pub fn sign_transaction(from: Address, tx_data: TransactionParameters) -> Self { + let mut params = Vec::new(); + + // Parameter `To` is optional, so we add it only if it is not None + let tx = if let Some(to) = tx_data.to { + serde_json::json!({ + "from": serde_json::to_value(from).expect("serialization fail"), + "to": serde_json::to_value(to).expect("serialization fail"), + "gas": serde_json::to_value(tx_data.gas).expect("serialization fail"), + "gasPrice": serde_json::to_value(tx_data.gas_price).expect("serialization fail"), + "maxPriorityFeePerGas": serde_json::to_value(tx_data.max_priority_fee_per_gas).expect("serialization fail"), + "maxFeePerGas": serde_json::to_value(tx_data.max_fee_per_gas).expect("serialization fail"), + "value": serde_json::to_value(tx_data.value).expect("serialization fail"), + "data": serde_json::to_value(format!("0x{}", encode(tx_data.data))).expect("serialization fail"), + "nonce": serde_json::to_value(tx_data.nonce).expect("serialization fail"), + }) + } else { + serde_json::json!({ + "from": serde_json::to_value(from).expect("serialization fail"), + "gas": serde_json::to_value(tx_data.gas).expect("serialization fail"), + "gasPrice": serde_json::to_value(tx_data.gas_price).expect("serialization fail"), + "maxPriorityFeePerGas": serde_json::to_value(tx_data.max_priority_fee_per_gas).expect("serialization fail"), + "maxFeePerGas": serde_json::to_value(tx_data.max_fee_per_gas).expect("serialization fail"), + "value": serde_json::to_value(tx_data.value).expect("serialization fail"), + "data": serde_json::to_value(format!("0x{}", encode(tx_data.data))).expect("serialization fail"), + "nonce": serde_json::to_value(tx_data.nonce).expect("serialization fail"), + }) + }; + params.push(tx); + Self::create("eth_signTransaction", params) + } + } +} + +#[cfg(test)] +mod tests { + use crate::raw_ethereum_tx::TransactionParameters; + use actix_web::{ + post, + web::{self, Data}, + App, HttpResponse, HttpServer, Responder, + }; + use futures::future::{AbortHandle, Abortable}; + use jsonrpc_core::{Failure, Id, Output, Success, Version}; + use parity_crypto::publickey::{Generator, KeyPair, Random}; + use serde_json::json; + + use zksync_types::{tx::primitives::PackedEthSignature, Address}; + + use super::{is_signature_from_address, messages::JsonRpcRequest}; + use crate::{EthereumSigner, JsonRpcSigner}; + + #[post("/")] + async fn index(req: web::Json, state: web::Data) -> impl Responder { + let resp = match req.method.as_str() { + "eth_accounts" => { + let mut addresses = vec![]; + for pair in &state.key_pairs { + addresses.push(pair.address()) + } + + create_success(json!(addresses)) + } + "personal_unlockAccount" => create_success(json!(true)), + "eth_sign" => { + let _address: Address = serde_json::from_value(req.params[0].clone()).unwrap(); + let data: String = serde_json::from_value(req.params[1].clone()).unwrap(); + let data_bytes = hex::decode(&data[2..]).unwrap(); + let signature = + PackedEthSignature::sign(state.key_pairs[0].secret(), &data_bytes).unwrap(); + create_success(json!(signature)) + } + "eth_signTransaction" => { + let tx_value = json!(req.params[0].clone()).to_string(); + let tx = tx_value.as_bytes(); + let hex_data = hex::encode(tx); + create_success(json!({ "raw": hex_data })) + } + _ => create_fail(req.method.clone()), + }; + HttpResponse::Ok().json(json!(resp)) + } + + fn create_fail(method: String) -> Output { + Output::Failure(Failure { + jsonrpc: Some(Version::V2), + error: jsonrpc_core::Error { + code: jsonrpc_core::ErrorCode::ParseError, + message: method, + data: None, + }, + id: Id::Num(1), + }) + } + + fn create_success(result: serde_json::Value) -> Output { + Output::Success(Success { + jsonrpc: Some(Version::V2), + result, + id: Id::Num(1), + }) + } + #[derive(Clone)] + struct State { + key_pairs: Vec, + } + + fn run_server(state: State) -> (String, AbortHandle) { + let mut url = None; + let mut server = None; + for i in 9000..9999 { + let new_url = format!("127.0.0.1:{}", i); + // Try to bind to some port, hope that 999 variants will be enough + let tmp_state = state.clone(); + if let Ok(ser) = HttpServer::new(move || { + App::new() + .app_data(Data::new(tmp_state.clone())) + .service(index) + }) + .bind(new_url.clone()) + { + server = Some(ser); + url = Some(new_url); + break; + } + } + + let server = server.expect("Could not bind to port from 9000 to 9999"); + let (abort_handle, abort_registration) = AbortHandle::new_pair(); + let future = Abortable::new(server.run(), abort_registration); + tokio::spawn(future); + let address = format!("http://{}/", &url.unwrap()); + (address, abort_handle) + } + + #[actix_rt::test] + async fn run_client() { + let (address, abort_handle) = run_server(State { + key_pairs: vec![Random.generate()], + }); + // Get address is ok, unlock address is ok, recover address from signature is also ok + let client = JsonRpcSigner::new(address, None, None, None).await.unwrap(); + let msg = b"some_text_message"; + + let signature = client.sign_message(msg).await.unwrap(); + let signed_bytes = PackedEthSignature::message_to_signed_bytes(msg); + assert!( + is_signature_from_address(&signature, &signed_bytes, client.address().unwrap()) + .unwrap() + ); + + let transaction_signature = client + .sign_transaction(TransactionParameters::default()) + .await + .unwrap(); + assert_ne!(transaction_signature.len(), 0); + abort_handle.abort(); + } +} diff --git a/core/lib/eth_signer/src/lib.rs b/core/lib/eth_signer/src/lib.rs new file mode 100644 index 000000000000..1999e217a384 --- /dev/null +++ b/core/lib/eth_signer/src/lib.rs @@ -0,0 +1,26 @@ +use async_trait::async_trait; +use error::SignerError; +use zksync_types::tx::primitives::PackedEthSignature; +use zksync_types::{Address, EIP712TypedStructure, Eip712Domain}; + +use crate::raw_ethereum_tx::TransactionParameters; +pub use json_rpc_signer::JsonRpcSigner; +pub use pk_signer::PrivateKeySigner; + +pub mod error; +pub mod json_rpc_signer; +pub mod pk_signer; +pub mod raw_ethereum_tx; + +#[async_trait] +pub trait EthereumSigner: Send + Sync + Clone { + async fn sign_message(&self, message: &[u8]) -> Result; + async fn sign_typed_data( + &self, + domain: &Eip712Domain, + typed_struct: &S, + ) -> Result; + async fn sign_transaction(&self, raw_tx: TransactionParameters) + -> Result, SignerError>; + async fn get_address(&self) -> Result; +} diff --git a/core/lib/eth_signer/src/pk_signer.rs b/core/lib/eth_signer/src/pk_signer.rs new file mode 100644 index 000000000000..4a5bfb838def --- /dev/null +++ b/core/lib/eth_signer/src/pk_signer.rs @@ -0,0 +1,127 @@ +use secp256k1::SecretKey; + +use zksync_types::tx::primitives::PackedEthSignature; +use zksync_types::{Address, EIP712TypedStructure, Eip712Domain, H256}; + +use crate::{ + raw_ethereum_tx::{Transaction, TransactionParameters}, + {EthereumSigner, SignerError}, +}; + +#[derive(Clone)] +pub struct PrivateKeySigner { + private_key: H256, +} + +impl std::fmt::Debug for PrivateKeySigner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "PrivateKeySigner") + } +} + +impl PrivateKeySigner { + pub fn new(private_key: H256) -> Self { + Self { private_key } + } +} + +#[async_trait::async_trait] +impl EthereumSigner for PrivateKeySigner { + /// Get Ethereum address that matches the private key. + async fn get_address(&self) -> Result { + PackedEthSignature::address_from_private_key(&self.private_key) + .map_err(|_| SignerError::DefineAddress) + } + + /// The sign method calculates an Ethereum specific signature with: + /// sign(keccak256("\x19Ethereum Signed Message:\n" + len(message) + message))). + async fn sign_message(&self, message: &[u8]) -> Result { + let signature = PackedEthSignature::sign(&self.private_key, message) + .map_err(|err| SignerError::SigningFailed(err.to_string()))?; + Ok(signature) + } + + /// Signs typed struct using ethereum private key by EIP-712 signature standard. + /// Result of this function is the equivalent of RPC calling `eth_signTypedData`. + async fn sign_typed_data( + &self, + domain: &Eip712Domain, + typed_struct: &S, + ) -> Result { + let signature = + PackedEthSignature::sign_typed_data(&self.private_key, domain, typed_struct) + .map_err(|err| SignerError::SigningFailed(err.to_string()))?; + Ok(signature) + } + + /// Signs and returns the RLP-encoded transaction. + async fn sign_transaction( + &self, + raw_tx: TransactionParameters, + ) -> Result, SignerError> { + let key = SecretKey::from_slice(self.private_key.as_bytes()).unwrap(); + + // According to the code in web3 + // We should use max_fee_per_gas as gas_price if we use EIP1559 + let gas_price = raw_tx.max_fee_per_gas; + + let max_priority_fee_per_gas = raw_tx.max_priority_fee_per_gas; + + let tx = Transaction { + to: raw_tx.to, + nonce: raw_tx.nonce, + gas: raw_tx.gas, + gas_price, + value: raw_tx.value, + data: raw_tx.data, + transaction_type: raw_tx.transaction_type, + access_list: raw_tx.access_list.unwrap_or_default(), + max_priority_fee_per_gas, + }; + + let signed = tx.sign(&key, raw_tx.chain_id); + Ok(signed.raw_transaction.0) + } +} + +#[cfg(test)] +mod test { + use super::PrivateKeySigner; + use crate::raw_ethereum_tx::TransactionParameters; + use crate::EthereumSigner; + use zksync_types::{H160, H256, U256, U64}; + + #[tokio::test] + async fn test_generating_signed_raw_transaction() { + let private_key = H256::from([5; 32]); + let signer = PrivateKeySigner::new(private_key); + let raw_transaction = TransactionParameters { + nonce: U256::from(1u32), + to: Some(H160::default()), + gas: Default::default(), + gas_price: Some(U256::from(2u32)), + max_fee_per_gas: U256::from(2u32), + max_priority_fee_per_gas: U256::from(1u32), + value: Default::default(), + data: vec![1, 2, 3], + chain_id: 270, + transaction_type: Some(U64::from(1u32)), + access_list: None, + }; + let raw_tx = signer + .sign_transaction(raw_transaction.clone()) + .await + .unwrap(); + assert_ne!(raw_tx.len(), 1); + // precalculated signature with right algorithm implementation + let precalculated_raw_tx: Vec = vec![ + 1, 248, 100, 130, 1, 14, 1, 2, 128, 148, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 128, 131, 1, 2, 3, 192, 1, 160, 98, 201, 238, 158, 215, 98, 23, 231, + 221, 161, 170, 16, 54, 85, 187, 107, 12, 228, 218, 139, 103, 164, 17, 196, 178, 185, + 252, 243, 186, 175, 93, 230, 160, 93, 204, 205, 5, 46, 187, 231, 211, 102, 133, 200, + 254, 119, 94, 206, 81, 8, 143, 204, 14, 138, 43, 183, 214, 209, 166, 16, 116, 176, 44, + 52, 133, + ]; + assert_eq!(raw_tx, precalculated_raw_tx); + } +} diff --git a/core/lib/eth_signer/src/raw_ethereum_tx.rs b/core/lib/eth_signer/src/raw_ethereum_tx.rs new file mode 100644 index 000000000000..fcee13494456 --- /dev/null +++ b/core/lib/eth_signer/src/raw_ethereum_tx.rs @@ -0,0 +1,220 @@ +//! This file is a copy-paste from https://github.com/tomusdrw/rust-web3/blob/master/src/api/accounts.rs#L39 +//! We want to use our own Signer, which is independent of Transaction Sender. +//! Unfortunately, it is impossible to use public interfaces from web3 library. +//! The only thing which has been changed is optional parameters, which are necessary for signing transactions. +//! In the library, they are filling using eth_node. +//! +//! I see no big difference between transaction and transaction parameters. +//! We can refactor this code and adapt it for our needs better, but I prefer to reuse as much code as we can. +//! In the case where it will be possible to use only the web3 library without copy-paste, the changes will be small and simple +//! Link to @Deniallugo's PR to web3: https://github.com/tomusdrw/rust-web3/pull/630 +use rlp::RlpStream; +use zksync_types::web3::{ + signing::{self, Signature}, + types::{AccessList, SignedTransaction}, +}; +use zksync_types::{ethabi::Address, U256, U64}; + +const LEGACY_TX_ID: u64 = 0; +const ACCESSLISTS_TX_ID: u64 = 1; +const EIP1559_TX_ID: u64 = 2; + +#[derive(Clone, Debug, PartialEq, Default)] +pub struct TransactionParameters { + /// Transaction nonce + pub nonce: U256, + /// To address + pub to: Option
, + /// Supplied gas + pub gas: U256, + /// Gas price (None for estimated gas price) + pub gas_price: Option, + /// Transferred value + pub value: U256, + /// Data + pub data: Vec, + /// The chain ID + pub chain_id: u64, + /// Transaction type, Some(1) for AccessList transaction, None for Legacy + pub transaction_type: Option, + /// Access list + pub access_list: Option, + /// Max fee per gas + pub max_fee_per_gas: U256, + /// miner bribe + pub max_priority_fee_per_gas: U256, +} + +/// A transaction used for RLP encoding, hashing and signing. +#[derive(Debug)] +pub struct Transaction { + pub to: Option
, + pub nonce: U256, + pub gas: U256, + pub gas_price: U256, + pub value: U256, + pub data: Vec, + pub transaction_type: Option, + pub access_list: AccessList, + pub max_priority_fee_per_gas: U256, +} + +impl Transaction { + fn rlp_append_legacy(&self, stream: &mut RlpStream) { + stream.append(&self.nonce); + stream.append(&self.gas_price); + stream.append(&self.gas); + if let Some(to) = self.to { + stream.append(&to); + } else { + stream.append(&""); + } + stream.append(&self.value); + stream.append(&self.data); + } + + fn encode_legacy(&self, chain_id: u64, signature: Option<&Signature>) -> RlpStream { + let mut stream = RlpStream::new(); + stream.begin_list(9); + + self.rlp_append_legacy(&mut stream); + + if let Some(signature) = signature { + self.rlp_append_signature(&mut stream, signature); + } else { + stream.append(&chain_id); + stream.append(&0u8); + stream.append(&0u8); + } + + stream + } + + fn encode_access_list_payload( + &self, + chain_id: u64, + signature: Option<&Signature>, + ) -> RlpStream { + let mut stream = RlpStream::new(); + + let list_size = if signature.is_some() { 11 } else { 8 }; + stream.begin_list(list_size); + + // append chain_id. from EIP-2930: chainId is defined to be an integer of arbitrary size. + stream.append(&chain_id); + + self.rlp_append_legacy(&mut stream); + self.rlp_append_access_list(&mut stream); + + if let Some(signature) = signature { + self.rlp_append_signature(&mut stream, signature); + } + + stream + } + + fn encode_eip1559_payload(&self, chain_id: u64, signature: Option<&Signature>) -> RlpStream { + let mut stream = RlpStream::new(); + + let list_size = if signature.is_some() { 12 } else { 9 }; + stream.begin_list(list_size); + + // append chain_id. from EIP-2930: chainId is defined to be an integer of arbitrary size. + stream.append(&chain_id); + + stream.append(&self.nonce); + stream.append(&self.max_priority_fee_per_gas); + stream.append(&self.gas_price); + stream.append(&self.gas); + if let Some(to) = self.to { + stream.append(&to); + } else { + stream.append(&""); + } + stream.append(&self.value); + stream.append(&self.data); + + self.rlp_append_access_list(&mut stream); + + if let Some(signature) = signature { + self.rlp_append_signature(&mut stream, signature); + } + + stream + } + + fn rlp_append_signature(&self, stream: &mut RlpStream, signature: &Signature) { + stream.append(&signature.v); + stream.append(&U256::from_big_endian(signature.r.as_bytes())); + stream.append(&U256::from_big_endian(signature.s.as_bytes())); + } + + fn rlp_append_access_list(&self, stream: &mut RlpStream) { + stream.begin_list(self.access_list.len()); + for access in self.access_list.iter() { + stream.begin_list(2); + stream.append(&access.address); + stream.begin_list(access.storage_keys.len()); + for storage_key in access.storage_keys.iter() { + stream.append(storage_key); + } + } + } + + fn encode(&self, chain_id: u64, signature: Option<&Signature>) -> Vec { + match self.transaction_type.map(|t| t.as_u64()) { + Some(LEGACY_TX_ID) | None => { + let stream = self.encode_legacy(chain_id, signature); + stream.out().to_vec() + } + + Some(ACCESSLISTS_TX_ID) => { + let tx_id: u8 = ACCESSLISTS_TX_ID as u8; + let stream = self.encode_access_list_payload(chain_id, signature); + [&[tx_id], stream.as_raw()].concat() + } + + Some(EIP1559_TX_ID) => { + let tx_id: u8 = EIP1559_TX_ID as u8; + let stream = self.encode_eip1559_payload(chain_id, signature); + [&[tx_id], stream.as_raw()].concat() + } + + _ => { + panic!("Unsupported transaction type"); + } + } + } + + /// Sign and return a raw signed transaction. + pub fn sign(self, sign: impl signing::Key, chain_id: u64) -> SignedTransaction { + let adjust_v_value = matches!( + self.transaction_type.map(|t| t.as_u64()), + Some(LEGACY_TX_ID) | None + ); + + let encoded = self.encode(chain_id, None); + + let hash = signing::keccak256(encoded.as_ref()); + + let signature = if adjust_v_value { + sign.sign(&hash, Some(chain_id)) + .expect("hash is non-zero 32-bytes; qed") + } else { + sign.sign_message(&hash) + .expect("hash is non-zero 32-bytes; qed") + }; + + let signed = self.encode(chain_id, Some(&signature)); + let transaction_hash = signing::keccak256(signed.as_ref()).into(); + + SignedTransaction { + message_hash: hash.into(), + v: signature.v, + r: signature.r, + s: signature.s, + raw_transaction: signed.into(), + transaction_hash, + } + } +} diff --git a/core/lib/mempool/Cargo.toml b/core/lib/mempool/Cargo.toml new file mode 100644 index 000000000000..45259f85f162 --- /dev/null +++ b/core/lib/mempool/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "zksync_mempool" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-2" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] + +[dependencies] +zksync_types = { path = "../types", version = "1.0" } +vlog = { path = "../../lib/vlog", version = "1.0" } +metrics = "0.20" diff --git a/core/lib/mempool/src/lib.rs b/core/lib/mempool/src/lib.rs new file mode 100644 index 000000000000..7e72fa537741 --- /dev/null +++ b/core/lib/mempool/src/lib.rs @@ -0,0 +1,6 @@ +mod mempool_store; +#[cfg(test)] +mod tests; +mod types; +pub use mempool_store::{MempoolInfo, MempoolStore}; +pub use types::L2TxFilter; diff --git a/core/lib/mempool/src/mempool_store.rs b/core/lib/mempool/src/mempool_store.rs new file mode 100644 index 000000000000..7b5899df1b8e --- /dev/null +++ b/core/lib/mempool/src/mempool_store.rs @@ -0,0 +1,239 @@ +use crate::types::{AccountTransactions, L2TxFilter, MempoolScore}; +use std::collections::hash_map::Entry; +use std::collections::{BTreeSet, HashMap, HashSet}; + +use zksync_types::{ + l1::L1Tx, l2::L2Tx, Address, ExecuteTransactionCommon, Nonce, PriorityOpId, Transaction, +}; + +#[derive(Debug, Default)] +pub struct MempoolStore { + /// Pending L1 transactions + l1_transactions: HashMap, + /// Pending L2 transactions grouped by initiator address + l2_transactions_per_account: HashMap, + /// Global priority queue for L2 transactions. Used for scoring + l2_priority_queue: BTreeSet, + /// Next priority operation + next_priority_id: PriorityOpId, + stashed_accounts: Vec
, + /// number of l2 transactions in the mempool + size: u64, + capacity: u64, +} + +#[derive(Debug)] +pub struct MempoolInfo { + pub stashed_accounts: Vec
, + pub purged_accounts: Vec
, +} + +impl MempoolStore { + pub fn new(next_priority_id: PriorityOpId, capacity: u64) -> Self { + Self { + l1_transactions: HashMap::new(), + l2_transactions_per_account: HashMap::new(), + l2_priority_queue: BTreeSet::new(), + next_priority_id, + stashed_accounts: vec![], + size: 0, + capacity, + } + } + + /// Inserts batch of new transactions to mempool + /// `initial_nonces` provides current committed nonce information to mempool + /// variable is used only if account is not present in mempool yet and we have to bootstrap it + /// in other cases mempool relies on state keeper and its internal state to keep that info up to date + pub fn insert( + &mut self, + transactions: Vec, + initial_nonces: HashMap, + ) { + for transaction in transactions { + let Transaction { + common_data, + execute, + received_timestamp_ms, + } = transaction; + match common_data { + ExecuteTransactionCommon::L1(data) => { + vlog::trace!("inserting L1 transaction {}", data.serial_id); + self.l1_transactions.insert( + data.serial_id, + L1Tx { + execute, + common_data: data, + received_timestamp_ms, + }, + ); + } + ExecuteTransactionCommon::L2(data) => { + vlog::trace!("inserting L2 transaction {}", data.nonce); + self.insert_l2_transaction( + L2Tx { + execute, + common_data: data, + received_timestamp_ms, + }, + &initial_nonces, + ); + } + } + } + self.collect_stats(); + } + + fn insert_l2_transaction( + &mut self, + transaction: L2Tx, + initial_nonces: &HashMap, + ) { + let account = transaction.initiator_account(); + + let metadata = match self.l2_transactions_per_account.entry(account) { + Entry::Occupied(mut txs) => txs.get_mut().insert(transaction), + Entry::Vacant(entry) => { + let account_nonce = initial_nonces.get(&account).cloned().unwrap_or(Nonce(0)); + entry + .insert(AccountTransactions::new(account_nonce)) + .insert(transaction) + } + }; + if let Some(score) = metadata.previous_score { + self.l2_priority_queue.remove(&score); + } + if let Some(score) = metadata.new_score { + self.l2_priority_queue.insert(score); + } + if metadata.is_new { + self.size += 1; + } + } + + /// Returns `true` if there is a transaction in the mempool satisfying the filter. + pub fn has_next(&self, filter: &L2TxFilter) -> bool { + self.l1_transactions.get(&self.next_priority_id).is_some() + || self + .l2_priority_queue + .iter() + .rfind(|el| el.matches_filter(filter)) + .is_some() + } + + /// Returns next transaction for execution from mempool + pub fn next_transaction(&mut self, filter: &L2TxFilter) -> Option { + if let Some(transaction) = self.l1_transactions.remove(&self.next_priority_id) { + self.next_priority_id += 1; + return Some(transaction.into()); + } + + let mut removed = 0; + // We want to fetch the next transaction that would match the fee requirements. + let tx_pointer = self + .l2_priority_queue + .iter() + .rfind(|el| el.matches_filter(filter))? + .clone(); + + // Stash all observed transactions that don't meet criteria + for stashed_pointer in self + .l2_priority_queue + .split_off(&tx_pointer) + .into_iter() + .skip(1) + { + removed += self + .l2_transactions_per_account + .remove(&stashed_pointer.account) + .expect("mempool: dangling pointer in priority queue") + .len(); + + self.stashed_accounts.push(stashed_pointer.account); + } + // insert pointer to the next transaction if it exists + let (transaction, score) = self + .l2_transactions_per_account + .get_mut(&tx_pointer.account) + .expect("mempool: dangling pointer in priority queue") + .next(); + + if let Some(score) = score { + self.l2_priority_queue.insert(score); + } + self.size = self + .size + .checked_sub((removed + 1) as u64) + .expect("mempool size can't be negative"); + self.collect_stats(); + Some(transaction.into()) + } + + /// When a state_keeper starts the block over after a rejected transaction, + /// we have to rollback the nonces/ids in the mempool and + /// reinsert the transactions from the block back into mempool. + pub fn rollback(&mut self, tx: &Transaction) { + // rolling back the nonces and priority ids + match &tx.common_data { + ExecuteTransactionCommon::L1(data) => { + // reset next priority id + self.next_priority_id = self.next_priority_id.min(data.serial_id); + } + ExecuteTransactionCommon::L2(_) => { + if let Some(score) = self + .l2_transactions_per_account + .get_mut(&tx.initiator_account()) + .expect("account is not available in mempool") + .reset(tx) + { + self.l2_priority_queue.remove(&score); + } + } + } + } + + pub fn get_mempool_info(&mut self) -> MempoolInfo { + MempoolInfo { + stashed_accounts: std::mem::take(&mut self.stashed_accounts), + purged_accounts: self.gc(), + } + } + + fn collect_stats(&self) { + metrics::gauge!( + "server.state_keeper.mempool_l1_size", + self.l1_transactions.len() as f64 + ); + metrics::gauge!("server.state_keeper.mempool_l2_size", self.size as f64); + metrics::gauge!( + "server.state_keeper.mempool_l2_priority_queue_size", + self.l2_priority_queue.len() as f64 + ); + } + + #[cfg(test)] + pub fn size(&self) -> u64 { + self.size + } + + fn gc(&mut self) -> Vec
{ + if self.size >= self.capacity { + let index: HashSet<_> = self + .l2_priority_queue + .iter() + .map(|pointer| pointer.account) + .collect(); + let transactions = std::mem::take(&mut self.l2_transactions_per_account); + let (kept, drained) = transactions + .into_iter() + .partition(|(address, _)| index.contains(address)); + self.l2_transactions_per_account = kept; + self.size = self + .l2_transactions_per_account + .iter() + .fold(0, |agg, (_, tnxs)| agg + tnxs.len() as u64); + return drained.into_keys().collect(); + } + vec![] + } +} diff --git a/core/lib/mempool/src/tests.rs b/core/lib/mempool/src/tests.rs new file mode 100644 index 000000000000..b8f174cbf450 --- /dev/null +++ b/core/lib/mempool/src/tests.rs @@ -0,0 +1,435 @@ +use crate::{mempool_store::MempoolStore, types::L2TxFilter}; +use std::collections::{HashMap, HashSet}; +use std::iter::FromIterator; +use zksync_types::fee::Fee; +use zksync_types::helpers::unix_timestamp_ms; +use zksync_types::l1::{OpProcessingType, PriorityQueueType}; +use zksync_types::l2::L2Tx; +use zksync_types::{Address, ExecuteTransactionCommon, L1TxCommonData, PriorityOpId, H256, U256}; +use zksync_types::{Execute, Nonce, Transaction}; + +#[test] +fn basic_flow() { + let mut mempool = MempoolStore::new(PriorityOpId(0), 100); + let account0 = Address::random(); + let account1 = Address::random(); + let transactions = vec![ + gen_l2_tx(account0, Nonce(0)), + gen_l2_tx(account0, Nonce(1)), + gen_l2_tx(account0, Nonce(2)), + gen_l2_tx(account0, Nonce(3)), + gen_l2_tx(account1, Nonce(1)), + ]; + assert_eq!(mempool.next_transaction(&L2TxFilter::default()), None); + mempool.insert(transactions, HashMap::new()); + assert_eq!( + view(mempool.next_transaction(&L2TxFilter::default())), + (account0, 0) + ); + assert_eq!( + view(mempool.next_transaction(&L2TxFilter::default())), + (account0, 1) + ); + assert_eq!( + view(mempool.next_transaction(&L2TxFilter::default())), + (account0, 2) + ); + assert_eq!( + view(mempool.next_transaction(&L2TxFilter::default())), + (account0, 3) + ); + assert_eq!(mempool.next_transaction(&L2TxFilter::default()), None); + // unclog second account and insert more txns + mempool.insert( + vec![gen_l2_tx(account1, Nonce(0)), gen_l2_tx(account0, Nonce(3))], + HashMap::new(), + ); + assert_eq!( + view(mempool.next_transaction(&L2TxFilter::default())), + (account1, 0) + ); + assert_eq!( + view(mempool.next_transaction(&L2TxFilter::default())), + (account1, 1) + ); + assert_eq!(mempool.next_transaction(&L2TxFilter::default()), None); +} + +#[test] +fn missing_txns() { + let mut mempool = MempoolStore::new(PriorityOpId(0), 100); + let account = Address::random(); + let transactions = vec![ + gen_l2_tx(account, Nonce(6)), + gen_l2_tx(account, Nonce(7)), + gen_l2_tx(account, Nonce(9)), + ]; + let mut nonces = HashMap::new(); + nonces.insert(account, Nonce(5)); + mempool.insert(transactions, nonces); + assert_eq!(mempool.next_transaction(&L2TxFilter::default()), None); + // missing transaction unclogs mempool + mempool.insert(vec![gen_l2_tx(account, Nonce(5))], HashMap::new()); + assert_eq!( + view(mempool.next_transaction(&L2TxFilter::default())), + (account, 5) + ); + assert_eq!( + view(mempool.next_transaction(&L2TxFilter::default())), + (account, 6) + ); + assert_eq!( + view(mempool.next_transaction(&L2TxFilter::default())), + (account, 7) + ); + + // filling remaining gap + mempool.insert(vec![gen_l2_tx(account, Nonce(8))], HashMap::new()); + assert_eq!( + view(mempool.next_transaction(&L2TxFilter::default())), + (account, 8) + ); + assert_eq!( + view(mempool.next_transaction(&L2TxFilter::default())), + (account, 9) + ); +} + +#[test] +fn prioritize_l1_txns() { + let mut mempool = MempoolStore::new(PriorityOpId(0), 100); + let account = Address::random(); + let transactions = vec![ + gen_l2_tx(account, Nonce(0)), + gen_l2_tx(account, Nonce(1)), + gen_l1_tx(PriorityOpId(0)), + ]; + mempool.insert(transactions, HashMap::new()); + assert!(mempool + .next_transaction(&L2TxFilter::default()) + .unwrap() + .is_l1()) +} + +#[test] +fn l1_txns_priority_id() { + let mut mempool = MempoolStore::new(PriorityOpId(0), 100); + let transactions = vec![ + gen_l1_tx(PriorityOpId(1)), + gen_l1_tx(PriorityOpId(2)), + gen_l1_tx(PriorityOpId(3)), + ]; + mempool.insert(transactions, HashMap::new()); + assert!(mempool.next_transaction(&L2TxFilter::default()).is_none()); + mempool.insert(vec![gen_l1_tx(PriorityOpId(0))], HashMap::new()); + for idx in 0..4 { + let data = mempool + .next_transaction(&L2TxFilter::default()) + .unwrap() + .common_data; + match data { + ExecuteTransactionCommon::L1(data) => { + assert_eq!(data.serial_id, PriorityOpId(idx as u64)); + } + _ => unreachable!("expected L1 transaction"), + } + } +} + +#[test] +fn rejected_tx() { + let mut mempool = MempoolStore::new(PriorityOpId(0), 100); + let account = Address::random(); + let transactions = vec![ + gen_l2_tx(account, Nonce(0)), + gen_l2_tx(account, Nonce(1)), + gen_l2_tx(account, Nonce(2)), + gen_l2_tx(account, Nonce(3)), + gen_l2_tx(account, Nonce(5)), + ]; + mempool.insert(transactions, HashMap::new()); + assert_eq!( + view(mempool.next_transaction(&L2TxFilter::default())), + (account, 0) + ); + assert_eq!( + view(mempool.next_transaction(&L2TxFilter::default())), + (account, 1) + ); + + mempool.rollback(&gen_l2_tx(account, Nonce(1))); + assert!(mempool.next_transaction(&L2TxFilter::default()).is_none()); + + // replace transaction and unblock account + mempool.insert(vec![gen_l2_tx(account, Nonce(1))], HashMap::new()); + assert_eq!( + view(mempool.next_transaction(&L2TxFilter::default())), + (account, 1) + ); + assert_eq!( + view(mempool.next_transaction(&L2TxFilter::default())), + (account, 2) + ); + assert_eq!( + view(mempool.next_transaction(&L2TxFilter::default())), + (account, 3) + ); +} + +#[test] +fn replace_tx() { + let mut mempool = MempoolStore::new(PriorityOpId(0), 100); + let account = Address::random(); + mempool.insert(vec![gen_l2_tx(account, Nonce(0))], HashMap::new()); + // replace it + mempool.insert( + vec![gen_l2_tx_with_timestamp( + account, + Nonce(0), + unix_timestamp_ms() + 10, + )], + HashMap::new(), + ); + assert!(mempool.next_transaction(&L2TxFilter::default()).is_some()); + assert!(mempool.next_transaction(&L2TxFilter::default()).is_none()); +} + +#[test] +fn two_ready_txs() { + let mut mempool = MempoolStore::new(PriorityOpId(0), 100); + let account0 = Address::random(); + let account1 = Address::random(); + let transactions = vec![gen_l2_tx(account0, Nonce(0)), gen_l2_tx(account1, Nonce(0))]; + mempool.insert(transactions, HashMap::new()); + assert_eq!( + HashSet::<(_, _)>::from_iter(vec![ + view(mempool.next_transaction(&L2TxFilter::default())), + view(mempool.next_transaction(&L2TxFilter::default())) + ]), + HashSet::<(_, _)>::from_iter(vec![(account0, 0), (account1, 0)].into_iter()), + ); +} + +#[test] +fn mempool_size() { + let mut mempool = MempoolStore::new(PriorityOpId(0), 100); + let account0 = Address::random(); + let account1 = Address::random(); + let transactions = vec![ + gen_l2_tx(account0, Nonce(0)), + gen_l2_tx(account0, Nonce(1)), + gen_l2_tx(account0, Nonce(2)), + gen_l2_tx(account0, Nonce(3)), + gen_l2_tx(account1, Nonce(1)), + ]; + mempool.insert(transactions, HashMap::new()); + assert_eq!(mempool.size(), 5); + // replacement + mempool.insert(vec![gen_l2_tx(account0, Nonce(2))], HashMap::new()); + assert_eq!(mempool.size(), 5); + // load next + mempool.next_transaction(&L2TxFilter::default()); + mempool.next_transaction(&L2TxFilter::default()); + assert_eq!(mempool.size(), 3); +} + +/// Checks whether filtering transactions based on their fee works as expected. +#[test] +fn filtering() { + // Filter to find transactions with non-zero `gas_per_pubdata` values. + let filter_non_zero = L2TxFilter { + l1_gas_price: 0u64, + fee_per_gas: 0u64, + gas_per_pubdata: 1u32, + }; + // No-op filter that fetches any transaction. + let filter_zero = L2TxFilter { + l1_gas_price: 0u64, + fee_per_gas: 0u64, + gas_per_pubdata: 0u32, + }; + + let mut mempool = MempoolStore::new(PriorityOpId(0), 100); + let account0 = Address::random(); + let account1 = Address::random(); + + // First account will have two transactions: one with too low pubdata price and one with the right value. + // Second account will have just one transaction with the right value. + mempool.insert( + gen_transactions_for_filtering(vec![ + (account0, Nonce(0), unix_timestamp_ms(), 0), + (account0, Nonce(1), unix_timestamp_ms(), 1), + (account1, Nonce(0), unix_timestamp_ms() - 10, 1), + ]), + HashMap::new(), + ); + + // First transaction from first account doesn't match the filter, so we should get the transaction + // from the second account. + assert_eq!( + view(mempool.next_transaction(&filter_non_zero)), + (account1, 0) + ); + // No more transactions can be executed with the non-zero filter. + assert_eq!(mempool.next_transaction(&filter_non_zero), None); + + // Now we apply zero filter and get the transaction as expected. + assert_eq!(view(mempool.next_transaction(&filter_zero)), (account0, 0)); + assert_eq!(view(mempool.next_transaction(&filter_zero)), (account0, 1)); + assert_eq!(mempool.next_transaction(&filter_zero), None); +} + +#[test] +fn stashed_accounts() { + let filter_non_zero = L2TxFilter { + l1_gas_price: 0u64, + fee_per_gas: 0u64, + gas_per_pubdata: 1u32, + }; + // No-op filter that fetches any transaction. + let filter_zero = L2TxFilter { + l1_gas_price: 0u64, + fee_per_gas: 0u64, + gas_per_pubdata: 0u32, + }; + let mut mempool = MempoolStore::new(PriorityOpId(0), 100); + let account0 = Address::random(); + let account1 = Address::random(); + + mempool.insert( + gen_transactions_for_filtering(vec![ + (account0, Nonce(0), unix_timestamp_ms(), 0), + (account0, Nonce(1), unix_timestamp_ms(), 1), + (account1, Nonce(0), unix_timestamp_ms() + 10, 1), + ]), + HashMap::new(), + ); + assert!(mempool.get_mempool_info().stashed_accounts.is_empty()); + assert_eq!( + view(mempool.next_transaction(&filter_non_zero)), + (account1, 0) + ); + assert_eq!(mempool.get_mempool_info().stashed_accounts, vec![account0]); + assert!(mempool.next_transaction(&filter_zero).is_none()); +} + +#[test] +fn mempool_capacity() { + let mut mempool = MempoolStore::new(PriorityOpId(0), 5); + let account0 = Address::random(); + let account1 = Address::random(); + let account2 = Address::random(); + let transactions = vec![ + gen_l2_tx(account0, Nonce(0)), + gen_l2_tx(account0, Nonce(1)), + gen_l2_tx(account0, Nonce(2)), + gen_l2_tx(account1, Nonce(1)), + gen_l2_tx(account2, Nonce(1)), + ]; + mempool.insert(transactions, HashMap::new()); + // the mempool is full. Accounts with non-sequential nonces got stashed + assert_eq!( + HashSet::<_>::from_iter(mempool.get_mempool_info().purged_accounts), + HashSet::<_>::from_iter(vec![account1, account2]), + ); + // verify that existing good-to-go transactions and new ones got picked + mempool.insert( + vec![gen_l2_tx_with_timestamp( + account1, + Nonce(0), + unix_timestamp_ms() + 1, + )], + HashMap::new(), + ); + for _ in 0..3 { + assert_eq!( + mempool + .next_transaction(&L2TxFilter::default()) + .unwrap() + .initiator_account(), + account0 + ); + } + assert_eq!( + mempool + .next_transaction(&L2TxFilter::default()) + .unwrap() + .initiator_account(), + account1 + ); +} + +fn gen_l2_tx(address: Address, nonce: Nonce) -> Transaction { + gen_l2_tx_with_timestamp(address, nonce, unix_timestamp_ms()) +} + +fn gen_l2_tx_with_timestamp(address: Address, nonce: Nonce, received_at_ms: u64) -> Transaction { + let mut txn = L2Tx::new( + Address::default(), + Vec::new(), + nonce, + Fee::default(), + address, + U256::zero(), + None, + Default::default(), + ); + txn.received_timestamp_ms = received_at_ms; + txn.into() +} + +fn gen_l1_tx(priority_id: PriorityOpId) -> Transaction { + let execute = Execute { + contract_address: Address::repeat_byte(0x11), + calldata: vec![1, 2, 3], + factory_deps: None, + value: U256::zero(), + }; + let op_data = L1TxCommonData { + sender: Address::random(), + serial_id: priority_id, + deadline_block: 100000, + layer_2_tip_fee: U256::zero(), + full_fee: U256::zero(), + gas_limit: U256::zero(), + gas_per_pubdata_limit: U256::one(), + op_processing_type: OpProcessingType::Common, + priority_queue_type: PriorityQueueType::Deque, + eth_hash: H256::zero(), + eth_block: 1, + canonical_tx_hash: H256::zero(), + to_mint: U256::zero(), + refund_recipient: Address::random(), + }; + + Transaction { + common_data: ExecuteTransactionCommon::L1(op_data), + execute, + received_timestamp_ms: 0, + } +} + +fn view(transaction: Option) -> (Address, u32) { + let tx = transaction.unwrap(); + (tx.initiator_account(), tx.nonce().unwrap().0) +} + +fn gen_transactions_for_filtering(input: Vec<(Address, Nonce, u64, u32)>) -> Vec { + // Helper function to conveniently set `max_gas_per_pubdata_byte`. + fn set_max_gas_per_pubdata_byte(tx: &mut Transaction, value: u32) { + match &mut tx.common_data { + ExecuteTransactionCommon::L2(data) => { + data.fee.gas_per_pubdata_limit = U256::from(value) + } + _ => unreachable!(), + }; + } + input + .into_iter() + .map(|(account, nonce, tst, max_gas_per_pubdata)| { + let mut tx = gen_l2_tx_with_timestamp(account, nonce, tst); + set_max_gas_per_pubdata_byte(&mut tx, max_gas_per_pubdata); + tx + }) + .collect() +} diff --git a/core/lib/mempool/src/types.rs b/core/lib/mempool/src/types.rs new file mode 100644 index 000000000000..6140229ef90c --- /dev/null +++ b/core/lib/mempool/src/types.rs @@ -0,0 +1,201 @@ +use std::cmp::Ordering; +use std::collections::HashMap; +use zksync_types::fee::Fee; +use zksync_types::l2::L2Tx; +use zksync_types::{Address, Nonce, Transaction, U256}; + +/// Pending mempool transactions of account +#[derive(Debug)] +pub(crate) struct AccountTransactions { + /// transactions that belong to given account keyed by transaction nonce + transactions: HashMap, + /// account nonce in mempool + /// equals to committed nonce in db + number of transactions sent to state keeper + nonce: Nonce, +} + +impl AccountTransactions { + pub fn new(nonce: Nonce) -> Self { + Self { + transactions: HashMap::new(), + nonce, + } + } + + /// Inserts new transaction for given account. Returns insertion metadata + pub fn insert(&mut self, transaction: L2Tx) -> InsertionMetadata { + let mut metadata = InsertionMetadata::default(); + let nonce = transaction.common_data.nonce; + // skip insertion if transaction is old + if nonce < self.nonce { + return metadata; + } + let new_score = Self::score_for_transaction(&transaction); + let previous_score = self + .transactions + .insert(nonce, transaction) + .map(|tx| Self::score_for_transaction(&tx)); + metadata.is_new = previous_score.is_none(); + if nonce == self.nonce { + metadata.new_score = Some(new_score); + metadata.previous_score = previous_score; + } + metadata + } + + /// Returns next transaction to be included in block and optional score of its successor + /// Panics if no such transaction exists + pub fn next(&mut self) -> (L2Tx, Option) { + let transaction = self + .transactions + .remove(&self.nonce) + .expect("missing transaction in mempool"); + self.nonce += 1; + let score = self + .transactions + .get(&self.nonce) + .map(Self::score_for_transaction); + (transaction, score) + } + + /// Handles transaction rejection. Returns optional score of its successor + pub fn reset(&mut self, transaction: &Transaction) -> Option { + // current nonce for the group needs to be reset + let tx_nonce = transaction + .nonce() + .expect("nonce is not set for L2 transaction"); + self.nonce = self.nonce.min(tx_nonce); + self.transactions + .get(&(tx_nonce + 1)) + .map(Self::score_for_transaction) + } + + pub fn len(&self) -> usize { + self.transactions.len() + } + + fn score_for_transaction(transaction: &L2Tx) -> MempoolScore { + MempoolScore { + account: transaction.initiator_account(), + received_at_ms: transaction.received_timestamp_ms, + fee_data: transaction.common_data.fee.clone(), + } + } +} + +/// Mempool score of transaction. Used to prioritize L2 transactions in mempool +/// Currently trivial ordering is used based on received at timestamp +#[derive(Eq, PartialEq, Clone, Debug, Hash)] +pub struct MempoolScore { + pub account: Address, + pub received_at_ms: u64, + // Not used for actual scoring, but state keeper would request + // transactions that have acceptable fee values (so transactions + // with fee too low would be ignored until prices go down). + pub fee_data: Fee, +} + +impl MempoolScore { + /// Checks whether transaction matches requirements provided by state keeper. + pub fn matches_filter(&self, filter: &L2TxFilter) -> bool { + self.fee_data.max_fee_per_gas >= U256::from(filter.fee_per_gas) + && self.fee_data.gas_per_pubdata_limit >= U256::from(filter.gas_per_pubdata) + } +} + +impl Ord for MempoolScore { + fn cmp(&self, other: &MempoolScore) -> Ordering { + match self.received_at_ms.cmp(&other.received_at_ms).reverse() { + Ordering::Equal => {} + ordering => return ordering, + } + self.account.cmp(&other.account) + } +} + +impl PartialOrd for MempoolScore { + fn partial_cmp(&self, other: &MempoolScore) -> Option { + Some(self.cmp(other)) + } +} + +#[derive(Debug, Default)] +pub(crate) struct InsertionMetadata { + pub new_score: Option, + pub previous_score: Option, + pub is_new: bool, +} + +/// Structure that can be used by state keeper to describe +/// criteria for transaction it wants to fetch. +#[derive(Debug, Default)] +pub struct L2TxFilter { + /// L1 gas price. + pub l1_gas_price: u64, + /// Effective fee price for the transaction. The price of 1 gas in wei. + pub fee_per_gas: u64, + /// Effective pubdata price in gas for transaction. The number of gas per 1 pubdata byte. + pub gas_per_pubdata: u32, +} + +#[cfg(test)] +mod tests { + use super::*; + + /// Checks the filter logic. + #[test] + fn filter() { + fn filter(l1_gas_price: u64, fee_per_gas: u64, gas_per_pubdata: u32) -> L2TxFilter { + L2TxFilter { + l1_gas_price, + fee_per_gas, + gas_per_pubdata, + } + } + + const MAX_FEE_PER_GAS: u64 = 100u64; + const MAX_PRIORITY_FEE_PER_GAS: u32 = 100u32; + const GAS_PER_PUBDATA_LIMIT: u32 = 100u32; + + let score = MempoolScore { + account: Address::random(), + received_at_ms: Default::default(), // Not important + fee_data: Fee { + gas_limit: Default::default(), // Not important + max_fee_per_gas: U256::from(MAX_FEE_PER_GAS), + max_priority_fee_per_gas: U256::from(MAX_PRIORITY_FEE_PER_GAS), + gas_per_pubdata_limit: U256::from(GAS_PER_PUBDATA_LIMIT), + }, + }; + + let noop_filter = filter(0, 0, 0); + assert!( + score.matches_filter(&noop_filter), + "Noop filter should always match" + ); + + let max_gas_filter = filter(0, MAX_FEE_PER_GAS, 0); + assert!( + score.matches_filter(&max_gas_filter), + "Correct max gas should be accepted" + ); + + let pubdata_filter = filter(0, 0, GAS_PER_PUBDATA_LIMIT); + assert!( + score.matches_filter(&pubdata_filter), + "Correct pubdata price should be accepted" + ); + + let decline_gas_filter = filter(0, MAX_FEE_PER_GAS + 1, 0); + assert!( + !score.matches_filter(&decline_gas_filter), + "Incorrect max gas should be rejected" + ); + + let decline_pubdata_filter = filter(0, 0, GAS_PER_PUBDATA_LIMIT + 1); + assert!( + !score.matches_filter(&decline_pubdata_filter), + "Incorrect pubdata price should be rejected" + ); + } +} diff --git a/core/lib/merkle_tree/Cargo.toml b/core/lib/merkle_tree/Cargo.toml new file mode 100644 index 000000000000..f040e13c9503 --- /dev/null +++ b/core/lib/merkle_tree/Cargo.toml @@ -0,0 +1,39 @@ +[package] +name = "zksync_merkle_tree" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-2" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] + +[dependencies] +zksync_types = { path = "../../lib/types", version = "1.0" } +zksync_crypto = { path = "../../lib/crypto", version = "1.0" } +zksync_storage = { path = "../../lib/storage", version = "1.0", default-features = false } +zksync_utils = { path = "../../lib/utils", version = "1.0" } +zksync_config = { path = "../../lib/config", version = "1.0" } + +vlog = { path = "../../lib/vlog", version = "1.0" } + +itertools = "0.10" +rayon = "1.3.0" +anyhow = "1.0" +futures = "0.3" +once_cell = "1.7" +thiserror = "1.0" +bincode = "1" +fnv = "1.0.3" +serde = "1.0.90" +async-trait = "0.1" +metrics = "0.20" +byteorder = "1.3" +tokio = { version = "1", features = ["full"] } + +[dev-dependencies] +rand = "0.4" +serde_json = "1.0.0" +criterion = "0.3.0" +tempfile = "3.0.2" diff --git a/core/lib/merkle_tree/src/iter_ext/merge_join_with_max_predecessor.rs b/core/lib/merkle_tree/src/iter_ext/merge_join_with_max_predecessor.rs new file mode 100644 index 000000000000..7b0524d999c5 --- /dev/null +++ b/core/lib/merkle_tree/src/iter_ext/merge_join_with_max_predecessor.rs @@ -0,0 +1,351 @@ +use core::{cmp::Ordering, iter}; +use itertools::Either; + +/// Iterator produced by [.merge_join_with_max_predecessor()](`super::IteratorExt::merge_join_with_max_predecessor`). +/// Merges two iterators with the same `Self::Item` types, emitting ordered items from both of them +/// along with optional maximum predecessor for each item from another iterator. +pub struct MergeJoinWithMaxPredecessor +where + LI: Iterator, + RI: Iterator, + CmpF: Fn(&LI::Item, &LI::Item) -> Ordering, + MapF: Fn(&LI::Item) -> Pred, +{ + left_iter: iter::Peekable>, + right_iter: iter::Peekable>, + cmp_f: CmpF, + map_f: MapF, + last: Option>, + last_left: Option, + last_right: Option, +} + +impl MergeJoinWithMaxPredecessor +where + LI: Iterator, + RI: Iterator, + CmpF: Fn(&LI::Item, &LI::Item) -> Ordering, + MapF: Fn(&LI::Item) -> Pred, +{ + /// Instantiates `MergeJoinWithMaxPredecessor` with given params. + /// + /// - `left_iter` - first iterator to be used in merge. Has a higher priority to pick the first element from if they're equal. + /// - `right_iter` - second iterator to be used in merge. + /// - `cmp_f` - compares iterator items. + /// - `map_f` - maps iterator item to the predecessor item. + pub fn new(left_iter: LI, right_iter: RI, cmp_f: CmpF, map_f: MapF) -> Self { + Self { + left_iter: left_iter.fuse().peekable(), + right_iter: right_iter.fuse().peekable(), + cmp_f, + map_f, + last_left: None, + last_right: None, + last: None, + } + } + + /// Picks next item along with some optional last items to be used by first and second iterators respectively. + #[allow(clippy::type_complexity)] + fn choose( + item: &LI::Item, + map_f: &MapF, + cmp_f: &CmpF, + first_iter: &mut iter::Peekable>, + second_iter: &mut iter::Peekable>, + ) -> ( + Option>, + Option, + Option, + ) { + match first_iter + .peek() + .zip(second_iter.peek()) + .map_or(Ordering::Less, |(first, second)| (cmp_f)(first, second)) + { + Ordering::Less => ( + first_iter.next().map(Either::Left), + Some((map_f)(item)), + None, + ), + Ordering::Equal => (first_iter.next().map(Either::Left), None, None), + Ordering::Greater => { + let (next_first, next_second) = second_iter + .peek() + .map(|last_item| { + ( + if (cmp_f)(item, last_item).is_eq() { + None + } else { + Some((map_f)(item)) + }, + if (cmp_f)(item, last_item).is_lt() { + Some((map_f)(last_item)) + } else { + None + }, + ) + }) + .unwrap_or_default(); + + ( + second_iter.next().map(Either::Right), + next_first, + next_second, + ) + } + } + } +} + +impl Iterator for MergeJoinWithMaxPredecessor +where + LI: Iterator, + RI: Iterator, + CmpF: Fn(&LI::Item, &LI::Item) -> Ordering, + MapF: Fn(&LI::Item) -> Pred, + Pred: Clone, +{ + type Item = (LI::Item, Option); + + fn size_hint(&self) -> (usize, Option) { + ( + self.left_iter.size_hint().0 + self.right_iter.size_hint().0, + self.left_iter + .size_hint() + .1 + .zip(self.right_iter.size_hint().1) + .map(|(a, b)| a + b), + ) + } + + fn next(&mut self) -> Option { + let cmp_f = &self.cmp_f; + let map_f = &self.map_f; + let left_iter = &mut self.left_iter; + let right_iter = &mut self.right_iter; + + if self.last.is_none() { + let next_left = left_iter.peek(); + let next_right = right_iter.peek(); + + self.last = if next_left + .as_ref() + .zip(next_right.as_ref()) + .map_or(next_left.is_none(), |(l, r)| (cmp_f)(l, r).is_gt()) + { + right_iter.next().map(Either::Right) + } else { + left_iter.next().map(Either::Left) + } + } + + let next = match self.last.as_ref()? { + Either::Left(left) => { + let (item, left, right) = Self::choose(left, map_f, cmp_f, left_iter, right_iter); + + (item, left, right) + } + Either::Right(right) => { + let (item, right, left) = Self::choose(right, map_f, cmp_f, right_iter, left_iter); + + ( + item.map(|either| match either { + Either::Left(item) => Either::Right(item), + Either::Right(item) => Either::Left(item), + }), + left, + right, + ) + } + }; + + let item = self.last.take().map(|either| match either { + Either::Left(item) => (item, self.last_right.clone()), + Either::Right(item) => (item, self.last_left.clone()), + }); + + let (val, last_left, last_right) = next; + if let new_left @ Some(_) = last_left { + self.last_left = new_left; + } + if let new_right @ Some(_) = last_right { + self.last_right = new_right; + } + self.last = val; + + item + } +} + +#[cfg(test)] +mod tests { + use super::super::IteratorExt; + + #[test] + fn basic() { + assert_eq!( + vec![1, 1, 2, 2, 2, 3] + .into_iter() + .merge_join_with_max_predecessor( + vec![-1, 0, 2, 2, 2, 4, 6].into_iter(), + |a, b| a.cmp(b), + |v| *v + ) + .collect::>(), + vec![ + (-1, None), + (0, None), + (1, Some(0)), + (1, Some(0)), + (2, Some(0)), + (2, Some(0)), + (2, Some(0)), + (2, Some(1)), + (2, Some(1)), + (2, Some(1)), + (3, Some(2)), + (4, Some(3)), + (6, Some(3)) + ] + ); + } + + #[test] + fn left_empty() { + assert_eq!( + vec![] + .into_iter() + .merge_join_with_max_predecessor( + vec![1, 2, 3, 3].into_iter(), + |a, b| a.cmp(b), + |v| *v + ) + .collect::>(), + vec![(1, None), (2, None), (3, None), (3, None)] + ); + } + + #[test] + fn right_empty() { + assert_eq!( + vec![1, 2, 3, 10] + .into_iter() + .into_iter() + .merge_join_with_max_predecessor(vec![].into_iter(), |a, b| a.cmp(b), |v| *v) + .collect::>(), + vec![(1, None), (2, None), (3, None), (10, None)] + ); + } + + #[test] + fn both_empty() { + assert_eq!( + vec![] + .into_iter() + .into_iter() + .merge_join_with_max_predecessor( + vec![].into_iter(), + |a: &u8, b: &u8| a.cmp(b), + |v| *v + ) + .collect::>(), + vec![] + ); + } + + #[test] + fn repetitions() { + assert_eq!( + vec![1, 1, 1, 1, 1, 1, 1, 1, 1, 1] + .into_iter() + .into_iter() + .merge_join_with_max_predecessor( + vec![0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2, 2, 2].into_iter(), + |a, b| a.cmp(b), + |v| *v + ) + .collect::>(), + vec![ + (0, None), + (0, None), + (0, None), + (0, None), + (0, None), + (0, None), + (0, None), + (1, Some(0)), + (1, Some(0)), + (1, Some(0)), + (1, Some(0)), + (1, Some(0)), + (1, Some(0)), + (1, Some(0)), + (1, Some(0)), + (1, Some(0)), + (1, Some(0)), + (2, Some(1)), + (2, Some(1)), + (2, Some(1)), + (2, Some(1)), + (2, Some(1)), + (2, Some(1)), + (2, Some(1)), + (2, Some(1)) + ] + ); + } + + #[test] + fn unsorted() { + assert_eq!( + vec![3, 2, 1] + .into_iter() + .into_iter() + .merge_join_with_max_predecessor(vec![6, 5, 4].into_iter(), |a, b| a.cmp(b), |v| *v) + .collect::>(), + vec![ + (3, None), + (2, None), + (1, None), + (6, Some(1)), + (5, Some(1)), + (4, Some(1)) + ] + ); + assert_eq!( + vec![6, 5, 4] + .into_iter() + .into_iter() + .merge_join_with_max_predecessor(vec![3, 2, 1].into_iter(), |a, b| a.cmp(b), |v| *v) + .collect::>(), + vec![ + (3, None), + (2, None), + (1, None), + (6, Some(1)), + (5, Some(1)), + (4, Some(1)) + ] + ); + } + + #[test] + fn reverse_order() { + assert_eq!( + vec![1, 2, 3] + .into_iter() + .into_iter() + .merge_join_with_max_predecessor(vec![4, 5, 6].into_iter(), |a, b| b.cmp(a), |v| *v) + .collect::>(), + vec![ + (4, None), + (5, None), + (6, None), + (1, Some(6)), + (2, Some(6)), + (3, Some(6)) + ] + ); + } +} diff --git a/core/lib/merkle_tree/src/iter_ext/mod.rs b/core/lib/merkle_tree/src/iter_ext/mod.rs new file mode 100644 index 000000000000..99f9d4259fba --- /dev/null +++ b/core/lib/merkle_tree/src/iter_ext/mod.rs @@ -0,0 +1,31 @@ +use core::cmp::Ordering; + +pub mod merge_join_with_max_predecessor; +pub use merge_join_with_max_predecessor::*; + +/// Iterator extension which provides additional methods to be used by iterators. +pub trait IteratorExt: Iterator { + /// Merges two iterators with the same `Self::Item` types, emitting ordered items from both of them + /// along with optional maximum predecessor for each item from another iterator. + /// + /// - `left_iter` - first iterator to be used in merge. Has a higher priority to pick the first element from if they're equal. + /// - `right_iter` - second iterator to be used in merge. + /// - `cmp_f` - compares iterator items. + /// - `map_f` - maps iterator item to the predecessor item. + fn merge_join_with_max_predecessor( + self, + right_iter: I, + cmp_f: CmpF, + map_f: MapF, + ) -> MergeJoinWithMaxPredecessor + where + I: Iterator, + CmpF: Fn(&I::Item, &I::Item) -> Ordering, + MapF: Fn(&I::Item) -> Pred, + Self: Sized, + { + MergeJoinWithMaxPredecessor::new(self, right_iter, cmp_f, map_f) + } +} + +impl IteratorExt for T where T: Iterator {} diff --git a/core/lib/merkle_tree/src/lib.rs b/core/lib/merkle_tree/src/lib.rs new file mode 100644 index 000000000000..f1f9ca3c4dbe --- /dev/null +++ b/core/lib/merkle_tree/src/lib.rs @@ -0,0 +1,46 @@ +//! In-database Merkle Tree implementation. + +#![allow(clippy::upper_case_acronyms, clippy::derive_partial_eq_without_eq)] + +use thiserror::Error; +use zksync_crypto::hasher::Hasher; +use zksync_types::U256; + +mod iter_ext; +mod patch; +mod storage; +#[cfg(test)] +mod tests; +mod tree_config; +mod types; +mod utils; +mod zksync_tree; + +use types::Bytes; +pub use types::{InitialStorageWrite, RepeatedStorageWrite, TreeMetadata}; +pub use zksync_tree::{TreeMode, ZkSyncTree}; + +/// All kinds of Merkle Tree errors. +#[derive(Error, Clone, Debug)] +pub enum TreeError { + #[error("Branch entry with given level and hash was not found: {0:?} {1:?}")] + MissingBranch(u16, Vec), + #[error("Leaf entry with given hash was not found: {0:?}")] + MissingLeaf(Vec), + #[error("Key shouldn't be greater than {0:?}, received {1:?}")] + InvalidKey(U256, U256), + #[error("Failed to convert {0:?} to `U256`")] + KeyConversionFailed(String), + #[error("Invalid depth for {0:?}: {1:?} != {2:?}")] + InvalidDepth(String, u16, u16), + #[error("Attempt to create read-only Merkle tree for the absent root")] + EmptyRoot, + #[error("Invalid root: {0:?}")] + InvalidRoot(Vec), + #[error("Trees have different roots: {0:?} and {1:?} respectively")] + TreeRootsDiffer(Vec, Vec), + #[error("storage access error")] + StorageIoError(#[from] zksync_storage::rocksdb::Error), + #[error("empty patch")] + EmptyPatch, +} diff --git a/core/lib/merkle_tree/src/patch.rs b/core/lib/merkle_tree/src/patch.rs new file mode 100644 index 000000000000..d69ce41e752a --- /dev/null +++ b/core/lib/merkle_tree/src/patch.rs @@ -0,0 +1,151 @@ +use crate::iter_ext::IteratorExt; +use crate::types::{NodeEntry, TreeKey}; +use crate::{Bytes, TreeError}; +use core::iter; +use itertools::Itertools; +use rayon::prelude::{IntoParallelIterator, ParallelIterator}; +use std::collections::HashMap; +use zksync_config::constants::ROOT_TREE_DEPTH; +use zksync_crypto::hasher::Hasher; + +/// Represents set of prepared updates to be applied to the given tree in batch. +/// To calculate actual patch, use [crate::UpdatesMap::calculate]. +pub struct UpdatesBatch { + updates: HashMap< + // key affected by given storage update on respective tree level + TreeKey, + Vec, + >, +} + +/// Set of patches combined into one. +/// Each element represents changes from a single slot update. +pub type TreePatch = Vec>; + +#[derive(Clone, Debug)] +pub struct Update { + // operation index in a batch + index: usize, + // hashes of neighbour nodes on the path from root to the leaf + uncles: Vec>, + // all branch nodes that changed due to given update + // empty initially; populated level-by-level during path calculate phasE + changes: Vec<(TreeKey, NodeEntry)>, +} + +impl Update { + pub fn new(index: usize, uncles: Vec>, key: TreeKey) -> Self { + let mut update = Self { + index, + uncles, + changes: Vec::with_capacity(ROOT_TREE_DEPTH + 1), + }; + update.changes.push(( + key, + NodeEntry::Leaf { + hash: update.uncles.pop().unwrap(), + }, + )); + update + } +} + +impl UpdatesBatch { + /// Instantiates new set of batch updates. + pub(crate) fn new(updates: HashMap>) -> Self { + Self { updates } + } + + /// Calculates new set of Merkle Trees produced by applying map of updates to the current tree. + /// This calculation is parallelized over operations - all trees will be calculated in parallel. + /// + /// Memory and time: O(M * log2(N)), where + /// - N - count of all leaf nodes (basically 2 in power of depth) + /// - M - count of updates being applied + pub fn calculate(self, hasher: H) -> Result + where + H: Hasher + Send + Sync, + { + let res_map = (0..ROOT_TREE_DEPTH).fold(self.updates, |cur_lvl_updates_map, _| { + // Calculate next level map based on current in parallel + cur_lvl_updates_map + .into_iter() + .into_grouping_map_by(|(key, _)| key >> 1) + .fold((None, None), |acc, _, item| { + if item.0 % 2 == 1.into() { + (acc.0, Some(item.1)) + } else { + (Some(item.1), acc.1) + } + }) + // Parallel by vertex key family + .into_par_iter() + .map(|(next_idx, (left_updates, right_updates))| { + let left_updates = left_updates + .into_iter() + .flat_map(|items| iter::repeat(false).zip(items)); + + let right_updates = right_updates + .into_iter() + .flat_map(|items| iter::repeat(true).zip(items)); + + let merged_ops: Vec<_> = left_updates + .merge_join_with_max_predecessor( + right_updates, + |(_, left), (_, right)| left.index.cmp(&right.index), + |(_, update)| update.changes.last().map(|(_, node)| node).cloned(), + ) + .collect(); + + let ops_iter = merged_ops + // Parallel by operation index + .into_par_iter() + .map(|((odd, mut update), nei)| { + let Update { + uncles, changes, .. + } = &mut update; + + let current_hash = changes + .last() + .map(|(_, node)| node.hash().to_vec()) + .unwrap(); + + let sibling_hash = uncles.pop().unwrap(); + let nei_hash = nei + .flatten() + .map(NodeEntry::into_hash) + .unwrap_or(sibling_hash); + + // Hash current node with its neighbor + let (left_hash, right_hash) = if odd { + (nei_hash, current_hash) + } else { + (current_hash, nei_hash) + }; + + let branch = NodeEntry::Branch { + hash: hasher.compress(&left_hash, &right_hash), + left_hash, + right_hash, + }; + + changes.push((next_idx, branch)); + + update + }); + + (next_idx, ops_iter.collect()) + }) + .collect() + }); + + // Transforms map of leaf keys into an iterator of Merkle paths which produces + // items sorted by operation index in increasing order. + let patch = res_map + .into_iter() + .flat_map(|(_, updates)| updates.into_iter().map(|update| update.changes)) + .collect(); + + Ok(patch) + } +} diff --git a/core/lib/merkle_tree/src/storage.rs b/core/lib/merkle_tree/src/storage.rs new file mode 100644 index 000000000000..264bb4a86d56 --- /dev/null +++ b/core/lib/merkle_tree/src/storage.rs @@ -0,0 +1,210 @@ +use crate::types::{ + InitialStorageWrite, LeafIndices, LevelIndex, RepeatedStorageWrite, TreeKey, TreeOperation, + ZkHash, +}; +use crate::TreeError; +use byteorder::{BigEndian, ByteOrder, ReadBytesExt}; +use itertools::Itertools; +use std::collections::HashMap; +use std::fmt::{Debug, Formatter}; +use zksync_storage::db::MerkleTreeColumnFamily; +use zksync_storage::rocksdb::WriteBatch; +use zksync_storage::util::{deserialize_block_number, serialize_block_number, serialize_tree_leaf}; +use zksync_storage::RocksDB; + +const BLOCK_NUMBER_KEY: &[u8; 12] = b"block_number"; +const LEAF_INDEX_KEY: &[u8; 10] = b"leaf_index"; + +// Represents pending update that is yet to be flushed in RocksDB. +#[derive(Default)] +struct PendingPatch(WriteBatch); + +impl Debug for PendingPatch { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "PendingPatch {{ ... }}") + } +} + +/// Storage wrapper around RocksDB. +/// Stores hashes of branch nodes in merkle tree and current block number +#[derive(Debug)] +pub struct Storage { + db: RocksDB, + pending_patch: PendingPatch, +} + +impl Storage { + pub fn new(db: RocksDB) -> Self { + Self { + db, + pending_patch: PendingPatch(WriteBatch::default()), + } + } + + /// Fetches hashes of merkle tree branches from db + pub fn hashes<'a, I: 'a>(&'a self, keys: I) -> Vec>> + where + I: IntoIterator, + { + self.db + .multi_get(keys.into_iter().map(LevelIndex::bin_key)) + .into_iter() + .collect::>() + .unwrap() + } + + /// Prepares db update + pub fn pre_save(&mut self, branches: HashMap>) { + for (level_index, value) in branches { + self.pending_patch.0.put(level_index.bin_key(), value); + } + } + + /// Saves current state to db + pub fn save(&mut self, block_number: u32) -> Result<(), TreeError> { + if self.pending_patch.0.is_empty() { + return Err(TreeError::EmptyPatch); + } + let mut write_batch = + std::mem::replace(&mut self.pending_patch, PendingPatch(WriteBatch::default())).0; + write_batch.put(BLOCK_NUMBER_KEY, serialize_block_number(block_number)); + + // Sync write is not used here intentionally. It somewhat improves write performance. + // Overall flow is designed in such way that data is committed to state keeper first + // and, in case of process crash, tree state is recoverable + self.db + .write(write_batch) + .map_err(TreeError::StorageIoError) + } + + /// Updates mapping between leaf index and its historical first occurrence and returns it + /// + /// note: for simplicity this column family update is done separately from the main one + /// so column families can become out of sync in the case of intermediate process crash + /// but after restart state is fully recoverable + pub fn process_leaf_indices( + &mut self, + storage_logs: &[(usize, (TreeKey, TreeOperation))], + ) -> Result, TreeError> { + let cf = self + .db + .cf_merkle_tree_handle(MerkleTreeColumnFamily::LeafIndices); + let mut current_index = self + .db + .get_cf(cf, LEAF_INDEX_KEY) + .expect("failed to fetch current leaf index") + .map(|bytes| deserialize_leaf_index(&bytes)) + .unwrap_or(1); + + let mut write_batch = std::mem::take(&mut self.pending_patch).0; + let mut new_writes = HashMap::new(); + + let result = self + .db + .multi_get_cf( + storage_logs + .iter() + .map(|(_, (key, _))| (cf, serialize_tree_leaf(*key))), + ) + .into_iter() + .zip(storage_logs) + .group_by(|(_, &(block, _))| block) + .into_iter() + .map(|(_block, group)| { + let mut repeated_writes = Vec::new(); + let mut initial_writes = Vec::new(); + let previous_index = current_index; + + let leaf_indices = group + .map(|(raw_data, &(_, (leaf, tree_operation)))| { + let leaf_index = match ( + raw_data.expect("failed to fetch leaf index"), + tree_operation, + ) { + // revert of first occurrence + (_, TreeOperation::Delete) => { + write_batch.delete_cf(cf, serialize_tree_leaf(leaf)); + current_index -= 1; + 0 + } + // existing leaf + (Some(bytes), TreeOperation::Write { value, .. }) => { + let index = deserialize_leaf_index(&bytes); + repeated_writes.push(RepeatedStorageWrite { index, value }); + index + } + (Some(bytes), TreeOperation::Read(_)) => deserialize_leaf_index(&bytes), + // first occurrence read (noop) + (None, TreeOperation::Read(_)) => *new_writes.get(&leaf).unwrap_or(&0), + // first occurrence write + (None, TreeOperation::Write { value, .. }) => { + // Since there can't be 2 logs for the same slot in one block, + // we can safely assume that if we have a new write, it was done in a + // previous block and thus the new index is valid. + if let Some(&index) = new_writes.get(&leaf) { + repeated_writes.push(RepeatedStorageWrite { index, value }); + index + } else { + let index = current_index; + write_batch.put_cf( + cf, + serialize_tree_leaf(leaf), + serialize_leaf_index(index), + ); + initial_writes.push(InitialStorageWrite { key: leaf, value }); + new_writes.insert(leaf, index); + current_index += 1; + index + } + } + }; + (leaf, leaf_index) + }) + .collect(); + + LeafIndices { + leaf_indices, + previous_index, + initial_writes, + repeated_writes, + last_index: current_index, + } + }) + .collect(); + + write_batch.put_cf(cf, LEAF_INDEX_KEY, serialize_leaf_index(current_index)); + self.pending_patch = PendingPatch(write_batch); + + Ok(result) + } + + /// Fetches high-level metadata about merkle tree state + pub fn fetch_metadata(&self) -> StoredTreeMetadata { + // Fetch root hash. It is represented by level index (0, 0). + let root_hash = self.hashes(vec![&(0, 0.into()).into()])[0].clone(); + + let block_number = self + .db + .get(BLOCK_NUMBER_KEY) + .expect("failed to fetch tree metadata") + .map(|bytes| deserialize_block_number(&bytes)) + .unwrap_or(0); + (root_hash, block_number) + } +} + +/// High level merkle tree metadata +/// Includes root hash and current block number +pub(crate) type StoredTreeMetadata = (Option, u32); + +pub(crate) fn serialize_leaf_index(leaf_index: u64) -> Vec { + let mut bytes = vec![0; 8]; + BigEndian::write_u64(&mut bytes, leaf_index); + bytes +} + +fn deserialize_leaf_index(mut bytes: &[u8]) -> u64 { + bytes + .read_u64::() + .expect("failed to deserialize leaf index") +} diff --git a/core/lib/merkle_tree/src/tests.rs b/core/lib/merkle_tree/src/tests.rs new file mode 100644 index 000000000000..6fda64372a5f --- /dev/null +++ b/core/lib/merkle_tree/src/tests.rs @@ -0,0 +1,473 @@ +use crate::tree_config::TreeConfig; +use crate::types::{TreeKey, ZkHash, ZkHasher}; +use crate::ZkSyncTree; +use std::str::FromStr; +use tempfile::TempDir; +use zksync_config::constants::ACCOUNT_CODE_STORAGE_ADDRESS; +use zksync_storage::db::Database; +use zksync_storage::RocksDB; +use zksync_types::proofs::StorageLogMetadata; +use zksync_types::{ + AccountTreeId, Address, L1BatchNumber, StorageKey, StorageLog, WitnessStorageLog, H256, +}; +use zksync_utils::u32_to_h256; + +/// Checks main operations of the tree. +#[test] +fn basic_workflow() { + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let logs = gen_storage_logs(); + + let expected_root_hash = { + let db = RocksDB::new(Database::MerkleTree, temp_dir.as_ref(), false); + let mut tree = ZkSyncTree::new(db); + let _ = tree.process_block(logs); + tree.save().unwrap(); + tree.root_hash() + }; + + assert_eq!( + expected_root_hash, + [ + 125, 25, 107, 171, 182, 155, 32, 70, 138, 108, 238, 150, 140, 205, 193, 39, 90, 92, + 122, 233, 118, 238, 248, 201, 160, 55, 58, 206, 244, 216, 188, 10 + ], + ); + + let db = RocksDB::new(Database::MerkleTree, temp_dir.as_ref(), false); + let tree = ZkSyncTree::new(db); + assert_eq!(tree.root_hash(), expected_root_hash); + assert_eq!(tree.block_number(), 1); +} + +/// Checks main operations of the tree on multiple blocks. +#[test] +fn basic_workflow_multiblock() { + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let logs = gen_storage_logs(); + let blocks: Vec<_> = logs.chunks(9).collect(); + + let expected_root_hash = { + let db = RocksDB::new(Database::MerkleTree, temp_dir.as_ref(), false); + let mut tree = ZkSyncTree::new(db); + let _ = tree.process_blocks(blocks); + tree.save().unwrap(); + tree.root_hash() + }; + + assert_eq!( + expected_root_hash, + [ + 125, 25, 107, 171, 182, 155, 32, 70, 138, 108, 238, 150, 140, 205, 193, 39, 90, 92, + 122, 233, 118, 238, 248, 201, 160, 55, 58, 206, 244, 216, 188, 10 + ], + ); + + let db = RocksDB::new(Database::MerkleTree, temp_dir.as_ref(), false); + let tree = ZkSyncTree::new(db); + assert_eq!(tree.root_hash(), expected_root_hash); + assert_eq!(tree.block_number(), 12); +} + +/// Checks main operations of the tree. +#[test] +fn multiple_single_block_workflow() { + let logs = gen_storage_logs(); + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + { + let db = RocksDB::new(Database::MerkleTree, temp_dir.as_ref(), false); + let mut tree = ZkSyncTree::new(db); + + logs.chunks(5).into_iter().for_each(|chunk| { + let metadata = tree.process_block(chunk); + assert_eq!(tree.root_hash(), metadata.root_hash); + tree.save().unwrap(); + }); + } + // verify consistency of final result + let db = RocksDB::new(Database::MerkleTree, temp_dir.as_ref(), false); + let mirror_tree = ZkSyncTree::new(db); + mirror_tree.verify_consistency(); +} + +#[test] +fn revert_blocks() { + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let storage = RocksDB::new(Database::MerkleTree, temp_dir.as_ref(), false); + + // generate logs and save them to db + // produce 4 blocks with distinct values and 1 block with modified values from first block + let block_size: usize = 25; + let address = Address::from_str("4b3af74f66ab1f0da3f2e4ec7a3cb99baf1af7b2").unwrap(); + let proof_keys: Vec<_> = (0..100) + .map(move |i| StorageKey::new(AccountTreeId::new(address), u32_to_h256(i))) + .collect(); + let proof_values: Vec<_> = (0..100).map(u32_to_h256).collect(); + + // add couple of blocks of distinct keys/values + let mut logs: Vec<_> = convert_logs( + proof_keys + .iter() + .zip(proof_values.iter()) + .map(|(proof_key, &proof_value)| StorageLog::new_write_log(*proof_key, proof_value)) + .collect(), + ); + // add block with repeated keys + let mut extra_logs = convert_logs( + (0..block_size) + .map(move |i| { + StorageLog::new_write_log( + StorageKey::new(AccountTreeId::new(address), u32_to_h256(i as u32)), + u32_to_h256((i + 1) as u32), + ) + }) + .collect(), + ); + logs.append(&mut extra_logs); + + let mirror_logs = logs.clone(); + let tree_metadata = { + let mut tree = ZkSyncTree::new(storage); + let mut tree_metadata = vec![]; + for chunk in logs.chunks(block_size) { + tree_metadata.push(tree.process_block(chunk)); + tree.save().unwrap(); + } + assert_eq!(tree.block_number(), 5); + tree_metadata + }; + + let witness_tree = TestWitnessTree::deserialize(tree_metadata[3].witness_input.clone()); + assert!(witness_tree.get_leaf((4 * block_size - 1) as u64).is_some()); + + // revert last block + let storage = RocksDB::new(Database::MerkleTree, temp_dir.as_ref(), false); + { + let mut tree = ZkSyncTree::new(storage); + assert_eq!(tree.root_hash(), tree_metadata.last().unwrap().root_hash); + let logs_to_revert: Vec<_> = mirror_logs + .iter() + .take(block_size) + .map(|log| { + ( + log.storage_log.key.hashed_key_u256(), + Some(log.storage_log.value), + ) + }) + .collect(); + tree.revert_logs(L1BatchNumber(3), logs_to_revert); + + assert_eq!(tree.root_hash(), tree_metadata[3].root_hash); + tree.save().unwrap(); + } + + // revert two more blocks + let storage = RocksDB::new(Database::MerkleTree, temp_dir.as_ref(), false); + let logs_to_revert = mirror_logs + .iter() + .skip(2 * block_size) + .take(2 * block_size) + .map(|witness_log| (witness_log.storage_log.key.hashed_key_u256(), None)) + .collect(); + { + let mut tree = ZkSyncTree::new(storage); + tree.revert_logs(L1BatchNumber(1), logs_to_revert); + assert_eq!(tree.root_hash(), tree_metadata[1].root_hash); + tree.save().unwrap(); + } + + // reapply one of the reverted logs and verify that indexing is correct + let storage = RocksDB::new(Database::MerkleTree, temp_dir.as_ref(), false); + { + let storage_log = mirror_logs.get(3 * block_size).unwrap(); + let mut tree = ZkSyncTree::new(storage); + let metadata = tree.process_block(vec![storage_log]); + + let witness_tree = TestWitnessTree::deserialize(metadata.witness_input); + assert!(witness_tree.get_leaf((2 * block_size + 1) as u64).is_some()); + tree.save().unwrap(); + } + + // check saved block number + let storage = RocksDB::new(Database::MerkleTree, temp_dir.as_ref(), false); + let tree = ZkSyncTree::new(storage); + assert_eq!(tree.block_number(), 3); +} + +#[test] +fn reset_tree() { + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let storage = RocksDB::new(Database::MerkleTree, temp_dir.as_ref(), false); + let logs = gen_storage_logs(); + let mut tree = ZkSyncTree::new(storage); + let config = TreeConfig::new(ZkHasher::default()); + + logs.chunks(5) + .into_iter() + .fold(config.default_root_hash(), |hash, chunk| { + let _ = tree.process_block(chunk); + tree.reset(); + assert_eq!(tree.root_hash(), hash); + let _ = tree.process_block(chunk); + tree.save().unwrap(); + tree.root_hash() + }); +} + +fn convert_logs(logs: Vec) -> Vec { + logs.into_iter() + .map(|storage_log| WitnessStorageLog { + storage_log, + previous_value: H256::zero(), + }) + .collect() +} + +fn gen_storage_logs() -> Vec { + let addrs = vec![ + "4b3af74f66ab1f0da3f2e4ec7a3cb99baf1af7b2", + "ef4bb7b21c5fe7432a7d63876cc59ecc23b46636", + "89b8988a018f5348f52eeac77155a793adf03ecc", + "782806db027c08d36b2bed376b4271d1237626b3", + "b2b57b76717ee02ae1327cc3cf1f40e76f692311", + ] + .into_iter() + .map(|s| Address::from_str(s).unwrap()); + + let proof_keys: Vec<_> = addrs + .flat_map(|addr| { + (0..20).map(move |i| StorageKey::new(AccountTreeId::new(addr), u32_to_h256(i))) + }) + .collect(); + let proof_values: Vec<_> = (0..100).map(u32_to_h256).collect(); + + proof_keys + .iter() + .zip(proof_values.iter()) + .map(|(proof_key, &proof_value)| { + let storage_log = StorageLog::new_write_log(*proof_key, proof_value); + WitnessStorageLog { + storage_log, + previous_value: H256::zero(), + } + }) + .collect() +} + +/// This one is used only for basic test verification +pub struct TestWitnessTree { + storage_logs: Vec, +} + +impl TestWitnessTree { + pub fn deserialize(bytes: Vec) -> Self { + let storage_logs = bincode::deserialize(&bytes).expect("failed to deserialize witness"); + Self { storage_logs } + } + + pub fn root(&self) -> ZkHash { + self.storage_logs.last().unwrap().root_hash.clone() + } + + pub fn get_leaf(&self, index: u64) -> Option { + for log in &self.storage_logs { + if log.leaf_enumeration_index == index { + return Some(log.leaf_hashed_key); + } + } + None + } +} + +#[test] +fn basic_witness_workflow() { + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let logs = gen_storage_logs(); + let (first_chunk, second_chunk) = logs.split_at(logs.len() / 2); + + { + let db = RocksDB::new(Database::MerkleTree, temp_dir.as_ref(), false); + let mut tree = ZkSyncTree::new(db); + let metadata = tree.process_block(first_chunk); + let witness_tree = TestWitnessTree::deserialize(metadata.witness_input); + + assert_eq!( + witness_tree.get_leaf(1), + Some(logs[0].storage_log.key.hashed_key_u256()) + ); + assert_eq!( + witness_tree.get_leaf(2), + Some(logs[1].storage_log.key.hashed_key_u256()) + ); + + tree.save().unwrap(); + } + let db = RocksDB::new(Database::MerkleTree, temp_dir.as_ref(), false); + let mut tree = ZkSyncTree::new(db); + let metadata = tree.process_block(second_chunk); + let witness_tree = TestWitnessTree::deserialize(metadata.witness_input); + assert_eq!( + witness_tree.root(), + [ + 125, 25, 107, 171, 182, 155, 32, 70, 138, 108, 238, 150, 140, 205, 193, 39, 90, 92, + 122, 233, 118, 238, 248, 201, 160, 55, 58, 206, 244, 216, 188, 10 + ], + ); + + assert_eq!( + witness_tree.get_leaf((logs.len() / 2 + 1) as u64), + Some(logs[logs.len() / 2].storage_log.key.hashed_key_u256()) + ); +} + +#[test] +fn read_logs() { + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let logs: Vec<_> = gen_storage_logs().into_iter().take(5).collect(); + + let write_metadata = { + let db = RocksDB::new(Database::MerkleTree, temp_dir.as_ref(), false); + let mut tree = ZkSyncTree::new(db); + let metadata = tree.process_block(logs.clone()); + tree.save().unwrap(); + metadata + }; + + let db = RocksDB::new(Database::MerkleTree, temp_dir.as_ref(), false); + let mut tree = ZkSyncTree::new(db); + let read_logs: Vec<_> = logs + .into_iter() + .take(5) + .map(|log| StorageLog::new_read_log(log.storage_log.key, log.storage_log.value)) + .collect(); + let read_metadata = tree.process_block(convert_logs(read_logs)); + + assert_eq!(read_metadata.root_hash, write_metadata.root_hash); + let witness_tree = TestWitnessTree::deserialize(read_metadata.witness_input); + assert!(witness_tree.get_leaf(1).is_some()); + assert!(witness_tree.get_leaf(2).is_some()); +} + +#[test] +fn root_hash_compatibility() { + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let db = RocksDB::new(Database::MerkleTree, temp_dir.as_ref(), false); + let mut tree = ZkSyncTree::new(db); + assert_eq!( + tree.root_hash(), + [ + 152, 164, 142, 78, 209, 115, 97, 136, 56, 74, 232, 167, 157, 210, 28, 77, 102, 135, + 229, 253, 34, 202, 24, 20, 137, 6, 215, 135, 54, 192, 216, 106 + ], + ); + let storage_logs = vec![ + WitnessStorageLog { + storage_log: StorageLog::new_write_log( + StorageKey::new( + AccountTreeId::new(ACCOUNT_CODE_STORAGE_ADDRESS), + H256::zero(), + ), + [1u8; 32].into(), + ), + previous_value: H256::zero(), + }, + WitnessStorageLog { + storage_log: StorageLog::new_write_log( + StorageKey::new( + AccountTreeId::new(Address::from_low_u64_be(9223372036854775808)), + H256::from(&[254; 32]), + ), + [ + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, + ] + .into(), + ), + previous_value: H256::zero(), + }, + WitnessStorageLog { + storage_log: StorageLog::new_write_log( + StorageKey::new( + AccountTreeId::new(Address::from_low_u64_be(9223372036854775809)), + H256::from(&[253; 32]), + ), + [ + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 253, + ] + .into(), + ), + previous_value: H256::zero(), + }, + WitnessStorageLog { + storage_log: StorageLog::new_write_log( + StorageKey::new( + AccountTreeId::new(Address::from_low_u64_be(9223372036854775810)), + H256::from(&[252; 32]), + ), + [ + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 252, + ] + .into(), + ), + previous_value: H256::zero(), + }, + WitnessStorageLog { + storage_log: StorageLog::new_write_log( + StorageKey::new( + AccountTreeId::new(Address::from_low_u64_be(9223372036854775811)), + H256::from(&[251; 32]), + ), + [ + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 251, + ] + .into(), + ), + previous_value: H256::zero(), + }, + WitnessStorageLog { + storage_log: StorageLog::new_write_log( + StorageKey::new( + AccountTreeId::new(Address::from_low_u64_be(9223372036854775812)), + H256::from(&[250; 32]), + ), + [ + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 250, + ] + .into(), + ), + previous_value: H256::zero(), + }, + ]; + let metadata = tree.process_block(storage_logs); + assert_eq!( + metadata.root_hash, + [ + 35, 191, 235, 50, 17, 223, 143, 160, 240, 38, 139, 111, 221, 156, 42, 29, 72, 90, 196, + 198, 72, 13, 219, 88, 59, 250, 94, 112, 221, 3, 44, 171 + ] + ); +} + +#[test] +fn process_block_idempotency_check() { + let temp_dir = TempDir::new().expect("failed to get temporary directory for RocksDB"); + let rocks_db = RocksDB::new(Database::MerkleTree, temp_dir.as_ref(), false); + let mut tree = ZkSyncTree::new(rocks_db); + let tree_metadata = tree.process_block(gen_storage_logs()); + + // simulate server restart by calling process_block again on the same tree. + + let tree_metadata_second = tree.process_block(gen_storage_logs()); + assert_eq!( + tree_metadata.initial_writes, tree_metadata_second.initial_writes, + "initial writes must be same on multiple calls to process_block to ensure idempotency" + ); + assert_eq!( + tree_metadata.repeated_writes, tree_metadata_second.repeated_writes, + "repeated writes must be same on multiple calls to process_block to ensure idempotency" + ); +} diff --git a/core/lib/merkle_tree/src/tree_config.rs b/core/lib/merkle_tree/src/tree_config.rs new file mode 100644 index 000000000000..e9219d1ffbfa --- /dev/null +++ b/core/lib/merkle_tree/src/tree_config.rs @@ -0,0 +1,92 @@ +//! Merkle Tree shared configuration. + +use core::fmt::Debug; +use core::iter::once; +use std::sync::Arc; +use zksync_config::constants::ROOT_TREE_DEPTH; + +use crate::{types::*, Bytes, Hasher}; + +#[derive(Debug)] +struct TreeConfigInner { + /// Hash generator used to hash entries. + pub(crate) hasher: H, + /// Precalculated empty leaf tree hashes. Start from leaf. + pub(crate) empty_tree: Vec, +} + +/// Shared configuration for Sparse Merkle Tree. +#[derive(Clone, Debug)] +pub struct TreeConfig { + inner: Arc>, +} + +impl TreeConfig +where + H: Hasher, +{ + /// Creates new shared config with supplied params. + pub fn new(hasher: H) -> Self { + let empty_hashes = Self::calc_default_hashes(ROOT_TREE_DEPTH, &hasher); + + Self { + inner: Arc::new(TreeConfigInner { + empty_tree: Self::calc_empty_tree(&empty_hashes), + hasher, + }), + } + } + + /// Produces tree with all leaves having default value (empty). + fn calc_empty_tree(hashes: &[Vec]) -> Vec { + let mut empty_tree: Vec<_> = hashes + .iter() + .rev() + .zip(once(None).chain(hashes.iter().rev().map(Some))) + .map(|(hash, prev_hash)| match prev_hash { + None => NodeEntry::Leaf { hash: hash.clone() }, + Some(prev_hash) => NodeEntry::Branch { + hash: hash.clone(), + left_hash: prev_hash.clone(), + right_hash: prev_hash.clone(), + }, + }) + .collect(); + empty_tree.reverse(); + + empty_tree + } + + /// Returns reference to precalculated empty Merkle Tree hashes starting from leaf. + pub fn empty_tree(&self) -> &[NodeEntry] { + &self.inner.empty_tree + } + + pub fn empty_leaf(hasher: &H) -> ZkHash { + hasher.hash_bytes([0; 40]) + } + + pub fn default_root_hash(&self) -> ZkHash { + self.empty_tree().first().cloned().unwrap().into_hash() + } + + /// Returns current hasher. + pub fn hasher(&self) -> &H { + &self.inner.hasher + } + + /// Calculates default empty leaf hashes for given types. + fn calc_default_hashes(depth: usize, hasher: &H) -> Vec> { + let mut def_hashes = Vec::with_capacity(depth + 1); + def_hashes.push(Self::empty_leaf(hasher)); + for _ in 0..depth { + let last_hash = def_hashes.last().unwrap(); + let hash = hasher.compress(last_hash, last_hash); + + def_hashes.push(hash); + } + def_hashes.reverse(); + + def_hashes + } +} diff --git a/core/lib/merkle_tree/src/types.rs b/core/lib/merkle_tree/src/types.rs new file mode 100644 index 000000000000..4490dd31fc64 --- /dev/null +++ b/core/lib/merkle_tree/src/types.rs @@ -0,0 +1,90 @@ +//! Definitions of types used in Merkle Tree implementation. + +use crate::U256; +use serde::Serialize; +use std::collections::HashMap; +use zksync_crypto::hasher::blake2::Blake2Hasher; +pub use zksync_types::writes::{InitialStorageWrite, RepeatedStorageWrite}; +use zksync_types::H256; +use zksync_utils::impl_from_wrapper; + +#[derive(PartialEq, Eq, Hash, Clone, Debug, Serialize)] +pub struct LevelIndex(pub (u16, U256)); + +impl_from_wrapper!(LevelIndex, (u16, U256)); + +impl LevelIndex { + pub fn bin_key(&self) -> Vec { + bincode::serialize(&self).expect("Serialization failed") + } +} + +#[derive(Clone, Copy, Debug, PartialEq)] +pub enum TreeOperation { + Write { + value: TreeValue, + previous_value: TreeValue, + }, + Read(TreeValue), + Delete, +} + +#[derive(Clone, Debug)] +pub enum NodeEntry { + Branch { + hash: Vec, + left_hash: Vec, + right_hash: Vec, + }, + Leaf { + hash: Vec, + }, +} + +impl NodeEntry { + pub fn hash(&self) -> &[u8] { + match self { + NodeEntry::Branch { hash, .. } => hash, + NodeEntry::Leaf { hash } => hash, + } + } + + pub fn into_hash(self) -> Vec { + match self { + NodeEntry::Branch { hash, .. } => hash, + NodeEntry::Leaf { hash } => hash, + } + } +} + +/// Convenience aliases to make code a bit more readable. +pub type TreeKey = U256; +pub type TreeValue = H256; +pub type Bytes = Vec; + +/// Definition of the main hashing scheme to be used throughout the module. +/// We use an alias instead of direct type definition for the case if we'd decide to switch the hashing scheme +pub type ZkHasher = Blake2Hasher; + +/// Definition of the hash type derived from the hasher. +pub type ZkHash = Bytes; + +/// Represents metadata of current tree state +/// Includes root hash, current tree location and serialized merkle paths for each storage log +#[derive(Debug, Clone, Default)] +pub struct TreeMetadata { + pub root_hash: ZkHash, + pub rollup_last_leaf_index: u64, + pub witness_input: Vec, + pub initial_writes: Vec, + pub repeated_writes: Vec, +} + +#[derive(Debug, Clone, Default)] +pub struct LeafIndices { + pub leaf_indices: HashMap, + pub last_index: u64, + pub previous_index: u64, + pub initial_writes: Vec, + pub repeated_writes: Vec, +} diff --git a/core/lib/merkle_tree/src/utils.rs b/core/lib/merkle_tree/src/utils.rs new file mode 100644 index 000000000000..36a68d854c57 --- /dev/null +++ b/core/lib/merkle_tree/src/utils.rs @@ -0,0 +1,28 @@ +use crate::types::LevelIndex; +use zksync_config::constants::ROOT_TREE_DEPTH; +use zksync_types::U256; + +/// Calculates neighbor index for given index to have complete pair. +fn neighbor_idx(idx: U256) -> U256 { + idx ^ 1.into() +} + +/// Produces a full merkle path of neighbors for given leaf (including given leaf itself) +/// Used to calculate hash changes for branch nodes caused by leaf update +pub fn idx_to_merkle_path(idx: U256) -> impl DoubleEndedIterator + Clone { + (1..=ROOT_TREE_DEPTH) + .map(move |cur_depth| { + ( + cur_depth as u16, + neighbor_idx(idx >> (ROOT_TREE_DEPTH - cur_depth)), + ) + }) + .map(Into::into) +} + +pub(crate) fn children_idxs(level_idx: &LevelIndex) -> (LevelIndex, LevelIndex) { + ( + (level_idx.0 .0 + 1, level_idx.0 .1 << 1).into(), + (level_idx.0 .0 + 1, (level_idx.0 .1 << 1) + 1).into(), + ) +} diff --git a/core/lib/merkle_tree/src/zksync_tree.rs b/core/lib/merkle_tree/src/zksync_tree.rs new file mode 100644 index 000000000000..2cc4a80f95d3 --- /dev/null +++ b/core/lib/merkle_tree/src/zksync_tree.rs @@ -0,0 +1,527 @@ +use crate::patch::{TreePatch, Update, UpdatesBatch}; +use crate::storage::{serialize_leaf_index, Storage}; +use crate::tree_config::TreeConfig; +use crate::types::{ + LeafIndices, LevelIndex, NodeEntry, TreeKey, TreeMetadata, TreeOperation, TreeValue, ZkHash, + ZkHasher, +}; +use crate::utils::children_idxs; +use crate::{utils, TreeError}; +use itertools::Itertools; +use rayon::prelude::{IntoParallelIterator, ParallelIterator}; +use std::borrow::Borrow; +use std::collections::{hash_map::Entry, HashMap, HashSet}; +use std::iter::once; +use std::sync::Arc; +use tokio::time::Instant; +use zksync_config::constants::ROOT_TREE_DEPTH; +use zksync_crypto::hasher::Hasher; +use zksync_storage::RocksDB; +use zksync_types::proofs::StorageLogMetadata; +use zksync_types::{L1BatchNumber, StorageLogKind, WitnessStorageLog, H256}; + +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub enum TreeMode { + Full, + Lightweight, +} + +#[derive(Debug)] +pub struct ZkSyncTree { + storage: Storage, + config: TreeConfig, + root_hash: ZkHash, + block_number: u32, + mode: TreeMode, +} + +impl ZkSyncTree { + /// Creates new ZkSyncTree instance + pub fn new_with_mode(db: RocksDB, mode: TreeMode) -> Self { + let storage = Storage::new(db); + let config = TreeConfig::new(ZkHasher::default()); + let (root_hash, block_number) = storage.fetch_metadata(); + let root_hash = root_hash.unwrap_or_else(|| config.default_root_hash()); + + Self { + storage, + config, + root_hash, + block_number, + mode, + } + } + + pub fn new(db: RocksDB) -> Self { + Self::new_with_mode(db, TreeMode::Full) + } + + pub fn new_lightweight(db: RocksDB) -> Self { + Self::new_with_mode(db, TreeMode::Lightweight) + } + + pub fn root_hash(&self) -> ZkHash { + self.root_hash.clone() + } + + pub fn is_empty(&self) -> bool { + self.root_hash == self.config.default_root_hash() + } + + pub fn block_number(&self) -> u32 { + self.block_number + } + + /// Returns current hasher. + fn hasher(&self) -> &ZkHasher { + self.config.hasher() + } + + /// Processes an iterator of block logs, interpreting each nested iterator as a block. + /// Before going to the next block, the current block will be sealed. + /// Returns tree metadata for the corresponding blocks. + /// + /// - `storage_logs` - an iterator of storage logs for a given block + pub fn process_block(&mut self, storage_logs: I) -> TreeMetadata + where + I: IntoIterator, + I::Item: Borrow, + { + self.process_blocks(once(storage_logs)).pop().unwrap() + } + + pub fn process_blocks(&mut self, blocks: I) -> Vec + where + I: IntoIterator, + I::Item: IntoIterator, + ::Item: Borrow, + { + // Filter out reading logs and convert writing to the key-value pairs + let tree_operations: Vec<_> = blocks + .into_iter() + .enumerate() + .map(|(i, logs)| { + let tree_operations: Vec<_> = logs + .into_iter() + .map(|log| { + let operation = match log.borrow().storage_log.kind { + StorageLogKind::Write => TreeOperation::Write { + value: log.borrow().storage_log.value, + previous_value: log.borrow().previous_value, + }, + StorageLogKind::Read => { + TreeOperation::Read(log.borrow().storage_log.value) + } + }; + (log.borrow().storage_log.key.hashed_key_u256(), operation) + }) + .collect(); + + vlog::info!( + "Tree {:?}, processing block {}, with {} logs", + self.mode, + self.block_number + i as u32, + tree_operations.len(), + ); + + tree_operations + }) + .collect(); + + assert!( + self.mode == TreeMode::Full || tree_operations.len() == 1, + "Tried to process multiple blocks in lightweight mode" + ); + + // Apply all tree operations + self.apply_updates_batch(tree_operations) + .expect("Failed to apply logs") + } + + fn apply_updates_batch( + &mut self, + updates_batch: Vec>, + ) -> Result, TreeError> { + let start = Instant::now(); + let total_blocks = updates_batch.len(); + + let storage_logs_with_blocks: Vec<_> = updates_batch + .into_iter() + .enumerate() + .flat_map(|(i, logs)| logs.into_iter().map(move |log| (i, log))) + .collect(); + + let mut leaf_indices = self + .storage + .process_leaf_indices(&storage_logs_with_blocks)?; + + let storage_logs_with_indices: Vec<_> = storage_logs_with_blocks + .iter() + .map(|&(block, (key, operation))| { + let leaf_index = leaf_indices[block].leaf_indices[&key]; + (key, operation, leaf_index) + }) + .collect(); + + metrics::histogram!("merkle_tree.leaf_index_update", start.elapsed()); + let start = Instant::now(); + + let prepared_updates = self.prepare_batch_update(storage_logs_with_indices)?; + + metrics::histogram!("merkle_tree.prepare_update", start.elapsed()); + + let start = Instant::now(); + let updates = prepared_updates.calculate(self.hasher().clone())?; + + metrics::histogram!("merkle_tree.root_calculation", start.elapsed()); + + let start = Instant::now(); + + let tree_metadata = match self.mode { + TreeMode::Full => { + let patch_metadata = + self.apply_patch(updates, &storage_logs_with_blocks, &leaf_indices); + + self.root_hash = patch_metadata + .last() + .map(|metadata| metadata.root_hash.clone()) + .unwrap_or_else(|| self.root_hash.clone()); + + patch_metadata + .into_iter() + .zip(storage_logs_with_blocks) + .group_by(|(_, (block, _))| *block) + .into_iter() + .map(|(block, group)| { + let LeafIndices { + last_index, + initial_writes, + repeated_writes, + previous_index, + .. + } = std::mem::take(&mut leaf_indices[block]); + + let metadata: Vec<_> = + group.into_iter().map(|(metadata, _)| metadata).collect(); + let root_hash = metadata.last().unwrap().root_hash.clone(); + let witness_input = bincode::serialize(&(metadata, previous_index)) + .expect("witness serialization failed"); + + TreeMetadata { + root_hash, + rollup_last_leaf_index: last_index, + witness_input, + initial_writes, + repeated_writes, + } + }) + .collect() + } + TreeMode::Lightweight => { + self.root_hash = self.apply_patch_without_metadata_calculation(updates); + + let LeafIndices { + last_index, + initial_writes, + repeated_writes, + .. + } = std::mem::take(&mut leaf_indices[0]); + + vec![TreeMetadata { + root_hash: self.root_hash.clone(), + rollup_last_leaf_index: last_index, + witness_input: Vec::new(), + initial_writes, + repeated_writes, + }] + } + }; + + metrics::histogram!("merkle_tree.patch_application", start.elapsed()); + + self.block_number += total_blocks as u32; + Ok(tree_metadata) + } + + /// Prepares all the data which will be needed to calculate new Merkle Trees without storage access. + /// This method doesn't perform any hashing operations. + fn prepare_batch_update(&self, storage_logs: I) -> Result + where + I: IntoIterator, + { + let (op_idxs, updates): (Vec<_>, Vec<_>) = storage_logs + .into_iter() + .enumerate() + .map(|(op_idx, (key, op, index))| ((op_idx, key), (key, op, index))) + .unzip(); + + let map = self + .hash_paths_to_leaves(updates.into_iter()) + .zip(op_idxs.into_iter()) + .map(|(parent_nodes, (op_idx, key))| (key, Update::new(op_idx, parent_nodes, key))) + .fold(HashMap::new(), |mut map: HashMap<_, Vec<_>>, (key, op)| { + match map.entry(key) { + Entry::Occupied(mut entry) => entry.get_mut().push(op), + Entry::Vacant(entry) => { + entry.insert(vec![op]); + } + } + map + }); + + Ok(UpdatesBatch::new(map)) + } + + /// Accepts updated key-value pair and resolves to an iterator which produces + /// new tree path containing leaf with branch nodes and full path to the top. + /// This iterator will lazily emit leaf with needed path to the top node. + /// At the moment of calling given function won't perform any hashing operation. + /// Note: This method is public so that it can be used by the data availability repo. + pub fn hash_paths_to_leaves<'a, 'b: 'a, I>( + &'a self, + storage_logs: I, + ) -> impl Iterator>> + 'a + where + I: Iterator + Clone + 'b, + { + let hasher = self.hasher().clone(); + let default_leaf = TreeConfig::empty_leaf(&hasher); + + self.get_leaves_paths(storage_logs.clone().map(|(key, _, _)| key)) + .zip(storage_logs) + .map(move |(current_path, (_key, operation, leaf_index))| { + let hash = match operation { + TreeOperation::Write { value, .. } => hasher.compress( + &serialize_leaf_index(leaf_index), + &value.to_fixed_bytes().to_vec(), + ), + TreeOperation::Delete => default_leaf.clone(), + TreeOperation::Read(value) => hasher.compress( + &serialize_leaf_index(leaf_index), + &value.to_fixed_bytes().to_vec(), + ), + }; + current_path + .map(|(_, hash)| hash) + .chain(once(hash)) + .collect() + }) + } + + /// Retrieves leaf with a given key along with full tree path to it. + /// Note: This method is public so that it can be used by the data availability repo. + pub fn get_leaves_paths<'a, 'b: 'a, I>( + &'a self, + ids_iter: I, + ) -> impl Iterator)> + Clone + 'b> + 'a + where + I: Iterator + Clone + 'a, + { + let empty_tree = Arc::new(self.config.empty_tree().to_vec()); + + let idxs: HashSet<_> = ids_iter + .clone() + .flat_map(utils::idx_to_merkle_path) + .collect(); + + let branch_map: Arc> = Arc::new( + idxs.iter() + .cloned() + .zip(self.storage.hashes(idxs.iter()).into_iter()) + .collect(), + ); + + let hash_by_lvl_idx = move |lvl_idx| { + let value = branch_map + .get(&lvl_idx) + .and_then(|x| x.clone()) + .unwrap_or_else(|| empty_tree[lvl_idx.0 .0 as usize].hash().to_vec()); + + (lvl_idx.0 .1, value) + }; + + ids_iter + .into_iter() + .map(move |idx| utils::idx_to_merkle_path(idx).map(hash_by_lvl_idx.clone())) + } + + fn make_node(level: usize, key: TreeKey, node: NodeEntry) -> (LevelIndex, Vec) { + ( + ((ROOT_TREE_DEPTH - level) as u16, key).into(), + node.into_hash(), + ) + } + + fn apply_patch_without_metadata_calculation(&mut self, patch: TreePatch) -> Vec { + let branches = patch + .into_iter() + .fold(HashMap::new(), |mut branches, entries| { + branches.extend( + entries + .into_iter() + .enumerate() + .map(|(level, (key, tree_value))| Self::make_node(level, key, tree_value)), + ); + branches + }); + + let root_hash = branches[&(0, TreeKey::zero()).into()].clone(); + + // Prepare database changes + self.storage.pre_save(branches); + root_hash + } + + /// Applies each change from the given patch to the tree. + fn apply_patch( + &mut self, + patch: TreePatch, + storage_logs: &[(usize, (TreeKey, TreeOperation))], + leaf_indices: &[LeafIndices], + ) -> Vec { + let (branches, metadata) = patch.into_iter().zip(storage_logs).fold( + (HashMap::new(), Vec::new()), + |(mut branches, mut metadata), (entries, &(block, (_, storage_log)))| { + let leaf_hashed_key = entries.first().unwrap().0; + let leaf_index = leaf_indices[block].leaf_indices[&leaf_hashed_key]; + let mut merkle_paths = Vec::with_capacity(ROOT_TREE_DEPTH); + + branches.extend(entries.into_iter().enumerate().map(|(level, (key, node))| { + if let NodeEntry::Branch { + right_hash, + left_hash, + .. + } = &node + { + let witness_hash = if (leaf_hashed_key >> (level - 1)) % 2 == 0.into() { + right_hash + } else { + left_hash + }; + merkle_paths.push(witness_hash.clone()); + } + Self::make_node(level, key, node) + })); + + let root_hash = branches.get(&(0, TreeKey::zero()).into()).unwrap().clone(); + let is_write = !matches!(storage_log, TreeOperation::Read(_)); + let first_write = is_write && leaf_index >= leaf_indices[block].previous_index; + let value_written = match storage_log { + TreeOperation::Write { value, .. } => value, + _ => H256::zero(), + }; + let value_read = match storage_log { + TreeOperation::Write { previous_value, .. } => previous_value, + TreeOperation::Read(value) => value, + TreeOperation::Delete => H256::zero(), + }; + let metadata_log = StorageLogMetadata { + root_hash, + is_write, + first_write, + merkle_paths, + leaf_hashed_key, + leaf_enumeration_index: leaf_index, + value_written: value_written.to_fixed_bytes(), + value_read: value_read.to_fixed_bytes(), + }; + metadata.push(metadata_log); + + (branches, metadata) + }, + ); + + // Prepare database changes + self.storage.pre_save(branches); + metadata + } + + pub fn save(&mut self) -> Result<(), TreeError> { + self.storage.save(self.block_number) + } + + pub fn verify_consistency(&self) { + let empty_tree = self.config.empty_tree().to_vec(); + let hasher = self.hasher().clone(); + + let mut current_level = + vec![(self.root_hash(), (1, 0.into()).into(), (1, 1.into()).into())]; + + for node in empty_tree.iter().take(ROOT_TREE_DEPTH + 1).skip(1) { + let default_hash = node.hash().to_vec(); + + // fetch hashes for current level from rocksdb + let hashes = { + let nodes_iter = current_level + .iter() + .flat_map(|(_, left, right)| vec![left, right]); + + self.storage + .hashes(nodes_iter.clone()) + .into_iter() + .map(|value| value.unwrap_or_else(|| default_hash.clone())) + .zip(nodes_iter) + .map(|(k, v)| (v.clone(), k)) + .collect::>() + }; + + // verify in parallel that hashes do match with previous level + // and create new data for next level + current_level = current_level + .into_par_iter() + .map(|(parent_hash, left, right)| { + let mut children_checks = vec![]; + let left_hash = hashes[&left].clone(); + let right_hash = hashes[&right].clone(); + + assert_eq!(parent_hash, hasher.compress(&left_hash, &right_hash)); + if left_hash != default_hash { + let (left_child, right_child) = children_idxs(&left); + children_checks.push((left_hash, left_child, right_child)); + } + if right_hash != default_hash { + let (left_child, right_child) = children_idxs(&right); + children_checks.push((right_hash, left_child, right_child)); + } + children_checks + }) + .flatten() + .collect(); + } + } + + // while this function is used by the block reverter to revert to a previous state, + // it actually applies "arbitrary logs" (without any context). + // that is, it gets a list of (key,value) logs and applies them to the state + pub fn revert_logs( + &mut self, + block_number: L1BatchNumber, + logs: Vec<(TreeKey, Option)>, + ) { + let tree_operations = logs + .into_iter() + .map(|(key, value)| { + let operation = match value { + Some(value) => TreeOperation::Write { + value, + previous_value: H256::zero(), + }, + None => TreeOperation::Delete, + }; + (key, operation) + }) + .collect(); + self.apply_updates_batch(vec![tree_operations]) + .expect("Failed to revert logs"); + self.block_number = block_number.0 + 1; + } + + /// Resets state of the tree to the latest state in db + pub fn reset(&mut self) { + let (root_hash, block_number) = self.storage.fetch_metadata(); + self.root_hash = + root_hash.unwrap_or_else(|| TreeConfig::new(ZkHasher::default()).default_root_hash()); + self.block_number = block_number; + self.storage.pre_save(HashMap::new()); + } +} diff --git a/core/lib/mini_merkle_tree/Cargo.toml b/core/lib/mini_merkle_tree/Cargo.toml new file mode 100644 index 000000000000..ccb7b9b78f6b --- /dev/null +++ b/core/lib/mini_merkle_tree/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "zksync_mini_merkle_tree" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-2" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] + +[dependencies] +zksync_crypto = { path = "../../lib/crypto", version = "1.0" } +zksync_basic_types = { path = "../basic_types", version = "1.0" } + +rayon = "1.3.0" +once_cell = "1.7" + diff --git a/core/lib/mini_merkle_tree/src/lib.rs b/core/lib/mini_merkle_tree/src/lib.rs new file mode 100644 index 000000000000..4f83da68dd5a --- /dev/null +++ b/core/lib/mini_merkle_tree/src/lib.rs @@ -0,0 +1,208 @@ +//! Module used to calculate root hashes for small in-memory merkle trees +//! +//! Picks height for the tree and builds merkle tree on a fly from vector of values. +//! Latter will be used as tree leaves. +//! Resulted tree is left-leaning, meaning that all meaningful non-trivial paths will be stored in the left subtree. +use once_cell::sync::OnceCell; +use rayon::prelude::{IntoParallelIterator, ParallelIterator, ParallelSlice}; +use std::cmp::max; +use std::collections::HashMap; +use zksync_basic_types::H256; +use zksync_crypto::hasher::keccak::KeccakHasher; +use zksync_crypto::hasher::Hasher; + +const MINI_TREE_MIN_DEPTH: usize = 5; +const MINI_TREE_MAX_DEPTH: usize = 10; +const MAX_NUMBER_OF_LEAVES: u32 = 2_u32.pow(MINI_TREE_MAX_DEPTH as u32); +type ZkHasher = KeccakHasher; +type EmptyTree = Vec>; + +/// Computes root hash of merkle tree by given list of leaf values. The leaves could have different sizes, normally +/// it's 32 bytes. But it's not always an option, e.g. leaf for L2L1Message is 88 bytes. +pub fn mini_merkle_tree_root_hash(values: Vec, leaf_size: usize, tree_size: usize) -> H256 +where + V: IntoIterator + Send, +{ + if values.is_empty() { + return H256::zero(); + } + H256::from_slice( + mini_merkle_tree_proof(values, 0, leaf_size, tree_size) + .pop() + .unwrap() + .as_slice(), + ) +} + +/// Recalculates hashes in merkle tree and returns root hash and merkle proof for specified leaf +pub fn mini_merkle_tree_proof( + values: Vec, + mut idx: usize, + leaf_size: usize, + tree_size: usize, +) -> Vec> +where + V: IntoIterator + Send, +{ + assert!(idx < values.len(), "invalid tree leaf index"); + assert!( + values.len() as u32 <= MAX_NUMBER_OF_LEAVES, + "number of leaves exceeds merkle tree capacity" + ); + + // pick merkle tree depth + let depth = max(tree_size.trailing_zeros() as usize, MINI_TREE_MIN_DEPTH); + let empty_tree = empty_tree(depth, leaf_size); + + // compute leaf hashes + let hasher = ZkHasher::default(); + let mut current_level: Vec<_> = values + .into_par_iter() + .map(|value| hasher.hash_bytes(value)) + .collect(); + + // iterate tree level by level bottom-up, group neighbour nodes and emit their cumulative hash + let mut proof = Vec::with_capacity(depth + 1); + for level_idx in 1..=depth { + let default_value = empty_tree[level_idx - 1].clone(); + let neighbour_idx = idx ^ 1; + let neighbour_hash = current_level.get(neighbour_idx).unwrap_or(&default_value); + proof.push(neighbour_hash.clone()); + + current_level = current_level + .par_chunks(2) + .map(|chunk| { + let right = chunk.get(1).unwrap_or(&default_value); + hasher.compress(&chunk[0], right) + }) + .collect(); + idx /= 2; + } + proof.push(current_level[0].clone()); + proof +} + +/// Empty tree hashes for mini merkle tree of specified depth. +/// Uses `once_cell` internally, thus hashes must be precalculated only once. +fn empty_tree(depth: usize, leaf_size: usize) -> &'static EmptyTree { + assert!( + (MINI_TREE_MIN_DEPTH..=MINI_TREE_MAX_DEPTH).contains(&depth), + "merkle tree depth is out of range" + ); + static CONFIGS: OnceCell> = OnceCell::new(); + &CONFIGS.get_or_init(|| { + let hasher = ZkHasher::default(); + (MINI_TREE_MIN_DEPTH..=MINI_TREE_MAX_DEPTH) + .map(|depth| { + let mut hashes = Vec::with_capacity(depth + 1); + hashes.push(hasher.hash_bytes(vec![0; leaf_size])); + + for _ in 0..depth { + let last_hash = hashes.last().unwrap(); + let hash = hasher.compress(last_hash, last_hash); + hashes.push(hash); + } + (depth, hashes) + }) + .collect() + })[&depth] +} + +#[cfg(test)] +mod tests { + use super::*; + + // #[test] + // fn calculate_root_hash() { + // // trivial tree hash matches default config value + // let default_root_hash = empty_tree(MINI_TREE_MIN_DEPTH, 32).last().unwrap().clone(); + // let values = vec![[0; 32]]; + // let hash = mini_merkle_tree_root_hash(values, 32).as_bytes().to_vec(); + // assert_eq!(hash, default_root_hash); + // + // // array of integers + // let hash = mini_merkle_tree_root_hash(gen_test_data(1, 8), 32); + // assert_eq!( + // hash.as_bytes(), + // &[ + // 124, 121, 200, 65, 252, 60, 136, 184, 140, 160, 31, 140, 95, 161, 133, 79, 66, 216, + // 126, 228, 153, 182, 82, 213, 39, 38, 70, 28, 193, 7, 206, 64 + // ], + // ); + // + // // large array + // let hash = mini_merkle_tree_root_hash(gen_test_data(0, 1000), 32); + // assert_eq!( + // hash.as_bytes(), + // &[ + // 143, 68, 29, 29, 247, 41, 93, 101, 30, 223, 112, 103, 30, 157, 200, 128, 183, 193, + // 178, 186, 163, 228, 60, 196, 42, 44, 97, 174, 75, 82, 224, 187 + // ], + // ); + // } + // + // #[test] + // fn calculate_merkle_proof() { + // let hasher = ZkHasher::default(); + // let values = gen_test_data(0, 6); + // let proof = mini_merkle_tree_proof(values.clone(), 3, 32); + // assert_eq!(proof.len(), MINI_TREE_MIN_DEPTH + 1); + // assert_eq!( + // proof, + // vec![ + // hasher.hash_bytes(vec![2, 0, 0, 0]), // neighbour hash + // hasher.compress( + // &hasher.hash_bytes(vec![0; 4]), + // &hasher.hash_bytes(vec![1, 0, 0, 0]) + // ), // neighbour branch + // [ + // 138, 204, 157, 41, 179, 91, 233, 147, 240, 27, 150, 54, 120, 138, 71, 109, 120, + // 53, 187, 156, 232, 131, 65, 96, 227, 224, 157, 108, 15, 150, 30, 123 + // ] + // .to_vec(), + // [ + // 91, 130, 182, 149, 167, 172, 38, 104, 225, 136, 183, 95, 125, 79, 167, 159, + // 170, 80, 65, 23, 209, 253, 252, 190, 138, 70, 145, 92, 26, 138, 81, 145 + // ] + // .to_vec(), + // mini_merkle_tree_root_hash(values, 32).as_bytes().to_vec(), // root hash + // ] + // ); + // + // // verify merkle tree heights + // let proof = mini_merkle_tree_proof(gen_test_data(0, 1000), 0, 32); + // assert_eq!(proof.len(), MINI_TREE_MAX_DEPTH + 1); + // let proof = mini_merkle_tree_proof(gen_test_data(0, 100), 0, 32); + // assert_eq!(proof.len(), 8); + // let proof = mini_merkle_tree_proof(gen_test_data(0, 256), 0, 32); + // assert_eq!(proof.len(), 9); + // } + + #[test] + #[should_panic] + fn empty_tree_proof() { + mini_merkle_tree_proof(gen_test_data(0, 0), 0, 32, MINI_TREE_MIN_DEPTH); + } + + #[test] + #[should_panic] + fn invalid_index_fails() { + mini_merkle_tree_proof(gen_test_data(1, 4), 5, 32, MINI_TREE_MIN_DEPTH); + } + + #[test] + #[should_panic] + fn check_capacity() { + mini_merkle_tree_root_hash( + gen_test_data(0, 2 * MAX_NUMBER_OF_LEAVES as usize), + 32, + MINI_TREE_MIN_DEPTH, + ); + } + + fn gen_test_data(left: usize, right: usize) -> Vec> { + (left..right) + .map(|x| vec![(x % 256) as u8, (x / 256) as u8, 0, 0]) + .collect() + } +} diff --git a/core/lib/object_store/Cargo.toml b/core/lib/object_store/Cargo.toml new file mode 100644 index 000000000000..ef540dbde1d7 --- /dev/null +++ b/core/lib/object_store/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "zksync_object_store" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-2" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] + +[dependencies] +cloud-storage = "0.11.1" +vlog = { path = "../vlog", version = "1.0" } +zksync_config = { path = "../../lib/config", version = "1.0" } +zksync_types = { path = "../types", version = "1.0" } +metrics = "0.20" +tokio = { version = "1.21.2", features = ["full"] } + +[dev-dependencies] +expanduser = "1.2.2" +tempdir = "0.3.7" diff --git a/core/lib/object_store/src/file_backed_object_store.rs b/core/lib/object_store/src/file_backed_object_store.rs new file mode 100644 index 000000000000..06918a8e3492 --- /dev/null +++ b/core/lib/object_store/src/file_backed_object_store.rs @@ -0,0 +1,121 @@ +use std::error::Error; +use std::fmt::Debug; +use std::fs; +use std::fs::File; +use std::io::{Read, Write}; + +use crate::object_store::{ + ObjectStore, LEAF_AGGREGATION_WITNESS_JOBS_BUCKET_PATH, + NODE_AGGREGATION_WITNESS_JOBS_BUCKET_PATH, PROVER_JOBS_BUCKET_PATH, + SCHEDULER_WITNESS_JOBS_BUCKET_PATH, WITNESS_INPUT_BUCKET_PATH, +}; + +#[derive(Debug)] +pub struct FileBackedObjectStore { + base_dir: String, +} + +impl FileBackedObjectStore { + pub fn new(base_dir: String) -> Self { + for bucket in &[ + PROVER_JOBS_BUCKET_PATH, + WITNESS_INPUT_BUCKET_PATH, + LEAF_AGGREGATION_WITNESS_JOBS_BUCKET_PATH, + NODE_AGGREGATION_WITNESS_JOBS_BUCKET_PATH, + SCHEDULER_WITNESS_JOBS_BUCKET_PATH, + ] { + fs::create_dir_all(format!("{}/{}", base_dir, bucket)).expect("failed creating bucket"); + } + FileBackedObjectStore { base_dir } + } + + fn filename(&self, bucket: &'static str, key: String) -> String { + format!("{}/{}/{}", self.base_dir, bucket, key) + } +} + +impl ObjectStore for FileBackedObjectStore { + type Bucket = &'static str; + type Key = String; + type Value = Vec; + type Error = Box; + + fn get_store_type(&self) -> &'static str { + "FileBackedStore" + } + + fn get(&self, bucket: Self::Bucket, key: Self::Key) -> Result { + let filename = self.filename(bucket, key); + let mut file = File::open(filename)?; + let mut buffer = Vec::::new(); + file.read_to_end(&mut buffer)?; + Ok(buffer) + } + + fn put( + &mut self, + bucket: Self::Bucket, + key: Self::Key, + value: Self::Value, + ) -> Result<(), Self::Error> { + let filename = self.filename(bucket, key); + let mut file = File::create(filename)?; + file.write_all(&value)?; + Ok(()) + } + + fn remove(&mut self, bucket: Self::Bucket, key: Self::Key) -> Result<(), Self::Error> { + let filename = self.filename(bucket, key); + fs::remove_file(filename)?; + Ok(()) + } +} + +#[cfg(test)] +mod test { + use super::*; + use tempdir::TempDir; + + #[test] + fn test_get() { + let dir = TempDir::new("test-data").unwrap(); + let path = dir.into_path().into_os_string().into_string().unwrap(); + let mut object_store = FileBackedObjectStore::new(path); + let expected = vec![9, 0, 8, 9, 0, 7]; + let result = object_store.put( + PROVER_JOBS_BUCKET_PATH, + "test-key.bin".to_string(), + expected.clone(), + ); + assert!(result.is_ok(), "result must be OK"); + let bytes = object_store + .get(PROVER_JOBS_BUCKET_PATH, "test-key.bin".to_string()) + .unwrap(); + assert_eq!(expected, bytes, "expected didn't match"); + } + + #[test] + fn test_put() { + let dir = TempDir::new("test-data").unwrap(); + let path = dir.into_path().into_os_string().into_string().unwrap(); + let mut object_store = FileBackedObjectStore::new(path); + let bytes = vec![9, 0, 8, 9, 0, 7]; + let result = object_store.put(PROVER_JOBS_BUCKET_PATH, "test-key.bin".to_string(), bytes); + assert!(result.is_ok(), "result must be OK"); + } + + #[test] + fn test_remove() { + let dir = TempDir::new("test-data").unwrap(); + let path = dir.into_path().into_os_string().into_string().unwrap(); + let mut object_store = FileBackedObjectStore::new(path); + let result = object_store.put( + PROVER_JOBS_BUCKET_PATH, + "test-key.bin".to_string(), + vec![0, 1], + ); + assert!(result.is_ok(), "result must be OK"); + let result = object_store.remove(PROVER_JOBS_BUCKET_PATH, "test-key.bin".to_string()); + assert!(result.is_ok(), "result must be OK"); + } +} diff --git a/core/lib/object_store/src/gcs_object_store.rs b/core/lib/object_store/src/gcs_object_store.rs new file mode 100644 index 000000000000..1464d581ba13 --- /dev/null +++ b/core/lib/object_store/src/gcs_object_store.rs @@ -0,0 +1,169 @@ +pub use cloud_storage; +use cloud_storage::Client; +use std::env; +use std::error::Error; +use std::sync::mpsc::channel; +use std::time::Instant; +use tokio; + +use zksync_config::ObjectStoreConfig; + +use crate::object_store::ObjectStore; + +#[derive(Debug)] +pub struct GoogleCloudStorage { + client: Client, + bucket_prefix: String, +} +pub const GOOGLE_CLOUD_STORAGE_OBJECT_STORE_TYPE: &str = "GoogleCloudStorage"; + +impl GoogleCloudStorage { + pub fn new() -> Self { + let config = ObjectStoreConfig::from_env(); + env::set_var("SERVICE_ACCOUNT", config.service_account_path); + GoogleCloudStorage { + client: Client::new(), + bucket_prefix: ObjectStoreConfig::from_env().bucket_base_url, + } + } + + fn filename(&self, bucket: &str, filename: &str) -> String { + format!("{}/{}", bucket, filename) + } + + async fn get_async( + self, + bucket: &'static str, + key: String, + ) -> Result, cloud_storage::Error> { + let started_at = Instant::now(); + vlog::info!( + "Fetching data from GCS for key {} from bucket {}", + &self.filename(bucket, &key), + self.bucket_prefix + ); + let blob = self + .client + .object() + .download(&self.bucket_prefix, &self.filename(bucket, &key)) + .await; + vlog::info!( + "Fetched data from GCS for key {} from bucket {} and it took: {:?}", + key, + bucket, + started_at.elapsed() + ); + metrics::histogram!( + "server.object_store.fetching_time", + started_at.elapsed(), + "bucket" => bucket + ); + blob + } + + async fn put_async( + self, + bucket: &'static str, + key: String, + value: Vec, + ) -> Result<(), cloud_storage::Error> { + let started_at = Instant::now(); + vlog::info!( + "Storing data to GCS for key {} from bucket {}", + &self.filename(bucket, &key), + self.bucket_prefix + ); + let object = self + .client + .object() + .create( + &self.bucket_prefix, + value, + &self.filename(bucket, &key), + "binary/blob", + ) + .await; + vlog::info!( + "Stored data to GCS for key {} from bucket {} and it took: {:?}", + key, + bucket, + started_at.elapsed() + ); + metrics::histogram!( + "server.object_store.storing_time", + started_at.elapsed(), + "bucket" => bucket + ); + object.map(drop) + } + + async fn remove_async( + self, + bucket: &'static str, + key: String, + ) -> Result<(), cloud_storage::Error> { + vlog::info!( + "Removing data from GCS for key {} from bucket {}", + &self.filename(bucket, &key), + self.bucket_prefix + ); + self.client + .object() + .delete(&self.bucket_prefix, &self.filename(bucket, &key)) + .await + } +} + +impl Default for GoogleCloudStorage { + fn default() -> Self { + Self::new() + } +} + +fn gcs_query(query: F) -> OUT +where + OUT: Send + 'static, + FUT: std::future::Future, + F: FnOnce(GoogleCloudStorage) -> FUT + Send + 'static, +{ + let (tx, rx) = channel(); + std::thread::spawn(move || { + let runtime = tokio::runtime::Builder::new_current_thread() + .enable_io() + .enable_time() + .build() + .unwrap(); + let gcs = GoogleCloudStorage::new(); + let result = runtime.block_on(Box::pin(query(gcs))); + tx.send(result).unwrap(); + }); + rx.recv().unwrap() +} + +impl ObjectStore for GoogleCloudStorage { + type Bucket = &'static str; + type Key = String; + type Value = Vec; + type Error = Box; + + fn get_store_type(&self) -> &'static str { + GOOGLE_CLOUD_STORAGE_OBJECT_STORE_TYPE + } + + fn get(&self, bucket: Self::Bucket, key: Self::Key) -> Result { + gcs_query(move |gcs| gcs.get_async(bucket, key)).map_err(|e| e.into()) + } + + fn put( + &mut self, + bucket: Self::Bucket, + key: Self::Key, + value: Self::Value, + ) -> Result<(), Self::Error> { + gcs_query(move |gcs| gcs.put_async(bucket, key, value)).map_err(|e| e.into()) + } + + fn remove(&mut self, bucket: Self::Bucket, key: Self::Key) -> Result<(), Self::Error> { + gcs_query(move |gcs| gcs.remove_async(bucket, key)).map_err(|e| e.into()) + } +} diff --git a/core/lib/object_store/src/gcs_utils.rs b/core/lib/object_store/src/gcs_utils.rs new file mode 100644 index 000000000000..eb7de48a3f77 --- /dev/null +++ b/core/lib/object_store/src/gcs_utils.rs @@ -0,0 +1,42 @@ +use zksync_types::proofs::AggregationRound; +use zksync_types::L1BatchNumber; + +pub fn prover_circuit_input_blob_url( + block_number: L1BatchNumber, + sequence_number: usize, + circuit_type: String, + aggregation_round: AggregationRound, +) -> String { + format!( + "{}_{}_{}_{:?}.bin", + block_number, sequence_number, circuit_type, aggregation_round + ) +} + +pub fn merkle_tree_paths_blob_url(block_number: L1BatchNumber) -> String { + format!("merkel_tree_paths_{}.bin", block_number) +} + +pub fn basic_circuits_blob_url(block_number: L1BatchNumber) -> String { + format!("basic_circuits_{}.bin", block_number) +} + +pub fn basic_circuits_inputs_blob_url(block_number: L1BatchNumber) -> String { + format!("basic_circuits_inputs_{}.bin", block_number) +} + +pub fn leaf_layer_subqueues_blob_url(block_number: L1BatchNumber) -> String { + format!("leaf_layer_subqueues_{}.bin", block_number) +} + +pub fn aggregation_outputs_blob_url(block_number: L1BatchNumber) -> String { + format!("aggregation_outputs_{}.bin", block_number) +} + +pub fn scheduler_witness_blob_url(block_number: L1BatchNumber) -> String { + format!("scheduler_witness_{}.bin", block_number) +} + +pub fn final_node_aggregations_blob_url(block_number: L1BatchNumber) -> String { + format!("final_node_aggregations_{}.bin", block_number) +} diff --git a/core/lib/object_store/src/lib.rs b/core/lib/object_store/src/lib.rs new file mode 100644 index 000000000000..d31ef4bbc383 --- /dev/null +++ b/core/lib/object_store/src/lib.rs @@ -0,0 +1,10 @@ +extern crate core; + +pub mod file_backed_object_store; +pub mod gcs_object_store; +pub mod object_store; +pub use cloud_storage; + +pub mod gcs_utils; +#[cfg(test)] +mod tests; diff --git a/core/lib/object_store/src/object_store.rs b/core/lib/object_store/src/object_store.rs new file mode 100644 index 000000000000..fdce540fc363 --- /dev/null +++ b/core/lib/object_store/src/object_store.rs @@ -0,0 +1,88 @@ +use std::error; +use std::fmt::Debug; +use std::str::FromStr; + +pub use cloud_storage::Error; +use zksync_config::ObjectStoreConfig; + +use crate::file_backed_object_store::FileBackedObjectStore; +use crate::gcs_object_store::GoogleCloudStorage; + +pub const PROVER_JOBS_BUCKET_PATH: &str = "prover_jobs"; +pub const WITNESS_INPUT_BUCKET_PATH: &str = "witness_inputs"; +pub const LEAF_AGGREGATION_WITNESS_JOBS_BUCKET_PATH: &str = "leaf_aggregation_witness_jobs"; +pub const NODE_AGGREGATION_WITNESS_JOBS_BUCKET_PATH: &str = "node_aggregation_witness_jobs"; +pub const SCHEDULER_WITNESS_JOBS_BUCKET_PATH: &str = "scheduler_witness_jobs"; + +/// Trait to fetch and store BLOB's from an object store(S3, Google Cloud Storage, Azure Blobstore etc). +pub trait ObjectStore: Debug + Send + Sync { + type Bucket: Debug; + type Key: Debug; + type Value; + type Error; + + fn get_store_type(&self) -> &'static str; + + /// Fetches the value for the given key from the given bucket if it exists otherwise returns Error. + fn get(&self, bucket: Self::Bucket, key: Self::Key) -> Result; + + /// Stores the value associating it with the key into the given bucket, if the key already exist then the value is replaced. + fn put( + &mut self, + bucket: Self::Bucket, + key: Self::Key, + value: Self::Value, + ) -> Result<(), Self::Error>; + + /// Removes the value associated with the key from the given bucket if it exist. + fn remove(&mut self, bucket: Self::Bucket, key: Self::Key) -> Result<(), Self::Error>; +} + +pub type DynamicObjectStore = Box< + dyn ObjectStore< + Bucket = &'static str, + Error = Box, + Key = String, + Value = Vec, + >, +>; + +#[derive(Debug, Eq, PartialEq)] +pub enum ObjectStoreMode { + GCS, + FileBacked, +} + +impl FromStr for ObjectStoreMode { + type Err = String; + + fn from_str(input: &str) -> Result { + match input { + "GCS" => Ok(ObjectStoreMode::GCS), + "FileBacked" => Ok(ObjectStoreMode::FileBacked), + _ => Err(format!("Unknown ObjectStoreMode type: {}", input)), + } + } +} + +pub fn create_object_store( + mode: ObjectStoreMode, + file_backed_base_path: String, +) -> DynamicObjectStore { + match mode { + ObjectStoreMode::GCS => { + vlog::trace!("Initialized GoogleCloudStorage Object store"); + Box::new(GoogleCloudStorage::new()) + } + ObjectStoreMode::FileBacked => { + vlog::trace!("Initialized FileBacked Object store"); + Box::new(FileBackedObjectStore::new(file_backed_base_path)) + } + } +} + +pub fn create_object_store_from_env() -> DynamicObjectStore { + let config = ObjectStoreConfig::from_env(); + let mode = ObjectStoreMode::from_str(&config.mode).unwrap(); + create_object_store(mode, config.file_backed_base_path) +} diff --git a/core/lib/object_store/src/tests.rs b/core/lib/object_store/src/tests.rs new file mode 100644 index 000000000000..86faff232af6 --- /dev/null +++ b/core/lib/object_store/src/tests.rs @@ -0,0 +1,24 @@ +use crate::object_store::{create_object_store, ObjectStoreMode}; +use expanduser::expanduser; +use std::env; + +#[test] +fn test_object_store_in_memory_creation() { + let object_store = create_object_store(ObjectStoreMode::FileBacked, "artifacts".to_string()); + assert_eq!("FileBackedStore", object_store.get_store_type()); +} + +#[test] +fn test_object_store_gcs_creation() { + set_object_store_environment_variable(); + let object_store = create_object_store(ObjectStoreMode::GCS, "".to_string()); + assert_eq!("GoogleCloudStorage", object_store.get_store_type()); +} + +fn set_object_store_environment_variable() { + let path = expanduser("~/gcloud/service_account.json").unwrap(); + env::set_var("OBJECT_STORE_SERVICE_ACCOUNT_PATH", path); + env::set_var("OBJECT_STORE_BUCKET_BASE_URL", "/base/url"); + env::set_var("OBJECT_STORE_MODE", "GCS"); + env::set_var("OBJECT_STORE_FILE_BACKED_BASE_PATH", "/base/url"); +} diff --git a/core/lib/prometheus_exporter/Cargo.toml b/core/lib/prometheus_exporter/Cargo.toml new file mode 100644 index 000000000000..03a5042ddcea --- /dev/null +++ b/core/lib/prometheus_exporter/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "prometheus_exporter" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-2" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] + +[dependencies] +tokio = "1" +metrics = "0.20" +metrics-exporter-prometheus = "0.11" +zksync_config = { path = "../config", version = "1.0" } +vlog = { path = "../vlog", version = "1.0" } diff --git a/core/lib/prometheus_exporter/src/lib.rs b/core/lib/prometheus_exporter/src/lib.rs new file mode 100644 index 000000000000..a936450b6955 --- /dev/null +++ b/core/lib/prometheus_exporter/src/lib.rs @@ -0,0 +1,69 @@ +use metrics_exporter_prometheus::{Matcher, PrometheusBuilder}; +use tokio::task::JoinHandle; +use zksync_config::configs::utils::Prometheus as PrometheusConfig; + +pub fn run_prometheus_exporter(config: PrometheusConfig, use_pushgateway: bool) -> JoinHandle<()> { + // in seconds + let default_latency_buckets = [0.001, 0.005, 0.025, 0.1, 0.25, 1.0, 5.0, 30.0, 120.0]; + let slow_latency_buckets = [ + 0.33, 1.0, 2.0, 5.0, 10.0, 30.0, 60.0, 180.0, 600.0, 1800.0, 3600.0, + ]; + let prover_buckets = [ + 1.0, 10.0, 20.0, 40.0, 60.0, 120.0, 240.0, 360.0, 600.0, 1800.0, 3600.0, + ]; + + let storage_interactions_per_call_buckets = [ + 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, 10000000.0, + ]; + + let builder = if use_pushgateway { + let job_id = "zksync-pushgateway"; + let namespace = std::env::var("POD_NAMESPACE").unwrap_or_else(|_| { + vlog::warn!("Missing POD_NAMESPACE env"); + "UNKNOWN_NAMESPACE".to_string() + }); + let pod = std::env::var("POD_NAME").unwrap_or_else(|_| { + vlog::warn!("Missing POD_NAME env"); + "UNKNOWN_POD".to_string() + }); + let endpoint = format!( + "{}/metrics/job/{}/namespace/{}/pod/{}", + config.pushgateway_url, job_id, namespace, pod + ); + PrometheusBuilder::new() + .with_push_gateway(endpoint.as_str(), config.push_interval()) + .unwrap() + } else { + let addr = ([0, 0, 0, 0], config.listener_port); + PrometheusBuilder::new().with_http_listener(addr) + }; + + let (recorder, exporter) = builder + .set_buckets(&default_latency_buckets) + .unwrap() + .set_buckets_for_metric( + Matcher::Full("runtime_context.storage_interaction".to_owned()), + &storage_interactions_per_call_buckets, + ) + .unwrap() + .set_buckets_for_metric(Matcher::Prefix("server.prover".to_owned()), &prover_buckets) + .unwrap() + .set_buckets_for_metric( + Matcher::Prefix("server.witness_generator".to_owned()), + &slow_latency_buckets, + ) + .unwrap() + .build() + .expect("failed to install Prometheus recorder"); + + metrics::set_boxed_recorder(Box::new(recorder)).expect("failed to set metrics recorder"); + + tokio::spawn(async move { + tokio::pin!(exporter); + loop { + tokio::select! { + _ = &mut exporter => {} + } + } + }) +} diff --git a/core/lib/prover_utils/Cargo.toml b/core/lib/prover_utils/Cargo.toml new file mode 100644 index 000000000000..5d06f81208ac --- /dev/null +++ b/core/lib/prover_utils/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "zksync_prover_utils" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-2" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] + +[dependencies] +vlog = { path = "../../lib/vlog", version = "1.0" } + +metrics = "0.20" +reqwest = { version = "0.11", features = ["blocking"] } diff --git a/core/lib/prover_utils/src/lib.rs b/core/lib/prover_utils/src/lib.rs new file mode 100644 index 000000000000..97d1b00cc4e8 --- /dev/null +++ b/core/lib/prover_utils/src/lib.rs @@ -0,0 +1,89 @@ +#![allow(clippy::upper_case_acronyms, clippy::derive_partial_eq_without_eq)] + +use std::fs::create_dir_all; +use std::io::Cursor; +use std::path::Path; +use std::time::Duration; +use std::time::Instant; + +fn download_bytes(key_download_url: &str) -> reqwest::Result> { + vlog::info!("Downloading initial setup from {:?}", key_download_url); + + const DOWNLOAD_TIMEOUT: Duration = Duration::from_secs(120); + let client = reqwest::blocking::Client::builder() + .timeout(DOWNLOAD_TIMEOUT) + .build() + .unwrap(); + + const DOWNLOAD_RETRIES: usize = 5; + let mut retry_count = 0; + + while retry_count < DOWNLOAD_RETRIES { + let bytes = client + .get(key_download_url) + .send() + .and_then(|response| response.bytes().map(|bytes| bytes.to_vec())); + match bytes { + Ok(bytes) => return Ok(bytes), + Err(_) => retry_count += 1, + } + + vlog::warn!("Failed to download keys. Backing off for 5 second"); + std::thread::sleep(Duration::from_secs(5)); + } + + client + .get(key_download_url) + .send() + .and_then(|response| response.bytes().map(|bytes| bytes.to_vec())) +} + +pub fn ensure_initial_setup_keys_present(initial_setup_key_path: &str, key_download_url: &str) { + if Path::new(initial_setup_key_path).exists() { + vlog::info!( + "Initial setup already present at {:?}", + initial_setup_key_path + ); + return; + } + let started_at = Instant::now(); + + let bytes = download_bytes(key_download_url).expect("Failed downloading initial setup"); + let initial_setup_key_dir = Path::new(initial_setup_key_path).parent().unwrap(); + create_dir_all(initial_setup_key_dir).unwrap_or_else(|_| { + panic!( + "Failed creating dirs recursively: {:?}", + initial_setup_key_dir + ) + }); + let mut file = std::fs::File::create(initial_setup_key_path) + .expect("Cannot create file for the initial setup"); + let mut content = Cursor::new(bytes); + std::io::copy(&mut content, &mut file).expect("Cannot write the downloaded key to the file"); + metrics::histogram!("server.prover.download_time", started_at.elapsed()); +} + +pub fn numeric_index_to_circuit_name(circuit_numeric_index: u8) -> Option<&'static str> { + match circuit_numeric_index { + 0 => Some("Scheduler"), + 1 => Some("Node aggregation"), + 2 => Some("Leaf aggregation"), + 3 => Some("Main VM"), + 4 => Some("Decommitts sorter"), + 5 => Some("Code decommitter"), + 6 => Some("Log demuxer"), + 7 => Some("Keccak"), + 8 => Some("SHA256"), + 9 => Some("ECRecover"), + 10 => Some("RAM permutation"), + 11 => Some("Storage sorter"), + 12 => Some("Storage application"), + 13 => Some("Initial writes pubdata rehasher"), + 14 => Some("Repeated writes pubdata rehasher"), + 15 => Some("Events sorter"), + 16 => Some("L1 messages sorter"), + 17 => Some("L1 messages rehasher"), + 18 => Some("L1 messages merklizer"), + _ => None, + } +} diff --git a/core/lib/queued_job_processor/Cargo.toml b/core/lib/queued_job_processor/Cargo.toml new file mode 100644 index 000000000000..c31dc9976f30 --- /dev/null +++ b/core/lib/queued_job_processor/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "zksync_queued_job_processor" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-2" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] + + +[dependencies] +async-trait = "0.1" +tokio = { version = "1", features = ["time"] } + +zksync_dal = {path = "../../lib/dal", version = "1.0" } +zksync_utils = {path = "../../lib/utils", version = "1.0" } +vlog = { path = "../../lib/vlog", version = "1.0" } diff --git a/core/lib/queued_job_processor/src/lib.rs b/core/lib/queued_job_processor/src/lib.rs new file mode 100644 index 000000000000..1867d6607223 --- /dev/null +++ b/core/lib/queued_job_processor/src/lib.rs @@ -0,0 +1,140 @@ +use std::fmt::Debug; +use std::time::{Duration, Instant}; +use tokio::sync::watch; +use tokio::task::JoinHandle; +use tokio::time::sleep; +use zksync_dal::ConnectionPool; +use zksync_utils::panic_extractor::try_extract_panic_message; + +pub use async_trait::async_trait; + +#[async_trait] +pub trait JobProcessor: Sync + Send { + type Job: Send + 'static; + type JobId: Send + Debug + 'static; + type JobArtifacts: Send + 'static; + + const POLLING_INTERVAL_MS: u64 = 250; + const SERVICE_NAME: &'static str; + + /// Returns None when there is no pending job + /// Otherwise, returns Some(job_id, job) + /// Note: must be concurrency-safe - that is, one job must not be returned in two parallel processes + async fn get_next_job( + &self, + connection_pool: ConnectionPool, + ) -> Option<(Self::JobId, Self::Job)>; + + /// Invoked when `process_job` panics + /// Should mark the job as failed + async fn save_failure( + connection_pool: ConnectionPool, + job_id: Self::JobId, + started_at: Instant, + error: String, + ) -> (); + + /// Function that processes a job + async fn process_job( + connection_pool: ConnectionPool, + job: Self::Job, + started_at: Instant, + ) -> JoinHandle; + + /// `iterations_left`: + /// To run indefinitely, pass `None`, + /// To process one job, pass `Some(1)`, + /// To process a batch, pass `Some(batch_size)`. + async fn run( + self, + connection_pool: ConnectionPool, + stop_receiver: watch::Receiver, + mut iterations_left: Option, + ) where + Self: Sized, + { + while iterations_left.map_or(true, |i| i > 0) { + if *stop_receiver.borrow() { + vlog::warn!( + "Stop signal received, shutting down {} component while waiting for a new job", + Self::SERVICE_NAME + ); + return; + } + if let Some((job_id, job)) = Self::get_next_job(&self, connection_pool.clone()).await { + let started_at = Instant::now(); + iterations_left = iterations_left.map(|i| i - 1); + + let connection_pool_for_task = connection_pool.clone(); + vlog::debug!( + "Spawning thread processing {:?} job with id {:?}", + Self::SERVICE_NAME, + job_id + ); + let task = Self::process_job(connection_pool_for_task, job, started_at).await; + + Self::wait_for_task(connection_pool.clone(), job_id, started_at, task).await + } else if iterations_left.is_some() { + vlog::info!("No more jobs to process. Server can stop now."); + return; + } else { + sleep(Duration::from_millis(Self::POLLING_INTERVAL_MS)).await; + } + } + vlog::info!("Requested number of jobs is processed. Server can stop now.") + } + + async fn wait_for_task( + connection_pool: ConnectionPool, + job_id: Self::JobId, + started_at: Instant, + task: JoinHandle, + ) { + loop { + vlog::trace!( + "Polling {} task with id {:?}. Is finished: {}", + Self::SERVICE_NAME, + job_id, + task.is_finished() + ); + if task.is_finished() { + let result = task.await; + match result { + Ok(data) => { + vlog::debug!( + "{} Job {:?} finished successfully", + Self::SERVICE_NAME, + job_id + ); + Self::save_result(connection_pool.clone(), job_id, started_at, data).await; + } + Err(error) => { + let error_message = try_extract_panic_message(error); + vlog::error!( + "Error occurred while processing {} job {:?}: {:?}", + Self::SERVICE_NAME, + job_id, + error_message + ); + Self::save_failure( + connection_pool.clone(), + job_id, + started_at, + error_message, + ) + .await; + } + } + break; + } + sleep(Duration::from_millis(Self::POLLING_INTERVAL_MS)).await; + } + } + + async fn save_result( + connection_pool: ConnectionPool, + job_id: Self::JobId, + started_at: Instant, + artifacts: Self::JobArtifacts, + ); +} diff --git a/core/lib/state/Cargo.toml b/core/lib/state/Cargo.toml new file mode 100644 index 000000000000..1ec7c480a05e --- /dev/null +++ b/core/lib/state/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "zksync_state" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-2" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] + +[dependencies] +zksync_utils = { path = "../utils", version = "1.0" } +zksync_types = { path = "../types", version = "1.0" } +zksync_storage = { path = "../storage", version = "1.0" } + +vlog = { path = "../../lib/vlog", version = "1.0" } + +[dev-dependencies] +tempfile = "3.0.2" diff --git a/core/lib/state/src/lib.rs b/core/lib/state/src/lib.rs new file mode 100644 index 000000000000..f7c0d81fb834 --- /dev/null +++ b/core/lib/state/src/lib.rs @@ -0,0 +1,4 @@ +//! Execution of transaction in zkSync Era + +pub mod secondary_storage; +pub mod storage_view; diff --git a/core/lib/state/src/secondary_storage.rs b/core/lib/state/src/secondary_storage.rs new file mode 100644 index 000000000000..3982e035eadf --- /dev/null +++ b/core/lib/state/src/secondary_storage.rs @@ -0,0 +1,206 @@ +use std::collections::HashMap; +use std::ops::Deref; +use zksync_storage::db::StateKeeperColumnFamily; +use zksync_storage::rocksdb::WriteBatch; +use zksync_storage::util::{deserialize_block_number, serialize_block_number}; +use zksync_storage::RocksDB; +use zksync_types::{ + Address, L1BatchNumber, StorageKey, StorageLog, StorageLogKind, StorageValue, + ZkSyncReadStorage, H256, +}; + +const BLOCK_NUMBER_KEY: &[u8; 12] = b"block_number"; + +#[derive(Debug)] +pub struct SecondaryStateStorage { + db: RocksDB, + // currently not used + pending_patch: PendingPatch, +} + +#[derive(Default, Debug)] +struct PendingPatch { + state: HashMap, + contracts: HashMap>, + factory_deps: HashMap>, +} + +impl ZkSyncReadStorage for &SecondaryStateStorage { + fn read_value(&mut self, key: &StorageKey) -> StorageValue { + self.read_value_inner(key).unwrap_or_else(H256::zero) + } + + fn is_write_initial(&mut self, key: &StorageKey) -> bool { + self.read_value_inner(key).is_none() + } + + fn load_contract(&mut self, address: Address) -> Option> { + if let Some(value) = self.pending_patch.contracts.get(&address) { + return Some(value.clone()); + } + let cf = self + .db + .cf_state_keeper_handle(StateKeeperColumnFamily::Contracts); + self.db + .get_cf(cf, address.to_fixed_bytes()) + .expect("failed to read rocksdb state value") + } + + fn load_factory_dep(&mut self, hash: H256) -> Option> { + self.load_factory_dependency(hash) + } +} + +impl SecondaryStateStorage { + pub fn new(db: RocksDB) -> Self { + Self { + db, + pending_patch: PendingPatch::default(), + } + } + + fn read_value_inner(&self, key: &StorageKey) -> Option { + let cf = self + .db + .cf_state_keeper_handle(StateKeeperColumnFamily::State); + self.db + .get_cf(cf, SecondaryStateStorage::serialize_state_key(key)) + .expect("failed to read rocksdb state value") + .map(|value| H256::from_slice(&value)) + } + + pub fn load_factory_dependency(&self, hash: H256) -> Option> { + if let Some(value) = self.pending_patch.factory_deps.get(&hash) { + return Some(value.clone()); + } + let cf = self + .db + .cf_state_keeper_handle(StateKeeperColumnFamily::FactoryDeps); + self.db + .get_cf(cf, hash.to_fixed_bytes()) + .expect("failed to read rocksdb state value") + } + + pub fn process_transaction_logs(&mut self, logs: &[StorageLog]) { + let mut updates = HashMap::new(); + for log in logs { + if log.kind == StorageLogKind::Write { + updates.insert(log.key, log.value.to_fixed_bytes()); + } + } + for (key, value) in updates { + if value != [0u8; 32] || self.deref().read_value_inner(&key).is_some() { + self.pending_patch.state.insert(key, value); + } + } + } + + pub fn store_contract(&mut self, address: Address, bytecode: Vec) { + self.pending_patch.contracts.insert(address, bytecode); + } + + pub fn store_factory_dep(&mut self, hash: H256, bytecode: Vec) { + self.pending_patch.factory_deps.insert(hash, bytecode); + } + + pub fn rollback( + &mut self, + logs: Vec<(H256, Option)>, + contracts: Vec
, + factory_deps: Vec, + l1_batch_number: L1BatchNumber, + ) { + let mut batch = WriteBatch::default(); + + let cf = self + .db + .cf_state_keeper_handle(StateKeeperColumnFamily::State); + for (key, value) in logs { + match value { + Some(value) => batch.put_cf(cf, key.0, value.to_fixed_bytes()), + None => batch.delete_cf(cf, key.0), + } + } + batch.put_cf( + cf, + BLOCK_NUMBER_KEY, + serialize_block_number(l1_batch_number.0 + 1), + ); + + let cf = self + .db + .cf_state_keeper_handle(StateKeeperColumnFamily::Contracts); + for contract_address in contracts { + batch.delete_cf(cf, contract_address.to_fixed_bytes()); + } + + let cf = self + .db + .cf_state_keeper_handle(StateKeeperColumnFamily::FactoryDeps); + for factory_dep_hash in factory_deps { + batch.delete_cf(cf, factory_dep_hash.to_fixed_bytes()); + } + + self.db + .write(batch) + .expect("failed to save state data into rocksdb"); + } + + pub fn save(&mut self, l1_batch_number: L1BatchNumber) { + let mut batch = WriteBatch::default(); + + let cf = self + .db + .cf_state_keeper_handle(StateKeeperColumnFamily::State); + batch.put_cf( + cf, + BLOCK_NUMBER_KEY, + serialize_block_number(l1_batch_number.0), + ); + for (key, value) in self.pending_patch.state.iter() { + batch.put_cf(cf, Self::serialize_state_key(key), value); + } + + let cf = self + .db + .cf_state_keeper_handle(StateKeeperColumnFamily::Contracts); + for (address, value) in self.pending_patch.contracts.iter() { + batch.put_cf(cf, address.to_fixed_bytes(), value); + } + + let cf = self + .db + .cf_state_keeper_handle(StateKeeperColumnFamily::FactoryDeps); + for (hash, value) in self.pending_patch.factory_deps.iter() { + batch.put_cf(cf, hash.to_fixed_bytes(), value); + } + + self.db + .write(batch) + .expect("failed to save state data into rocksdb"); + self.pending_patch = PendingPatch::default(); + } + + /// Returns the last processed l1 batch number + 1 + pub fn get_l1_batch_number(&self) -> L1BatchNumber { + let cf = self + .db + .cf_state_keeper_handle(StateKeeperColumnFamily::State); + let block_number = self + .db + .get_cf(cf, BLOCK_NUMBER_KEY) + .expect("failed to fetch block number") + .map(|bytes| deserialize_block_number(&bytes)) + .unwrap_or(0); + L1BatchNumber(block_number) + } + + fn serialize_state_key(key: &StorageKey) -> Vec { + key.hashed_key().to_fixed_bytes().into() + } + + pub fn get_estimated_map_size(&self) -> u64 { + self.db + .get_estimated_number_of_entries(StateKeeperColumnFamily::State) + } +} diff --git a/core/lib/state/src/storage_view.rs b/core/lib/state/src/storage_view.rs new file mode 100644 index 000000000000..034de9ffe75e --- /dev/null +++ b/core/lib/state/src/storage_view.rs @@ -0,0 +1,167 @@ +use std::collections::HashMap; + +use std::fmt::Debug; +use zksync_types::{tokens::TokenInfo, Address, StorageKey, StorageValue, ZkSyncReadStorage, H256}; + +/// `StorageView` is buffer for `StorageLog`s between storage and transaction execution code. +/// In order to commit transactions logs should be submitted +/// to `ZkSyncStorage` after transaction is executed. +/// Note, you must not use one `StorageView` object for multiple L1 batches, +/// otherwise `is_write_initial` will return incorrect values because of the caching. +#[derive(Debug)] +pub struct StorageView { + storage_handle: S, + // Used for caching and to get the list/count of modified keys + modified_storage_keys: HashMap, + // Used purely for caching + read_storage_keys: HashMap, + // Cache for initial/repeated writes. It's only valid within one L1 batch execution. + read_initial_writes: HashMap, + deployed_contracts: HashMap>, + added_tokens: Vec, + new_factory_deps: HashMap>, + + pub storage_invocations: usize, + pub new_storage_invocations: usize, + pub get_value_storage_invocations: usize, + pub set_value_storage_invocations: usize, + pub contract_load_invocations: usize, +} + +impl StorageView { + pub fn new(storage_handle: S) -> Self { + Self { + storage_handle, + modified_storage_keys: HashMap::new(), + read_storage_keys: HashMap::new(), + read_initial_writes: HashMap::new(), + deployed_contracts: HashMap::new(), + new_factory_deps: HashMap::new(), + added_tokens: vec![], + storage_invocations: 0, + get_value_storage_invocations: 0, + contract_load_invocations: 0, + set_value_storage_invocations: 0, + new_storage_invocations: 0, + } + } + + pub fn get_value(&mut self, key: &StorageKey) -> StorageValue { + self.get_value_storage_invocations += 1; + let value = self.get_value_no_log(key); + + vlog::trace!( + "read value {:?} {:?} ({:?}/{:?})", + key.hashed_key().0, + value.0, + key.address(), + key.key() + ); + + value + } + + // returns the value before write. Doesn't generate read logs. + // `None` for value is only possible for rolling back the transaction + pub fn set_value(&mut self, key: &StorageKey, value: StorageValue) -> StorageValue { + self.set_value_storage_invocations += 1; + let original = self.get_value_no_log(key); + + vlog::trace!( + "write value {:?} value: {:?} original value: {:?} ({:?}/{:?})", + key.hashed_key().0, + value, + original, + key.address(), + key.key() + ); + self.modified_storage_keys.insert(*key, value); + + original + } + + fn get_value_no_log(&mut self, key: &StorageKey) -> StorageValue { + self.storage_invocations += 1; + if let Some(value) = self.modified_storage_keys.get(key) { + *value + } else if let Some(value) = self.read_storage_keys.get(key) { + *value + } else { + self.new_storage_invocations += 1; + let value = self.storage_handle.read_value(key); + self.read_storage_keys.insert(*key, value); + value + } + } + + pub fn is_write_initial(&mut self, key: &StorageKey) -> bool { + if let Some(is_initial) = self.read_initial_writes.get(key) { + *is_initial + } else { + let is_initial = self.storage_handle.is_write_initial(key); + self.read_initial_writes.insert(*key, is_initial); + is_initial + } + } + + pub fn get_modified_storage_keys(&self) -> &HashMap { + &self.modified_storage_keys + } + + pub fn save_token(&mut self, token: TokenInfo) { + self.added_tokens.push(token); + } + + pub fn save_contract(&mut self, address: Address, bytecode: Vec) { + self.deployed_contracts.insert(address, bytecode); + } + + pub fn load_contract(&mut self, address: Address) -> Option> { + self.contract_load_invocations += 1; + self.storage_handle + .load_contract(address) + .or_else(|| self.deployed_contracts.get(&address).cloned()) + } + + pub fn load_factory_dep(&mut self, hash: H256) -> Option> { + self.storage_handle + .load_factory_dep(hash) + .or_else(|| self.new_factory_deps.get(&hash).cloned()) + } + + pub fn save_factory_dep(&mut self, hash: H256, bytecode: Vec) { + self.new_factory_deps.insert(hash, bytecode); + } +} + +#[cfg(test)] +mod test { + use super::*; + use crate::secondary_storage::SecondaryStateStorage; + use tempfile::TempDir; + use zksync_storage::db::Database; + use zksync_storage::RocksDB; + use zksync_types::{AccountTreeId, H256}; + use zksync_utils::u32_to_h256; + + #[test] + fn test_storage_accessor() { + let account: AccountTreeId = AccountTreeId::new(Address::from([0xfe; 20])); + let key = u32_to_h256(61); + let value = u32_to_h256(73); + + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let db = RocksDB::new(Database::StateKeeper, temp_dir.as_ref(), false); + let raw_storage = SecondaryStateStorage::new(db); + + let mut storage_accessor = StorageView::new(&raw_storage); + + let default_value = storage_accessor.get_value(&StorageKey::new(account, key)); + assert_eq!(default_value, H256::default()); + + storage_accessor.set_value(&StorageKey::new(account, key), value); + + let new_value = storage_accessor.get_value(&StorageKey::new(account, key)); + assert_eq!(new_value, value); + } +} diff --git a/core/lib/storage/Cargo.toml b/core/lib/storage/Cargo.toml new file mode 100644 index 000000000000..446e21fa8447 --- /dev/null +++ b/core/lib/storage/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "zksync_storage" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-2" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] + +[dependencies] +zksync_types = { path = "../types", version = "1.0" } +zksync_utils = { path = "../utils", version = "1.0" } + +byteorder = "1.3" +vlog = { path = "../../lib/vlog", version = "1.0" } +serde = { version = "1.0", features = ["derive"] } +bincode = "1.3" +once_cell = "1.7" +rocksdb = { version = "0.18.0", default-features = false, features = ["snappy"] } +num_cpus = "1.13" diff --git a/core/lib/storage/src/db.rs b/core/lib/storage/src/db.rs new file mode 100644 index 000000000000..5e908ecea89d --- /dev/null +++ b/core/lib/storage/src/db.rs @@ -0,0 +1,221 @@ +use once_cell::sync::Lazy; +use rocksdb::{ + AsColumnFamilyRef, BlockBasedOptions, ColumnFamily, ColumnFamilyDescriptor, Options, + WriteBatch, DB, +}; +use std::path::Path; +use std::sync::{Condvar, Mutex}; + +/// Number of active RocksDB instances +/// Used to determine if it's safe to exit current process +/// Not properly dropped rocksdb instances can lead to db corruption +#[allow(clippy::mutex_atomic)] +pub(crate) static ROCKSDB_INSTANCE_COUNTER: Lazy<(Mutex, Condvar)> = + Lazy::new(|| (Mutex::new(0), Condvar::new())); + +/// Thin wrapper around RocksDB +#[derive(Debug)] +pub struct RocksDB { + db: DB, + _registry_entry: RegistryEntry, +} + +#[derive(Debug)] +pub enum Database { + MerkleTree, + StateKeeper, +} + +#[derive(Debug)] +pub enum MerkleTreeColumnFamily { + Tree, + LeafIndices, +} + +#[derive(Debug)] +pub enum StateKeeperColumnFamily { + State, + Contracts, + FactoryDeps, +} + +impl MerkleTreeColumnFamily { + fn all() -> &'static [Self] { + &[Self::Tree, Self::LeafIndices] + } +} + +impl StateKeeperColumnFamily { + fn all() -> &'static [Self] { + &[Self::State, Self::Contracts, Self::FactoryDeps] + } +} + +impl std::fmt::Display for MerkleTreeColumnFamily { + fn fmt(&self, formatter: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { + let value = match self { + MerkleTreeColumnFamily::Tree => "default", + MerkleTreeColumnFamily::LeafIndices => "leaf_indices", + }; + write!(formatter, "{}", value) + } +} + +impl std::fmt::Display for StateKeeperColumnFamily { + fn fmt(&self, formatter: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { + let value = match self { + StateKeeperColumnFamily::State => "state", + StateKeeperColumnFamily::Contracts => "contracts", + StateKeeperColumnFamily::FactoryDeps => "factory_deps", + }; + write!(formatter, "{}", value) + } +} + +impl RocksDB { + pub fn new>(database: Database, path: P, tune_options: bool) -> Self { + let options = Self::rocksdb_options(tune_options); + let db = match database { + Database::MerkleTree => { + let cfs = MerkleTreeColumnFamily::all().iter().map(|cf| { + ColumnFamilyDescriptor::new(cf.to_string(), Self::rocksdb_options(tune_options)) + }); + DB::open_cf_descriptors(&options, path, cfs).expect("failed to init rocksdb") + } + Database::StateKeeper => { + let cfs = StateKeeperColumnFamily::all().iter().map(|cf| { + ColumnFamilyDescriptor::new(cf.to_string(), Self::rocksdb_options(tune_options)) + }); + DB::open_cf_descriptors(&options, path, cfs).expect("failed to init rocksdb") + } + }; + + Self { + db, + _registry_entry: RegistryEntry::new(), + } + } + + fn rocksdb_options(tune_options: bool) -> Options { + let mut options = Options::default(); + options.create_missing_column_families(true); + options.create_if_missing(true); + if tune_options { + options.increase_parallelism(num_cpus::get() as i32); + let mut block_based_options = BlockBasedOptions::default(); + block_based_options.set_bloom_filter(10.0, false); + options.set_block_based_table_factory(&block_based_options); + } + options + } + + pub fn get_estimated_number_of_entries(&self, cf: StateKeeperColumnFamily) -> u64 { + let error_msg = "failed to get estimated number of entries"; + let cf = self.db.cf_handle(&cf.to_string()).unwrap(); + self.db + .property_int_value_cf(cf, "rocksdb.estimate-num-keys") + .expect(error_msg) + .expect(error_msg) + } + + pub fn multi_get(&self, keys: I) -> Vec>, rocksdb::Error>> + where + K: AsRef<[u8]>, + I: IntoIterator, + { + self.db.multi_get(keys) + } + + pub fn multi_get_cf<'a, 'b: 'a, K, I, W: 'b>( + &'a self, + keys: I, + ) -> Vec>, rocksdb::Error>> + where + K: AsRef<[u8]>, + I: IntoIterator, + W: AsColumnFamilyRef, + { + self.db.multi_get_cf(keys) + } + + pub fn write(&self, batch: WriteBatch) -> Result<(), rocksdb::Error> { + self.db.write(batch) + } + + pub fn put(&self, key: K, value: V) -> Result<(), rocksdb::Error> + where + K: AsRef<[u8]>, + V: AsRef<[u8]>, + { + self.db.put(key, value) + } + + pub fn get>(&self, key: K) -> Result>, rocksdb::Error> { + self.db.get(key) + } + + /// Returns column family handle for State Keeper database + pub fn cf_state_keeper_handle(&self, cf: StateKeeperColumnFamily) -> &ColumnFamily { + self.db + .cf_handle(&cf.to_string()) + .unwrap_or_else(|| panic!("Column family '{}' doesn't exist", cf)) + } + + /// Returns column family handle for Merkle Tree database + pub fn cf_merkle_tree_handle(&self, cf: MerkleTreeColumnFamily) -> &ColumnFamily { + self.db + .cf_handle(&cf.to_string()) + .unwrap_or_else(|| panic!("Column family '{}' doesn't exist", cf)) + } + + pub fn get_cf>( + &self, + cf: &impl AsColumnFamilyRef, + key: K, + ) -> Result>, rocksdb::Error> { + self.db.get_cf(cf, key) + } + + /// awaits termination of all running rocksdb instances + pub fn await_rocksdb_termination() { + let (lock, cvar) = &*ROCKSDB_INSTANCE_COUNTER; + let mut num_instances = lock.lock().unwrap(); + while *num_instances != 0 { + vlog::info!( + "Waiting for all the RocksDB instances to be dropped, {} remaining", + *num_instances + ); + num_instances = cvar.wait(num_instances).unwrap(); + } + vlog::info!("All the RocksDB instances are dropped"); + } +} + +impl Drop for RocksDB { + fn drop(&mut self) { + self.db.cancel_all_background_work(true); + } +} + +/// Empty struct used to register rocksdb instance +#[derive(Debug)] +struct RegistryEntry; + +impl RegistryEntry { + fn new() -> Self { + let (lock, cvar) = &*ROCKSDB_INSTANCE_COUNTER; + let mut num_instances = lock.lock().unwrap(); + *num_instances += 1; + cvar.notify_all(); + Self + } +} + +impl Drop for RegistryEntry { + fn drop(&mut self) { + let (lock, cvar) = &*ROCKSDB_INSTANCE_COUNTER; + let mut num_instances = lock.lock().unwrap(); + *num_instances -= 1; + cvar.notify_all(); + } +} diff --git a/core/lib/storage/src/lib.rs b/core/lib/storage/src/lib.rs new file mode 100644 index 000000000000..03ac6938cd33 --- /dev/null +++ b/core/lib/storage/src/lib.rs @@ -0,0 +1,5 @@ +pub mod db; +pub mod util; + +pub use db::RocksDB; +pub use rocksdb; diff --git a/core/lib/storage/src/util.rs b/core/lib/storage/src/util.rs new file mode 100644 index 000000000000..252157d2b38d --- /dev/null +++ b/core/lib/storage/src/util.rs @@ -0,0 +1,21 @@ +use byteorder::ReadBytesExt; +use byteorder::{ByteOrder, LittleEndian}; +use zksync_types::U256; + +pub fn serialize_block_number(block_number: u32) -> Vec { + let mut bytes = vec![0; 4]; + LittleEndian::write_u32(&mut bytes, block_number); + bytes +} + +pub fn deserialize_block_number(mut bytes: &[u8]) -> u32 { + bytes + .read_u32::() + .expect("failed to deserialize block number") +} + +pub fn serialize_tree_leaf(leaf: U256) -> Vec { + let mut bytes = vec![0; 32]; + leaf.to_big_endian(&mut bytes); + bytes +} diff --git a/core/lib/types/Cargo.toml b/core/lib/types/Cargo.toml new file mode 100644 index 000000000000..36787dc0000f --- /dev/null +++ b/core/lib/types/Cargo.toml @@ -0,0 +1,59 @@ +[package] +name = "zksync_types" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-2" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] +readme = "README.md" + +[dependencies] +zksync_config = { path = "../config", version = "1.0" } +zksync_utils = { path = "../utils", version = "1.0" } +zksync_basic_types = { path = "../basic_types", version = "1.0" } +zksync_contracts = { path = "../contracts", version = "1.0" } +zksync_mini_merkle_tree = { path = "../mini_merkle_tree", version = "1.0"} +# We need this import because we wanat DAL to be responsible for (de)serialization +codegen = { git = "https://github.com/matter-labs/solidity_plonk_verifier.git", branch = "dev" } + +zkevm_test_harness = { git = "https://github.com/matter-labs/zkevm_test_harness.git", branch = "v1.3.1" } +#zkevm_test_harness = { path = "../../../../zkevm_test_harness" } + + +#codegen = { path = "../../../../solidity_plonk_verifier/codegen" } + +zk_evm = {git = "https://github.com/matter-labs/zk_evm.git", branch = "v1.3.1"} +#zk_evm = { path = "../../../../zk_evm" } + +zkevm-assembly = { git = "https://github.com/matter-labs/zkEVM-assembly.git", branch = "v1.3.1" } + +rlp = "0.5" +metrics = "0.20" + +num = { version = "0.3.1", features = ["serde"] } +bigdecimal = { version = "=0.2.0", features = ["serde"]} +hex = "0.4" +thiserror = "1.0" +chrono = { version = "0.4", features = ["serde", "rustc-serialize"] } +once_cell = "1.7" +tiny-keccak = { version = "1.5" } + +serde = "1.0.90" +serde_json = "1.0.0" +serde_with = "1" +rayon = "1.5.3" +strum = { version = "0.24", features = ["derive"] } + +# Crypto stuff +parity-crypto = { version = "0.9", features = ["publickey"] } +ethbloom = "0.11" +blake2 = "0.10" + +[dev-dependencies] +secp256k1 = {version = "0.21", features = ["recovery"] } +tokio = { version = "1", features = ["rt", "macros"] } +serde_with = {version="1", features=["hex"]} + diff --git a/core/lib/types/src/aggregated_operations.rs b/core/lib/types/src/aggregated_operations.rs new file mode 100644 index 000000000000..9acdaef13317 --- /dev/null +++ b/core/lib/types/src/aggregated_operations.rs @@ -0,0 +1,231 @@ +use crate::commitment::BlockWithMetadata; +use crate::U256; +use codegen::serialize_proof; +use serde::{Deserialize, Serialize}; +use zkevm_test_harness::abstract_zksync_circuit::concrete_circuits::ZkSyncCircuit; +use zkevm_test_harness::bellman::bn256::Bn256; +use zkevm_test_harness::bellman::plonk::better_better_cs::proof::Proof; +use zkevm_test_harness::witness::oracle::VmWitnessOracle; +use zksync_basic_types::{ethabi::Token, L1BatchNumber}; + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct BlocksCommitOperation { + pub last_committed_block: BlockWithMetadata, + pub blocks: Vec, +} + +impl BlocksCommitOperation { + pub fn get_eth_tx_args(&self) -> Vec { + let stored_block_info = self.last_committed_block.l1_header_data(); + let blocks_to_commit = self + .blocks + .iter() + .map(|block| block.l1_commit_data()) + .collect(); + + vec![stored_block_info, Token::Array(blocks_to_commit)] + } + + pub fn block_range(&self) -> (L1BatchNumber, L1BatchNumber) { + let BlocksCommitOperation { blocks, .. } = self; + ( + blocks.first().map(|b| b.header.number).unwrap_or_default(), + blocks.last().map(|b| b.header.number).unwrap_or_default(), + ) + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct BlocksCreateProofOperation { + pub blocks: Vec, + pub proofs_to_pad: usize, +} + +#[derive(Clone)] +pub struct BlockProofForL1 { + pub aggregation_result_coords: [[u8; 32]; 4], + pub scheduler_proof: Proof>>, +} + +#[derive(Clone)] +pub struct BlocksProofOperation { + pub prev_block: BlockWithMetadata, + pub blocks: Vec, + pub proofs: Vec, + pub should_verify: bool, +} + +impl BlocksProofOperation { + pub fn get_eth_tx_args(&self) -> Vec { + let prev_block = self.prev_block.l1_header_data(); + let blocks_arg = Token::Array(self.blocks.iter().map(|b| b.l1_header_data()).collect()); + + if self.should_verify { + // currently we only support submitting a single proof + assert_eq!(self.proofs.len(), 1); + assert_eq!(self.blocks.len(), 1); + + let BlockProofForL1 { + aggregation_result_coords, + scheduler_proof, + } = self.proofs.first().unwrap(); + + let (_, proof) = serialize_proof(scheduler_proof); + + let proof_input = Token::Tuple(vec![ + Token::Array( + aggregation_result_coords + .iter() + .map(|bytes| Token::Uint(U256::from_big_endian(bytes))) + .collect(), + ), + Token::Array(proof.into_iter().map(Token::Uint).collect()), + ]); + + vec![prev_block, blocks_arg, proof_input] + } else { + vec![ + prev_block, + blocks_arg, + Token::Tuple(vec![Token::Array(vec![]), Token::Array(vec![])]), + ] + } + } + + pub fn block_range(&self) -> (L1BatchNumber, L1BatchNumber) { + let BlocksProofOperation { blocks, .. } = self; + ( + blocks.first().map(|c| c.header.number).unwrap_or_default(), + blocks.last().map(|c| c.header.number).unwrap_or_default(), + ) + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct BlocksExecuteOperation { + pub blocks: Vec, +} + +impl BlocksExecuteOperation { + fn get_eth_tx_args_for_block(block: &BlockWithMetadata) -> Token { + block.l1_header_data() + } + + pub fn get_eth_tx_args(&self) -> Vec { + vec![Token::Array( + self.blocks + .iter() + .map(BlocksExecuteOperation::get_eth_tx_args_for_block) + .collect(), + )] + } + + pub fn block_range(&self) -> (L1BatchNumber, L1BatchNumber) { + let BlocksExecuteOperation { blocks } = self; + ( + blocks.first().map(|b| b.header.number).unwrap_or_default(), + blocks.last().map(|b| b.header.number).unwrap_or_default(), + ) + } +} + +#[derive(Debug, Clone, Copy, Serialize, Deserialize, Eq, PartialEq)] +pub enum AggregatedActionType { + CommitBlocks, + PublishProofBlocksOnchain, + ExecuteBlocks, +} + +impl std::string::ToString for AggregatedActionType { + fn to_string(&self) -> String { + match self { + AggregatedActionType::CommitBlocks => "CommitBlocks".to_owned(), + AggregatedActionType::PublishProofBlocksOnchain => { + "PublishProofBlocksOnchain".to_owned() + } + AggregatedActionType::ExecuteBlocks => "ExecuteBlocks".to_owned(), + } + } +} + +impl std::str::FromStr for AggregatedActionType { + type Err = String; + + fn from_str(s: &str) -> Result { + match s { + "CommitBlocks" => Ok(Self::CommitBlocks), + "PublishProofBlocksOnchain" => Ok(Self::PublishProofBlocksOnchain), + "ExecuteBlocks" => Ok(Self::ExecuteBlocks), + _ => Err("Incorrect aggregated action type".to_owned()), + } + } +} + +#[allow(clippy::large_enum_variant)] +#[derive(Clone)] +pub enum AggregatedOperation { + CommitBlocks(BlocksCommitOperation), + PublishProofBlocksOnchain(BlocksProofOperation), + ExecuteBlocks(BlocksExecuteOperation), +} + +impl AggregatedOperation { + pub fn get_action_type(&self) -> AggregatedActionType { + match self { + AggregatedOperation::CommitBlocks(..) => AggregatedActionType::CommitBlocks, + AggregatedOperation::PublishProofBlocksOnchain(..) => { + AggregatedActionType::PublishProofBlocksOnchain + } + AggregatedOperation::ExecuteBlocks(..) => AggregatedActionType::ExecuteBlocks, + } + } + + pub fn get_block_range(&self) -> (L1BatchNumber, L1BatchNumber) { + match self { + AggregatedOperation::CommitBlocks(op) => op.block_range(), + AggregatedOperation::PublishProofBlocksOnchain(op) => op.block_range(), + AggregatedOperation::ExecuteBlocks(op) => op.block_range(), + } + } + + pub fn get_action_caption(&self) -> &'static str { + match self { + AggregatedOperation::CommitBlocks(_) => "commit", + AggregatedOperation::PublishProofBlocksOnchain(_) => "proof", + AggregatedOperation::ExecuteBlocks(_) => "execute", + } + } + + pub fn is_commit(&self) -> bool { + matches!(self.get_action_type(), AggregatedActionType::CommitBlocks) + } + + pub fn is_execute(&self) -> bool { + matches!(self.get_action_type(), AggregatedActionType::ExecuteBlocks) + } + + pub fn is_publish_proofs(&self) -> bool { + matches!( + self.get_action_type(), + AggregatedActionType::PublishProofBlocksOnchain + ) + } +} + +impl From for AggregatedOperation { + fn from(other: BlocksCommitOperation) -> Self { + Self::CommitBlocks(other) + } +} + +impl From for AggregatedOperation { + fn from(other: BlocksProofOperation) -> Self { + Self::PublishProofBlocksOnchain(other) + } +} + +impl From for AggregatedOperation { + fn from(other: BlocksExecuteOperation) -> Self { + Self::ExecuteBlocks(other) + } +} diff --git a/core/lib/types/src/api.rs b/core/lib/types/src/api.rs new file mode 100644 index 000000000000..4f2c5a8b03f7 --- /dev/null +++ b/core/lib/types/src/api.rs @@ -0,0 +1,493 @@ +use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; + +use crate::explorer_api::TransactionStatus; +pub use crate::transaction_request::{ + Eip712Meta, SerializationTransactionError, TransactionRequest, +}; +use crate::web3::types::{AccessList, Index, H2048}; +use crate::{Address, MiniblockNumber}; +use chrono::{DateTime, Utc}; +pub use zksync_basic_types::web3::{ + self, ethabi, + types::{Bytes, Work, H160, H256, H64, U256, U64}, +}; + +/// Block Number +#[derive(Copy, Clone, Debug, PartialEq)] +pub enum BlockNumber { + /// Alias for BlockNumber::Latest. + Committed, + /// Last block that was finalized on L1. + Finalized, + /// Latest sealed block + Latest, + /// Earliest block (genesis) + Earliest, + /// Latest block (may be the block that is currently open). + Pending, + /// Block by number from canon chain + Number(U64), +} + +impl> From for BlockNumber { + fn from(num: T) -> Self { + BlockNumber::Number(num.into()) + } +} + +impl Serialize for BlockNumber { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + match *self { + BlockNumber::Number(ref x) => serializer.serialize_str(&format!("0x{:x}", x)), + BlockNumber::Committed => serializer.serialize_str("committed"), + BlockNumber::Finalized => serializer.serialize_str("finalized"), + BlockNumber::Latest => serializer.serialize_str("latest"), + BlockNumber::Earliest => serializer.serialize_str("earliest"), + BlockNumber::Pending => serializer.serialize_str("pending"), + } + } +} + +impl<'de> Deserialize<'de> for BlockNumber { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct V; + impl<'de> serde::de::Visitor<'de> for V { + type Value = BlockNumber; + fn expecting(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + f.write_str("A block number or one of the supported aliases") + } + fn visit_str(self, value: &str) -> Result { + let result = match value { + "committed" => BlockNumber::Committed, + "finalized" => BlockNumber::Finalized, + "latest" => BlockNumber::Latest, + "earliest" => BlockNumber::Earliest, + "pending" => BlockNumber::Pending, + num => { + let number = + U64::deserialize(de::value::BorrowedStrDeserializer::new(num))?; + BlockNumber::Number(number) + } + }; + + Ok(result) + } + } + deserializer.deserialize_str(V) + } +} + +/// Block unified identifier in terms of ZKSync +/// +/// This is an utility structure that cannot be (de)serialized, it has to be created manually. +/// The reason is because Web3 API provides multiple methods for referring block either by hash or number, +/// and with such an ID it will be possible to avoid a lot of boilerplate. +#[derive(Copy, Clone, Debug, PartialEq, Serialize, Deserialize)] +#[serde(untagged)] +pub enum BlockId { + /// By Hash + Hash(H256), + /// By Number + Number(BlockNumber), +} + +/// Helper struct for EIP-1898. +#[derive(Copy, Clone, Debug, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct BlockNumberObject { + pub block_number: BlockNumber, +} + +/// Helper struct for EIP-1898. +#[derive(Copy, Clone, Debug, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct BlockHashObject { + pub block_hash: H256, +} + +/// Helper enum for EIP-1898. +/// Should be used for `block` parameters in web3 JSON RPC methods that implement EIP-1898. +#[derive(Copy, Clone, Debug, PartialEq, Serialize, Deserialize)] +#[serde(untagged)] +pub enum BlockIdVariant { + BlockNumber(BlockNumber), + BlockNumberObject(BlockNumberObject), + BlockHashObject(BlockHashObject), +} + +impl From for BlockId { + fn from(value: BlockIdVariant) -> BlockId { + match value { + BlockIdVariant::BlockNumber(number) => BlockId::Number(number), + BlockIdVariant::BlockNumberObject(number_object) => { + BlockId::Number(number_object.block_number) + } + BlockIdVariant::BlockHashObject(hash_object) => BlockId::Hash(hash_object.block_hash), + } + } +} + +/// Transaction variant +/// +/// Utility structure. Some Web3 API methods have to return a block with a list of either full +/// transaction objects or just their hashes. +#[allow(clippy::large_enum_variant)] +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +#[serde(untagged)] +pub enum TransactionVariant { + Full(Transaction), + Hash(H256), +} + +/// Transaction Identifier +#[derive(Copy, Clone, Debug, PartialEq, Serialize, Deserialize)] +pub enum TransactionId { + /// By hash + Hash(H256), + /// By block and index + Block(BlockId, Index), +} + +impl From for TransactionId { + fn from(hash: H256) -> Self { + TransactionId::Hash(hash) + } +} + +/// A struct with the proof for the L2->L1 log in a specific block. +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct L2ToL1LogProof { + /// The merkle path for the leaf. + pub proof: Vec, + /// The id of the leaf in a tree. + pub id: u32, + /// The root of the tree. + pub root: H256, +} + +/// A struct with the two default bridge contracts. +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct BridgeAddresses { + pub l1_erc20_default_bridge: Address, + pub l2_erc20_default_bridge: Address, +} + +#[derive(Debug, Default, Clone, PartialEq, Serialize, Deserialize)] +pub struct TransactionReceipt { + /// Transaction hash. + #[serde(rename = "transactionHash")] + pub transaction_hash: H256, + /// Index within the block. + #[serde(rename = "transactionIndex")] + pub transaction_index: Index, + /// Hash of the block this transaction was included within. + #[serde(rename = "blockHash")] + pub block_hash: Option, + /// Number of the miniblock this transaction was included within. + #[serde(rename = "blockNumber")] + pub block_number: Option, + /// Index of transaction in l1 batch + #[serde(rename = "l1BatchTxIndex")] + pub l1_batch_tx_index: Option, + /// Number of the l1 batch this transaction was included within. + #[serde(rename = "l1BatchNumber")] + pub l1_batch_number: Option, + /// Sender + /// Note: default address if the client did not return this value + /// (maintains backwards compatibility for <= 0.7.0 when this field was missing) + #[serde(default)] + pub from: Address, + /// Recipient (None when contract creation) + /// Note: Also `None` if the client did not return this value + /// (maintains backwards compatibility for <= 0.7.0 when this field was missing) + #[serde(default)] + pub to: Option
, + /// Cumulative gas used within the block after this was executed. + #[serde(rename = "cumulativeGasUsed")] + pub cumulative_gas_used: U256, + /// Gas used by this transaction alone. + /// + /// Gas used is `None` if the the client is running in light client mode. + #[serde(rename = "gasUsed")] + pub gas_used: Option, + /// Contract address created, or `None` if not a deployment. + #[serde(rename = "contractAddress")] + pub contract_address: Option
, + /// Logs generated within this transaction. + pub logs: Vec, + /// L2 to L1 logs generated within this transaction. + #[serde(rename = "l2ToL1Logs")] + pub l2_to_l1_logs: Vec, + /// Status: either 1 (success) or 0 (failure). + pub status: Option, + /// State root. + pub root: Option, + /// Logs bloom + #[serde(rename = "logsBloom")] + pub logs_bloom: H2048, + /// Transaction type, Some(1) for AccessList transaction, None for Legacy + #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] + pub transaction_type: Option, + /// Effective gas price + #[serde(rename = "effectiveGasPrice")] + pub effective_gas_price: Option, +} + +/// The block type returned from RPC calls. +/// This is generic over a `TX` type. +#[derive(Debug, Default, Clone, PartialEq, Deserialize, Serialize)] +pub struct Block { + /// Hash of the block + pub hash: H256, + /// Hash of the parent + #[serde(rename = "parentHash")] + pub parent_hash: H256, + /// Hash of the uncles + #[serde(rename = "sha3Uncles")] + pub uncles_hash: H256, + /// Miner/author's address + #[serde(rename = "miner", default, deserialize_with = "null_to_default")] + pub author: H160, + /// State root hash + #[serde(rename = "stateRoot")] + pub state_root: H256, + /// Transactions root hash + #[serde(rename = "transactionsRoot")] + pub transactions_root: H256, + /// Transactions receipts root hash + #[serde(rename = "receiptsRoot")] + pub receipts_root: H256, + /// Block number + pub number: U64, + /// L1 batch number the block is included in + #[serde(rename = "l1BatchNumber")] + pub l1_batch_number: Option, + /// Gas Used + #[serde(rename = "gasUsed")] + pub gas_used: U256, + /// Gas Limit + #[serde(rename = "gasLimit")] + pub gas_limit: U256, + /// Base fee per unit of gas + #[serde(rename = "baseFeePerGas")] + pub base_fee_per_gas: U256, + /// Extra data + #[serde(rename = "extraData")] + pub extra_data: Bytes, + /// Logs bloom + #[serde(rename = "logsBloom")] + pub logs_bloom: H2048, + /// Timestamp + pub timestamp: U256, + /// Timestamp of the l1 batch this miniblock was included within + #[serde(rename = "l1BatchTimestamp")] + pub l1_batch_timestamp: Option, + /// Difficulty + pub difficulty: U256, + /// Total difficulty + #[serde(rename = "totalDifficulty")] + pub total_difficulty: U256, + /// Seal fields + #[serde(default, rename = "sealFields")] + pub seal_fields: Vec, + /// Uncles' hashes + pub uncles: Vec, + /// Transactions + pub transactions: Vec, + /// Size in bytes + pub size: U256, + /// Mix Hash + #[serde(rename = "mixHash")] + pub mix_hash: H256, + /// Nonce + pub nonce: H64, +} + +fn null_to_default<'de, D, T>(deserializer: D) -> Result +where + T: Default + Deserialize<'de>, + D: Deserializer<'de>, +{ + let option = Option::deserialize(deserializer)?; + Ok(option.unwrap_or_default()) +} + +/// A log produced by a transaction. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct Log { + /// H160 + pub address: H160, + /// Topics + pub topics: Vec, + /// Data + pub data: Bytes, + /// Block Hash + #[serde(rename = "blockHash")] + pub block_hash: Option, + /// Block Number + #[serde(rename = "blockNumber")] + pub block_number: Option, + /// L1 batch number the log is included in. + #[serde(rename = "l1BatchNumber")] + pub l1_batch_number: Option, + /// Transaction Hash + #[serde(rename = "transactionHash")] + pub transaction_hash: Option, + /// Transaction Index + #[serde(rename = "transactionIndex")] + pub transaction_index: Option, + /// Log Index in Block + #[serde(rename = "logIndex")] + pub log_index: Option, + /// Log Index in Transaction + #[serde(rename = "transactionLogIndex")] + pub transaction_log_index: Option, + /// Log Type + #[serde(rename = "logType")] + pub log_type: Option, + /// Removed + pub removed: Option, +} + +impl Log { + /// Returns true if the log has been removed. + pub fn is_removed(&self) -> bool { + if let Some(val_removed) = self.removed { + return val_removed; + } + + if let Some(ref val_log_type) = self.log_type { + if val_log_type == "removed" { + return true; + } + } + false + } +} + +/// A log produced by a transaction. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct L2ToL1Log { + pub block_hash: Option, + pub block_number: U64, + pub l1_batch_number: Option, + pub log_index: U256, + pub transaction_index: Index, + pub transaction_hash: H256, + pub transaction_log_index: U256, + pub shard_id: U64, + pub is_service: bool, + pub sender: Address, + pub key: H256, + pub value: H256, +} + +/// Description of a Transaction, pending or in the chain. +#[derive(Debug, Default, Clone, PartialEq, Deserialize, Serialize)] +pub struct Transaction { + /// Hash + pub hash: H256, + /// Nonce + pub nonce: U256, + /// Block hash. None when pending. + #[serde(rename = "blockHash")] + pub block_hash: Option, + /// Block number. None when pending. + #[serde(rename = "blockNumber")] + pub block_number: Option, + /// Transaction Index. None when pending. + #[serde(rename = "transactionIndex")] + pub transaction_index: Option, + /// Sender + #[serde(default, skip_serializing_if = "Option::is_none")] + pub from: Option
, + /// Recipient (None when contract creation) + pub to: Option
, + /// Transfered value + pub value: U256, + /// Gas Price + #[serde(rename = "gasPrice")] + pub gas_price: Option, + /// Gas amount + pub gas: U256, + /// Input data + pub input: Bytes, + /// ECDSA recovery id + #[serde(default, skip_serializing_if = "Option::is_none")] + pub v: Option, + /// ECDSA signature r, 32 bytes + #[serde(default, skip_serializing_if = "Option::is_none")] + pub r: Option, + /// ECDSA signature s, 32 bytes + #[serde(default, skip_serializing_if = "Option::is_none")] + pub s: Option, + /// Raw transaction data + #[serde(default, skip_serializing_if = "Option::is_none")] + pub raw: Option, + /// Transaction type, Some(1) for AccessList transaction, None for Legacy + #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] + pub transaction_type: Option, + /// Access list + #[serde( + rename = "accessList", + default, + skip_serializing_if = "Option::is_none" + )] + pub access_list: Option, + /// Max fee per gas + #[serde(rename = "maxFeePerGas", skip_serializing_if = "Option::is_none")] + pub max_fee_per_gas: Option, + /// Miner bribe + #[serde( + rename = "maxPriorityFeePerGas", + skip_serializing_if = "Option::is_none" + )] + pub max_priority_fee_per_gas: Option, + /// Id of the current chain + #[serde(rename = "chainId")] + pub chain_id: U256, + /// Number of the l1 batch this transaction was included within. + #[serde( + rename = "l1BatchNumber", + default, + skip_serializing_if = "Option::is_none" + )] + pub l1_batch_number: Option, + /// Index of transaction in l1 batch + #[serde( + rename = "l1BatchTxIndex", + default, + skip_serializing_if = "Option::is_none" + )] + pub l1_batch_tx_index: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct TransactionDetails { + pub is_l1_originated: bool, + pub status: TransactionStatus, + pub fee: U256, + pub initiator_address: Address, + pub received_at: DateTime, + pub eth_commit_tx_hash: Option, + pub eth_prove_tx_hash: Option, + pub eth_execute_tx_hash: Option, +} + +#[derive(Debug, Clone)] +pub struct GetLogsFilter { + pub from_block: MiniblockNumber, + pub to_block: Option, + pub addresses: Vec
, + pub topics: Vec<(u32, Vec)>, +} diff --git a/core/lib/types/src/block.rs b/core/lib/types/src/block.rs new file mode 100644 index 000000000000..b50dc967c994 --- /dev/null +++ b/core/lib/types/src/block.rs @@ -0,0 +1,190 @@ +use serde::{Deserialize, Serialize}; +use std::fmt::{Debug, Formatter}; +use std::ops::{Add, AddAssign}; +use zksync_basic_types::{H2048, H256, U256}; +use zksync_config::constants::FAIR_L2_GAS_PRICE; + +use crate::{ + l2_to_l1_log::L2ToL1Log, priority_op_onchain_data::PriorityOpOnchainData, + pubdata_packing::pack_storage_log, web3::signing::keccak256, AccountTreeId, Address, + L1BatchNumber, MiniblockNumber, StorageKey, StorageLogKind, WitnessStorageLog, +}; + +/// Represents a successfully deployed smart contract. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct DeployedContract { + pub account_id: AccountTreeId, + pub bytecode: Vec, +} + +impl DeployedContract { + pub fn new(account_id: AccountTreeId, bytecode: Vec) -> Self { + Self { + account_id, + bytecode, + } + } +} + +/// Holder for the block metadata that is not available from transactions themselves. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct L1BatchHeader { + /// Numeric ID of the block. Starts from 1, 0 block is considered genesis block and has no transactions. + pub number: L1BatchNumber, + /// Whether block is sealed or not (doesn't correspond to committing/verifying it on the L1). + pub is_finished: bool, + /// Timestamp when block was first created. + pub timestamp: u64, + /// Address of the fee account that was used when block was created + pub fee_account_address: Address, + /// Total number of processed priority operations in the block + pub l1_tx_count: u16, + /// Total number of processed txs that was requested offchain + pub l2_tx_count: u16, + /// The data of the processed priority operations hash which must be sent to the smart contract + pub priority_ops_onchain_data: Vec, + /// all L2 -> L1 logs in the block + pub l2_to_l1_logs: Vec, + /// preimages of the hashes that were sent as value of L2 logs by special system L2 contract + pub l2_to_l1_messages: Vec>, + /// Bloom filter for the event logs in the block. + pub bloom: H2048, + /// Initial value of the bootloader's heap + pub initial_bootloader_contents: Vec<(usize, U256)>, + /// Hashes of contracts used this block + pub used_contract_hashes: Vec, + /// The EIP1559 base_fee used in this block. + pub base_fee_per_gas: u64, + /// The assumed L1 gas price within the block. + pub l1_gas_price: u64, + /// The L2 gas price that the operator agrees on. + pub l2_fair_gas_price: u64, +} + +/// Holder for the miniblock metadata that is not available from transactions themselves. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct MiniblockHeader { + pub number: MiniblockNumber, + pub timestamp: u64, + pub hash: H256, + pub l1_tx_count: u16, + pub l2_tx_count: u16, + pub base_fee_per_gas: u64, // Min wei per gas that txs in this miniblock need to have. + + pub l1_gas_price: u64, // L1 gas price assumed in the corresponding batch + pub l2_fair_gas_price: u64, // L2 gas price assumed in the corresponding batch +} + +impl L1BatchHeader { + pub fn new( + number: L1BatchNumber, + timestamp: u64, + fee_account_address: Address, + ) -> L1BatchHeader { + Self { + number, + is_finished: false, + timestamp, + fee_account_address, + l1_tx_count: 0, + l2_tx_count: 0, + priority_ops_onchain_data: vec![], + l2_to_l1_logs: vec![], + l2_to_l1_messages: vec![], + bloom: H2048::default(), + initial_bootloader_contents: vec![], + used_contract_hashes: vec![], + // For now, base fee is always equal to the minimal one. + base_fee_per_gas: FAIR_L2_GAS_PRICE, + l1_gas_price: 0, + l2_fair_gas_price: FAIR_L2_GAS_PRICE, + } + } + + /// Mock block header, existing only for tests. + #[doc(hidden)] + pub fn mock(number: L1BatchNumber) -> Self { + Self::new(number, 0, Address::default()) + } + + /// Creates a hash of the priority ops data. + pub fn priority_ops_onchain_data_hash(&self) -> H256 { + let mut rolling_hash: H256 = keccak256(&[]).into(); + for onchain_data in &self.priority_ops_onchain_data { + let mut preimage = Vec::new(); + preimage.extend(rolling_hash.as_bytes()); + preimage.extend(onchain_data.onchain_data_hash.as_bytes()); + + rolling_hash = keccak256(&preimage).into(); + } + + rolling_hash + } + + pub fn tx_count(&self) -> usize { + (self.l1_tx_count + self.l2_tx_count) as usize + } +} + +/// Utility structure that holds the block header together with its logs required to generate the witness +#[derive(Debug)] +pub struct WitnessBlockWithLogs { + pub header: L1BatchHeader, + pub storage_logs: Vec, +} + +impl WitnessBlockWithLogs { + /// Packs the logs into the byte sequence. + /// Used for the onchain data availability. + pub fn compress_logs(&self, hash_fn: F) -> Vec + where + F: Fn(&StorageKey) -> Vec + Copy, + { + self.storage_logs + .iter() + .filter(|log| log.storage_log.kind == StorageLogKind::Write) + .flat_map(|l| pack_storage_log(&l.storage_log, hash_fn)) + .collect() + } +} + +#[derive(Clone, Copy, Eq, PartialEq, Default)] +pub struct BlockGasCount { + pub commit: u32, + pub prove: u32, + pub execute: u32, +} + +impl Debug for BlockGasCount { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "c:{}/p:{}/e:{}", self.commit, self.prove, self.execute)?; + Ok(()) + } +} + +impl BlockGasCount { + pub fn has_greater_than(&self, bound: u32) -> bool { + self.commit > bound || self.prove > bound || self.execute > bound + } +} + +impl AddAssign for BlockGasCount { + fn add_assign(&mut self, other: Self) { + *self = Self { + commit: self.commit + other.commit, + prove: self.prove + other.prove, + execute: self.execute + other.execute, + }; + } +} + +impl Add for BlockGasCount { + type Output = BlockGasCount; + fn add(self, rhs: Self) -> Self::Output { + Self { + commit: self.commit + rhs.commit, + prove: self.prove + rhs.prove, + execute: self.execute + rhs.execute, + } + } +} diff --git a/core/lib/types/src/circuit.rs b/core/lib/types/src/circuit.rs new file mode 100644 index 000000000000..72f2527f30b6 --- /dev/null +++ b/core/lib/types/src/circuit.rs @@ -0,0 +1,11 @@ +use zkevm_test_harness::geometry_config::get_geometry_config; +use zkevm_test_harness::toolset::GeometryConfig; + +pub const LEAF_SPLITTING_FACTOR: usize = 50; +pub const NODE_SPLITTING_FACTOR: usize = 48; +pub const SCHEDULER_UPPER_BOUND: u32 = (LEAF_SPLITTING_FACTOR * NODE_SPLITTING_FACTOR) as u32; + +pub const LEAF_CIRCUIT_INDEX: u8 = 2; +pub const NODE_CIRCUIT_INDEX: u8 = 1; + +pub const GEOMETRY_CONFIG: GeometryConfig = get_geometry_config(); diff --git a/core/lib/types/src/commitment.rs b/core/lib/types/src/commitment.rs new file mode 100644 index 000000000000..6f149661ac8f --- /dev/null +++ b/core/lib/types/src/commitment.rs @@ -0,0 +1,651 @@ +//! Data structures that have more metadata than their primary versions declared in this crate. +//! For example, block defined here has the `root_hash` field which is absent in the usual `Block`. +//! +//! Existence of this module is caused by the execution model of zkSync: when executing transactions, +//! we aim to avoid expensive operations like the state root hash recalculation. State root hash is not +//! required for the rollup to execute blocks, it's needed for the proof generation and the Ethereum +//! transactions, thus the calculations are done separately and asynchronously. + +use std::collections::HashMap; +use std::fmt::Debug; + +use serde::{Deserialize, Serialize}; +use zksync_contracts::{DEFAULT_ACCOUNT_CODE, PROVED_BLOCK_BOOTLOADER_CODE}; + +use zksync_config::constants::ZKPORTER_IS_AVAILABLE; +use zksync_mini_merkle_tree::mini_merkle_tree_root_hash; +use zksync_utils::u256_to_h256; + +use crate::circuit::GEOMETRY_CONFIG; +use crate::ethabi::Token; +use crate::l2_to_l1_log::L2ToL1Log; +use crate::web3::signing::keccak256; +use crate::writes::{InitialStorageWrite, RepeatedStorageWrite}; +use crate::{block::L1BatchHeader, H256, KNOWN_CODES_STORAGE_ADDRESS, U256}; + +/// Make the struct serializable for commitment. +pub trait CommitmentSerializable: Clone { + /// Size of the structure in bytes + const SERIALIZED_SIZE: usize; + /// The number of objects of this type that can be included in the block + fn limit_per_block() -> usize; + fn to_bytes(self) -> Vec; +} + +/// Serialize elements for commitment. The results consist of: +/// 1. Number of elements (4 bytes) +/// 2. Serialized elements +pub(crate) fn serialize_commitments(values: &[I]) -> Vec { + let final_len = I::limit_per_block() * I::SERIALIZED_SIZE + 4; + let mut input = Vec::with_capacity(final_len); + let result = values + .iter() + .cloned() + .flat_map(CommitmentSerializable::to_bytes); + input.extend((values.len() as u32).to_be_bytes()); + input.extend(result); + assert!( + input.len() <= final_len, + "The size of serialized values is more than expected expected_len {} actual_len {} size {} capacity {}", + final_len, input.len() , I::SERIALIZED_SIZE, I::limit_per_block() + ); + input +} + +/// Precalculated data for the block that was used in commitment and L1 transaction +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct BlockMetadata { + pub root_hash: H256, + pub rollup_last_leaf_index: u64, + pub merkle_root_hash: H256, + pub initial_writes_compressed: Vec, + pub repeated_writes_compressed: Vec, + pub commitment: H256, + pub l2_l1_messages_compressed: Vec, + pub l2_l1_merkle_root: H256, + pub block_meta_params: BlockMetaParameters, + pub aux_data_hash: H256, + pub meta_parameters_hash: H256, + pub pass_through_data_hash: H256, +} + +impl BlockMetadata { + /// Mock metadata, exists only for tests. + #[doc(hidden)] + pub fn mock() -> Self { + Self { + root_hash: H256::zero(), + rollup_last_leaf_index: 1, + merkle_root_hash: H256::zero(), + initial_writes_compressed: vec![], + repeated_writes_compressed: vec![], + commitment: Default::default(), + l2_l1_messages_compressed: vec![], + l2_l1_merkle_root: H256::default(), + block_meta_params: BlockMetaParameters::default(), + aux_data_hash: Default::default(), + meta_parameters_hash: Default::default(), + pass_through_data_hash: Default::default(), + } + } +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct BlockWithMetadata { + pub header: L1BatchHeader, + pub metadata: BlockMetadata, + pub factory_deps: Vec>, +} + +impl BlockWithMetadata { + pub fn new( + header: L1BatchHeader, + metadata: BlockMetadata, + unsorted_factory_deps: HashMap>, + ) -> Self { + Self { + factory_deps: Self::factory_deps_in_appearance_order(&header, &unsorted_factory_deps), + header, + metadata, + } + } + + /// Creates an array of factory deps in the order in which they appeared in a block + fn factory_deps_in_appearance_order( + header: &L1BatchHeader, + unsorted_factory_deps: &HashMap>, + ) -> Vec> { + let mut result = Vec::with_capacity(unsorted_factory_deps.len()); + + for log in &header.l2_to_l1_logs { + if log.sender == KNOWN_CODES_STORAGE_ADDRESS { + result.push( + unsorted_factory_deps + .get(&log.key) + .unwrap_or_else(|| panic!("Failed to get bytecode that was marked as known on L2 block: bytecodehash: {:?}, block number {:?}", &log.key, header.number)) + .clone(), + ); + } + } + + result + } + + pub fn l1_header_data(&self) -> Token { + Token::Tuple(vec![ + Token::Uint(U256::from(*self.header.number)), + Token::FixedBytes(self.metadata.root_hash.as_bytes().to_vec()), + Token::Uint(U256::from(self.metadata.rollup_last_leaf_index)), + Token::Uint(U256::from(self.header.l1_tx_count)), + Token::FixedBytes( + self.header + .priority_ops_onchain_data_hash() + .as_bytes() + .to_vec(), + ), + Token::FixedBytes(self.metadata.l2_l1_merkle_root.as_bytes().to_vec()), + Token::Uint(U256::from(self.header.timestamp)), + Token::FixedBytes(self.metadata.commitment.as_bytes().to_vec()), + ]) + } + + pub fn l1_commit_data(&self) -> Token { + Token::Tuple(vec![ + Token::Uint(U256::from(self.header.number.0)), + Token::Uint(U256::from(self.header.timestamp)), + Token::Uint(U256::from(self.metadata.rollup_last_leaf_index)), + Token::FixedBytes(self.metadata.merkle_root_hash.as_bytes().to_vec()), + Token::Uint(U256::from(self.header.l1_tx_count)), + Token::FixedBytes(self.metadata.l2_l1_merkle_root.as_bytes().to_vec()), + Token::FixedBytes( + self.header + .priority_ops_onchain_data_hash() + .as_bytes() + .to_vec(), + ), + Token::Bytes(self.metadata.initial_writes_compressed.clone()), + Token::Bytes(self.metadata.repeated_writes_compressed.clone()), + Token::Bytes(self.metadata.l2_l1_messages_compressed.clone()), + Token::Array( + self.header + .l2_to_l1_messages + .iter() + .map(|message| Token::Bytes(message.to_vec())) + .collect(), + ), + Token::Array( + self.factory_deps + .iter() + .map(|bytecode| Token::Bytes(bytecode.to_vec())) + .collect(), + ), + ]) + } + + pub fn l1_commit_data_size(&self) -> usize { + crate::ethabi::encode(&[Token::Array(vec![self.l1_commit_data()])]).len() + } +} + +impl CommitmentSerializable for L2ToL1Log { + const SERIALIZED_SIZE: usize = 88; + + fn limit_per_block() -> usize { + GEOMETRY_CONFIG.limit_for_l1_messages_merklizer as usize + } + + fn to_bytes(self) -> Vec { + let mut raw_data = Vec::with_capacity(Self::SERIALIZED_SIZE); + raw_data.push(self.shard_id); + raw_data.push(self.is_service as u8); + raw_data.extend(self.tx_number_in_block.to_be_bytes()); + raw_data.extend(self.sender.as_bytes()); + raw_data.extend(self.key.as_bytes()); + raw_data.extend(self.value.as_bytes()); + assert_eq!( + raw_data.len(), + Self::SERIALIZED_SIZE, + "Serialized size for L2ToL1Log is bigger than expected" + ); + raw_data + } +} + +impl CommitmentSerializable for InitialStorageWrite { + const SERIALIZED_SIZE: usize = 64; + + fn limit_per_block() -> usize { + GEOMETRY_CONFIG.limit_for_initial_writes_pubdata_hasher as usize + } + + fn to_bytes(self) -> Vec { + let mut result = vec![0; Self::SERIALIZED_SIZE]; + self.key.to_little_endian(&mut result[0..32]); + result[32..64].copy_from_slice(self.value.as_bytes()); + result + } +} + +impl CommitmentSerializable for RepeatedStorageWrite { + const SERIALIZED_SIZE: usize = 40; + + fn limit_per_block() -> usize { + GEOMETRY_CONFIG.limit_for_repeated_writes_pubdata_hasher as usize + } + + fn to_bytes(self) -> Vec { + let mut result = Vec::with_capacity(Self::SERIALIZED_SIZE); + result.extend_from_slice(&self.index.to_be_bytes()); + result.extend_from_slice(self.value.as_bytes()); + assert_eq!( + result.len(), + Self::SERIALIZED_SIZE, + "Serialized size for RepeatedStorageWrite is bigger than expected" + ); + result + } +} + +/// Block Output produced by Virtual Machine +#[derive(Debug, Clone)] +struct BlockAuxiliaryOutput { + // We use initial fields for debugging + #[allow(dead_code)] + l2_l1_logs: Vec, + #[allow(dead_code)] + initial_writes: Vec, + #[allow(dead_code)] + repeated_writes: Vec, + l2_l1_logs_compressed: Vec, + l2_l1_logs_linear_hash: H256, + l2_l1_logs_merkle_root: H256, + initial_writes_compressed: Vec, + initial_writes_hash: H256, + repeated_writes_compressed: Vec, + repeated_writes_hash: H256, +} + +impl BlockAuxiliaryOutput { + fn new( + l2_l1_logs: Vec, + initial_writes: Vec, + repeated_writes: Vec, + ) -> Self { + let l2_l1_logs_compressed = serialize_commitments(&l2_l1_logs); + let initial_writes_compressed = serialize_commitments(&initial_writes); + let repeated_writes_compressed = serialize_commitments(&repeated_writes); + + let l2_l1_logs_linear_hash = H256::from(keccak256(&l2_l1_logs_compressed)); + let initial_writes_hash = H256::from(keccak256(&initial_writes_compressed)); + let repeated_writes_hash = H256::from(keccak256(&repeated_writes_compressed)); + + let l2_l1_logs_merkle_root = { + let values: Vec> = l2_l1_logs + .iter() + .cloned() + .map(CommitmentSerializable::to_bytes) + .collect(); + mini_merkle_tree_root_hash( + values, + L2ToL1Log::SERIALIZED_SIZE, + L2ToL1Log::limit_per_block(), + ) + }; + + Self { + l2_l1_logs_compressed, + initial_writes_compressed, + repeated_writes_compressed, + l2_l1_logs, + initial_writes, + repeated_writes, + l2_l1_logs_linear_hash, + l2_l1_logs_merkle_root, + initial_writes_hash, + repeated_writes_hash, + } + } + + pub fn to_bytes(&self) -> Vec { + // 4 H256 values + const SERIALIZED_SIZE: usize = 128; + let mut result = Vec::with_capacity(SERIALIZED_SIZE); + result.extend(self.l2_l1_logs_merkle_root.as_bytes()); + result.extend(self.l2_l1_logs_linear_hash.as_bytes()); + result.extend(self.initial_writes_hash.as_bytes()); + result.extend(self.repeated_writes_hash.as_bytes()); + result + } + + pub fn hash(&self) -> H256 { + H256::from_slice(&keccak256(&self.to_bytes())) + } +} + +/// Meta parameters for block. They are the same for each block per run, excluding timestamp. +/// We keep timestamp in seconds here for consistency with the crypto team +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct BlockMetaParameters { + pub zkporter_is_available: bool, + pub bootloader_code_hash: H256, + pub default_aa_code_hash: H256, +} + +impl Default for BlockMetaParameters { + fn default() -> Self { + Self { + zkporter_is_available: ZKPORTER_IS_AVAILABLE, + bootloader_code_hash: u256_to_h256(PROVED_BLOCK_BOOTLOADER_CODE.hash), + default_aa_code_hash: u256_to_h256(DEFAULT_ACCOUNT_CODE.hash), + } + } +} + +impl BlockMetaParameters { + pub fn to_bytes(&self) -> Vec { + const SERIALIZED_SIZE: usize = 4 + 1 + 32 + 32; + let mut result = Vec::with_capacity(SERIALIZED_SIZE); + result.push(self.zkporter_is_available as u8); + result.extend(self.bootloader_code_hash.as_bytes()); + result.extend(self.default_aa_code_hash.as_bytes()); + result + } + + pub fn hash(&self) -> H256 { + H256::from_slice(&keccak256(&self.to_bytes())) + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +struct RootState { + pub last_leaf_index: u64, + pub root_hash: H256, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +struct BlockPassThroughData { + shared_states: Vec, +} + +impl BlockPassThroughData { + pub fn to_bytes(&self) -> Vec { + // We assume that currently we have only two shared state: Rollup and ZkPorter where porter is always zero + const SERIALIZED_SIZE: usize = 8 + 32 + 8 + 32; + let mut result = Vec::with_capacity(SERIALIZED_SIZE); + for state in self.shared_states.iter() { + result.extend_from_slice(&state.last_leaf_index.to_be_bytes()); + result.extend_from_slice(state.root_hash.as_bytes()); + } + assert_eq!( + result.len(), + SERIALIZED_SIZE, + "Serialized size for BlockPassThroughData is bigger than expected" + ); + result + } + + pub fn hash(&self) -> H256 { + H256::from_slice(&keccak256(&self.to_bytes())) + } +} + +#[derive(Debug, Clone)] +pub struct BlockCommitment { + pass_through_data: BlockPassThroughData, + auxiliary_output: BlockAuxiliaryOutput, + meta_parameters: BlockMetaParameters, +} + +#[derive(Debug, Clone)] +pub struct BlockCommitmentHash { + pub pass_through_data: H256, + pub aux_output: H256, + pub meta_parameters: H256, + pub commitment: H256, +} + +impl BlockCommitment { + pub fn new( + l2_to_l1_logs: Vec, + rollup_last_leaf_index: u64, + rollup_root_hash: H256, + initial_writes: Vec, + repeated_writes: Vec, + ) -> Self { + let meta_parameters = BlockMetaParameters::default(); + + Self { + pass_through_data: BlockPassThroughData { + shared_states: vec![ + RootState { + last_leaf_index: rollup_last_leaf_index, + root_hash: rollup_root_hash, + }, + // Despite the fact, that zk_porter is not available we have to add params about it. + RootState { + last_leaf_index: 0, + root_hash: H256::zero(), + }, + ], + }, + auxiliary_output: BlockAuxiliaryOutput::new( + l2_to_l1_logs, + initial_writes, + repeated_writes, + ), + meta_parameters, + } + } + + pub fn meta_parameters(&self) -> BlockMetaParameters { + self.meta_parameters.clone() + } + + pub fn l2_l1_logs_compressed(&self) -> &[u8] { + &self.auxiliary_output.l2_l1_logs_compressed + } + + pub fn l2_l1_logs_linear_hash(&self) -> H256 { + self.auxiliary_output.l2_l1_logs_linear_hash + } + + pub fn l2_l1_logs_merkle_root(&self) -> H256 { + self.auxiliary_output.l2_l1_logs_merkle_root + } + + pub fn initial_writes_compressed(&self) -> &[u8] { + &self.auxiliary_output.initial_writes_compressed + } + + pub fn repeated_writes_compressed(&self) -> &[u8] { + &self.auxiliary_output.repeated_writes_compressed + } + + pub fn initial_writes_pubdata_hash(&self) -> H256 { + self.auxiliary_output.initial_writes_hash + } + + pub fn repeated_writes_pubdata_hash(&self) -> H256 { + self.auxiliary_output.repeated_writes_hash + } + + pub fn hash(&self) -> BlockCommitmentHash { + let mut result = vec![]; + let pass_through_data_hash = self.pass_through_data.hash(); + result.extend_from_slice(pass_through_data_hash.as_bytes()); + let metadata_hash = self.meta_parameters.hash(); + result.extend_from_slice(metadata_hash.as_bytes()); + let auxiliary_output_hash = self.auxiliary_output.hash(); + result.extend_from_slice(auxiliary_output_hash.as_bytes()); + let hash = keccak256(&result); + let commitment = H256::from_slice(&hash); + BlockCommitmentHash { + pass_through_data: pass_through_data_hash, + aux_output: auxiliary_output_hash, + meta_parameters: metadata_hash, + commitment, + } + } +} + +#[cfg(test)] +mod tests { + use serde::{Deserialize, Serialize}; + use serde_with::serde_as; + + use crate::commitment::{ + BlockAuxiliaryOutput, BlockCommitment, BlockMetaParameters, BlockPassThroughData, + }; + use crate::l2_to_l1_log::L2ToL1Log; + use crate::writes::{InitialStorageWrite, RepeatedStorageWrite}; + use crate::{H256, U256}; + + #[serde_as] + #[derive(Debug, Serialize, Deserialize)] + struct ExpectedOutput { + #[serde_as(as = "serde_with::hex::Hex")] + l2_l1_bytes: Vec, + l2_l1_linear_hash: H256, + l2_l1_root_hash: H256, + #[serde_as(as = "serde_with::hex::Hex")] + initial_writes_bytes: Vec, + initial_writes_hash: H256, + #[serde_as(as = "serde_with::hex::Hex")] + repeated_writes_bytes: Vec, + repeated_writes_hash: H256, + #[serde_as(as = "serde_with::hex::Hex")] + pass_through_bytes: Vec, + pass_through_hash: H256, + #[serde_as(as = "serde_with::hex::Hex")] + meta_params_bytes: Vec, + meta_params_hash: H256, + #[serde_as(as = "serde_with::hex::Hex")] + auxiliary_bytes: Vec, + auxiliary_hash: H256, + commitment_hash: H256, + } + + #[derive(Debug, Clone, Serialize, Deserialize)] + struct InitialStorageTest { + pub key: String, + pub value: H256, + } + + #[derive(Debug, Clone, Serialize, Deserialize)] + struct BlockAuxiliaryInput { + l2_l1_logs: Vec, + initial_writes: Vec, + repeated_writes: Vec, + } + + #[derive(Debug, Serialize, Deserialize)] + struct CommitmentTest { + pass_through_data: BlockPassThroughData, + auxiliary_input: BlockAuxiliaryInput, + meta_parameters: BlockMetaParameters, + expected_outputs: ExpectedOutput, + } + + #[test] + fn commitment_test() { + let zksync_home = std::env::var("ZKSYNC_HOME").unwrap_or_else(|_| ".".into()); + let path = std::path::Path::new(&zksync_home) + .join("etc/commitment_tests/zksync_testharness_test.json"); + let contents = std::fs::read_to_string(path).unwrap(); + let commitment_test: CommitmentTest = serde_json::from_str(&contents).unwrap(); + + let initial_writes = commitment_test + .auxiliary_input + .initial_writes + .clone() + .into_iter() + .map(|a| InitialStorageWrite { + key: U256::from_dec_str(&a.key).unwrap(), + value: a.value, + }) + .collect(); + let auxiliary_output = BlockAuxiliaryOutput::new( + commitment_test.auxiliary_input.l2_l1_logs.clone(), + initial_writes, + commitment_test.auxiliary_input.repeated_writes.clone(), + ); + + let commitment = BlockCommitment { + pass_through_data: commitment_test.pass_through_data, + auxiliary_output, + meta_parameters: commitment_test.meta_parameters, + }; + + assert_eq!( + commitment.auxiliary_output.l2_l1_logs_compressed.len(), + commitment_test.expected_outputs.l2_l1_bytes.len() + ); + assert_eq!( + commitment.auxiliary_output.l2_l1_logs_compressed, + commitment_test.expected_outputs.l2_l1_bytes + ); + assert_eq!( + commitment.auxiliary_output.l2_l1_logs_linear_hash, + commitment_test.expected_outputs.l2_l1_linear_hash + ); + assert_eq!( + commitment.auxiliary_output.l2_l1_logs_merkle_root, + commitment_test.expected_outputs.l2_l1_root_hash + ); + + assert_eq!( + commitment.auxiliary_output.repeated_writes_compressed.len(), + commitment_test.expected_outputs.repeated_writes_bytes.len() + ); + assert_eq!( + commitment.auxiliary_output.repeated_writes_compressed, + commitment_test.expected_outputs.repeated_writes_bytes + ); + + assert_eq!( + commitment.auxiliary_output.repeated_writes_hash, + commitment_test.expected_outputs.repeated_writes_hash + ); + assert_eq!( + commitment.auxiliary_output.initial_writes_compressed.len(), + commitment_test.expected_outputs.initial_writes_bytes.len() + ); + assert_eq!( + commitment.auxiliary_output.initial_writes_compressed, + commitment_test.expected_outputs.initial_writes_bytes + ); + + assert_eq!( + commitment.auxiliary_output.initial_writes_hash, + commitment_test.expected_outputs.initial_writes_hash + ); + assert_eq!( + commitment.pass_through_data.to_bytes(), + commitment_test.expected_outputs.pass_through_bytes + ); + assert_eq!( + commitment.pass_through_data.hash(), + commitment_test.expected_outputs.pass_through_hash + ); + assert_eq!( + commitment.meta_parameters.to_bytes(), + commitment_test.expected_outputs.meta_params_bytes, + ); + assert_eq!( + commitment.meta_parameters.hash(), + commitment_test.expected_outputs.meta_params_hash, + ); + assert_eq!( + commitment.auxiliary_output.to_bytes(), + commitment_test.expected_outputs.auxiliary_bytes + ); + assert_eq!( + commitment.auxiliary_output.hash(), + commitment_test.expected_outputs.auxiliary_hash + ); + assert_eq!( + commitment.hash().commitment, + commitment_test.expected_outputs.commitment_hash + ); + } +} diff --git a/core/lib/types/src/eth_sender.rs b/core/lib/types/src/eth_sender.rs new file mode 100644 index 000000000000..11def0500dc3 --- /dev/null +++ b/core/lib/types/src/eth_sender.rs @@ -0,0 +1,35 @@ +use crate::aggregated_operations::AggregatedActionType; +use crate::{Address, H256}; + +#[derive(Debug, Clone)] +pub struct EthTx { + pub id: u32, + pub nonce: u64, + pub contract_address: Address, + pub raw_tx: Vec, + pub tx_type: AggregatedActionType, + pub created_at_timestamp: u64, + pub predicted_gas_cost: u64, +} + +#[derive(Clone, Debug)] +pub struct TxHistory { + pub id: u32, + pub eth_tx_id: u32, + pub base_fee_per_gas: u64, + pub priority_fee_per_gas: u64, + pub tx_hash: H256, + pub signed_raw_tx: Vec, + pub sent_at_block: Option, +} + +#[derive(Clone, Debug)] +pub struct TxHistoryToSend { + pub id: u32, + pub eth_tx_id: u32, + pub base_fee_per_gas: u64, + pub priority_fee_per_gas: u64, + pub tx_hash: H256, + pub signed_raw_tx: Vec, + pub nonce: u64, +} diff --git a/core/lib/types/src/event.rs b/core/lib/types/src/event.rs new file mode 100644 index 000000000000..6e183bb935d4 --- /dev/null +++ b/core/lib/types/src/event.rs @@ -0,0 +1,189 @@ +use crate::{ + ethabi, + tokens::{TokenInfo, TokenMetadata}, + Address, L1BatchNumber, CONTRACT_DEPLOYER_ADDRESS, H256, KNOWN_CODES_STORAGE_ADDRESS, + L1_MESSENGER_ADDRESS, +}; +use once_cell::sync::Lazy; +use serde::{Deserialize, Serialize}; +use std::fmt::Debug; +use zksync_config::ContractsConfig; +use zksync_utils::h256_to_account_address; + +#[derive(Default, Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub struct VmEvent { + pub location: (L1BatchNumber, u32), + pub address: Address, + pub indexed_topics: Vec, + pub value: Vec, +} + +impl VmEvent { + pub fn index_keys(&self) -> impl Iterator + '_ { + self.indexed_topics + .iter() + .enumerate() + .map(move |(idx, &topic)| VmEventGroupKey { + address: self.address, + topic: (idx as u32, topic), + }) + } +} + +pub static DEPLOY_EVENT_SIGNATURE: Lazy = Lazy::new(|| { + ethabi::long_signature( + "ContractDeployed", + &[ + ethabi::ParamType::Address, + ethabi::ParamType::FixedBytes(32), + ethabi::ParamType::Address, + ], + ) +}); + +static L1_MESSAGE_EVENT_SIGNATURE: Lazy = Lazy::new(|| { + ethabi::long_signature( + "L1MessageSent", + &[ + ethabi::ParamType::Address, + ethabi::ParamType::FixedBytes(32), + ethabi::ParamType::Bytes, + ], + ) +}); + +static BRIDGE_INITIALIZATION_SIGNATURE: Lazy = Lazy::new(|| { + ethabi::long_signature( + "BridgeInitialization", + &[ + ethabi::ParamType::Address, + ethabi::ParamType::String, + ethabi::ParamType::String, + ethabi::ParamType::Uint(8), + ], + ) +}); + +static PUBLISHED_BYTECODE_SIGNATURE: Lazy = Lazy::new(|| { + ethabi::long_signature( + "MarkedAsKnown", + &[ethabi::ParamType::FixedBytes(32), ethabi::ParamType::Bool], + ) +}); + +// moved from Runtime Context +pub fn extract_added_tokens(all_generated_events: &[VmEvent]) -> Vec { + static ERC20_BRIDGE_CONTRACT: Lazy
= Lazy::new(|| { + let config = ContractsConfig::from_env(); + config.l2_erc20_bridge_addr + }); + + let deployed_tokens = all_generated_events + .iter() + .filter(|event| { + // Filter events from the deployer contract that match the expected signature. + event.address == CONTRACT_DEPLOYER_ADDRESS + && event.indexed_topics.len() == 4 + && event.indexed_topics[0] == *DEPLOY_EVENT_SIGNATURE + && h256_to_account_address(&event.indexed_topics[1]) == *ERC20_BRIDGE_CONTRACT + }) + .map(|event| h256_to_account_address(&event.indexed_topics[3])); + + extract_added_token_info_from_addresses(all_generated_events, deployed_tokens) +} + +// moved from Runtime Context +fn extract_added_token_info_from_addresses( + all_generated_events: &[VmEvent], + deployed_tokens: impl Iterator, +) -> Vec { + deployed_tokens + .filter_map(|l2_token_address| { + all_generated_events + .iter() + .find(|event| { + event.address == l2_token_address + && event.indexed_topics[0] == *BRIDGE_INITIALIZATION_SIGNATURE + }) + .map(|event| { + let l1_token_address = h256_to_account_address(&event.indexed_topics[1]); + let mut dec_ev = ethabi::decode( + &[ + ethabi::ParamType::String, + ethabi::ParamType::String, + ethabi::ParamType::Uint(8), + ], + &event.value, + ) + .unwrap(); + + TokenInfo { + l1_address: l1_token_address, + l2_address: l2_token_address, + metadata: TokenMetadata { + name: dec_ev.remove(0).into_string().unwrap(), + symbol: dec_ev.remove(0).into_string().unwrap(), + decimals: dec_ev.remove(0).into_uint().unwrap().as_u32() as u8, + }, + } + }) + }) + .collect() +} + +// moved from RuntimeContext +// Extracts all the "long" L2->L1 messages that were submitted by the +// L1Messenger contract +pub fn extract_long_l2_to_l1_messages(all_generated_events: &[VmEvent]) -> Vec> { + all_generated_events + .iter() + .filter(|event| { + // Filter events from the deployer contract that match the expected signature. + event.address == L1_MESSENGER_ADDRESS + && event.indexed_topics.len() == 3 + && event.indexed_topics[0] == *L1_MESSAGE_EVENT_SIGNATURE + }) + .map(|event| { + let decoded_tokens = ethabi::decode(&[ethabi::ParamType::Bytes], &event.value) + .expect("Failed to decode L1MessageSent message"); + // The `Token` does not implement `Copy` trait, so I had to do it like that: + let bytes_token = decoded_tokens.into_iter().next().unwrap(); + bytes_token.into_bytes().unwrap() + }) + .collect() +} + +// Extract all bytecodes marked as known on the system contracts +pub fn extract_bytecodes_marked_as_known(all_generated_events: &[VmEvent]) -> Vec { + all_generated_events + .iter() + .filter(|event| { + // Filter events from the deployer contract that match the expected signature. + event.address == KNOWN_CODES_STORAGE_ADDRESS + && event.indexed_topics.len() == 3 + && event.indexed_topics[0] == *PUBLISHED_BYTECODE_SIGNATURE + }) + .map(|event| event.indexed_topics[1]) + .collect() +} + +// Extract bytecodes that were marked as known on the system contracts and should be published onchain +pub fn extract_published_bytecodes(all_generated_events: &[VmEvent]) -> Vec { + all_generated_events + .iter() + .filter(|event| { + // Filter events from the deployer contract that match the expected signature. + event.address == KNOWN_CODES_STORAGE_ADDRESS + && event.indexed_topics.len() == 3 + && event.indexed_topics[0] == *PUBLISHED_BYTECODE_SIGNATURE + && event.indexed_topics[2] != H256::zero() + }) + .map(|event| event.indexed_topics[1]) + .collect() +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Hash)] +pub struct VmEventGroupKey { + pub address: Address, + pub topic: (u32, H256), +} diff --git a/core/lib/types/src/explorer_api.rs b/core/lib/types/src/explorer_api.rs new file mode 100644 index 000000000000..03f9780b2f33 --- /dev/null +++ b/core/lib/types/src/explorer_api.rs @@ -0,0 +1,438 @@ +use serde::de::{Deserializer, Error, MapAccess, Unexpected, Visitor}; +use std::{collections::HashMap, fmt}; + +use bigdecimal::BigDecimal; +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; + +use crate::{api::Log, Address, Bytes, Execute, L1BatchNumber, MiniblockNumber, Nonce, H256, U256}; + +use serde_with::rust::display_fromstr::deserialize as deserialize_fromstr; + +pub use crate::Execute as ExecuteData; + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Clone, Copy)] +#[serde(rename_all = "camelCase")] +pub enum PaginationDirection { + Newer, + Older, +} + +#[derive(Debug, Deserialize, Serialize, Clone, Copy)] +#[serde(rename_all = "camelCase")] +pub struct PaginationQuery { + // There is known problem with serde flatten and serde_urlencoded. + // It is described here https://github.com/nox/serde_urlencoded/issues/33 + // A workaround is described here https://docs.rs/serde_qs/0.9.1/serde_qs/index.html#flatten-workaround. + // It includes using of `deserialize_with` + #[serde(deserialize_with = "deserialize_fromstr")] + pub limit: usize, + #[serde(deserialize_with = "deserialize_fromstr", default)] + pub offset: usize, + pub direction: PaginationDirection, +} + +#[derive(Debug, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct BlocksQuery { + pub from: Option, + #[serde(flatten)] + pub pagination: PaginationQuery, +} + +#[derive(Debug, Clone, Copy)] +pub struct TxPosition { + pub block_number: MiniblockNumber, + pub tx_index: Option, +} + +#[derive(Debug, Serialize, Deserialize, Clone, Copy)] +#[serde(rename_all = "camelCase")] +pub struct TransactionsQuery { + pub from_block_number: Option, + pub from_tx_index: Option, + pub block_number: Option, + pub address: Option
, + pub account_address: Option
, + pub contract_address: Option
, + #[serde(flatten)] + pub pagination: PaginationQuery, +} + +impl TransactionsQuery { + pub fn tx_position(&self) -> Option { + self.from_block_number.map(|block_number| TxPosition { + block_number, + tx_index: self.from_tx_index, + }) + } +} + +#[derive(Debug, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct TransactionsResponse { + pub list: Vec, + pub total: usize, +} + +#[derive(Debug, Serialize, Deserialize, Clone, Copy)] +#[serde(rename_all = "camelCase")] +pub struct EventsQuery { + pub from_block_number: Option, + pub contract_address: Option
, + #[serde(flatten)] + pub pagination: PaginationQuery, +} + +#[derive(Debug, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct EventsResponse { + pub list: Vec, + pub total: usize, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "type", rename_all = "camelCase")] +pub enum TransactionData { + Execute(ExecuteData), +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum BlockStatus { + Sealed, + Verified, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum TransactionStatus { + Pending, + Included, + Verified, + Failed, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct BlockPageItem { + pub number: MiniblockNumber, + pub l1_tx_count: usize, + pub l2_tx_count: usize, + pub hash: Option, + pub status: BlockStatus, + pub timestamp: u64, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct TransactionResponse { + #[serde(flatten)] + pub tx: TransactionDetails, + pub logs: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct TransactionDetails { + pub transaction_hash: H256, + pub data: Execute, + pub is_l1_originated: bool, + pub status: TransactionStatus, + pub fee: U256, + pub nonce: Option, + pub block_number: Option, + pub l1_batch_number: Option, + pub block_hash: Option, + pub index_in_block: Option, + pub initiator_address: Address, + pub received_at: DateTime, + pub eth_commit_tx_hash: Option, + pub eth_prove_tx_hash: Option, + pub eth_execute_tx_hash: Option, + pub erc20_transfers: Vec, + /// It is `Some` only if the transaction calls `transfer` method of some ERC20 token. + #[serde(skip_serializing_if = "Option::is_none")] + pub transfer: Option, + pub balance_changes: Vec, + pub r#type: u32, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct Erc20TransferInfo { + pub token_info: ExplorerTokenInfo, + pub from: Address, + pub to: Address, + pub amount: U256, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum BalanceChangeType { + Transfer, + Deposit, + Withdrawal, + Fee, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct BalanceChangeInfo { + pub token_info: ExplorerTokenInfo, + pub from: Address, + pub to: Address, + pub amount: U256, + pub r#type: BalanceChangeType, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct ExplorerTokenInfo { + pub l1_address: Address, + pub l2_address: Address, + pub address: Address, + pub symbol: String, + pub name: String, + pub decimals: u8, + pub usd_price: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct BalanceItem { + pub token_info: ExplorerTokenInfo, + pub balance: U256, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum AccountType { + EOA, + Contract, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct AccountDetails { + pub address: Address, + pub balances: HashMap, + pub sealed_nonce: Nonce, + pub verified_nonce: Nonce, + pub account_type: AccountType, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ContractDetails { + #[serde(flatten)] + pub info: ContractBasicInfo, + #[serde(flatten)] + pub stats: ContractStats, + pub balances: HashMap, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase", tag = "type", content = "info")] +pub enum AddressDetails { + Account(AccountDetails), + Contract(ContractDetails), +} + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +#[serde(rename_all = "camelCase")] +pub struct ContractStats { + pub total_transactions: usize, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ContractBasicInfo { + pub address: Address, + pub bytecode: Bytes, + pub creator_address: Address, + pub creator_tx_hash: H256, + pub created_in_block_number: MiniblockNumber, + pub verification_info: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct BlockDetails { + pub number: MiniblockNumber, + pub timestamp: u64, + pub l1_tx_count: usize, + pub l2_tx_count: usize, + pub root_hash: Option, + pub status: BlockStatus, + pub commit_tx_hash: Option, + pub committed_at: Option>, + pub prove_tx_hash: Option, + pub proven_at: Option>, + pub execute_tx_hash: Option, + pub executed_at: Option>, +} + +#[derive(Debug, Clone, Serialize)] +#[serde(tag = "codeFormat", content = "sourceCode")] +pub enum SourceCodeData { + #[serde(rename = "solidity-single-file")] + SingleFile(String), + #[serde(rename = "solidity-standard-json-input")] + StandardJsonInput(serde_json::Map), +} + +// Implementing Custom deserializer which deserializes `SourceCodeData` +// as `SingleFile` if `codeFormat` is not specified. +// Serde doesn't support this feature: https://github.com/serde-rs/serde/issues/2231 +impl<'de> Deserialize<'de> for SourceCodeData { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + deserializer.deserialize_map(SourceCodeVisitor) + } +} + +struct SourceCodeVisitor; + +impl<'de> Visitor<'de> for SourceCodeVisitor { + type Value = SourceCodeData; + fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.write_str("source code data") + } + fn visit_map(self, mut map: A) -> Result + where + A: MapAccess<'de>, + { + let mut source_code = None; + let mut r#type = None; + while let Some(key) = map.next_key::()? { + match &*key { + "sourceCode" => source_code = Some(map.next_value::()?), + "codeFormat" => r#type = Some(map.next_value::()?), + _ => continue, + } + } + let result = match r#type.as_deref() { + Some("solidity-single-file") | None => { + let value = source_code.ok_or_else(|| A::Error::missing_field("source_code"))?; + SourceCodeData::SingleFile( + value + .as_str() + .ok_or_else(|| { + A::Error::invalid_type(Unexpected::Other(&value.to_string()), &self) + })? + .to_string(), + ) + } + Some("solidity-standard-json-input") => { + let value = source_code.ok_or_else(|| A::Error::missing_field("source_code"))?; + SourceCodeData::StandardJsonInput( + value + .as_object() + .ok_or_else(|| { + A::Error::invalid_type(Unexpected::Other(&value.to_string()), &self) + })? + .clone(), + ) + } + Some(x) => { + return Err(A::Error::unknown_variant( + x, + &["solidity-single-file", "solidity-standard-json-input"], + )) + } + }; + Ok(result) + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct VerificationIncomingRequest { + pub contract_address: Address, + #[serde(flatten)] + pub source_code_data: SourceCodeData, + pub contract_name: String, + pub compiler_zksolc_version: String, + pub compiler_solc_version: String, + pub optimization_used: bool, + #[serde(default)] + pub constructor_arguments: Bytes, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct VerificationRequest { + pub id: usize, + #[serde(flatten)] + pub req: VerificationIncomingRequest, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CompilationArtifacts { + pub bytecode: Vec, + pub abi: serde_json::Value, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct VerificationInfo { + pub request: VerificationRequest, + pub artifacts: CompilationArtifacts, + pub verified_at: DateTime, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct VerificationRequestStatus { + pub status: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub error: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub compilation_errors: Option>, +} + +#[derive(Debug)] +pub enum DeployContractCalldata { + Deploy(Vec), + Ignore, +} + +#[cfg(test)] +mod tests { + use super::SourceCodeData; + + #[test] + fn source_code_deserialization() { + let single_file_str = r#"{"codeFormat": "solidity-single-file", "sourceCode": "text"}"#; + let single_file_result = serde_json::from_str::(single_file_str); + assert!(matches!( + single_file_result, + Ok(SourceCodeData::SingleFile(_)) + )); + + let stand_json_input_str = + r#"{"codeFormat": "solidity-standard-json-input", "sourceCode": {}}"#; + let stand_json_input_result = serde_json::from_str::(stand_json_input_str); + assert!(matches!( + stand_json_input_result, + Ok(SourceCodeData::StandardJsonInput(_)) + )); + + let type_not_specified_str = r#"{"sourceCode": "text"}"#; + let type_not_specified_result = + serde_json::from_str::(type_not_specified_str); + assert!(matches!( + type_not_specified_result, + Ok(SourceCodeData::SingleFile(_)) + )); + + let type_not_specified_object_str = r#"{"sourceCode": {}}"#; + let type_not_specified_object_result = + serde_json::from_str::(type_not_specified_object_str); + assert!(type_not_specified_object_result.is_err()); + } +} diff --git a/core/lib/types/src/fee.rs b/core/lib/types/src/fee.rs new file mode 100644 index 000000000000..3f5605235aa6 --- /dev/null +++ b/core/lib/types/src/fee.rs @@ -0,0 +1,73 @@ +use serde::{Deserialize, Serialize}; +use zksync_utils::ceil_div; + +use crate::U256; + +#[derive(Debug, Serialize, Deserialize, Clone, Default)] +#[serde(rename_all = "camelCase", tag = "result")] +pub struct TransactionExecutionMetrics { + pub initial_storage_writes: usize, + pub repeated_storage_writes: usize, + pub gas_used: usize, + pub event_topics: u16, + pub published_bytecode_bytes: usize, + pub l2_l1_long_messages: usize, + pub l2_l1_logs: usize, + pub contracts_used: usize, + pub contracts_deployed: u16, + pub vm_events: usize, + pub storage_logs: usize, + // it's the sum of storage logs, vm events, l2->l1 logs, + // and the number of precompile calls + pub total_log_queries: usize, + pub cycles_used: u32, +} + +#[derive(Default, Serialize, Deserialize, Clone, Debug, PartialEq, Eq, Hash)] +pub struct Fee { + /// The limit of gas that are to be spent on the actual transaction. + pub gas_limit: U256, + /// zkSync version of EIP1559 maxFeePerGas. + pub max_fee_per_gas: U256, + /// zkSync version of EIP1559 maxPriorityFeePerGas. + pub max_priority_fee_per_gas: U256, + /// The maximal gas per pubdata byte the user agrees to. + pub gas_per_pubdata_limit: U256, +} + +impl Fee { + pub fn max_total_fee(&self) -> U256 { + self.max_fee_per_gas * self.gas_limit + } + + pub fn get_effective_gas_price(&self, block_base_fee_per_gas: U256) -> U256 { + assert!(block_base_fee_per_gas <= self.max_fee_per_gas); + assert!(self.max_priority_fee_per_gas <= self.max_fee_per_gas); + + let max_that_operator_could_take = block_base_fee_per_gas + self.max_priority_fee_per_gas; + std::cmp::min(max_that_operator_could_take, self.max_fee_per_gas) + } +} + +/// Returns how many slots would ABI-encoding of the transaction with such parameters take +pub fn encoding_len( + data_len: u64, + signature_len: u64, + factory_deps_len: u64, + paymaster_input_len: u64, + reserved_dynamic_len: u64, +) -> usize { + // The length assuming that all the dynamic fields are empty, i.e. it includes + // encoding of fixed-length fields and the lengths of the dynamic fields + 1 0x20 starting symbol + const BASE_LEN: usize = 1 + 19 + 5; + + // All of the fields are encoded as `bytes`, so their encoding takes ceil(len, 32) slots. + // Factory deps are encoded as an array of bytes32. + let dynamic_len = ceil_div(data_len, 32) + + ceil_div(signature_len, 32) + + ceil_div(paymaster_input_len, 32) + + ceil_div(reserved_dynamic_len, 32) + + factory_deps_len; + + BASE_LEN + dynamic_len as usize +} diff --git a/core/lib/types/src/helpers.rs b/core/lib/types/src/helpers.rs new file mode 100644 index 000000000000..378c3d6b4b4f --- /dev/null +++ b/core/lib/types/src/helpers.rs @@ -0,0 +1,8 @@ +use std::time::SystemTime; + +pub fn unix_timestamp_ms() -> u64 { + SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .unwrap() + .as_millis() as u64 +} diff --git a/core/lib/types/src/l1/error.rs b/core/lib/types/src/l1/error.rs new file mode 100644 index 000000000000..d33b57653a50 --- /dev/null +++ b/core/lib/types/src/l1/error.rs @@ -0,0 +1,13 @@ +use thiserror::Error; + +#[derive(Debug, Error)] +pub enum L1TxParseError { + #[error("PubData length mismatch")] + PubdataLengthMismatch, + #[error("Unsupported priority op type")] + UnsupportedPriorityOpType, + #[error("Unexpected priority queue type")] + UnexpectedPriorityQueueType, + #[error("Ethereum ABI error: {0}")] + AbiError(#[from] crate::ethabi::Error), +} diff --git a/core/lib/types/src/l1/mod.rs b/core/lib/types/src/l1/mod.rs new file mode 100644 index 000000000000..521839a174d1 --- /dev/null +++ b/core/lib/types/src/l1/mod.rs @@ -0,0 +1,346 @@ +//! Definition of zkSync network priority operations: operations initiated from the L1. + +use serde::{Deserialize, Serialize}; +use std::convert::TryFrom; +use zksync_basic_types::{ + ethabi::{decode, ParamType, Token}, + Address, Log, PriorityOpId, H160, H256, U256, +}; +use zksync_utils::u256_to_account_address; + +use crate::{ + l1::error::L1TxParseError, + priority_op_onchain_data::{PriorityOpOnchainData, PriorityOpOnchainMetadata}, + tx::Execute, + ExecuteTransactionCommon, +}; + +use super::Transaction; + +use crate::helpers::unix_timestamp_ms; +use crate::l2::TransactionType; + +pub mod error; + +#[derive(Debug, PartialEq, Serialize, Deserialize, Clone, Copy)] +#[repr(u8)] +pub enum OpProcessingType { + Common = 0, + OnlyRollup = 1, +} + +impl TryFrom for OpProcessingType { + type Error = (); + + fn try_from(v: u8) -> Result { + match v { + x if x == OpProcessingType::Common as u8 => Ok(OpProcessingType::Common), + x if x == OpProcessingType::OnlyRollup as u8 => Ok(OpProcessingType::OnlyRollup), + _ => Err(()), + } + } +} + +impl Default for OpProcessingType { + fn default() -> Self { + Self::Common + } +} + +#[derive(Debug, PartialEq, Serialize, Deserialize, Clone, Copy)] +#[repr(u8)] +pub enum PriorityQueueType { + Deque = 0, + HeapBuffer = 1, + Heap = 2, +} + +impl TryFrom for PriorityQueueType { + type Error = (); + + fn try_from(v: u8) -> Result { + match v { + x if x == PriorityQueueType::Deque as u8 => Ok(PriorityQueueType::Deque), + x if x == PriorityQueueType::HeapBuffer as u8 => Ok(PriorityQueueType::HeapBuffer), + x if x == PriorityQueueType::Heap as u8 => Ok(PriorityQueueType::Heap), + _ => Err(()), + } + } +} + +impl Default for PriorityQueueType { + fn default() -> Self { + PriorityQueueType::Deque + } +} + +#[derive(Default, Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct L1TxCommonData { + /// Sender of the transaction. + pub sender: Address, + /// Unique ID of the priority operation. + pub serial_id: PriorityOpId, + /// Ethereum deadline block until which operation must be processed. + pub deadline_block: u64, + /// Additional payment to the operator as an incentive to perform the operation. The contract uses a value of 192 bits. + pub layer_2_tip_fee: U256, + /// The total cost the sender paid for the transaction. + pub full_fee: U256, + /// The maximum number of gas that a transaction can spend at a price of gas equals 1. + pub gas_limit: U256, + /// The maximum number of gas per 1 byte of pubdata. + pub gas_per_pubdata_limit: U256, + /// Indicator that the operation can interact with Rollup and Porter trees, or only with Rollup. + pub op_processing_type: OpProcessingType, + /// Priority operations queue type. + pub priority_queue_type: PriorityQueueType, + /// Hash of the corresponding Ethereum transaction. Size should be 32 bytes. + pub eth_hash: H256, + /// Block in which Ethereum transaction was included. + pub eth_block: u64, + /// Tx hash of the transaction in the zkSync network. Calculated as the encoded transaction data hash. + pub canonical_tx_hash: H256, + /// The amount of ETH that should be minted with this transaction + pub to_mint: U256, + /// The recipient of the refund of the transaction + pub refund_recipient: Address, +} + +impl L1TxCommonData { + pub fn hash(&self) -> H256 { + self.canonical_tx_hash + } + + pub fn onchain_data(&self) -> PriorityOpOnchainData { + PriorityOpOnchainData { + layer_2_tip_fee: self.layer_2_tip_fee, + onchain_data_hash: self.hash(), + } + } + + pub fn onchain_metadata(&self) -> PriorityOpOnchainMetadata { + PriorityOpOnchainMetadata { + op_processing_type: self.op_processing_type, + priority_queue_type: self.priority_queue_type, + onchain_data: self.onchain_data(), + } + } + + pub fn tx_format(&self) -> TransactionType { + TransactionType::PriorityOpTransaction + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct L1Tx { + pub execute: Execute, + pub common_data: L1TxCommonData, + pub received_timestamp_ms: u64, +} + +impl From for Transaction { + fn from(tx: L1Tx) -> Self { + let L1Tx { + execute, + common_data, + received_timestamp_ms, + } = tx; + Self { + common_data: ExecuteTransactionCommon::L1(common_data), + execute, + received_timestamp_ms, + } + } +} + +impl TryFrom for L1Tx { + type Error = (); + + fn try_from(value: Transaction) -> Result { + let Transaction { + common_data, + execute, + received_timestamp_ms, + } = value; + match common_data { + ExecuteTransactionCommon::L1(common_data) => Ok(L1Tx { + execute, + common_data, + received_timestamp_ms, + }), + ExecuteTransactionCommon::L2(_) => Err(()), + } + } +} + +impl L1Tx { + pub fn serial_id(&self) -> PriorityOpId { + self.common_data.serial_id + } + + pub fn eth_block(&self) -> u64 { + self.common_data.eth_block + } + + pub fn hash(&self) -> H256 { + self.common_data.hash() + } +} + +impl TryFrom for L1Tx { + type Error = L1TxParseError; + + fn try_from(event: Log) -> Result { + let transaction_param_type = ParamType::Tuple(vec![ + ParamType::Uint(8), // txType + ParamType::Address, // sender + ParamType::Address, // to + ParamType::Uint(256), // gasLimit + ParamType::Uint(256), // gasPerPubdataLimit + ParamType::Uint(256), // maxFeePerGas + ParamType::Uint(256), // maxPriorityFeePerGas + ParamType::Address, // paymaster + ParamType::Uint(256), // nonce (serial ID) + ParamType::Uint(256), // value + ParamType::FixedArray(Box::new(ParamType::Uint(256)), 4), // reserved + ParamType::Bytes, // calldata + ParamType::Bytes, // signature + ParamType::Array(Box::new(ParamType::Uint(256))), // factory deps + ParamType::Bytes, // paymaster input + ParamType::Bytes, // reservedDynamic + ]); + + let mut dec_ev = decode( + &[ + ParamType::Uint(256), // tx ID + ParamType::FixedBytes(32), // tx hash + ParamType::Uint(64), // expiration block + transaction_param_type, // transaction data + ParamType::Array(Box::new(ParamType::Bytes)), // factory deps + ], + &event.data.0, + )?; + + let eth_hash = event + .transaction_hash + .expect("Event transaction hash is missing"); + let eth_block = event + .block_number + .expect("Event block number is missing") + .as_u64(); + + let serial_id = PriorityOpId( + dec_ev + .remove(0) + .into_uint() + .as_ref() + .map(U256::as_u64) + .unwrap(), + ); + + let canonical_tx_hash = H256::from_slice(&dec_ev.remove(0).into_fixed_bytes().unwrap()); + + let deadline_block = dec_ev.remove(0).into_uint().unwrap().as_u64(); + + // Decoding transaction bytes + let mut transaction = match dec_ev.remove(0) { + Token::Tuple(tx) => tx, + _ => unreachable!(), + }; + + assert_eq!(transaction.len(), 16); + + let tx_type = transaction.remove(0).into_uint().unwrap(); + assert_eq!(tx_type.clone(), U256::from(255u8)); // L1TxType + + let sender = transaction.remove(0).into_address().unwrap(); + let contract_address = transaction.remove(0).into_address().unwrap(); + + let gas_limit = transaction.remove(0).into_uint().unwrap(); + + let gas_per_pubdata_limit = transaction.remove(0).into_uint().unwrap(); + + let max_fee_per_gas = transaction.remove(0).into_uint().unwrap(); + assert_eq!(max_fee_per_gas, U256::zero()); + + let max_priority_fee_per_gas = transaction.remove(0).into_uint().unwrap(); + assert_eq!(max_priority_fee_per_gas, U256::zero()); + + let paymaster = transaction.remove(0).into_address().unwrap(); + assert_eq!(paymaster, H160::zero()); + + let serial_id_from_tx = transaction.remove(0).into_uint().unwrap(); + assert_eq!(serial_id_from_tx, serial_id.0.into()); // serial id from decoded from transaction bytes should be equal to one from event + + let msg_value = transaction.remove(0).into_uint().unwrap(); + + let reserved = transaction + .remove(0) + .into_fixed_array() + .unwrap() + .into_iter() + .map(|token| token.into_uint().unwrap()) + .collect::>(); + assert_eq!(reserved.len(), 4); + + let to_mint = reserved[0]; + let refund_recipient = u256_to_account_address(&reserved[1]); + + // All other reserved fields should be zero + for item in reserved.iter().skip(2) { + assert_eq!(item, &U256::zero()); + } + + let calldata = transaction.remove(0).into_bytes().unwrap(); + + let signature = transaction.remove(0).into_bytes().unwrap(); + assert_eq!(signature.len(), 0); + + let _factory_deps_hashes = transaction.remove(0).into_array().unwrap(); + let _paymaster_input = transaction.remove(0).into_bytes().unwrap(); + let _reserved_dynamic = transaction.remove(0).into_bytes().unwrap(); + + // Decoding metadata + + // Finally, decode the factory dependencies + let factory_deps = match dec_ev.remove(0) { + Token::Array(factory_deps) => factory_deps, + _ => unreachable!(), + }; + + let factory_deps = factory_deps + .into_iter() + .map(|token| token.into_bytes().unwrap()) + .collect::>(); + + let common_data = L1TxCommonData { + serial_id, + canonical_tx_hash, + sender, + deadline_block, + layer_2_tip_fee: U256::zero(), + to_mint, + refund_recipient, + full_fee: U256::zero(), + gas_limit, + gas_per_pubdata_limit, + op_processing_type: OpProcessingType::Common, + priority_queue_type: PriorityQueueType::Deque, + eth_hash, + eth_block, + }; + + let execute = Execute { + contract_address, + calldata: calldata.to_vec(), + factory_deps: Some(factory_deps), + value: msg_value, + }; + Ok(Self { + common_data, + execute, + received_timestamp_ms: unix_timestamp_ms(), + }) + } +} diff --git a/core/lib/types/src/l2/error.rs b/core/lib/types/src/l2/error.rs new file mode 100644 index 000000000000..c157f95b09f4 --- /dev/null +++ b/core/lib/types/src/l2/error.rs @@ -0,0 +1,18 @@ +use parity_crypto::publickey::Error as ParityCryptoError; +use serde::{Deserialize, Serialize}; +use thiserror::Error; +use zksync_basic_types::H256; + +#[derive(Debug, Clone, Copy, Serialize, Deserialize, Error)] +pub enum TxCheckError { + #[error("transaction type {0} not supported")] + UnsupportedType(u32), + #[error("known transaction. transaction with hash {0} is already in the system")] + TxDuplication(H256), +} + +#[derive(Debug, Error)] +pub enum SignError { + #[error("Failed to sign transaction")] + SignError(#[from] ParityCryptoError), +} diff --git a/core/lib/types/src/l2/mod.rs b/core/lib/types/src/l2/mod.rs new file mode 100644 index 000000000000..380812147e06 --- /dev/null +++ b/core/lib/types/src/l2/mod.rs @@ -0,0 +1,330 @@ +use rlp::RlpStream; + +use self::error::SignError; +use crate::transaction_request::PaymasterParams; +use crate::{ + api::Eip712Meta, tx::primitives::PackedEthSignature, tx::Execute, web3::types::U64, Address, + Bytes, EIP712TypedStructure, Eip712Domain, ExecuteTransactionCommon, InputData, L2ChainId, + Nonce, StructBuilder, Transaction, EIP_712_TX_TYPE, H256, PRIORITY_OPERATION_L2_TX_TYPE, U256, +}; + +use serde::{Deserialize, Serialize}; + +pub mod error; + +use crate::api::TransactionRequest; +use crate::fee::{encoding_len, Fee}; +use crate::helpers::unix_timestamp_ms; + +#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq)] +#[repr(u32)] +pub enum TransactionType { + // Native ECDSA Transaction + LegacyTransaction = 0, + + EIP2930Transaction = 1, + EIP1559Transaction = 2, + // Eip 712 transaction with additional fields specified for zksync + EIP712Transaction = EIP_712_TX_TYPE as u32, + PriorityOpTransaction = PRIORITY_OPERATION_L2_TX_TYPE as u32, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct L2TxCommonData { + pub nonce: Nonce, + pub fee: Fee, + pub initiator_address: Address, + pub signature: Vec, + pub transaction_type: TransactionType, + /// This input consists of raw transaction bytes when we receive it from API. + /// But we still use this structure for zksync-rs and tests, and we don't have raw tx before + /// creating the structure. We setup this field manually later for consistency. + /// We need some research on how to change it + pub input: Option, + + pub paymaster_params: PaymasterParams, +} + +impl L2TxCommonData { + #[allow(clippy::too_many_arguments)] + pub fn new( + nonce: Nonce, + fee: Fee, + initiator_address: Address, + signature: Vec, + transaction_type: TransactionType, + input: Vec, + hash: H256, + paymaster_params: PaymasterParams, + ) -> Self { + let input = Some(InputData { hash, data: input }); + Self { + nonce, + fee, + initiator_address, + signature, + transaction_type, + input, + paymaster_params, + } + } + + pub fn input_data(&self) -> Option> { + self.input.as_ref().map(|i| i.data.clone()) + } + + pub fn hash(&self) -> H256 { + self.input.clone().unwrap().hash + } + + pub fn set_input(&mut self, input: Vec, hash: H256) { + self.input = Some(InputData { hash, data: input }) + } +} + +impl Default for L2TxCommonData { + fn default() -> Self { + Self { + nonce: Nonce(0), + fee: Default::default(), + initiator_address: Address::zero(), + signature: Default::default(), + transaction_type: TransactionType::EIP712Transaction, + input: Default::default(), + paymaster_params: Default::default(), + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct L2Tx { + pub execute: Execute, + pub common_data: L2TxCommonData, + pub received_timestamp_ms: u64, +} + +impl L2Tx { + #[allow(clippy::too_many_arguments)] + pub fn new( + contract_address: Address, + calldata: Vec, + nonce: Nonce, + fee: Fee, + initiator_address: Address, + value: U256, + factory_deps: Option>>, + paymaster_params: PaymasterParams, + ) -> Self { + Self { + execute: Execute { + contract_address, + calldata, + value, + factory_deps, + }, + common_data: L2TxCommonData { + nonce, + fee, + initiator_address, + signature: Default::default(), + transaction_type: TransactionType::EIP712Transaction, + input: None, + paymaster_params, + }, + received_timestamp_ms: unix_timestamp_ms(), + } + } + + #[allow(clippy::too_many_arguments)] + pub fn new_signed( + contract_address: Address, + calldata: Vec, + nonce: Nonce, + fee: Fee, + value: U256, + chain_id: L2ChainId, + private_key: &H256, + factory_deps: Option>>, + paymaster_params: PaymasterParams, + ) -> Result { + let initiator_address = PackedEthSignature::address_from_private_key(private_key).unwrap(); + let mut res = Self::new( + contract_address, + calldata, + nonce, + fee, + initiator_address, + value, + factory_deps, + paymaster_params, + ); + + let data = res.get_signed_bytes(chain_id); + res.set_signature(PackedEthSignature::sign_raw(private_key, &data)?); + Ok(res) + } + + /// Returns the hash of the transaction. + pub fn hash(&self) -> H256 { + self.common_data.hash() + } + + /// Returns the account affected by the transaction. + pub fn initiator_account(&self) -> Address { + self.common_data.initiator_address + } + + /// Returns recipient account of the transaction. + pub fn recipient_account(&self) -> Address { + self.execute.contract_address + } + + /// Returns the account nonce associated with transaction. + pub fn nonce(&self) -> Nonce { + self.common_data.nonce + } + + pub fn set_input(&mut self, data: Vec, hash: H256) { + self.common_data.set_input(data, hash) + } + + pub fn get_rlp_bytes(&self, chain_id: L2ChainId) -> Bytes { + let mut rlp_stream = RlpStream::new(); + let tx: TransactionRequest = self.clone().into(); + tx.rlp(&mut rlp_stream, chain_id.0, None); + Bytes(rlp_stream.as_raw().to_vec()) + } + + pub fn get_signed_bytes(&self, chain_id: L2ChainId) -> H256 { + let tx: TransactionRequest = self.clone().into(); + if tx.is_eip712_tx() { + PackedEthSignature::typed_data_to_signed_bytes(&Eip712Domain::new(chain_id), &tx) + } else { + let mut data = self.get_rlp_bytes(chain_id).0; + if let Some(tx_type) = tx.transaction_type { + data.insert(0, tx_type.as_u32() as u8); + } + PackedEthSignature::message_to_signed_bytes(&data) + } + } + + pub fn set_signature(&mut self, signature: PackedEthSignature) { + self.set_raw_signature(signature.serialize_packed().to_vec()); + } + + pub fn set_raw_signature(&mut self, signature: Vec) { + self.common_data.signature = signature; + } + + pub fn abi_encoding_len(&self) -> usize { + let data_len = self.execute.calldata.len(); + let signature_len = self.common_data.signature.len(); + let factory_deps_len = self.execute.factory_deps_length(); + let paymaster_input_len = self.common_data.paymaster_params.paymaster_input.len(); + + encoding_len( + data_len as u64, + signature_len as u64, + factory_deps_len as u64, + paymaster_input_len as u64, + 0, + ) + } + + pub fn payer(&self) -> Address { + if self.common_data.paymaster_params.paymaster != Address::zero() { + self.common_data.paymaster_params.paymaster + } else { + self.initiator_account() + } + } + + pub fn factory_deps_len(&self) -> u32 { + self.execute + .factory_deps + .as_ref() + .map(|deps| deps.iter().fold(0u32, |len, item| len + item.len() as u32)) + .unwrap_or_default() + } +} + +impl From for TransactionRequest { + fn from(tx: L2Tx) -> Self { + let tx_type = tx.common_data.transaction_type as u32; + let (v, r, s) = + if let Ok(sig) = PackedEthSignature::deserialize_packed(&tx.common_data.signature) { + ( + Some(U64::from(sig.v())), + Some(U256::from(sig.r())), + Some(U256::from(sig.s())), + ) + } else { + (None, None, None) + }; + TransactionRequest { + nonce: U256::from(tx.common_data.nonce.0), + from: Some(tx.common_data.initiator_address), + to: Some(tx.recipient_account()), + value: tx.execute.value, + gas_price: tx.common_data.fee.max_fee_per_gas, + max_priority_fee_per_gas: Some(tx.common_data.fee.max_priority_fee_per_gas), + gas: tx.common_data.fee.gas_limit, + input: Bytes(tx.execute.calldata), + v, + r, + s, + raw: None, + transaction_type: if tx_type == 0 { + None + } else { + Some(U64::from(tx_type)) + }, + access_list: None, + eip712_meta: Some(Eip712Meta { + gas_per_pubdata: tx.common_data.fee.gas_per_pubdata_limit, + factory_deps: tx.execute.factory_deps, + custom_signature: Some(tx.common_data.signature), + paymaster_params: Some(tx.common_data.paymaster_params), + }), + chain_id: None, + } + } +} + +impl From for Transaction { + fn from(tx: L2Tx) -> Self { + let L2Tx { + execute, + common_data, + received_timestamp_ms, + } = tx; + Self { + common_data: ExecuteTransactionCommon::L2(common_data), + execute, + received_timestamp_ms, + } + } +} + +impl EIP712TypedStructure for L2Tx { + const TYPE_NAME: &'static str = "Transaction"; + + fn build_structure(&self, builder: &mut BUILDER) { + builder.add_member("txType", &(self.common_data.transaction_type as u8)); + + self.execute.build_structure(builder); + + builder.add_member("gasLimit", &self.common_data.fee.gas_limit); + builder.add_member( + "gasPerPubdataByteLimit", + &self.common_data.fee.gas_per_pubdata_limit, + ); + builder.add_member("maxFeePerGas", &self.common_data.fee.max_fee_per_gas); + builder.add_member( + "maxPriorityFeePerGas", + &self.common_data.fee.max_priority_fee_per_gas, + ); + builder.add_member("nonce", &U256::from(self.common_data.nonce.0)); + } +} diff --git a/core/lib/types/src/l2_to_l1_log.rs b/core/lib/types/src/l2_to_l1_log.rs new file mode 100644 index 000000000000..25455e3e4099 --- /dev/null +++ b/core/lib/types/src/l2_to_l1_log.rs @@ -0,0 +1,42 @@ +use crate::commitment::CommitmentSerializable; +use crate::{Address, H256}; +use serde::{Deserialize, Serialize}; +use zk_evm::reference_impls::event_sink::EventMessage; +use zksync_utils::u256_to_h256; + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default, Eq)] +pub struct L2ToL1Log { + pub shard_id: u8, + pub is_service: bool, + pub tx_number_in_block: u16, + pub sender: Address, + pub key: H256, + pub value: H256, +} + +impl From> for L2ToL1Log { + fn from(data: Vec) -> Self { + assert_eq!(data.len(), L2ToL1Log::SERIALIZED_SIZE); + Self { + shard_id: data[0], + is_service: data[1] != 0, + tx_number_in_block: u16::from_be_bytes([data[2], data[3]]), + sender: Address::from_slice(&data[4..24]), + key: H256::from_slice(&data[24..56]), + value: H256::from_slice(&data[56..88]), + } + } +} + +impl From for L2ToL1Log { + fn from(m: EventMessage) -> Self { + Self { + shard_id: m.shard_id, + is_service: m.is_first, + tx_number_in_block: m.tx_number_in_block, + sender: m.address, + key: u256_to_h256(m.key), + value: u256_to_h256(m.value), + } + } +} diff --git a/core/lib/types/src/lib.rs b/core/lib/types/src/lib.rs new file mode 100644 index 000000000000..f0f12d018593 --- /dev/null +++ b/core/lib/types/src/lib.rs @@ -0,0 +1,152 @@ +//! zkSync types: essential type definitions for zkSync network. +//! +//! `zksync_types` is a crate containing essential zkSync network types, such as transactions, operations and +//! blockchain primitives. + +#![allow(clippy::upper_case_acronyms, clippy::derive_partial_eq_without_eq)] + +use serde::{Deserialize, Serialize}; +use std::fmt::Debug; + +pub use crate::{Nonce, H256, U256, U64}; + +pub type SerialId = u64; + +use crate::l2::TransactionType; +pub use event::{VmEvent, VmEventGroupKey}; +pub use l1::L1TxCommonData; +pub use l2::L2TxCommonData; +pub use storage::*; +pub use tx::primitives::*; +pub use tx::Execute; +pub use zk_evm; +pub use zkevm_test_harness; +pub use zksync_basic_types::*; + +pub mod aggregated_operations; +pub mod block; +pub mod circuit; +pub mod commitment; +pub mod event; +pub mod explorer_api; +pub mod fee; +pub mod l1; +pub mod l2; +pub mod l2_to_l1_log; +pub mod priority_op_onchain_data; +pub mod pubdata_packing; +pub mod storage; +pub mod system_contracts; +pub mod tokens; +pub mod tx; +pub mod vm_trace; + +pub mod api; +pub mod eth_sender; +pub mod helpers; +pub mod log_query_sorter; +pub mod proofs; +pub mod transaction_request; +pub mod utils; + +/// Denotes the first byte of the special zkSync's EIP-712-signed transaction. +pub const EIP_712_TX_TYPE: u8 = 0x71; + +/// Denotes the first byte of the `EIP-1559` transaction. +pub const EIP_1559_TX_TYPE: u8 = 0x02; + +/// Denotes the first byte of the `EIP-2930` transaction. +pub const EIP_2930_TX_TYPE: u8 = 0x01; + +/// Denotes the first byte of some legacy transaction, which type is unknown to the server. +pub const LEGACY_TX_TYPE: u8 = 0x0; + +/// Denotes the first byte of some legacy transaction, which type is unknown to the server. +pub const PRIORITY_OPERATION_L2_TX_TYPE: u8 = 0xff; + +#[derive(Debug, Clone)] +pub struct Transaction { + pub common_data: ExecuteTransactionCommon, + pub execute: Execute, + pub received_timestamp_ms: u64, +} + +impl PartialEq for Transaction { + fn eq(&self, other: &Transaction) -> bool { + self.hash() == other.hash() + } +} + +impl Eq for Transaction {} + +impl Transaction { + /// Returns recipient account of the transaction. + pub fn recipient_account(&self) -> Address { + self.execute.contract_address + } + + pub fn nonce(&self) -> Option { + match &self.common_data { + ExecuteTransactionCommon::L1(_) => None, + ExecuteTransactionCommon::L2(tx) => Some(tx.nonce), + } + } + + pub fn is_l1(&self) -> bool { + matches!(self.common_data, ExecuteTransactionCommon::L1(_)) + } + + pub fn tx_format(&self) -> TransactionType { + match &self.common_data { + ExecuteTransactionCommon::L1(tx) => tx.tx_format(), + ExecuteTransactionCommon::L2(tx) => tx.transaction_type, + } + } + + pub fn type_display(&self) -> &'static str { + match &self.common_data { + ExecuteTransactionCommon::L1(_) => "l1_transaction", + ExecuteTransactionCommon::L2(_) => "l2_transaction", + } + } +} + +impl Transaction { + pub fn hash(&self) -> H256 { + match &self.common_data { + ExecuteTransactionCommon::L1(data) => data.hash(), + ExecuteTransactionCommon::L2(data) => data.hash(), + } + } + + /// Returns the account that initiated this transaction. + pub fn initiator_account(&self) -> Address { + match &self.common_data { + ExecuteTransactionCommon::L1(data) => data.sender, + ExecuteTransactionCommon::L2(data) => data.initiator_address, + } + } + + pub fn gas_limit(&self) -> U256 { + match &self.common_data { + ExecuteTransactionCommon::L1(data) => data.gas_limit, + ExecuteTransactionCommon::L2(data) => data.fee.gas_limit, + } + } +} + +/// Optional input `Ethereum`-like encoded transaction if submitted via Web3 API. +/// If exists, its hash will be used to identify transaction. +/// Note, that for EIP712-type transactions, `hash` is not equal to the hash +/// of the `data`, but rather calculated by special formula. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub struct InputData { + pub hash: H256, + pub data: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub enum ExecuteTransactionCommon { + L1(L1TxCommonData), + L2(L2TxCommonData), +} diff --git a/core/lib/types/src/log_query_sorter.rs b/core/lib/types/src/log_query_sorter.rs new file mode 100644 index 000000000000..928c824380b4 --- /dev/null +++ b/core/lib/types/src/log_query_sorter.rs @@ -0,0 +1,312 @@ +// copied from https://github.com/matter-labs/zkevm_test_harness/blob/main/src/witness/sort_storage_access.rs + +use std::cmp::Ordering; + +use zk_evm::aux_structures::LogQuery; + +use rayon::slice::ParallelSliceMut; + +use crate::{StorageLogQuery, StorageLogQueryType, U256}; + +#[derive(Debug, Clone, Copy)] +pub struct LogQueryWithExtendedEnumeration { + pub raw_query: StorageLogQuery, + pub extended_timestamp: u32, +} + +#[derive(Debug)] +pub struct StorageSlotHistoryKeeper { + pub initial_value: Option, + pub current_value: Option, + pub changes_stack: Vec, + pub did_read_at_depth_zero: bool, + pub minimum_log_type: StorageLogQueryType, +} + +#[allow(clippy::all)] +pub fn sort_storage_access_queries( + unsorted_storage_queries: &[StorageLogQuery], +) -> (Vec, Vec) { + let mut sorted_storage_queries_with_extra_timestamp: Vec<_> = unsorted_storage_queries + .iter() + .enumerate() + .map(|(i, el)| LogQueryWithExtendedEnumeration { + raw_query: el.clone(), + extended_timestamp: i as u32, + }) + .collect(); + + sorted_storage_queries_with_extra_timestamp.par_sort_by(|a, b| { + match a + .raw_query + .log_query + .shard_id + .cmp(&a.raw_query.log_query.shard_id) + { + Ordering::Equal => match a + .raw_query + .log_query + .address + .cmp(&b.raw_query.log_query.address) + { + Ordering::Equal => { + match a.raw_query.log_query.key.cmp(&b.raw_query.log_query.key) { + Ordering::Equal => a.extended_timestamp.cmp(&b.extended_timestamp), + r @ _ => r, + } + } + r @ _ => r, + }, + r @ _ => r, + } + }); + + let mut deduplicated_storage_queries = vec![]; + + // now just implement the logic to sort and deduplicate + let mut it = sorted_storage_queries_with_extra_timestamp + .iter() + .peekable(); + + loop { + if it.peek().is_none() { + break; + } + + let candidate = it.peek().unwrap().clone(); + + let subit = it.clone().take_while(|el| { + el.raw_query.log_query.shard_id == candidate.raw_query.log_query.shard_id + && el.raw_query.log_query.address == candidate.raw_query.log_query.address + && el.raw_query.log_query.key == candidate.raw_query.log_query.key + }); + + let mut current_element_history = StorageSlotHistoryKeeper { + initial_value: None, + current_value: None, + changes_stack: vec![], + did_read_at_depth_zero: false, + minimum_log_type: StorageLogQueryType::RepeatedWrite, + }; + let mut last_write_is_rollback = false; + + for (_idx, el) in subit.enumerate() { + let _ = it.next().unwrap(); + + if current_element_history.current_value.is_none() { + assert!( + current_element_history.initial_value.is_none(), + "invalid for query {:?}", + el + ); + // first read potentially + if el.raw_query.log_query.rw_flag == false { + current_element_history.did_read_at_depth_zero = true; + } + } else { + // explicit read at zero + if el.raw_query.log_query.rw_flag == false + && current_element_history.changes_stack.is_empty() + { + current_element_history.did_read_at_depth_zero = true; + } + } + + if current_element_history.current_value.is_none() { + assert!( + current_element_history.initial_value.is_none(), + "invalid for query {:?}", + el + ); + if el.raw_query.log_query.rw_flag == false { + current_element_history.initial_value = Some(el.raw_query.log_query.read_value); + current_element_history.current_value = Some(el.raw_query.log_query.read_value); + } else { + assert!(el.raw_query.log_query.rollback == false); + current_element_history.initial_value = Some(el.raw_query.log_query.read_value); + current_element_history.current_value = Some(el.raw_query.log_query.read_value); + // note: We apply updates few lines later + } + } + + if el.raw_query.log_query.rw_flag == false { + assert_eq!( + &el.raw_query.log_query.read_value, + current_element_history.current_value.as_ref().unwrap(), + "invalid for query {:?}", + el + ); + + // and do not place reads into the stack + } else if el.raw_query.log_query.rw_flag == true { + if matches!(el.raw_query.log_type, StorageLogQueryType::InitialWrite) { + current_element_history.minimum_log_type = StorageLogQueryType::InitialWrite + } + // write-like things manipulate the stack + if el.raw_query.log_query.rollback == false { + last_write_is_rollback = false; + // write + assert_eq!( + &el.raw_query.log_query.read_value, + current_element_history.current_value.as_ref().unwrap(), + "invalid for query {:?}", + el + ); + current_element_history.current_value = + Some(el.raw_query.log_query.written_value); + current_element_history.changes_stack.push(el.clone()); + } else { + last_write_is_rollback = true; + // pop from stack + let popped_change = current_element_history.changes_stack.pop().unwrap(); + // we do not explicitly swap values, and use rollback flag instead, so compare this way + assert_eq!( + el.raw_query.log_query.read_value, + popped_change.raw_query.log_query.read_value, + "invalid for query {:?}", + el + ); + assert_eq!( + el.raw_query.log_query.written_value, + popped_change.raw_query.log_query.written_value, + "invalid for query {:?}", + el + ); + assert_eq!( + &el.raw_query.log_query.written_value, + current_element_history.current_value.as_ref().unwrap(), + "invalid for query {:?}", + el + ); + // check that we properly apply rollbacks + assert_eq!( + el.raw_query.log_query.shard_id, popped_change.raw_query.log_query.shard_id, + "invalid for query {:?}", + el + ); + assert_eq!( + el.raw_query.log_query.address, popped_change.raw_query.log_query.address, + "invalid for query {:?}", + el + ); + assert_eq!( + el.raw_query.log_query.key, popped_change.raw_query.log_query.key, + "invalid for query {:?}", + el + ); + // apply rollback + current_element_history.current_value = Some(el.raw_query.log_query.read_value); + // our convension + } + } + } + + use zk_evm::aux_structures::Timestamp; + + if current_element_history.did_read_at_depth_zero == false + && current_element_history.changes_stack.is_empty() + { + // whatever happened there didn't produce any final changes + assert_eq!( + current_element_history.initial_value.unwrap(), + current_element_history.current_value.unwrap() + ); + assert!(last_write_is_rollback == true); + // here we know that last write was a rollback, and there we no reads after it (otherwise "did_read_at_depth_zero" == true), + // so whatever was an initial value in storage slot it's not ever observed, and we do not need to issue even read here + continue; + } else { + if current_element_history.initial_value.unwrap() + == current_element_history.current_value.unwrap() + { + // no change, but we may need protective read + if current_element_history.did_read_at_depth_zero { + // protective read + let sorted_log_query = StorageLogQuery { + log_query: LogQuery { + timestamp: Timestamp(0), + tx_number_in_block: 0, + aux_byte: 0, + shard_id: candidate.raw_query.log_query.shard_id, + address: candidate.raw_query.log_query.address, + key: candidate.raw_query.log_query.key, + read_value: current_element_history.initial_value.unwrap(), + written_value: current_element_history.current_value.unwrap(), + rw_flag: false, + rollback: false, + is_service: false, + }, + log_type: StorageLogQueryType::Read, + }; + + deduplicated_storage_queries.push(sorted_log_query); + } else { + // we didn't read at depth zero, so it's something like + // - write cell from a into b + // .... + // - write cell from b into a + + // There is a catch here: + // - if it's two "normal" writes, then operator can claim that initial value + // was "a", but it could have been some other, and in this case we want to + // "read" that it was indeed "a" + // - but if the latest "write" was just a rollback, + // then we know that it's basically NOP. We already had a branch above that + // protects us in case of write - rollback - read, so we only need to degrade write into + // read here if the latest write wasn't a rollback + + if current_element_history.changes_stack.is_empty() == false { + // it means that we did accumlate some changes + // degrade to protective read + let sorted_log_query = StorageLogQuery { + log_query: LogQuery { + timestamp: Timestamp(0), + tx_number_in_block: 0, + aux_byte: 0, + shard_id: candidate.raw_query.log_query.shard_id, + address: candidate.raw_query.log_query.address, + key: candidate.raw_query.log_query.key, + read_value: current_element_history.initial_value.unwrap(), + written_value: current_element_history.current_value.unwrap(), + rw_flag: false, + rollback: false, + is_service: false, + }, + log_type: StorageLogQueryType::Read, + }; + + deduplicated_storage_queries.push(sorted_log_query); + } else { + //do nothing + } + } + } else { + // it's final net write + let sorted_log_query = LogQuery { + timestamp: Timestamp(0), + tx_number_in_block: 0, + aux_byte: 0, + shard_id: candidate.raw_query.log_query.shard_id, + address: candidate.raw_query.log_query.address, + key: candidate.raw_query.log_query.key, + read_value: current_element_history.initial_value.unwrap(), + written_value: current_element_history.current_value.unwrap(), + rw_flag: true, + rollback: false, + is_service: false, + }; + let sorted_log_query = StorageLogQuery { + log_query: sorted_log_query, + log_type: current_element_history.minimum_log_type, + }; + + deduplicated_storage_queries.push(sorted_log_query); + } + } + } + + ( + sorted_storage_queries_with_extra_timestamp, + deduplicated_storage_queries, + ) +} diff --git a/core/lib/types/src/priority_op_onchain_data.rs b/core/lib/types/src/priority_op_onchain_data.rs new file mode 100644 index 000000000000..84ff177bbc99 --- /dev/null +++ b/core/lib/types/src/priority_op_onchain_data.rs @@ -0,0 +1,53 @@ +use serde::{Deserialize, Serialize}; + +use std::cmp::Ordering; + +use crate::{ + l1::{OpProcessingType, PriorityQueueType}, + H256, U256, +}; + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct PriorityOpOnchainData { + pub layer_2_tip_fee: U256, + pub onchain_data_hash: H256, +} + +impl From for Vec { + fn from(data: PriorityOpOnchainData) -> Vec { + let mut raw_data = vec![0u8; 64]; + data.layer_2_tip_fee.to_big_endian(&mut raw_data[..32]); + raw_data[32..].copy_from_slice(data.onchain_data_hash.as_bytes()); + raw_data + } +} + +impl From> for PriorityOpOnchainData { + fn from(data: Vec) -> Self { + Self { + layer_2_tip_fee: U256::from_big_endian(&data[..32]), + onchain_data_hash: H256::from_slice(&data[32..]), + } + } +} + +impl Eq for PriorityOpOnchainData {} + +impl PartialOrd for PriorityOpOnchainData { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.layer_2_tip_fee.cmp(&other.layer_2_tip_fee)) + } +} + +impl Ord for PriorityOpOnchainData { + fn cmp(&self, other: &Self) -> Ordering { + self.layer_2_tip_fee.cmp(&other.layer_2_tip_fee) + } +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct PriorityOpOnchainMetadata { + pub op_processing_type: OpProcessingType, + pub priority_queue_type: PriorityQueueType, + pub onchain_data: PriorityOpOnchainData, +} diff --git a/core/lib/types/src/proofs.rs b/core/lib/types/src/proofs.rs new file mode 100644 index 000000000000..ab1f2eafbb29 --- /dev/null +++ b/core/lib/types/src/proofs.rs @@ -0,0 +1,314 @@ +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use std::convert::{TryFrom, TryInto}; +use std::fmt::Debug; +use std::ops::Add; +use zkevm_test_harness::abstract_zksync_circuit::concrete_circuits::ZkSyncCircuit; +use zkevm_test_harness::bellman::bn256::Bn256; +use zkevm_test_harness::bellman::plonk::better_better_cs::proof::Proof; +use zkevm_test_harness::witness::full_block_artifact::{ + BlockBasicCircuits, BlockBasicCircuitsPublicInputs, +}; +use zkevm_test_harness::witness::oracle::VmWitnessOracle; +use zkevm_test_harness::{ + LeafAggregationOutputDataWitness, NodeAggregationOutputDataWitness, + SchedulerCircuitInstanceWitness, +}; +use zksync_basic_types::{L1BatchNumber, U256}; + +/// Metadata emitted by merkle tree after processing single storage log +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct StorageLogMetadata { + pub root_hash: Vec, + pub is_write: bool, + pub first_write: bool, + pub merkle_paths: Vec>, + pub leaf_hashed_key: U256, + pub leaf_enumeration_index: u64, + pub value_written: [u8; 32], + pub value_read: [u8; 32], +} + +impl StorageLogMetadata { + pub fn leaf_hashed_key_array(&self) -> [u8; 32] { + let mut result = [0u8; 32]; + self.leaf_hashed_key.to_little_endian(&mut result); + result + } + + pub fn merkle_paths_array(&self) -> Box<[[u8; 32]; 256]> { + let vec_of_arrays = self + .merkle_paths + .clone() + .into_iter() + .map(|vec| TryInto::<[u8; 32]>::try_into(vec).unwrap()) + .collect::>(); + + Box::new(TryInto::<[[u8; 32]; 256]>::try_into(vec_of_arrays).unwrap()) + } +} + +#[derive(Clone)] +pub struct WitnessGeneratorJobMetadata { + pub block_number: L1BatchNumber, + pub proofs: Vec>>>, +} + +#[derive(Clone)] +pub struct WitnessGeneratorJob { + pub block_number: L1BatchNumber, + pub job: WitnessGeneratorJobInput, +} + +/// Represents the sequential number of the proof aggregation round. +/// Mostly used to be stored in `aggregation_round` column in `prover_jobs` table +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum AggregationRound { + BasicCircuits = 0, + LeafAggregation = 1, + NodeAggregation = 2, + Scheduler = 3, +} + +impl AggregationRound { + pub fn next(&self) -> Option { + match self { + AggregationRound::BasicCircuits => Some(AggregationRound::LeafAggregation), + AggregationRound::LeafAggregation => Some(AggregationRound::NodeAggregation), + AggregationRound::NodeAggregation => Some(AggregationRound::Scheduler), + AggregationRound::Scheduler => None, + } + } +} + +impl TryFrom for AggregationRound { + type Error = (); + + fn try_from(v: i32) -> Result { + match v { + x if x == AggregationRound::BasicCircuits as i32 => Ok(AggregationRound::BasicCircuits), + x if x == AggregationRound::LeafAggregation as i32 => { + Ok(AggregationRound::LeafAggregation) + } + x if x == AggregationRound::NodeAggregation as i32 => { + Ok(AggregationRound::NodeAggregation) + } + x if x == AggregationRound::Scheduler as i32 => Ok(AggregationRound::Scheduler), + _ => Err(()), + } + } +} + +/// Represents a job for one of the aggregation rounds. +/// `Box` is required by the linter, as the enum variants have vastly different memory footprints. +#[derive(Clone)] +pub enum WitnessGeneratorJobInput { + BasicCircuits(Box), + LeafAggregation(Box), + NodeAggregation(Box), + Scheduler(Box), +} + +impl WitnessGeneratorJobInput { + pub fn aggregation_round(&self) -> AggregationRound { + match self { + WitnessGeneratorJobInput::BasicCircuits(_) => AggregationRound::BasicCircuits, + WitnessGeneratorJobInput::LeafAggregation(_) => AggregationRound::LeafAggregation, + WitnessGeneratorJobInput::NodeAggregation(_) => AggregationRound::NodeAggregation, + WitnessGeneratorJobInput::Scheduler(_) => AggregationRound::Scheduler, + } + } +} + +#[derive(Clone)] +pub struct PrepareBasicCircuitsJob { + pub merkle_paths: Vec, + pub next_enumeration_index: u64, +} + +/// Enriched `PrepareBasicCircuitsJob`. All the other fields are taken from the `l1_batches` table. +#[derive(Clone)] +pub struct BasicCircuitWitnessGeneratorInput { + pub block_number: L1BatchNumber, + pub previous_block_timestamp: u64, + pub block_timestamp: u64, + pub used_bytecodes_hashes: Vec, + pub initial_heap_content: Vec<(usize, U256)>, + pub merkle_paths_input: PrepareBasicCircuitsJob, +} + +#[derive(Clone)] +pub struct PrepareLeafAggregationCircuitsJob { + pub basic_circuits: BlockBasicCircuits, + pub basic_circuits_inputs: BlockBasicCircuitsPublicInputs, + pub basic_circuits_proofs: Vec>>>, +} + +#[derive(Clone)] +pub struct PrepareNodeAggregationCircuitJob { + pub previous_level_proofs: Vec>>>, + pub previous_level_leafs_aggregations: Vec>, + pub previous_sequence: Vec< + zkevm_test_harness::encodings::QueueSimulator< + Bn256, + zkevm_test_harness::encodings::recursion_request::RecursionRequest, + 2, + 2, + >, + >, +} + +#[derive(Clone)] +pub struct PrepareSchedulerCircuitJob { + pub incomplete_scheduler_witness: SchedulerCircuitInstanceWitness, + pub final_node_aggregations: NodeAggregationOutputDataWitness, + pub node_final_proof_level_proof: Proof>>, + pub previous_aux_hash: [u8; 32], + pub previous_meta_hash: [u8; 32], +} + +#[derive(Debug, Clone)] +pub struct ProverJobMetadata { + pub id: u32, + pub block_number: L1BatchNumber, + pub circuit_type: String, + pub aggregation_round: AggregationRound, + pub sequence_number: usize, +} + +pub struct ProverJob { + pub metadata: ProverJobMetadata, + pub circuit_input: Vec, +} + +pub struct JobPosition { + pub aggregation_round: AggregationRound, + pub sequence_number: usize, +} + +#[derive(Default)] +pub struct ProverJobStatusFailed { + pub started_at: DateTime, + pub error: String, +} + +impl Default for ProverJobStatusSuccessful { + fn default() -> Self { + ProverJobStatusSuccessful { + started_at: DateTime::default(), + time_taken: chrono::Duration::zero(), + } + } +} + +pub struct ProverJobStatusSuccessful { + pub started_at: DateTime, + pub time_taken: chrono::Duration, +} + +#[derive(Default)] +pub struct ProverJobStatusInProgress { + pub started_at: DateTime, +} + +pub struct WitnessJobStatusSuccessful { + pub started_at: DateTime, + pub time_taken: chrono::Duration, +} + +impl Default for WitnessJobStatusSuccessful { + fn default() -> Self { + WitnessJobStatusSuccessful { + started_at: DateTime::default(), + time_taken: chrono::Duration::zero(), + } + } +} + +#[derive(Default)] +pub struct WitnessJobStatusFailed { + pub started_at: DateTime, + pub error: String, +} + +#[derive(strum::Display, strum::EnumString, strum::AsRefStr)] +pub enum ProverJobStatus { + #[strum(serialize = "queued")] + Queued, + #[strum(serialize = "in_progress")] + InProgress(ProverJobStatusInProgress), + #[strum(serialize = "successful")] + Successful(ProverJobStatusSuccessful), + #[strum(serialize = "failed")] + Failed(ProverJobStatusFailed), + #[strum(serialize = "skipped")] + Skipped, + #[strum(serialize = "ignored")] + Ignored, +} + +#[derive(strum::Display, strum::EnumString, strum::AsRefStr)] +pub enum WitnessJobStatus { + #[strum(serialize = "failed")] + Failed(WitnessJobStatusFailed), + #[strum(serialize = "skipped")] + Skipped, + #[strum(serialize = "successful")] + Successful(WitnessJobStatusSuccessful), + #[strum(serialize = "waiting_for_artifacts")] + WaitingForArtifacts, + #[strum(serialize = "waiting_for_proofs")] + WaitingForProofs, + #[strum(serialize = "in_progress")] + InProgress, + #[strum(serialize = "queued")] + Queued, +} + +pub struct WitnessJobInfo { + pub block_number: L1BatchNumber, + pub created_at: DateTime, + pub updated_at: DateTime, + pub status: WitnessJobStatus, + pub position: JobPosition, +} + +pub struct ProverJobInfo { + pub id: u32, + pub block_number: L1BatchNumber, + pub circuit_type: String, + pub position: JobPosition, + pub input_length: u64, + pub status: ProverJobStatus, + pub attempts: u32, + pub created_at: DateTime, + pub updated_at: DateTime, +} + +pub struct JobExtendedStatistics { + pub successful_padding: L1BatchNumber, + pub queued_padding: L1BatchNumber, + pub queued_padding_len: u32, + pub active_area: Vec, +} + +#[derive(Debug)] +pub struct JobCountStatistics { + pub queued: usize, + pub in_progress: usize, + pub failed: usize, + pub successful: usize, +} + +impl Add for JobCountStatistics { + type Output = JobCountStatistics; + + fn add(self, rhs: Self) -> Self::Output { + Self { + queued: self.queued + rhs.queued, + in_progress: self.in_progress + rhs.in_progress, + failed: self.failed + rhs.failed, + successful: self.successful + rhs.successful, + } + } +} diff --git a/core/lib/types/src/pubdata_packing.rs b/core/lib/types/src/pubdata_packing.rs new file mode 100644 index 000000000000..3f3955a6231a --- /dev/null +++ b/core/lib/types/src/pubdata_packing.rs @@ -0,0 +1,263 @@ +//! Utilities to efficiently pack pubdata (aka storage access slots). +//! +//! Overall the idea is following: +//! If we have a type with at most X bytes, and *most likely* some leading bytes will be +//! zeroes, we can implement a simple variadic length encoding, compressing the type +//! as . In case of `uint32` it will allow us to represent number +//! `0x00000022` as `0x0122`. First byte represents the length: 1 byte. Second byte represents +//! the value itself. Knowledge about the type of encoded value is implied: without such a knowledge, +//! we wouldn't be able to parse pubdata at all. +//! +//! Drawbacks of such an approach are the following: +//! +//! - In case of no leading zeroes, we spend one more byte per value. This one is a minor, because as long +//! as there are more values which *have* leading zeroes, we don't lose anything. +//! - We use complex access keys which may have two components: 32-bit writer ID + 256-bit key, and encoding this +//! pair may be actually longer than just using the hash (which represents the actual key in the tree). +//! To overcome this drawback, we will make the value `0xFF` of the length byte special: this value means that instead +//! of packed key fields we have just an unpacked final hash. At the time of packing we will generate both forms +//! and choose the shorter one. + +use zksync_basic_types::AccountTreeId; +use zksync_utils::h256_to_u256; + +use crate::{StorageKey, StorageLog, H256}; + +const ACCOUNT_TREE_ID_SIZE: usize = 21; +const U256_SIZE: usize = 32; + +const fn max_encoded_size(field_size: usize) -> usize { + field_size + 1 +} + +pub fn pack_smart_contract(account_id: AccountTreeId, bytecode: Vec) -> Vec { + let max_size = max_encoded_size(ACCOUNT_TREE_ID_SIZE) + bytecode.len(); + let mut packed = Vec::with_capacity(max_size); + + packed.append(&mut encode_account_tree_id(account_id)); + + packed +} + +pub const fn max_log_size() -> usize { + // Key is encoded as U168 + U256, value is U256. + max_encoded_size(ACCOUNT_TREE_ID_SIZE) + max_encoded_size(U256_SIZE) * 2 +} + +pub fn pack_storage_log(log: &StorageLog, _hash_key: F) -> Vec +where + F: FnOnce(&StorageKey) -> Vec, +{ + pack_storage_log_packed_old(log) +} + +/// Does not pack anything; just encodes account address, storage key and storage value as bytes. +/// Encoding is exactly 20 + 32 + 32 bytes in size. +pub fn pack_storage_log_unpacked(log: &StorageLog) -> Vec { + log.to_bytes() +} + +/// Packs address, storage key and storage value as 3 separate values. +/// Encoding is at most 21 + 33 + 33 bytes in size. +pub fn pack_storage_log_packed_old(log: &StorageLog) -> Vec { + let mut packed_log = Vec::with_capacity(max_log_size()); + + packed_log.append(&mut encode_key(&log.key, |key| { + key.key().to_fixed_bytes().to_vec() + })); + packed_log.append(&mut encode_h256(log.value)); + + packed_log +} + +/// Computes the hash of the (address, storage key) and packs the storage value. +/// Encoding is at most 32 + 33 bytes in size. +pub fn pack_storage_log_packed_new(log: &StorageLog) -> Vec { + let mut packed_log = Vec::with_capacity(max_log_size()); + + packed_log.extend_from_slice(&log.key.hashed_key().to_fixed_bytes()); + packed_log.append(&mut encode_h256(log.value)); + + packed_log +} + +fn encode_key(key: &StorageKey, hash_key: F) -> Vec +where + F: FnOnce(&StorageKey) -> Vec, +{ + let mut key_hash = Vec::with_capacity(max_encoded_size(U256_SIZE)); + key_hash.push(0xFFu8); + key_hash.append(&mut hash_key(key)); + + let encoded_storage_key = encode_h256(*key.key()); + + let mut storage_key_part = if encoded_storage_key.len() <= key_hash.len() { + encoded_storage_key + } else { + key_hash + }; + + let mut encoded_key = + Vec::with_capacity(max_encoded_size(U256_SIZE) + max_encoded_size(ACCOUNT_TREE_ID_SIZE)); + encoded_key.append(&mut encode_account_tree_id(*key.account())); + encoded_key.append(&mut storage_key_part); + + encoded_key +} + +fn encode_account_tree_id(val: AccountTreeId) -> Vec { + let mut result = vec![0; 21]; + result[0] = 20; + result[1..].copy_from_slice(&val.to_fixed_bytes()); + + result +} + +fn encode_h256(val: H256) -> Vec { + let val = h256_to_u256(val); + let leading_zero_bytes = (val.leading_zeros() / 8) as usize; + let result_vec_length = (1 + U256_SIZE) - leading_zero_bytes; + let val_len = result_vec_length - 1; + let mut result = vec![0; result_vec_length]; + + let mut val_bytes = [0u8; 32]; + val.to_big_endian(&mut val_bytes); + + result[0] = val_len as u8; + if val_len > 0 { + result[1..].copy_from_slice(&val_bytes[leading_zero_bytes..]); + } + + result +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{AccountTreeId, Address, U256}; + use zksync_utils::{u256_to_h256, u64_to_h256}; + + fn check_encoding(f: F, input: impl Into, output: &str) + where + F: Fn(T) -> Vec, + { + let output = hex::decode(output).unwrap(); + assert_eq!(f(input.into()), output); + } + + #[test] + fn u256_encoding() { + let test_vector = vec![ + (u64_to_h256(0x00_00_00_00_u64), "00"), + (u64_to_h256(0x00_00_00_01_u64), "0101"), + (u64_to_h256(0x00_00_00_FF_u64), "01FF"), + (u64_to_h256(0x00_00_01_00_u64), "020100"), + (u64_to_h256(0x10_01_00_00_u64), "0410010000"), + (u64_to_h256(0xFF_FF_FF_FF_u64), "04FFFFFFFF"), + ]; + + for (input, output) in test_vector { + check_encoding(encode_h256, input, output); + } + + let max = u256_to_h256(U256::max_value()); + check_encoding( + encode_h256, + max, + "20FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF", + ); + + let one_leading_zero_bit = U256::max_value() >> 1; + assert_eq!(one_leading_zero_bit.leading_zeros(), 1); + let one_leading_zero_bit = u256_to_h256(one_leading_zero_bit); + check_encoding( + encode_h256, + one_leading_zero_bit, + "207FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF", + ); + + let one_leading_zero_byte = U256::max_value() >> 8; + assert_eq!(one_leading_zero_byte.leading_zeros(), 8); + let one_leading_zero_byte = u256_to_h256(one_leading_zero_byte); + check_encoding( + encode_h256, + one_leading_zero_byte, + "1FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF", + ); + } + + fn pseudo_hash(key: &StorageKey) -> Vec { + // Just return something 32-bit long. + key.key().to_fixed_bytes().to_vec() + } + + #[test] + fn key_encoding() { + // Raw key must be encoded in the compressed form, because hash will be longer. + let short_key = StorageKey::new( + AccountTreeId::new(Address::from_slice(&[0x0A; 20])), + u64_to_h256(0xDEAD_F00D_u64), + ); + // `140A0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A` is encoding of `AccountTreeId`. + // 0x14 is number of bytes that should be decoded. + let expected_output = "140A0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A0A04DEADF00D"; + + check_encoding( + |key| encode_key(key, pseudo_hash), + &short_key, + expected_output, + ); + } + + /// Compares multiple packing approaches we have. Does not assert anything. + /// If you see this test and know that the packing algorithm is already chosen and used in + /// production, please remvoe this test. Also, remove the similar tests in the `runtime_context` + /// module of `zksync_state` crate. + #[test] + fn pack_log_comparison() { + let log1 = StorageLog::new_write_log( + StorageKey::new(AccountTreeId::new(Address::random()), H256::random()), + H256::random(), + ); + let log2 = StorageLog::new_write_log( + StorageKey::new( + AccountTreeId::new(Address::repeat_byte(0x11)), + H256::repeat_byte(0x22), + ), + H256::repeat_byte(0x33), + ); + let log3 = StorageLog::new_write_log( + StorageKey::new( + AccountTreeId::new(Address::repeat_byte(0x11)), + H256::from_low_u64_be(0x01), + ), + H256::from_low_u64_be(0x02), + ); + let log4 = StorageLog::new_write_log( + StorageKey::new( + AccountTreeId::new(Address::repeat_byte(0x11)), + H256::repeat_byte(0x22), + ), + H256::from_low_u64_be(0x02), + ); + + let test_vector = &[ + (log1, "Random values"), + (log2, "32-byte key/value"), + (log3, "1-byte key/value"), + (log4, "32-byte key/1-byte value"), + ]; + + for (log, description) in test_vector { + let no_packing = pack_storage_log_unpacked(log); + let old_packing = pack_storage_log_packed_old(log); + let new_packing = pack_storage_log_packed_new(log); + + println!("Packing {}", description); + println!("No packing: {} bytes", no_packing.len()); + println!("Old packing: {} bytes", old_packing.len()); + println!("New packing: {} bytes", new_packing.len()); + println!("-----------------------"); + } + } +} diff --git a/core/lib/types/src/storage/log.rs b/core/lib/types/src/storage/log.rs new file mode 100644 index 000000000000..b94da2a1ea19 --- /dev/null +++ b/core/lib/types/src/storage/log.rs @@ -0,0 +1,141 @@ +use crate::{H160, U256}; +use serde::{Deserialize, Serialize}; +use zk_evm::aux_structures::{LogQuery, Timestamp}; +use zkevm_test_harness::witness::sort_storage_access::LogQueryLike; +use zksync_basic_types::AccountTreeId; +use zksync_utils::u256_to_h256; + +use super::{StorageKey, StorageValue, H256}; + +#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)] +pub enum StorageLogKind { + Read, + Write, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct StorageLog { + pub kind: StorageLogKind, + pub key: StorageKey, + pub value: StorageValue, +} + +impl StorageLog { + pub fn from_log_query(log: &StorageLogQuery) -> Self { + let key = StorageKey::new( + AccountTreeId::new(log.log_query.address), + u256_to_h256(log.log_query.key), + ); + if log.log_query.rw_flag { + if log.log_query.rollback { + Self::new_write_log(key, u256_to_h256(log.log_query.read_value)) + } else { + Self::new_write_log(key, u256_to_h256(log.log_query.written_value)) + } + } else { + Self::new_read_log(key, u256_to_h256(log.log_query.read_value)) + } + } + + pub fn new_read_log(key: StorageKey, value: StorageValue) -> Self { + Self { + kind: StorageLogKind::Read, + key, + value, + } + } + + pub fn new_write_log(key: StorageKey, value: StorageValue) -> Self { + Self { + kind: StorageLogKind::Write, + key, + value, + } + } + + /// Encodes the log key and value into a byte sequence. + pub fn to_bytes(&self) -> Vec { + // Concatenate account, key and value. + let mut output = self.key.account().to_fixed_bytes().to_vec(); + output.extend_from_slice(&self.key.key().to_fixed_bytes()); + output.extend_from_slice(self.value.as_fixed_bytes()); + + output + } +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct WitnessStorageLog { + pub storage_log: StorageLog, + pub previous_value: H256, +} + +#[derive(Debug, Clone, Copy, Eq, PartialEq)] +pub enum StorageLogQueryType { + Read, + InitialWrite, + RepeatedWrite, +} + +/// Log query, which handle initial and repeated writes to the storage +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct StorageLogQuery { + pub log_query: LogQuery, + pub log_type: StorageLogQueryType, +} + +impl LogQueryLike for StorageLogQuery { + fn shard_id(&self) -> u8 { + self.log_query.shard_id + } + + fn address(&self) -> H160 { + self.log_query.address + } + + fn key(&self) -> U256 { + self.log_query.key + } + + fn rw_flag(&self) -> bool { + self.log_query.rw_flag + } + + fn rollback(&self) -> bool { + self.log_query.rollback + } + + fn read_value(&self) -> U256 { + self.log_query.read_value + } + + fn written_value(&self) -> U256 { + self.log_query.written_value + } + + fn create_partially_filled_from_fields( + shard_id: u8, + address: H160, + key: U256, + read_value: U256, + written_value: U256, + rw_flag: bool, + ) -> Self { + Self { + log_type: StorageLogQueryType::Read, + log_query: LogQuery { + timestamp: Timestamp::empty(), + tx_number_in_block: 0, + aux_byte: 0, + shard_id, + address, + key, + read_value, + written_value, + rw_flag, + rollback: false, + is_service: false, + }, + } + } +} diff --git a/core/lib/types/src/storage/mod.rs b/core/lib/types/src/storage/mod.rs new file mode 100644 index 000000000000..2e7202dfdc42 --- /dev/null +++ b/core/lib/types/src/storage/mod.rs @@ -0,0 +1,139 @@ +use core::fmt::Debug; + +use blake2::{Blake2s256, Digest}; +use serde::{Deserialize, Serialize}; +use zksync_basic_types::web3::signing::keccak256; + +use crate::{AccountTreeId, Address, H160, H256, U256}; + +pub mod log; +pub mod writes; + +pub use log::*; +pub use zksync_config::constants::*; +use zksync_utils::address_to_h256; + +/// Typed fully qualified key of the storage slot in global state tree. +#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)] +pub struct StorageKey { + account: AccountTreeId, + key: H256, +} + +impl StorageKey { + pub fn new(account: AccountTreeId, key: H256) -> Self { + Self { account, key } + } + + pub fn account(&self) -> &AccountTreeId { + &self.account + } + + pub fn key(&self) -> &H256 { + &self.key + } + + pub fn address(&self) -> &Address { + self.account.address() + } + + pub fn raw_hashed_key(address: &H160, key: &H256) -> [u8; 32] { + let mut bytes = [0u8; 64]; + bytes[12..32].copy_from_slice(&address.0); + U256::from(key.to_fixed_bytes()).to_big_endian(&mut bytes[32..64]); + + let mut hash = [0u8; 32]; + hash.copy_from_slice(Blake2s256::digest(bytes).as_slice()); + hash + } + + pub fn hashed_key(&self) -> H256 { + Self::raw_hashed_key(self.address(), self.key()).into() + } + + pub fn hashed_key_u256(&self) -> U256 { + U256::from_little_endian(&Self::raw_hashed_key(self.address(), self.key())) + } +} + +// Returns the storage key where the value for mapping(address => x) +// at position `position` is stored. +fn get_address_mapping_key(address: &Address, position: H256) -> H256 { + let padded_address = address_to_h256(address); + H256(keccak256( + &[padded_address.as_bytes(), position.as_bytes()].concat(), + )) +} + +pub fn get_nonce_key(account: &Address) -> StorageKey { + let nonce_manager = AccountTreeId::new(NONCE_HOLDER_ADDRESS); + + // The `minNonce` (used as nonce for EOAs) is stored in a mapping inside the NONCE_HOLDER system contract + let key = get_address_mapping_key(account, H256::zero()); + + StorageKey::new(nonce_manager, key) +} + +pub fn get_code_key(account: &Address) -> StorageKey { + let account_code_storage = AccountTreeId::new(ACCOUNT_CODE_STORAGE_ADDRESS); + StorageKey::new(account_code_storage, address_to_h256(account)) +} + +pub fn get_known_code_key(hash: &H256) -> StorageKey { + let known_codes_storage = AccountTreeId::new(KNOWN_CODES_STORAGE_ADDRESS); + StorageKey::new(known_codes_storage, *hash) +} + +pub fn get_system_context_key(key: H256) -> StorageKey { + let system_context = AccountTreeId::new(SYSTEM_CONTEXT_ADDRESS); + StorageKey::new(system_context, key) +} + +pub fn get_is_account_key(account: &Address) -> StorageKey { + let deployer = AccountTreeId::new(CONTRACT_DEPLOYER_ADDRESS); + + // The `is_account` is stored in a mapping inside the deployer system contract + let key = get_address_mapping_key(account, H256::zero()); + + StorageKey::new(deployer, key) +} + +pub type StorageValue = H256; + +/// Allows to read from storage. +/// Provides ability to read value by corresponsing key and load contract +/// code from supplied address. +pub trait ZkSyncReadStorage: Debug { + /// Read value of the key, this should not produce logs. + fn read_value(&mut self, key: &StorageKey) -> StorageValue; + + /// Returns if the write to the given key is initial. + fn is_write_initial(&mut self, key: &StorageKey) -> bool; + + /// Load the contract code deployed to the provided address. + fn load_contract(&mut self, address: Address) -> Option>; + + /// Load the factory dependency code by its hash. + fn load_factory_dep(&mut self, hash: H256) -> Option>; +} + +pub fn get_system_context_init_logs(chain_id: H256) -> Vec { + vec![ + StorageLog::new_write_log( + get_system_context_key(SYSTEM_CONTEXT_CHAIN_ID_POSITION), + chain_id, + ), + StorageLog::new_write_log( + get_system_context_key(SYSTEM_CONTEXT_BLOCK_GAS_LIMIT_POSITION), + SYSTEM_CONTEXT_BLOCK_GAS_LIMIT, + ), + StorageLog::new_write_log( + get_system_context_key(SYSTEM_CONTEXT_COINBASE_POSITION), + address_to_h256(&BOOTLOADER_ADDRESS), + ), + StorageLog::new_write_log( + get_system_context_key(SYSTEM_CONTEXT_DIFFICULTY_POSITION), + SYSTEM_CONTEXT_DIFFICULTY, + ), + ] +} diff --git a/core/lib/types/src/storage/writes.rs b/core/lib/types/src/storage/writes.rs new file mode 100644 index 000000000000..a83e5bd3daf2 --- /dev/null +++ b/core/lib/types/src/storage/writes.rs @@ -0,0 +1,64 @@ +use crate::H256; +use serde::{Deserialize, Serialize}; +use zksync_basic_types::U256; + +/// In vm there are two types of writes Initial and Repeated. After the first write to the leaf we assign an index to it +/// and in the future we should use index instead of full key. It allows us to compress the data. +#[derive(Clone, Debug, Deserialize, Serialize, Default, Eq, PartialEq)] +pub struct InitialStorageWrite { + pub key: U256, + pub value: H256, +} + +#[derive(Clone, Debug, Deserialize, Serialize, Default, Eq, PartialEq)] +pub struct RepeatedStorageWrite { + pub index: u64, + pub value: H256, +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::commitment::serialize_commitments; + use crate::{H256, U256}; + + #[test] + fn calculate_hash_for_storage_writes() { + let initial_writes = vec![ + InitialStorageWrite { + key: U256::from(1u32), + value: H256::from([1; 32]), + }, + InitialStorageWrite { + key: U256::from(2u32), + value: H256::from([3; 32]), + }, + ]; + let bytes = serialize_commitments(&initial_writes); + let size = "00000002"; + let initial_write_1= "01000000000000000000000000000000000000000000000000000000000000000101010101010101010101010101010101010101010101010101010101010101"; + let initial_write_2= "02000000000000000000000000000000000000000000000000000000000000000303030303030303030303030303030303030303030303030303030303030303"; + let expected_bytes = + hex::decode(format!("{}{}{}", size, initial_write_1, initial_write_2)).unwrap(); + assert_eq!(expected_bytes, bytes); + let repeated_writes = vec![ + RepeatedStorageWrite { + index: 1, + value: H256::from([1; 32]), + }, + RepeatedStorageWrite { + index: 2, + value: H256::from([3; 32]), + }, + ]; + let bytes = serialize_commitments(&repeated_writes); + let size = "00000002"; + let repeated_write_1 = + "00000000000000010101010101010101010101010101010101010101010101010101010101010101"; + let repeated_write_2 = + "00000000000000020303030303030303030303030303030303030303030303030303030303030303"; + let expected_bytes = + hex::decode(format!("{}{}{}", size, repeated_write_1, repeated_write_2)).unwrap(); + assert_eq!(expected_bytes, bytes); + } +} diff --git a/core/lib/types/src/system_contracts.rs b/core/lib/types/src/system_contracts.rs new file mode 100644 index 000000000000..27c84c10a13f --- /dev/null +++ b/core/lib/types/src/system_contracts.rs @@ -0,0 +1,64 @@ +use zksync_basic_types::{AccountTreeId, Address, U256}; +use zksync_config::constants::{BOOTLOADER_UTILITIES_ADDRESS, EVENT_WRITER_ADDRESS}; +use zksync_contracts::read_sys_contract_bytecode; + +use crate::{ + block::DeployedContract, ACCOUNT_CODE_STORAGE_ADDRESS, BOOTLOADER_ADDRESS, + CONTRACT_DEPLOYER_ADDRESS, ECRECOVER_PRECOMPILE_ADDRESS, IMMUTABLE_SIMULATOR_STORAGE_ADDRESS, + KECCAK256_PRECOMPILE_ADDRESS, KNOWN_CODES_STORAGE_ADDRESS, L1_MESSENGER_ADDRESS, + L2_ETH_TOKEN_ADDRESS, MSG_VALUE_SIMULATOR_ADDRESS, NONCE_HOLDER_ADDRESS, + SHA256_PRECOMPILE_ADDRESS, SYSTEM_CONTEXT_ADDRESS, +}; +use once_cell::sync::Lazy; + +// Note, that in the NONCE_HOLDER_ADDRESS's storage the nonces of accounts +// are stored in the following form: +// 2^128 * deployment_nonce + tx_nonce, +// where `tx_nonce` should be number of transactions, the account has processed +// and the `deployment_nonce` should be the number of contracts. +pub const TX_NONCE_INCREMENT: U256 = U256([1, 0, 0, 0]); // 1 +pub const DEPLOYMENT_NONCE_INCREMENT: U256 = U256([0, 0, 1, 0]); // 2^128 + +static SYSTEM_CONTRACTS: Lazy> = Lazy::new(|| { + let mut deployed_system_contracts = [ + ("", "AccountCodeStorage", ACCOUNT_CODE_STORAGE_ADDRESS), + ("", "NonceHolder", NONCE_HOLDER_ADDRESS), + ("", "KnownCodesStorage", KNOWN_CODES_STORAGE_ADDRESS), + ( + "", + "ImmutableSimulator", + IMMUTABLE_SIMULATOR_STORAGE_ADDRESS, + ), + ("", "ContractDeployer", CONTRACT_DEPLOYER_ADDRESS), + ("", "L1Messenger", L1_MESSENGER_ADDRESS), + ("", "MsgValueSimulator", MSG_VALUE_SIMULATOR_ADDRESS), + ("", "L2EthToken", L2_ETH_TOKEN_ADDRESS), + ("precompiles/", "Keccak256", KECCAK256_PRECOMPILE_ADDRESS), + ("precompiles/", "SHA256", SHA256_PRECOMPILE_ADDRESS), + ("precompiles/", "Ecrecover", ECRECOVER_PRECOMPILE_ADDRESS), + ("", "SystemContext", SYSTEM_CONTEXT_ADDRESS), + ("", "EventWriter", EVENT_WRITER_ADDRESS), + ("", "BootloaderUtilities", BOOTLOADER_UTILITIES_ADDRESS), + ] + .map(|(path, name, address)| DeployedContract { + account_id: AccountTreeId::new(address), + bytecode: read_sys_contract_bytecode(path, name), + }) + .to_vec(); + + let empty_bytecode = read_sys_contract_bytecode("", "EmptyContract"); + // For now, only zero address and the bootloader address have empty bytecode at the init + // In the future, we might want to set all of the system contracts this way. + let empty_system_contracts = + [Address::zero(), BOOTLOADER_ADDRESS].map(|address| DeployedContract { + account_id: AccountTreeId::new(address), + bytecode: empty_bytecode.clone(), + }); + + deployed_system_contracts.extend(empty_system_contracts); + deployed_system_contracts +}); + +pub fn get_system_smart_contracts() -> Vec { + SYSTEM_CONTRACTS.clone() +} diff --git a/core/lib/types/src/tokens.rs b/core/lib/types/src/tokens.rs new file mode 100644 index 000000000000..c89b8af01d92 --- /dev/null +++ b/core/lib/types/src/tokens.rs @@ -0,0 +1,53 @@ +use chrono::{DateTime, Utc}; +use num::{rational::Ratio, BigUint}; +use serde::{Deserialize, Serialize}; +use zksync_basic_types::Address; +pub use zksync_config::constants::ETHEREUM_ADDRESS; +use zksync_utils::UnsignedRatioSerializeAsDecimal; + +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] +pub struct TokenInfo { + pub l1_address: Address, + pub l2_address: Address, + pub metadata: TokenMetadata, +} + +/// Relevant information about tokens supported by zkSync protocol. +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] +pub struct TokenMetadata { + /// Token name (e.g. "Ethereum" or "USD Coin") + pub name: String, + /// Token symbol (e.g. "ETH" or "USDC") + pub symbol: String, + /// Token precision (e.g. 18 for "ETH" so "1.0" ETH = 10e18 as U256 number) + pub decimals: u8, +} + +impl TokenMetadata { + /// Creates a default representation of token data, which will be used for tokens that have + /// not known metadata. + pub fn default(address: Address) -> Self { + let default_name = format!("ERC20-{:x}", address); + Self { + name: default_name.clone(), + symbol: default_name, + decimals: 18, + } + } +} + +/// Token price known to the zkSync network. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TokenPrice { + #[serde(with = "UnsignedRatioSerializeAsDecimal")] + pub usd_price: Ratio, + pub last_updated: DateTime, +} + +/// Token price known to the zkSync network. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TokenMarketVolume { + #[serde(with = "UnsignedRatioSerializeAsDecimal")] + pub market_volume: Ratio, + pub last_updated: DateTime, +} diff --git a/core/lib/types/src/transaction_request.rs b/core/lib/types/src/transaction_request.rs new file mode 100644 index 000000000000..86321ad4b9f6 --- /dev/null +++ b/core/lib/types/src/transaction_request.rs @@ -0,0 +1,1219 @@ +use std::convert::{TryFrom, TryInto}; + +use rlp::{DecoderError, Rlp, RlpStream}; +use serde::{Deserialize, Serialize}; +use thiserror::Error; +use tiny_keccak::keccak256; +use zksync_basic_types::H256; +use zksync_config::constants::MAX_GAS_PER_PUBDATA_BYTE; +use zksync_utils::bytecode::{hash_bytecode, validate_bytecode, InvalidBytecodeError}; + +use crate::{ + web3::types::AccessList, Address, Bytes, EIP712TypedStructure, Eip712Domain, L2ChainId, Nonce, + PackedEthSignature, StructBuilder, U256, U64, +}; + +use super::{EIP_1559_TX_TYPE, EIP_2930_TX_TYPE, EIP_712_TX_TYPE}; +use crate::fee::Fee; +use crate::l2::{L2Tx, TransactionType}; +use zksync_utils::u256_to_h256; + +/// Call contract request (eth_call / eth_estimateGas) +/// +/// When using this for `eth_estimateGas`, all the fields +/// are optional. However, for usage in `eth_call` the +/// `to` field must be provided. +#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CallRequest { + /// Sender address (None for arbitrary address) + #[serde(default, skip_serializing_if = "Option::is_none")] + pub from: Option
, + /// To address (None allowed for eth_estimateGas) + #[serde(default, skip_serializing_if = "Option::is_none")] + pub to: Option
, + /// Supplied gas (None for sensible default) + #[serde(default, skip_serializing_if = "Option::is_none")] + pub gas: Option, + /// Gas price (None for sensible default) + #[serde(default, skip_serializing_if = "Option::is_none")] + pub gas_price: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub max_fee_per_gas: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub max_priority_fee_per_gas: Option, + /// Transfered value (None for no transfer) + #[serde(default, skip_serializing_if = "Option::is_none")] + pub value: Option, + /// Data (None for empty data) + #[serde(default, skip_serializing_if = "Option::is_none")] + pub data: Option, + + #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] + pub transaction_type: Option, + /// Access list + #[serde(default, skip_serializing_if = "Option::is_none")] + pub access_list: Option, + /// Eip712 meta + #[serde(default, skip_serializing_if = "Option::is_none")] + pub eip712_meta: Option, +} + +impl CallRequest { + /// Funtion to return a builder for a Call Request + pub fn builder() -> CallRequestBuilder { + CallRequestBuilder::default() + } +} + +/// Call Request Builder +#[derive(Clone, Debug, Default)] +pub struct CallRequestBuilder { + call_request: CallRequest, +} + +impl CallRequestBuilder { + /// Set sender address (None for arbitrary address) + pub fn from(mut self, from: Address) -> Self { + self.call_request.from = Some(from); + self + } + + /// Set to address (None allowed for eth_estimateGas) + pub fn to(mut self, to: Address) -> Self { + self.call_request.to = Some(to); + self + } + + /// Set supplied gas (None for sensible default) + pub fn gas(mut self, gas: U256) -> Self { + self.call_request.gas = Some(gas); + self + } + + /// Set transfered value (None for no transfer) + pub fn gas_price(mut self, gas_price: U256) -> Self { + self.call_request.gas_price = Some(gas_price); + self + } + + pub fn max_fee_per_gas(mut self, max_fee_per_gas: U256) -> Self { + self.call_request.max_fee_per_gas = Some(max_fee_per_gas); + self + } + + pub fn max_priority_fee_per_gas(mut self, max_priority_fee_per_gas: U256) -> Self { + self.call_request.max_priority_fee_per_gas = Some(max_priority_fee_per_gas); + self + } + + /// Set transfered value (None for no transfer) + pub fn value(mut self, value: U256) -> Self { + self.call_request.value = Some(value); + self + } + + /// Set data (None for empty data) + pub fn data(mut self, data: Bytes) -> Self { + self.call_request.data = Some(data); + self + } + + /// Set transaction type, Some(1) for AccessList transaction, None for Legacy + pub fn transaction_type(mut self, transaction_type: U64) -> Self { + self.call_request.transaction_type = Some(transaction_type); + self + } + + /// Set access list + pub fn access_list(mut self, access_list: AccessList) -> Self { + self.call_request.access_list = Some(access_list); + self + } + + /// Set meta + pub fn eip712_meta(mut self, eip712_meta: Eip712Meta) -> Self { + self.call_request.eip712_meta = Some(eip712_meta); + self + } + + /// build the Call Request + pub fn build(&self) -> CallRequest { + self.call_request.clone() + } +} + +#[derive(Debug, Error)] +pub enum SerializationTransactionError { + #[error("transaction type is not supported")] + UnknownTransactionFormat, + #[error("toAddressIsNull")] + ToAddressIsNull, + #[error("incompleteSignature")] + IncompleteSignature, + #[error("fromAddressIsNull")] + FromAddressIsNull, + #[error("priceLimitToLow")] + PriceLimitToLow, + #[error("wrongToken")] + WrongToken, + #[error("decodeRlpError {0}")] + DecodeRlpError(#[from] DecoderError), + #[error("invalid signature")] + MalformedSignature, + #[error("wrong chain id {}", .0.unwrap_or_default())] + WrongChainId(Option), + #[error("malformed paymaster params")] + MalforedPaymasterParams, + #[error("factory dependency #{0} is invalid: {1}")] + InvalidFactoryDependencies(usize, InvalidBytecodeError), + #[error("access lists are not supported")] + AccessListsNotSupported, + #[error("nonce has max value")] + TooBigNonce, +} + +/// Description of a Transaction, pending or in the chain. +#[derive(Deserialize, Serialize, Clone, Debug, PartialEq, Default)] +#[serde(rename_all = "camelCase")] +pub struct TransactionRequest { + /// Nonce + pub nonce: U256, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub from: Option
, + /// Recipient (None when contract creation) + pub to: Option
, + /// Transferred value + pub value: U256, + /// Gas Price + pub gas_price: U256, + /// Gas amount + pub gas: U256, + /// EIP-1559 part of gas price that goes to miners + #[serde(default, skip_serializing_if = "Option::is_none")] + pub max_priority_fee_per_gas: Option, + /// Input data + pub input: Bytes, + /// ECDSA recovery id + #[serde(default, skip_serializing_if = "Option::is_none")] + pub v: Option, + /// ECDSA signature r, 32 bytes + #[serde(default, skip_serializing_if = "Option::is_none")] + pub r: Option, + /// ECDSA signature s, 32 bytes + #[serde(default, skip_serializing_if = "Option::is_none")] + pub s: Option, + /// Raw transaction data + #[serde(default, skip_serializing_if = "Option::is_none")] + pub raw: Option, + /// Transaction type, Some(1) for AccessList transaction, None for Legacy + #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] + pub transaction_type: Option, + /// Access list + #[serde(default, skip_serializing_if = "Option::is_none")] + pub access_list: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub eip712_meta: Option, + /// Chain ID + #[serde(default, skip_serializing_if = "Option::is_none")] + pub chain_id: Option, +} + +#[derive(Default, Serialize, Deserialize, Clone, PartialEq, Debug, Eq)] +#[serde(rename_all = "camelCase")] +pub struct PaymasterParams { + pub paymaster: Address, + pub paymaster_input: Vec, +} + +impl PaymasterParams { + fn from_vector(value: Vec>) -> Result, SerializationTransactionError> { + if value.is_empty() { + return Ok(None); + } + if value.len() != 2 || value[0].len() != 20 { + return Err(SerializationTransactionError::MalforedPaymasterParams); + } + + let result = Some(Self { + paymaster: Address::from_slice(&value[0]), + paymaster_input: value[1].clone(), + }); + + Ok(result) + } +} + +#[derive(Default, Serialize, Deserialize, Clone, PartialEq, Debug)] +#[serde(rename_all = "camelCase")] +pub struct Eip712Meta { + pub gas_per_pubdata: U256, + // This field need to backward compatibility with the old way of withdraw. Will be deleted after public testnet + #[serde(default)] + pub factory_deps: Option>>, + pub custom_signature: Option>, + pub paymaster_params: Option, +} + +impl Eip712Meta { + pub fn rlp_append(&self, rlp: &mut RlpStream) { + rlp.append(&self.gas_per_pubdata); + if let Some(factory_deps) = &self.factory_deps { + rlp.begin_list(factory_deps.len()); + for dep in factory_deps.iter() { + rlp.append(&dep.as_slice()); + } + } else { + rlp.begin_list(0); + } + + rlp_opt(rlp, &self.custom_signature); + + if let Some(paymaster_params) = &self.paymaster_params { + rlp.begin_list(2); + rlp.append(&paymaster_params.paymaster.as_bytes()); + rlp.append(&paymaster_params.paymaster_input); + } else { + rlp.begin_list(0); + } + } +} + +impl EIP712TypedStructure for TransactionRequest { + const TYPE_NAME: &'static str = "Transaction"; + + fn build_structure(&self, builder: &mut BUILDER) { + let meta = self + .eip712_meta + .as_ref() + .expect("We can sign transaction only with meta"); + builder.add_member( + "txType", + &self + .transaction_type + .map(|x| U256::from(x.as_u64())) + .unwrap_or_else(|| U256::from(EIP_712_TX_TYPE)), + ); + builder.add_member( + "from", + &U256::from( + self.from + .expect("We can only sign transactions with known sender") + .as_bytes(), + ), + ); + builder.add_member("to", &U256::from(self.to.unwrap_or_default().as_bytes())); + builder.add_member("gasLimit", &self.gas); + builder.add_member("gasPerPubdataByteLimit", &meta.gas_per_pubdata); + builder.add_member("maxFeePerGas", &self.gas_price); + builder.add_member( + "maxPriorityFeePerGas", + &self.max_priority_fee_per_gas.unwrap_or(self.gas_price), + ); + builder.add_member( + "paymaster", + &U256::from(self.get_paymaster().unwrap_or_default().as_bytes()), + ); + builder.add_member("nonce", &self.nonce); + builder.add_member("value", &self.value); + builder.add_member("data", &self.input.0.as_slice()); + + let factory_dep_hashes: Vec<_> = self + .get_factory_deps() + .into_iter() + .map(|dep| hash_bytecode(&dep)) + .collect(); + builder.add_member("factoryDeps", &factory_dep_hashes.as_slice()); + + builder.add_member( + "paymasterInput", + &self.get_paymaster_input().unwrap_or_default().as_slice(), + ); + } +} + +impl TransactionRequest { + pub fn get_custom_signature(&self) -> Option> { + self.eip712_meta + .as_ref() + .and_then(|meta| meta.custom_signature.as_ref()) + .cloned() + } + + pub fn get_paymaster(&self) -> Option
{ + self.eip712_meta + .clone() + .and_then(|meta| meta.paymaster_params) + .map(|params| params.paymaster) + } + + pub fn get_paymaster_input(&self) -> Option> { + self.eip712_meta + .clone() + .and_then(|meta| meta.paymaster_params) + .map(|params| params.paymaster_input) + } + + pub fn get_factory_deps(&self) -> Vec> { + self.eip712_meta + .clone() + .and_then(|meta| meta.factory_deps) + .unwrap_or_default() + } + + pub fn get_signature(&self) -> Result, SerializationTransactionError> { + let custom_signature = self.get_custom_signature(); + if let Some(custom_sig) = custom_signature { + if !custom_sig.is_empty() { + // There was a custom signature supplied, it overrides + // the v/r/s signature + return Ok(custom_sig); + } + } + + let packed_v = self + .v + .ok_or(SerializationTransactionError::IncompleteSignature)? + .as_u64(); + let v = if !self.is_legacy_tx() { + packed_v + .try_into() + .map_err(|_| SerializationTransactionError::MalformedSignature)? + } else { + let (v, _) = PackedEthSignature::unpack_v(packed_v) + .map_err(|_| SerializationTransactionError::MalformedSignature)?; + v + }; + + let packed_eth_signature = PackedEthSignature::from_rsv( + &u256_to_h256( + self.r + .ok_or(SerializationTransactionError::IncompleteSignature)?, + ), + &u256_to_h256( + self.s + .ok_or(SerializationTransactionError::IncompleteSignature)?, + ), + v, + ); + + Ok(packed_eth_signature.serialize_packed().to_vec()) + } + + pub fn get_signed_bytes(&self, signature: &PackedEthSignature, chain_id: L2ChainId) -> Vec { + let mut rlp = RlpStream::new(); + self.rlp(&mut rlp, *chain_id, Some(signature)); + let mut data = rlp.out().to_vec(); + if let Some(tx_type) = self.transaction_type { + data.insert(0, tx_type.as_u64() as u8); + } + data + } + + pub fn rlp(&self, rlp: &mut RlpStream, chain_id: u16, signature: Option<&PackedEthSignature>) { + rlp.begin_unbounded_list(); + + match self.transaction_type { + // EIP-2930 (0x01) + Some(x) if x == EIP_2930_TX_TYPE.into() => { + // rlp_opt(rlp, &self.chain_id); + rlp.append(&chain_id); + rlp.append(&self.nonce); + rlp.append(&self.gas_price); + rlp.append(&self.gas); + rlp_opt(rlp, &self.to); + rlp.append(&self.value); + rlp.append(&self.input.0); + access_list_rlp(rlp, &self.access_list); + } + // EIP-1559 (0x02) + Some(x) if x == EIP_1559_TX_TYPE.into() => { + // rlp_opt(rlp, &self.chain_id); + rlp.append(&chain_id); + rlp.append(&self.nonce); + rlp_opt(rlp, &self.max_priority_fee_per_gas); + rlp.append(&self.gas_price); + rlp.append(&self.gas); + rlp_opt(rlp, &self.to); + rlp.append(&self.value); + rlp.append(&self.input.0); + access_list_rlp(rlp, &self.access_list); + } + // EIP-712 + Some(x) if x == EIP_712_TX_TYPE.into() => { + rlp.append(&self.nonce); + rlp_opt(rlp, &self.max_priority_fee_per_gas); + rlp.append(&self.gas_price); + rlp.append(&self.gas); + rlp_opt(rlp, &self.to); + rlp.append(&self.value); + rlp.append(&self.input.0); + } + // Legacy (None) + None => { + rlp.append(&self.nonce); + rlp.append(&self.gas_price); + rlp.append(&self.gas); + rlp_opt(rlp, &self.to); + rlp.append(&self.value); + rlp.append(&self.input.0); + } + Some(_) => unreachable!("Unknown tx type"), + } + + if let Some(signature) = signature { + if self.is_legacy_tx() { + rlp.append(&signature.v_with_chain_id(chain_id)); + } else { + rlp.append(&signature.v()); + } + rlp.append(&U256::from_big_endian(signature.r())); + rlp.append(&U256::from_big_endian(signature.s())); + } else if self.is_legacy_tx() { + rlp.append(&chain_id); + rlp.append(&0u8); + rlp.append(&0u8); + } + + if self.is_eip712_tx() { + rlp.append(&chain_id); + rlp_opt(rlp, &self.from); + if let Some(meta) = &self.eip712_meta { + meta.rlp_append(rlp); + } + } + + rlp.finalize_unbounded_list(); + } + + fn decode_standard_fields(rlp: &Rlp, offset: usize) -> Result { + Ok(Self { + nonce: rlp.val_at(offset)?, + gas_price: rlp.val_at(offset + 1)?, + gas: rlp.val_at(offset + 2)?, + to: rlp.val_at(offset + 3).ok(), + value: rlp.val_at(offset + 4)?, + input: Bytes(rlp.val_at(offset + 5)?), + ..Default::default() + }) + } + + fn decode_eip1559_fields(rlp: &Rlp, offset: usize) -> Result { + Ok(Self { + nonce: rlp.val_at(offset)?, + max_priority_fee_per_gas: rlp.val_at(offset + 1).ok(), + gas_price: rlp.val_at(offset + 2)?, + gas: rlp.val_at(offset + 3)?, + to: rlp.val_at(offset + 4).ok(), + value: rlp.val_at(offset + 5)?, + input: Bytes(rlp.val_at(offset + 6)?), + ..Default::default() + }) + } + + pub fn is_eip712_tx(&self) -> bool { + Some(EIP_712_TX_TYPE.into()) == self.transaction_type + } + + pub fn is_legacy_tx(&self) -> bool { + self.transaction_type.is_none() + } + + pub fn from_bytes( + bytes: &[u8], + chain_id: u16, + ) -> Result<(Self, H256), SerializationTransactionError> { + let rlp; + let mut tx = match bytes.first() { + Some(x) if *x >= 0x80 => { + rlp = Rlp::new(bytes); + if rlp.item_count()? != 9 { + return Err(SerializationTransactionError::DecodeRlpError( + DecoderError::RlpIncorrectListLen, + )); + } + let v = rlp.val_at(6)?; + let (_, tx_chain_id) = PackedEthSignature::unpack_v(v) + .map_err(|_| SerializationTransactionError::MalformedSignature)?; + if tx_chain_id.is_some() && tx_chain_id != Some(chain_id) { + return Err(SerializationTransactionError::WrongChainId(tx_chain_id)); + } + Self { + chain_id: Some(chain_id), + v: Some(rlp.val_at(6)?), + r: Some(rlp.val_at(7)?), + s: Some(rlp.val_at(8)?), + ..Self::decode_standard_fields(&rlp, 0)? + } + } + Some(&EIP_1559_TX_TYPE) => { + rlp = Rlp::new(&bytes[1..]); + if rlp.item_count()? != 12 { + return Err(SerializationTransactionError::DecodeRlpError( + DecoderError::RlpIncorrectListLen, + )); + } + if let Ok(access_list_rlp) = rlp.at(8) { + if access_list_rlp.item_count()? > 0 { + return Err(SerializationTransactionError::AccessListsNotSupported); + } + } + + let tx_chain_id = rlp.val_at(0).ok(); + if tx_chain_id != Some(chain_id) { + return Err(SerializationTransactionError::WrongChainId(tx_chain_id)); + } + Self { + chain_id: tx_chain_id, + v: Some(rlp.val_at(9)?), + r: Some(rlp.val_at(10)?), + s: Some(rlp.val_at(11)?), + raw: Some(Bytes(rlp.as_raw().to_vec())), + transaction_type: Some(EIP_1559_TX_TYPE.into()), + ..Self::decode_eip1559_fields(&rlp, 1)? + } + } + Some(&EIP_712_TX_TYPE) => { + rlp = Rlp::new(&bytes[1..]); + if rlp.item_count()? != 16 { + return Err(SerializationTransactionError::DecodeRlpError( + DecoderError::RlpIncorrectListLen, + )); + } + let tx_chain_id = rlp.val_at(10).ok(); + if tx_chain_id.is_some() && tx_chain_id != Some(chain_id) { + return Err(SerializationTransactionError::WrongChainId(tx_chain_id)); + } + + Self { + v: Some(rlp.val_at(7)?), + r: Some(rlp.val_at(8)?), + s: Some(rlp.val_at(9)?), + eip712_meta: Some(Eip712Meta { + gas_per_pubdata: rlp.val_at(12)?, + factory_deps: rlp.list_at(13).ok(), + custom_signature: rlp.val_at(14).ok(), + paymaster_params: if let Ok(params) = rlp.list_at(15) { + PaymasterParams::from_vector(params)? + } else { + None + }, + }), + chain_id: tx_chain_id, + transaction_type: Some(EIP_712_TX_TYPE.into()), + from: Some(rlp.val_at(11)?), + ..Self::decode_eip1559_fields(&rlp, 0)? + } + } + Some(&EIP_2930_TX_TYPE) => { + return Err(SerializationTransactionError::AccessListsNotSupported) + } + _ => return Err(SerializationTransactionError::UnknownTransactionFormat), + }; + + let factory_deps_ref = tx + .eip712_meta + .as_ref() + .and_then(|m| m.factory_deps.as_ref()); + if let Some(deps) = factory_deps_ref { + validate_factory_deps(deps)?; + } + + tx.raw = Some(Bytes(bytes.to_vec())); + + let default_signed_message = tx.get_default_signed_message(chain_id); + + tx.from = match tx.from { + Some(_) => tx.from, + None => tx.recover_default_signer(default_signed_message).ok(), + }; + + let hash = if tx.is_eip712_tx() { + let digest = [ + default_signed_message.as_bytes(), + &keccak256(&tx.get_signature()?), + ] + .concat(); + H256(keccak256(&digest)) + } else { + H256(keccak256(bytes)) + }; + + Ok((tx, hash)) + } + + fn get_default_signed_message(&self, chain_id: u16) -> H256 { + if self.is_eip712_tx() { + PackedEthSignature::typed_data_to_signed_bytes( + &Eip712Domain::new(L2ChainId(chain_id)), + self, + ) + } else { + let mut rlp_stream = RlpStream::new(); + self.rlp(&mut rlp_stream, chain_id, None); + let mut data = rlp_stream.out().to_vec(); + if let Some(tx_type) = self.transaction_type { + data.insert(0, tx_type.as_u64() as u8); + } + PackedEthSignature::message_to_signed_bytes(&data) + } + } + + fn recover_default_signer( + &self, + default_signed_message: H256, + ) -> Result { + let signature = self.get_signature()?; + PackedEthSignature::deserialize_packed(&signature) + .map_err(|_| SerializationTransactionError::MalformedSignature)? + .signature_recover_signer(&default_signed_message) + .map_err(|_| SerializationTransactionError::MalformedSignature)?; + + let address = PackedEthSignature::deserialize_packed(&signature) + .map_err(|_| SerializationTransactionError::MalformedSignature)? + .signature_recover_signer(&default_signed_message) + .map_err(|_| SerializationTransactionError::MalformedSignature)?; + + Ok(address) + } + + fn get_fee_data(&self) -> Result { + let gas_per_pubdata_limit = if let Some(meta) = &self.eip712_meta { + meta.gas_per_pubdata + } else { + // For transactions that don't support corresponding field, a default is chosen. + U256::from(MAX_GAS_PER_PUBDATA_BYTE) + }; + + Ok(Fee { + gas_limit: self.gas, + max_fee_per_gas: self.gas_price, + max_priority_fee_per_gas: self.max_priority_fee_per_gas.unwrap_or(self.gas_price), + gas_per_pubdata_limit, + }) + } + + fn get_nonce_checked(&self) -> Result { + if self.nonce <= u32::MAX.into() { + Ok(Nonce(self.nonce.as_u32())) + } else { + Err(SerializationTransactionError::TooBigNonce) + } + } +} + +impl TryFrom for L2Tx { + type Error = SerializationTransactionError; + + fn try_from(value: TransactionRequest) -> Result { + let fee = value.get_fee_data()?; + let nonce = value.get_nonce_checked()?; + + // Attempt to decode factory deps. + let factory_deps = value + .eip712_meta + .as_ref() + .and_then(|meta| meta.factory_deps.clone()); + if let Some(deps) = factory_deps.as_ref() { + validate_factory_deps(deps)?; + } + + let paymaster_params = value + .eip712_meta + .as_ref() + .and_then(|meta| meta.paymaster_params.clone()) + .unwrap_or_default(); + + let mut tx = L2Tx::new( + value + .to + .ok_or(SerializationTransactionError::ToAddressIsNull)?, + value.input.0.clone(), + nonce, + fee, + value.from.unwrap_or_default(), + value.value, + factory_deps, + paymaster_params, + ); + + tx.common_data.transaction_type = match value.transaction_type.map(|t| t.as_u64() as u8) { + Some(EIP_712_TX_TYPE) => TransactionType::EIP712Transaction, + Some(EIP_1559_TX_TYPE) => TransactionType::EIP1559Transaction, + Some(EIP_2930_TX_TYPE) => TransactionType::EIP2930Transaction, + _ => TransactionType::LegacyTransaction, + }; + // For fee calculation we use the same structure, as a result, signature may not be provided + tx.set_raw_signature(value.get_signature().unwrap_or_default()); + Ok(tx) + } +} + +impl From for TransactionRequest { + fn from(value: CallRequest) -> Self { + TransactionRequest { + nonce: Default::default(), + from: value.from, + to: value.to, + value: value.value.unwrap_or_default(), + gas_price: value.gas_price.unwrap_or_default(), + gas: value.gas.unwrap_or_default(), + input: value.data.unwrap_or_default(), + transaction_type: value.transaction_type, + access_list: value.access_list, + eip712_meta: value.eip712_meta, + ..Default::default() + } + } +} + +impl From for CallRequest { + fn from(tx: L2Tx) -> Self { + let mut meta = Eip712Meta { + gas_per_pubdata: tx.common_data.fee.gas_per_pubdata_limit, + factory_deps: None, + custom_signature: Some(tx.common_data.signature.clone()), + paymaster_params: Some(tx.common_data.paymaster_params.clone()), + }; + meta.factory_deps = tx.execute.factory_deps.clone(); + let mut request = CallRequestBuilder::default() + .from(tx.initiator_account()) + .gas(tx.common_data.fee.gas_limit) + .max_fee_per_gas(tx.common_data.fee.max_fee_per_gas) + .max_priority_fee_per_gas(tx.common_data.fee.max_priority_fee_per_gas) + .transaction_type(U64::from(tx.common_data.transaction_type as u32)) + .to(tx.execute.contract_address) + .data(Bytes(tx.execute.calldata.clone())) + .eip712_meta(meta) + .build(); + + if tx.common_data.transaction_type == TransactionType::LegacyTransaction { + request.transaction_type = None; + } + request + } +} + +impl TryFrom for L2Tx { + type Error = SerializationTransactionError; + fn try_from(tx: CallRequest) -> Result { + let tx: TransactionRequest = tx.into(); + tx.try_into() + } +} + +fn rlp_opt(rlp: &mut RlpStream, opt: &Option) { + if let Some(inner) = opt { + rlp.append(inner); + } else { + rlp.append(&""); + } +} + +fn access_list_rlp(rlp: &mut RlpStream, access_list: &Option) { + if let Some(access_list) = access_list { + rlp.begin_list(access_list.len()); + for item in access_list { + rlp.begin_list(2); + rlp.append(&item.address); + rlp.append_list(&item.storage_keys); + } + } else { + rlp.begin_list(0); + } +} + +pub fn validate_factory_deps( + factory_deps: &[Vec], +) -> Result<(), SerializationTransactionError> { + for (i, dep) in factory_deps.iter().enumerate() { + validate_bytecode(dep) + .map_err(|err| SerializationTransactionError::InvalidFactoryDependencies(i, err))?; + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::web3::api::Namespace; + use crate::web3::transports::test::TestTransport; + use crate::web3::types::{TransactionParameters, H256, U256}; + use secp256k1::SecretKey; + + #[tokio::test] + async fn decode_real_tx() { + let accounts = crate::web3::api::Accounts::new(TestTransport::default()); + + let pk = hex::decode("4c0883a69102937d6231471b5dbb6204fe5129617082792ae468d01a3f362318") + .unwrap(); + let address = PackedEthSignature::address_from_private_key(&H256::from_slice(&pk)).unwrap(); + let key = SecretKey::from_slice(&pk).unwrap(); + + let tx = TransactionParameters { + nonce: Some(U256::from(1u32)), + to: Some(Address::random()), + gas: Default::default(), + gas_price: Some(U256::from(2u32)), + max_fee_per_gas: None, + max_priority_fee_per_gas: None, + value: Default::default(), + data: Bytes(vec![1, 2, 3]), + chain_id: Some(270), + transaction_type: None, + access_list: None, + }; + let signed_tx = accounts.sign_transaction(tx.clone(), &key).await.unwrap(); + let (tx2, _) = + TransactionRequest::from_bytes(signed_tx.raw_transaction.0.as_slice(), 270).unwrap(); + assert_eq!(tx.gas, tx2.gas); + assert_eq!(tx.gas_price.unwrap(), tx2.gas_price); + assert_eq!(tx.nonce.unwrap(), tx2.nonce); + assert_eq!(tx.data, tx2.input); + assert_eq!(tx.value, tx2.value); + assert_eq!(address, tx2.from.unwrap()); + } + + #[test] + fn decode_rlp() { + let private_key = H256::random(); + let address = PackedEthSignature::address_from_private_key(&private_key).unwrap(); + + let mut tx = TransactionRequest { + nonce: U256::from(1u32), + to: Some(Address::random()), + value: U256::from(10u32), + gas_price: U256::from(11u32), + gas: U256::from(12u32), + input: Bytes::from(vec![1, 2, 3]), + chain_id: Some(270), + ..Default::default() + }; + let mut rlp = RlpStream::new(); + tx.rlp(&mut rlp, 270, None); + let data = rlp.out().to_vec(); + let msg = PackedEthSignature::message_to_signed_bytes(&data); + let signature = PackedEthSignature::sign_raw(&private_key, &msg).unwrap(); + tx.raw = Some(Bytes(data)); + let mut rlp = RlpStream::new(); + tx.rlp(&mut rlp, 270, Some(&signature)); + let data = rlp.out().to_vec(); + let (tx2, _) = TransactionRequest::from_bytes(&data, 270).unwrap(); + assert_eq!(tx.gas, tx2.gas); + assert_eq!(tx.gas_price, tx2.gas_price); + assert_eq!(tx.nonce, tx2.nonce); + assert_eq!(tx.input, tx2.input); + assert_eq!(tx.value, tx2.value); + assert_eq!( + tx2.v.unwrap().as_u32() as u16, + signature.v_with_chain_id(270) + ); + assert_eq!(tx2.s.unwrap(), signature.s().into()); + assert_eq!(tx2.r.unwrap(), signature.r().into()); + assert_eq!(address, tx2.from.unwrap()); + } + + #[test] + fn decode_eip712_with_meta() { + let private_key = H256::random(); + let address = PackedEthSignature::address_from_private_key(&private_key).unwrap(); + + let mut tx = TransactionRequest { + nonce: U256::from(1u32), + to: Some(Address::random()), + from: Some(address), + value: U256::from(10u32), + gas_price: U256::from(11u32), + max_priority_fee_per_gas: Some(U256::from(0u32)), + gas: U256::from(12u32), + input: Bytes::from(vec![1, 2, 3]), + transaction_type: Some(U64::from(EIP_712_TX_TYPE)), + eip712_meta: Some(Eip712Meta { + gas_per_pubdata: U256::from(4u32), + factory_deps: Some(vec![vec![2; 32]]), + custom_signature: Some(vec![1, 2, 3]), + paymaster_params: Some(PaymasterParams { + paymaster: Default::default(), + paymaster_input: vec![], + }), + }), + chain_id: Some(270), + ..Default::default() + }; + + let msg = + PackedEthSignature::typed_data_to_signed_bytes(&Eip712Domain::new(L2ChainId(270)), &tx); + let signature = PackedEthSignature::sign_raw(&private_key, &msg).unwrap(); + + let mut rlp = RlpStream::new(); + tx.rlp(&mut rlp, 270, Some(&signature)); + let mut data = rlp.out().to_vec(); + data.insert(0, EIP_712_TX_TYPE); + tx.raw = Some(Bytes(data.clone())); + tx.v = Some(U64::from(signature.v())); + tx.r = Some(U256::from_big_endian(signature.r())); + tx.s = Some(U256::from_big_endian(signature.s())); + + let (tx2, _) = TransactionRequest::from_bytes(&data, 270).unwrap(); + + assert_eq!(tx, tx2); + } + + #[test] + fn check_recovered_public_key_eip712() { + let private_key = H256::random(); + let address = PackedEthSignature::address_from_private_key(&private_key).unwrap(); + + let transaction_request = TransactionRequest { + nonce: U256::from(1u32), + to: Some(Address::random()), + from: Some(address), + value: U256::from(10u32), + gas_price: U256::from(11u32), + max_priority_fee_per_gas: Some(U256::from(0u32)), + gas: U256::from(12u32), + input: Bytes::from(vec![1, 2, 3]), + transaction_type: Some(U64::from(EIP_712_TX_TYPE)), + eip712_meta: Some(Eip712Meta { + gas_per_pubdata: U256::from(4u32), + factory_deps: Some(vec![vec![2; 32]]), + custom_signature: Some(vec![]), + paymaster_params: None, + }), + chain_id: Some(270), + ..Default::default() + }; + let domain = Eip712Domain::new(L2ChainId(270)); + let signature = + PackedEthSignature::sign_typed_data(&private_key, &domain, &transaction_request) + .unwrap(); + + let encoded_tx = transaction_request.get_signed_bytes(&signature, L2ChainId(270)); + + let (decoded_tx, _) = TransactionRequest::from_bytes(encoded_tx.as_slice(), 270).unwrap(); + let recovered_signer = decoded_tx.from.unwrap(); + assert_eq!(address, recovered_signer); + } + + #[test] + fn check_recovered_public_key_eip712_with_wrong_chain_id() { + let private_key = H256::random(); + let address = PackedEthSignature::address_from_private_key(&private_key).unwrap(); + + let transaction_request = TransactionRequest { + nonce: U256::from(1u32), + to: Some(Address::random()), + from: Some(address), + value: U256::from(10u32), + gas_price: U256::from(11u32), + max_priority_fee_per_gas: Some(U256::from(0u32)), + gas: U256::from(12u32), + input: Bytes::from(vec![1, 2, 3]), + transaction_type: Some(U64::from(EIP_712_TX_TYPE)), + eip712_meta: Some(Eip712Meta { + gas_per_pubdata: U256::from(4u32), + factory_deps: Some(vec![vec![2; 32]]), + custom_signature: Some(vec![1, 2, 3]), + paymaster_params: Some(PaymasterParams { + paymaster: Default::default(), + paymaster_input: vec![], + }), + }), + chain_id: Some(270), + ..Default::default() + }; + let domain = Eip712Domain::new(L2ChainId(270)); + let signature = + PackedEthSignature::sign_typed_data(&private_key, &domain, &transaction_request) + .unwrap(); + + let encoded_tx = transaction_request.get_signed_bytes(&signature, L2ChainId(270)); + + let decoded_tx = TransactionRequest::from_bytes(encoded_tx.as_slice(), 272); + assert!(matches!( + decoded_tx, + Err(SerializationTransactionError::WrongChainId(Some(270))) + )); + } + + #[test] + fn check_recovered_public_key_eip1559() { + let private_key = H256::random(); + let address = PackedEthSignature::address_from_private_key(&private_key).unwrap(); + + let mut transaction_request = TransactionRequest { + max_priority_fee_per_gas: Some(U256::from(1u32)), + raw: None, + transaction_type: Some(EIP_1559_TX_TYPE.into()), + nonce: U256::from(1u32), + to: Some(Address::random()), + from: Some(address), + value: U256::from(10u32), + gas_price: U256::from(11u32), + gas: U256::from(12u32), + input: Bytes::from(vec![1, 2, 3]), + chain_id: Some(270), + access_list: Some(Vec::new()), + ..Default::default() + }; + let mut rlp_stream = RlpStream::new(); + transaction_request.rlp(&mut rlp_stream, 270, None); + let mut data = rlp_stream.out().to_vec(); + data.insert(0, EIP_1559_TX_TYPE); + let msg = PackedEthSignature::message_to_signed_bytes(&data); + + let signature = PackedEthSignature::sign_raw(&private_key, &msg).unwrap(); + transaction_request.raw = Some(Bytes(data)); + let mut rlp = RlpStream::new(); + transaction_request.rlp(&mut rlp, 270, Some(&signature)); + let mut data = rlp.out().to_vec(); + data.insert(0, EIP_1559_TX_TYPE); + + let (decoded_tx, _) = TransactionRequest::from_bytes(data.as_slice(), 270).unwrap(); + let recovered_signer = decoded_tx.from.unwrap(); + assert_eq!(address, recovered_signer); + } + + #[test] + fn check_recovered_public_key_eip1559_with_wrong_chain_id() { + let private_key = H256::random(); + let address = PackedEthSignature::address_from_private_key(&private_key).unwrap(); + + let mut transaction_request = TransactionRequest { + max_priority_fee_per_gas: Some(U256::from(1u32)), + raw: None, + transaction_type: Some(EIP_1559_TX_TYPE.into()), + nonce: U256::from(1u32), + to: Some(Address::random()), + from: Some(address), + value: U256::from(10u32), + gas_price: U256::from(11u32), + gas: U256::from(12u32), + input: Bytes::from(vec![1, 2, 3]), + chain_id: Some(272), + ..Default::default() + }; + let mut rlp_stream = RlpStream::new(); + transaction_request.rlp(&mut rlp_stream, 272, None); + let mut data = rlp_stream.out().to_vec(); + data.insert(0, EIP_1559_TX_TYPE); + let msg = PackedEthSignature::message_to_signed_bytes(&data); + + let signature = PackedEthSignature::sign_raw(&private_key, &msg).unwrap(); + transaction_request.raw = Some(Bytes(data)); + let mut rlp = RlpStream::new(); + transaction_request.rlp(&mut rlp, 272, Some(&signature)); + let mut data = rlp.out().to_vec(); + data.insert(0, EIP_1559_TX_TYPE); + + let decoded_tx = TransactionRequest::from_bytes(data.as_slice(), 270); + assert!(matches!( + decoded_tx, + Err(SerializationTransactionError::WrongChainId(Some(272))) + )); + } + + #[test] + fn check_decode_eip1559_with_access_list() { + let private_key = H256::random(); + let address = PackedEthSignature::address_from_private_key(&private_key).unwrap(); + + let mut transaction_request = TransactionRequest { + max_priority_fee_per_gas: Some(U256::from(1u32)), + raw: None, + transaction_type: Some(EIP_1559_TX_TYPE.into()), + nonce: U256::from(1u32), + to: Some(Address::random()), + from: Some(address), + value: U256::from(10u32), + gas_price: U256::from(11u32), + gas: U256::from(12u32), + input: Bytes::from(vec![1, 2, 3]), + chain_id: Some(270), + access_list: Some(vec![Default::default()]), + ..Default::default() + }; + let mut rlp_stream = RlpStream::new(); + transaction_request.rlp(&mut rlp_stream, 270, None); + let mut data = rlp_stream.out().to_vec(); + data.insert(0, EIP_1559_TX_TYPE); + let msg = PackedEthSignature::message_to_signed_bytes(&data); + + let signature = PackedEthSignature::sign_raw(&private_key, &msg).unwrap(); + transaction_request.raw = Some(Bytes(data)); + let mut rlp = RlpStream::new(); + transaction_request.rlp(&mut rlp, 270, Some(&signature)); + let mut data = rlp.out().to_vec(); + data.insert(0, EIP_1559_TX_TYPE); + + let res = TransactionRequest::from_bytes(data.as_slice(), 270); + assert!(matches!( + res, + Err(SerializationTransactionError::AccessListsNotSupported) + )); + } + + #[test] + fn check_failed_to_decode_eip2930() { + let private_key = H256::random(); + let address = PackedEthSignature::address_from_private_key(&private_key).unwrap(); + + let mut transaction_request = TransactionRequest { + transaction_type: Some(EIP_2930_TX_TYPE.into()), + nonce: U256::from(1u32), + to: Some(Address::random()), + from: Some(address), + value: U256::from(10u32), + gas_price: U256::from(11u32), + gas: U256::from(12u32), + input: Bytes::from(vec![1, 2, 3]), + chain_id: Some(270), + ..Default::default() + }; + let mut rlp_stream = RlpStream::new(); + transaction_request.rlp(&mut rlp_stream, 270, None); + let mut data = rlp_stream.out().to_vec(); + data.insert(0, EIP_2930_TX_TYPE); + let msg = PackedEthSignature::message_to_signed_bytes(&data); + + let signature = PackedEthSignature::sign_raw(&private_key, &msg).unwrap(); + transaction_request.raw = Some(Bytes(data)); + let mut rlp = RlpStream::new(); + transaction_request.rlp(&mut rlp, 270, Some(&signature)); + let mut data = rlp.out().to_vec(); + data.insert(0, EIP_2930_TX_TYPE); + + let res = TransactionRequest::from_bytes(data.as_slice(), 270); + assert!(matches!( + res, + Err(SerializationTransactionError::AccessListsNotSupported) + )); + } + + #[test] + fn check_transaction_request_big_nonce() { + let tx1 = TransactionRequest { + nonce: U256::from(u32::MAX), + to: Some(Address::repeat_byte(0x1)), + from: Some(Address::repeat_byte(0x1)), + value: U256::zero(), + ..Default::default() + }; + let execute_tx1: Result = tx1.try_into(); + assert!(execute_tx1.is_ok()); + + let tx2 = TransactionRequest { + nonce: U256::from((u32::MAX as u64) + 1), + to: Some(Address::repeat_byte(0x1)), + from: Some(Address::repeat_byte(0x1)), + value: U256::zero(), + ..Default::default() + }; + let execute_tx2: Result = tx2.try_into(); + assert!(matches!( + execute_tx2, + Err(SerializationTransactionError::TooBigNonce) + )); + } +} diff --git a/core/lib/types/src/tx/execute.rs b/core/lib/types/src/tx/execute.rs new file mode 100644 index 000000000000..c00c8ca00cf9 --- /dev/null +++ b/core/lib/types/src/tx/execute.rs @@ -0,0 +1,76 @@ +use crate::{web3::ethabi, Address, EIP712TypedStructure, StructBuilder, H256, U256}; +use once_cell::sync::Lazy; +use serde::{Deserialize, Serialize}; +use zksync_utils::ZeroPrefixHexSerde; + +/// `Execute` transaction executes a previously deployed smart contract in the L2 rollup. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Execute { + pub contract_address: Address, + + #[serde(with = "ZeroPrefixHexSerde")] + pub calldata: Vec, + + pub value: U256, + + /// Factory dependencies: list of contract bytecodes associated with the deploy transaction. + /// This field is always `None` for all the transaction that do not cause the contract deployment. + /// For the deployment transactions, this field is always `Some`, even if there s no "dependencies" for the + /// contract being deployed, since the bytecode of the contract itself is also included into this list. + pub factory_deps: Option>>, +} + +impl EIP712TypedStructure for Execute { + const TYPE_NAME: &'static str = "Transaction"; + + fn build_structure(&self, builder: &mut BUILDER) { + builder.add_member("to", &U256::from(self.contract_address.as_bytes())); + builder.add_member("value", &self.value); + builder.add_member("data", &self.calldata().as_slice()); + // Factory deps are not included into the transaction signature, since they are parsed from the + // transaction metadata. + // Note that for the deploy transactions all the dependencies are implicitly included into the "calldataHash" + // field, because the deps are referenced in the bytecode of the "main" contract bytecode. + } +} + +impl Execute { + pub fn calldata(&self) -> Vec { + self.calldata.clone() + } + + /// Prepares calldata to invoke deployer contract. + /// This method encodes parameters for the `create` method. + pub fn encode_deploy_params_create( + salt: H256, + contract_hash: H256, + constructor_input: Vec, + ) -> Vec { + static FUNCTION_SIGNATURE: Lazy<[u8; 4]> = Lazy::new(|| { + ethabi::short_signature( + "create", + &[ + ethabi::ParamType::FixedBytes(32), + ethabi::ParamType::FixedBytes(32), + ethabi::ParamType::Bytes, + ], + ) + }); + let params = ethabi::encode(&[ + ethabi::Token::FixedBytes(salt.as_bytes().to_vec()), + ethabi::Token::FixedBytes(contract_hash.as_bytes().to_vec()), + ethabi::Token::Bytes(constructor_input), + ]); + + FUNCTION_SIGNATURE.iter().copied().chain(params).collect() + } + + /// Number of new factory dependencies in this transaction + pub fn factory_deps_length(&self) -> usize { + self.factory_deps + .as_ref() + .map(|deps| deps.len()) + .unwrap_or_default() + } +} diff --git a/core/lib/types/src/tx/mod.rs b/core/lib/types/src/tx/mod.rs new file mode 100644 index 000000000000..874146ff5c4d --- /dev/null +++ b/core/lib/types/src/tx/mod.rs @@ -0,0 +1,34 @@ +//! `transactions` is module that holds the essential information for every transaction. +//! +//! Since in zkSync Era every operation can be executed either from the contract or rollup, +//! it makes more sense to define the contents of each transaction chain-agnostic, and extent this data +//! with metadata (such as fees and/or signatures) for L1 and L2 separately. + +use std::fmt::Debug; +use zksync_basic_types::{Address, H256}; + +pub mod execute; +pub mod primitives; +pub mod tx_execution_info; + +pub use self::execute::Execute; +use crate::Transaction; +pub use tx_execution_info::ExecutionMetrics; +use tx_execution_info::TxExecutionStatus; + +#[derive(Debug, Clone, PartialEq)] +pub struct TransactionExecutionResult { + pub transaction: Transaction, + pub hash: H256, + pub execution_info: ExecutionMetrics, + pub execution_status: TxExecutionStatus, + pub refunded_gas: u32, + pub operator_suggested_refund: u32, +} + +#[derive(Debug, Clone)] +pub struct IncludedTxLocation { + pub tx_hash: H256, + pub tx_index_in_miniblock: u32, + pub tx_initiator_address: Address, +} diff --git a/core/lib/types/src/tx/primitives/eip712_signature/member_types.rs b/core/lib/types/src/tx/primitives/eip712_signature/member_types.rs new file mode 100644 index 000000000000..1b967c04dedf --- /dev/null +++ b/core/lib/types/src/tx/primitives/eip712_signature/member_types.rs @@ -0,0 +1,115 @@ +use crate::tx::primitives::eip712_signature::typed_structure::{ + EncodedStructureMember, StructMember, +}; +use parity_crypto::Keccak256; +use zksync_basic_types::{Address, H256, U256}; + +impl StructMember for String { + const MEMBER_TYPE: &'static str = "string"; + const IS_REFERENCE_TYPE: bool = false; + + fn get_inner_members(&self) -> Vec { + Vec::new() + } + + fn encode_member_data(&self) -> H256 { + self.keccak256().into() + } +} + +impl StructMember for Address { + const MEMBER_TYPE: &'static str = "address"; + const IS_REFERENCE_TYPE: bool = false; + + fn get_inner_members(&self) -> Vec { + Vec::new() + } + + fn encode_member_data(&self) -> H256 { + H256::from(*self) + } +} + +impl StructMember for &[u8] { + const MEMBER_TYPE: &'static str = "bytes"; + const IS_REFERENCE_TYPE: bool = false; + + fn get_inner_members(&self) -> Vec { + Vec::new() + } + + fn encode_member_data(&self) -> H256 { + self.keccak256().into() + } +} + +impl StructMember for &[H256] { + const MEMBER_TYPE: &'static str = "bytes32[]"; + const IS_REFERENCE_TYPE: bool = false; + + fn get_inner_members(&self) -> Vec { + Vec::new() + } + + fn encode_member_data(&self) -> H256 { + let bytes: Vec = self + .iter() + .flat_map(|hash| hash.as_bytes().to_vec()) + .collect(); + bytes.keccak256().into() + } +} + +impl StructMember for U256 { + const MEMBER_TYPE: &'static str = "uint256"; + const IS_REFERENCE_TYPE: bool = false; + + fn get_inner_members(&self) -> Vec { + Vec::new() + } + + fn encode_member_data(&self) -> H256 { + let mut bytes = [0u8; 32]; + self.to_big_endian(&mut bytes); + + bytes.into() + } +} + +impl StructMember for H256 { + const MEMBER_TYPE: &'static str = "uint256"; + const IS_REFERENCE_TYPE: bool = false; + + fn get_inner_members(&self) -> Vec { + Vec::new() + } + + fn encode_member_data(&self) -> H256 { + *self + } +} + +macro_rules! impl_primitive { + ($T: ident, $name:expr, $bit_size:expr) => { + impl StructMember for $T { + const MEMBER_TYPE: &'static str = $name; + const IS_REFERENCE_TYPE: bool = false; + fn get_inner_members(&self) -> Vec { + Vec::new() + } + fn encode_member_data(&self) -> H256 { + let mut bytes = [0u8; 32]; + let bytes_value = self.to_be_bytes(); + bytes[32 - $bit_size / 8..].copy_from_slice(&bytes_value); + + bytes.into() + } + } + }; +} + +impl_primitive!(u8, "uint8", 8); +impl_primitive!(u16, "uint16", 16); +impl_primitive!(u32, "uint32", 32); +impl_primitive!(u64, "uint64", 64); +impl_primitive!(u128, "uint128", 128); diff --git a/core/lib/types/src/tx/primitives/eip712_signature/mod.rs b/core/lib/types/src/tx/primitives/eip712_signature/mod.rs new file mode 100644 index 000000000000..81642cbf6159 --- /dev/null +++ b/core/lib/types/src/tx/primitives/eip712_signature/mod.rs @@ -0,0 +1,14 @@ +//! This is implementation of a standard for hashing typed structured data for [EIP-712](https://eips.ethereum.org/EIPS/eip-712) signing standard. +//! +//! This module contains the necessary interfaces for obtaining a hash of the structure, which is later needed for EIP-712 signing. + +mod member_types; +pub mod struct_builder; +pub mod typed_structure; +pub mod utils; + +pub use struct_builder::*; +pub use typed_structure::*; + +#[cfg(test)] +mod tests; diff --git a/core/lib/types/src/tx/primitives/eip712_signature/struct_builder.rs b/core/lib/types/src/tx/primitives/eip712_signature/struct_builder.rs new file mode 100644 index 000000000000..f6189f504df3 --- /dev/null +++ b/core/lib/types/src/tx/primitives/eip712_signature/struct_builder.rs @@ -0,0 +1,184 @@ +use serde_json::Value; +use std::collections::{BTreeMap, VecDeque}; +use zksync_basic_types::H256; + +use crate::tx::primitives::eip712_signature::typed_structure::{ + EncodedStructureMember, StructMember, +}; + +/// Interface that collects members of the structure into the structure of the EIP-712 standard. +pub trait StructBuilder { + fn new() -> Self; + + fn add_member(&mut self, name: &str, member: &MEMBER); +} + +/// Builder for collecting information about types of nested structures. +pub(crate) struct TypeBuilder { + members: Vec, +} + +impl TypeBuilder { + pub fn get_inner_members(&self) -> Vec { + self.members.clone() + } +} + +impl StructBuilder for TypeBuilder { + fn new() -> Self { + Self { + members: Vec::new(), + } + } + + fn add_member(&mut self, name: &str, member: &MEMBER) { + self.members + .push(EncodedStructureMember::encode(name, member)); + } +} + +struct OuterTypeBuilder { + inner_members_queue: VecDeque, +} + +impl OuterTypeBuilder { + fn new() -> Self { + Self { + inner_members_queue: VecDeque::new(), + } + } + + fn add_member(&mut self, encoded_member: EncodedStructureMember) { + // If the type is not used by the structure, then it is possible not + // to process it as it is not included in the list of types of nested structures. + if encoded_member.is_reference_type { + self.inner_members_queue.push_back(encoded_member); + } + } + + fn build(mut self) -> BTreeMap { + // All nested structures must be added to the encoded type alphabetically, + // so we will support a red-black tree with a key by the name of the structure type. + let mut result = BTreeMap::new(); + + while let Some(front_element) = self.inner_members_queue.pop_front() { + if result.get(&front_element.member_type).is_some() { + continue; + } + + result.insert(front_element.member_type.clone(), front_element.clone()); + for inner_member in front_element.inner_members { + if inner_member.is_reference_type && result.get(&inner_member.member_type).is_none() + { + self.inner_members_queue.push_back(inner_member); + } + } + } + result + } +} + +// Builder that encodes type information and structure data for for hashing the structure according to the EIP-712 standard. +pub(crate) struct EncodeBuilder { + members: Vec<(EncodedStructureMember, H256)>, +} + +impl EncodeBuilder { + /// Returns the concatenation of the encoded member values in the order that they appear in the type. + pub fn encode_data(&self) -> Vec { + // encodeData(s : 𝕊) = enc(value₁) ‖ enc(value₂) ‖ … ‖ enc(valueₙ). + self.members.iter().map(|(_, data)| *data).collect() + } + + /// Return the encoded structure type as `name ‖ "(" ‖ member₁ ‖ "," ‖ member₂ ‖ "," ‖ … ‖ memberₙ ")"`. + /// + /// If the struct type references other struct types (and these in turn reference even more struct types), + /// then the set of referenced struct types is collected, sorted by name and appended to the encoding. + pub fn encode_type(&self, type_name: &str) -> String { + let mut result = String::new(); + + let mut outer_members_builder = OuterTypeBuilder::new(); + for (member, _) in self.members.iter() { + outer_members_builder.add_member(member.clone()); + } + let outer_members = outer_members_builder.build(); + + // Collecting all members of the structure as a coded structure. + let inner_member = { + let member_type = type_name.to_string(); + let inner_members = self + .members + .iter() + .cloned() + .map(|(encoded_struct, _)| encoded_struct) + .collect::>(); + + EncodedStructureMember { + member_type, + name: String::default(), + is_reference_type: true, + inner_members, + } + }; + + result.push_str(&inner_member.get_encoded_type()); + for (_, outer_member) in outer_members { + result.push_str(&outer_member.get_encoded_type()); + } + + result + } + + /// Return the encoded structure type as `{ member_type: [{"name": member_name₁, "type": member_type₁}, ...] }`. + /// + /// If the struct type references other struct types (and these in turn reference even more struct types), + /// then the set of referenced struct types is collected, sorted by name and appended to the encoding. + pub fn get_json_types(&self, type_name: &str) -> Vec { + let mut result = Vec::new(); + + let mut outer_members_builder = OuterTypeBuilder::new(); + for (member, _) in self.members.iter() { + outer_members_builder.add_member(member.clone()); + } + let outer_members = outer_members_builder.build(); + + // Collecting all members of the structure as a coded structure. + let inner_member = { + let member_type = type_name.to_string(); + let inner_members = self + .members + .iter() + .cloned() + .map(|(encoded_struct, _)| encoded_struct) + .collect::>(); + + EncodedStructureMember { + member_type, + name: String::default(), + is_reference_type: true, + inner_members, + } + }; + + result.push(inner_member.get_json_types()); + for (_, outer_member) in outer_members { + result.push(outer_member.get_json_types()); + } + + result + } +} + +impl StructBuilder for EncodeBuilder { + fn new() -> Self { + Self { + members: Vec::new(), + } + } + + fn add_member(&mut self, name: &str, member: &MEMBER) { + let encoded_data = member.encode_member_data(); + self.members + .push((EncodedStructureMember::encode(name, member), encoded_data)); + } +} diff --git a/core/lib/types/src/tx/primitives/eip712_signature/tests.rs b/core/lib/types/src/tx/primitives/eip712_signature/tests.rs new file mode 100644 index 000000000000..ec2accc0b0b2 --- /dev/null +++ b/core/lib/types/src/tx/primitives/eip712_signature/tests.rs @@ -0,0 +1,202 @@ +use crate::tx::primitives::eip712_signature::{ + struct_builder::StructBuilder, + typed_structure::{EIP712TypedStructure, Eip712Domain}, +}; +use crate::tx::primitives::{eip712_signature::utils::get_eip712_json, PackedEthSignature}; +use parity_crypto::Keccak256; +use serde::Serialize; +use std::str::FromStr; +use zksync_basic_types::{Address, H256, U256}; + +#[derive(Clone, Serialize)] +struct Person { + name: String, + wallet: Address, +} + +impl EIP712TypedStructure for Person { + const TYPE_NAME: &'static str = "Person"; + + fn build_structure(&self, builder: &mut BUILDER) { + builder.add_member("name", &self.name); + builder.add_member("wallet", &self.wallet); + } +} + +#[derive(Clone, Serialize)] +struct Mail { + from: Person, + to: Person, + contents: String, +} + +impl EIP712TypedStructure for Mail { + const TYPE_NAME: &'static str = "Mail"; + fn build_structure(&self, builder: &mut BUILDER) { + builder.add_member("from", &self.from); + builder.add_member("to", &self.to); + builder.add_member("contents", &self.contents); + } +} + +#[test] +fn test_encode_eip712_typed_struct() { + let domain = Eip712Domain { + name: "Ether Mail".to_owned(), + version: "1".to_owned(), + chain_id: U256::from(1u8), + }; + + let message = Mail { + from: Person { + name: "Cow".to_owned(), + wallet: Address::from_str("CD2a3d9F938E13CD947Ec05AbC7FE734Df8DD826").unwrap(), + }, + to: Person { + name: "Bob".to_owned(), + wallet: Address::from_str("bBbBBBBbbBBBbbbBbbBbbbbBBbBbbbbBbBbbBBbB").unwrap(), + }, + contents: "Hello, Bob!".to_string(), + }; + + assert_eq!( + &message.encode_type(), + "Mail(Person from,Person to,string contents)Person(string name,address wallet)" + ); + + assert_eq!( + &message.encode_data()[..], + [ + H256::from_str("fc71e5fa27ff56c350aa531bc129ebdf613b772b6604664f5d8dbe21b85eb0c8") + .unwrap(), + H256::from_str("cd54f074a4af31b4411ff6a60c9719dbd559c221c8ac3492d9d872b041d703d1") + .unwrap(), + H256::from_str("b5aadf3154a261abdd9086fc627b61efca26ae5702701d05cd2305f7c52a2fc8") + .unwrap() + ] + ); + + assert_eq!( + message.hash_struct(), + H256::from_str("c52c0ee5d84264471806290a3f2c4cecfc5490626bf912d01f240d7a274b371e").unwrap() + ); + + assert_eq!( + &domain.encode_type(), + "EIP712Domain(string name,string version,uint256 chainId)" + ); + + assert_eq!( + &domain.encode_data()[..], + [ + H256::from_str("c70ef06638535b4881fafcac8287e210e3769ff1a8e91f1b95d6246e61e4d3c6") + .unwrap(), + H256::from_str("c89efdaa54c0f20c7adf612882df0950f5a951637e0307cdcb4c672f298b8bc6") + .unwrap(), + H256::from_str("0000000000000000000000000000000000000000000000000000000000000001") + .unwrap(), + ] + ); + + assert_eq!( + domain.hash_struct(), + H256::from_str("3b98b16ad068d9d8854a6a416bd476de44a4933ec5104d7c786a422ab262ed14").unwrap() + ); + + let private_key = b"cow".keccak256().into(); + let address_owner = PackedEthSignature::address_from_private_key(&private_key).unwrap(); + + let signature = PackedEthSignature::sign_typed_data(&private_key, &domain, &message).unwrap(); + let signed_bytes = PackedEthSignature::typed_data_to_signed_bytes(&domain, &message); + + assert_eq!( + address_owner, + signature.signature_recover_signer(&signed_bytes).unwrap() + ); +} + +#[test] +fn test_get_eip712_json() { + let domain = Eip712Domain { + name: "Ether Mail".to_owned(), + version: "1".to_owned(), + chain_id: U256::from(1u8), + }; + + let message = Mail { + from: Person { + name: "Cow".to_owned(), + wallet: Address::from_str("d94e3dc39d4cad1dad634e7eb585a57a19dc7efe").unwrap(), + }, + to: Person { + name: "Bob".to_owned(), + wallet: Address::from_str("d94e3dc39d4cad1dad634e7eb585a57a19dc7efe").unwrap(), + }, + contents: "Hello, Bob!".to_string(), + }; + + let expected_value = r#"{ + "domain":{ + "chainId":"0x1", + "name":"Ether Mail", + "version":"1" + }, + "message":{ + "contents":"Hello, Bob!", + "from":{ + "name":"Cow", + "wallet":"0xd94e3dc39d4cad1dad634e7eb585a57a19dc7efe" + }, + "to":{ + "name":"Bob", + "wallet":"0xd94e3dc39d4cad1dad634e7eb585a57a19dc7efe" + } + }, + "primaryType":"Mail", + "types":{ + "EIP712Domain":[ + { + "name":"name", + "type":"string" + }, + { + "name":"version", + "type":"string" + }, + { + "name":"chainId", + "type":"uint256" + } + ], + "Mail":[ + { + "name":"from", + "type":"Person" + }, + { + "name":"to", + "type":"Person" + }, + { + "name":"contents", + "type":"string" + } + ], + "Person":[ + { + "name":"name", + "type":"string" + }, + { + "name":"wallet", + "type":"address" + } + ] + } + }"#; + + assert_eq!( + get_eip712_json(&domain, &message), + serde_json::from_str::(expected_value).unwrap() + ); +} diff --git a/core/lib/types/src/tx/primitives/eip712_signature/typed_structure.rs b/core/lib/types/src/tx/primitives/eip712_signature/typed_structure.rs new file mode 100644 index 000000000000..408d818e343d --- /dev/null +++ b/core/lib/types/src/tx/primitives/eip712_signature/typed_structure.rs @@ -0,0 +1,186 @@ +use parity_crypto::Keccak256; +use serde::{Deserialize, Serialize}; +use serde_json::Value; + +use crate::tx::primitives::eip712_signature::struct_builder::{ + EncodeBuilder, StructBuilder, TypeBuilder, +}; +use crate::{L2ChainId, H256, U256}; + +#[derive(Debug, Clone)] +pub struct EncodedStructureMember { + /// Type identifier. + pub member_type: String, + /// Name identifier. + pub name: String, + /// Flag denoting structure or elementary type. + pub is_reference_type: bool, + // Members that are inside this member. + pub inner_members: Vec, +} + +impl EncodedStructureMember { + pub fn encode(name: &str, member: &MEMBER) -> Self { + Self { + member_type: member.member_type(), + name: name.to_string(), + is_reference_type: member.is_reference_type(), + inner_members: member.get_inner_members(), + } + } + + /// Encodes the structure as `name ‖ "(" ‖ member₁ ‖ "," ‖ member₂ ‖ "," ‖ … ‖ memberₙ ")". + pub fn get_encoded_type(&self) -> String { + let mut encoded_type = String::new(); + encoded_type.push_str(&self.member_type); + encoded_type.push('('); + + let mut members = self.inner_members.iter(); + + if let Some(member) = members.next() { + encoded_type.push_str(&member.member_type); + encoded_type.push(' '); + encoded_type.push_str(&member.name); + } + for member in members { + encoded_type.push(','); + encoded_type.push_str(&member.member_type); + encoded_type.push(' '); + encoded_type.push_str(&member.name); + } + + encoded_type.push(')'); + + encoded_type + } + + /// Encodes the structure as json according to principle `{ member_type: [{"name": member_name₁, "type": member_type₁}, ...] }`. + pub fn get_json_types(&self) -> Value { + let mut members = Vec::new(); + for member in &self.inner_members { + let member_value = serde_json::json!({ + "name": member.name, + "type": member.member_type, + }); + members.push(member_value); + } + + serde_json::json!({ &self.member_type: members }) + } +} + +pub trait StructMember { + const MEMBER_TYPE: &'static str; + const IS_REFERENCE_TYPE: bool; + + fn member_type(&self) -> String { + Self::MEMBER_TYPE.to_string() + } + + fn is_reference_type(&self) -> bool { + Self::IS_REFERENCE_TYPE + } + + fn get_inner_members(&self) -> Vec; + + fn encode_member_data(&self) -> H256; +} + +impl StructMember for TypedStructure { + const MEMBER_TYPE: &'static str = Self::TYPE_NAME; + const IS_REFERENCE_TYPE: bool = true; + + fn get_inner_members(&self) -> Vec { + let mut builder = TypeBuilder::new(); + self.build_structure(&mut builder); + + builder.get_inner_members() + } + + fn encode_member_data(&self) -> H256 { + self.hash_struct() + } +} + +/// Interface for defining the structure for the EIP712 signature. +pub trait EIP712TypedStructure: Serialize { + const TYPE_NAME: &'static str; + + fn build_structure(&self, builder: &mut BUILDER); + + fn encode_type(&self) -> String { + let mut builder = EncodeBuilder::new(); + self.build_structure(&mut builder); + + builder.encode_type(Self::TYPE_NAME) + } + + fn encode_data(&self) -> Vec { + let mut builder = EncodeBuilder::new(); + self.build_structure(&mut builder); + + builder.encode_data() + } + + fn hash_struct(&self) -> H256 { + // hashStruct(s : 𝕊) = keccak256(keccak256(encodeType(typeOf(s))) ‖ encodeData(s)). + let type_hash = { + let encode_type = self.encode_type(); + encode_type.keccak256() + }; + let encode_data = self.encode_data(); + + let mut bytes = Vec::new(); + bytes.extend_from_slice(&type_hash); + for data in encode_data { + bytes.extend_from_slice(data.as_bytes()); + } + + bytes.keccak256().into() + } + + fn get_json_types(&self) -> Vec { + let mut builder = EncodeBuilder::new(); + self.build_structure(&mut builder); + + builder.get_json_types(Self::TYPE_NAME) + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Eip712Domain { + /// The user readable name of signing domain, i.e. the name of the DApp or the protocol. + pub name: String, + /// The current major version of the signing domain. Signatures from different versions are not compatible. + pub version: String, + /// The [EIP-155](https://eips.ethereum.org/EIPS/eip-155) chain id. + pub chain_id: U256, +} + +impl Eip712Domain { + /// Name of the protocol. + pub const NAME: &'static str = "zkSync"; + /// Version of the protocol. While there may be `2.x` releases, the minor release version bump + /// should not be breaking, meaning that clients from the `2.x-1` version should be able to communicate + /// with zkSync server. Thus `VERSION` corresponds to the major version only. + pub const VERSION: &'static str = "2"; + + pub fn new(chain_id: L2ChainId) -> Self { + Self { + name: Self::NAME.to_string(), + version: Self::VERSION.to_string(), + chain_id: U256::from(*chain_id), + } + } +} + +impl EIP712TypedStructure for Eip712Domain { + const TYPE_NAME: &'static str = "EIP712Domain"; + + fn build_structure(&self, builder: &mut BUILDER) { + builder.add_member("name", &self.name); + builder.add_member("version", &self.version); + builder.add_member("chainId", &self.chain_id); + } +} diff --git a/core/lib/types/src/tx/primitives/eip712_signature/utils.rs b/core/lib/types/src/tx/primitives/eip712_signature/utils.rs new file mode 100644 index 000000000000..57db78943212 --- /dev/null +++ b/core/lib/types/src/tx/primitives/eip712_signature/utils.rs @@ -0,0 +1,32 @@ +use crate::tx::primitives::eip712_signature::typed_structure::{ + EIP712TypedStructure, Eip712Domain, +}; +use serde_json::{Map, Value}; + +/// Formats the data that needs to be signed in json according to the standard eip-712. +/// Compatible with `eth_signTypedData` RPC call. +pub fn get_eip712_json( + eip712_domain: &Eip712Domain, + typed_struct: &T, +) -> Value { + let types = { + let mut res = Map::new(); + + let mut vec_types = eip712_domain.get_json_types(); + vec_types.append(&mut typed_struct.get_json_types()); + + for mut member_type in vec_types { + if let Some(member_type) = member_type.as_object_mut() { + res.append(member_type); + } + } + res + }; + + serde_json::json!({ + "primaryType": T::TYPE_NAME, + "domain": serde_json::to_value(eip712_domain).expect("serialization fail"), + "message": serde_json::to_value(typed_struct).expect("serialization fail"), + "types": serde_json::to_value(types).expect("serialization fail"), + }) +} diff --git a/core/lib/types/src/tx/primitives/mod.rs b/core/lib/types/src/tx/primitives/mod.rs new file mode 100644 index 000000000000..a26475a03cd0 --- /dev/null +++ b/core/lib/types/src/tx/primitives/mod.rs @@ -0,0 +1,5 @@ +pub mod eip712_signature; +pub mod packed_eth_signature; + +pub use eip712_signature::*; +pub use packed_eth_signature::*; diff --git a/core/lib/types/src/tx/primitives/packed_eth_signature.rs b/core/lib/types/src/tx/primitives/packed_eth_signature.rs new file mode 100644 index 000000000000..15084e765837 --- /dev/null +++ b/core/lib/types/src/tx/primitives/packed_eth_signature.rs @@ -0,0 +1,225 @@ +use crate::tx::primitives::eip712_signature::typed_structure::{ + EIP712TypedStructure, Eip712Domain, +}; +use parity_crypto::{ + publickey::{ + public_to_address, recover, sign, Error as ParityCryptoError, KeyPair, + Signature as ETHSignature, + }, + Keccak256, +}; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; +use thiserror::Error; +use zksync_basic_types::{Address, H256}; +use zksync_utils::ZeroPrefixHexSerde; + +/// Struct used for working with ethereum signatures created using eth_sign (using geth, ethers.js, etc) +/// message is serialized as 65 bytes long `0x` prefixed string. +/// +/// Some notes on implementation of methods of this structure: +/// +/// Ethereum signed message produced by most clients contains v where v = 27 + recovery_id(0,1,2,3), +/// but for some clients v = recovery_id(0,1,2,3). +/// Library that we use for signature verification (written for bitcoin) expects v = recovery_id +/// +/// That is why: +/// 1) when we create this structure by deserialization of message produced by user +/// we subtract 27 from v in `ETHSignature` if necessary and store it in the `ETHSignature` structure this way. +/// 2) When we serialize/create this structure we add 27 to v in `ETHSignature`. +/// +/// This way when we have methods that consumes &self we can be sure that ETHSignature::recover_signer works +/// And we can be sure that we are compatible with Ethereum clients. +/// +#[derive(Debug, Clone, PartialEq, Eq, Default)] +pub struct PackedEthSignature(ETHSignature); + +impl PackedEthSignature { + pub fn serialize_packed(&self) -> [u8; 65] { + // adds 27 to v + self.0.clone().into_electrum() + } + + pub fn deserialize_packed(bytes: &[u8]) -> Result { + if bytes.len() != 65 { + return Err(DeserializeError::IncorrectSignatureLength); + } + let mut bytes_array = [0u8; 65]; + bytes_array.copy_from_slice(bytes); + + if bytes_array[64] >= 27 { + bytes_array[64] -= 27; + } + + Ok(PackedEthSignature(ETHSignature::from(bytes_array))) + } + + /// Signs message using ethereum private key, results are identical to signature created + /// using `geth`, `ethers.js`, etc. No hashing and prefixes required. + pub fn sign(private_key: &H256, msg: &[u8]) -> Result { + let signed_bytes = Self::message_to_signed_bytes(msg); + Self::sign_raw(private_key, &signed_bytes) + } + + pub fn sign_raw( + private_key: &H256, + signed_bytes: &H256, + ) -> Result { + let secret_key = (*private_key).into(); + let signature = sign(&secret_key, signed_bytes)?; + Ok(PackedEthSignature(signature)) + } + + /// Signs typed struct using ethereum private key by EIP-712 signature standard. + /// Result of this function is the equivalent of RPC calling `eth_signTypedData`. + pub fn sign_typed_data( + private_key: &H256, + domain: &Eip712Domain, + typed_struct: &impl EIP712TypedStructure, + ) -> Result { + let secret_key = (*private_key).into(); + let signed_bytes = Self::typed_data_to_signed_bytes(domain, typed_struct); + let signature = sign(&secret_key, &signed_bytes)?; + Ok(PackedEthSignature(signature)) + } + + pub fn typed_data_to_signed_bytes( + domain: &Eip712Domain, + typed_struct: &impl EIP712TypedStructure, + ) -> H256 { + let mut bytes = Vec::new(); + bytes.extend_from_slice("\x19\x01".as_bytes()); + bytes.extend_from_slice(domain.hash_struct().as_bytes()); + bytes.extend_from_slice(typed_struct.hash_struct().as_bytes()); + bytes.keccak256().into() + } + + pub fn message_to_signed_bytes(msg: &[u8]) -> H256 { + msg.keccak256().into() + } + + /// Checks signature and returns ethereum address of the signer. + /// message should be the same message that was passed to `eth.sign`(or similar) method + /// as argument. No hashing and prefixes required. + pub fn signature_recover_signer( + &self, + signed_bytes: &H256, + ) -> Result { + let public_key = recover(&self.0, signed_bytes)?; + Ok(public_to_address(&public_key)) + } + + /// Get Ethereum address from private key. + pub fn address_from_private_key(private_key: &H256) -> Result { + Ok(KeyPair::from_secret((*private_key).into())?.address()) + } + + pub fn from_rsv(r: &H256, s: &H256, v: u8) -> Self { + PackedEthSignature(ETHSignature::from_rsv(r, s, v)) + } + + pub fn r(&self) -> &[u8] { + self.0.r() + } + pub fn s(&self) -> &[u8] { + self.0.s() + } + pub fn v(&self) -> u8 { + self.0.v() + } + pub fn v_with_chain_id(&self, chain_id: u16) -> u16 { + self.0.v() as u16 + 35 + chain_id * 2 + } + pub fn unpack_v(v: u64) -> Result<(u8, Option), ParityCryptoError> { + use std::convert::TryInto; + + if v == 27 { + return Ok((0, None)); + } else if v == 28 { + return Ok((1, None)); + } else if v >= 35 { + let chain_id = (v - 35) >> 1; + let v = v - 35 - chain_id * 2; + let chain_id = chain_id + .try_into() + .map_err(|_| ParityCryptoError::Custom("Invalid chain_id".to_string()))?; + if v == 0 { + return Ok((0, Some(chain_id))); + } else if v == 1 { + return Ok((1, Some(chain_id))); + } + } + + Err(ParityCryptoError::Custom("Invalid v".to_string())) + } +} + +#[derive(Debug, Error, PartialEq)] +pub enum DeserializeError { + #[error("Eth signature length should be 65 bytes")] + IncorrectSignatureLength, +} + +impl Serialize for PackedEthSignature { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + let packed_signature = self.serialize_packed(); + ZeroPrefixHexSerde::serialize(&packed_signature, serializer) + } +} + +impl<'de> Deserialize<'de> for PackedEthSignature { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let bytes = ZeroPrefixHexSerde::deserialize(deserializer)?; + Self::deserialize_packed(&bytes).map_err(serde::de::Error::custom) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn unpack_v_0() { + assert_eq!(PackedEthSignature::unpack_v(27).unwrap(), (0, None)); + } + + #[test] + fn unpack_v_1() { + assert_eq!(PackedEthSignature::unpack_v(28).unwrap(), (1, None)); + } + + #[test] + fn unpack_wrong_v_10_without_chain_id() { + assert!(PackedEthSignature::unpack_v(10).is_err()); + } + + #[test] + fn unpack_wrong_v_30_without_chain_id() { + assert!(PackedEthSignature::unpack_v(30).is_err()); + } + + #[test] + fn unpack_v_0_with_chain_id_0() { + assert_eq!(PackedEthSignature::unpack_v(35).unwrap(), (0, Some(0))); + } + + #[test] + fn unpack_v_1_with_chain_id_0() { + assert_eq!(PackedEthSignature::unpack_v(36).unwrap(), (1, Some(0))); + } + + #[test] + fn unpack_v_1_with_chain_id_11() { + assert_eq!(PackedEthSignature::unpack_v(58).unwrap(), (1, Some(11))); + } + + #[test] + fn unpack_v_1_with_chain_id_270() { + assert_eq!(PackedEthSignature::unpack_v(576).unwrap(), (1, Some(270))); + } +} diff --git a/core/lib/types/src/tx/tx_execution_info.rs b/core/lib/types/src/tx/tx_execution_info.rs new file mode 100644 index 000000000000..91ac40d57554 --- /dev/null +++ b/core/lib/types/src/tx/tx_execution_info.rs @@ -0,0 +1,146 @@ +use crate::commitment::CommitmentSerializable; +use crate::event::{extract_long_l2_to_l1_messages, extract_published_bytecodes}; +use crate::l2_to_l1_log::L2ToL1Log; +use crate::log_query_sorter::sort_storage_access_queries; +use crate::writes::{InitialStorageWrite, RepeatedStorageWrite}; +use crate::{StorageLogQuery, StorageLogQueryType, VmEvent}; +use std::ops::{Add, AddAssign}; +use zksync_utils::bytecode::bytecode_len_in_bytes; + +/// Events/storage logs/l2->l1 logs created within transaction execution. +#[derive(Debug, Clone, Default, PartialEq)] +pub struct VmExecutionLogs { + pub storage_logs: Vec, + pub events: Vec, + pub l2_to_l1_logs: Vec, + pub total_log_queries_count: usize, +} + +#[derive(Debug, Clone, Copy, Eq, PartialEq)] +pub enum TxExecutionStatus { + Success, + Failure, +} + +impl TxExecutionStatus { + pub fn from_has_failed(has_failed: bool) -> Self { + if has_failed { + Self::Failure + } else { + Self::Success + } + } +} + +#[derive(Debug, Clone, Copy, Default, serde::Serialize, PartialEq)] +pub struct ExecutionMetrics { + pub initial_storage_writes: usize, + pub repeated_storage_writes: usize, + pub gas_used: usize, + pub published_bytecode_bytes: usize, + pub l2_l1_long_messages: usize, + pub l2_l1_logs: usize, + pub contracts_used: usize, + pub contracts_deployed: u16, + pub vm_events: usize, + pub storage_logs: usize, + pub total_log_queries: usize, + pub cycles_used: u32, +} + +impl ExecutionMetrics { + pub fn storage_writes(&self) -> usize { + self.initial_storage_writes + self.repeated_storage_writes + } + + pub fn size(&self) -> usize { + self.initial_storage_writes * InitialStorageWrite::SERIALIZED_SIZE + + self.repeated_storage_writes * RepeatedStorageWrite::SERIALIZED_SIZE + + self.l2_l1_logs * L2ToL1Log::SERIALIZED_SIZE + + self.l2_l1_long_messages + + self.published_bytecode_bytes + } + + pub fn new( + logs: &VmExecutionLogs, + gas_used: usize, + contracts_deployed: u16, + contracts_used: usize, + cycles_used: u32, + ) -> Self { + let (initial_storage_writes, repeated_storage_writes) = + get_initial_and_repeated_storage_writes(logs.storage_logs.as_slice()); + + let l2_l1_long_messages = extract_long_l2_to_l1_messages(&logs.events) + .iter() + .map(|event| event.len()) + .sum(); + + let published_bytecode_bytes = extract_published_bytecodes(&logs.events) + .iter() + .map(|bytecodehash| bytecode_len_in_bytes(*bytecodehash)) + .sum(); + + ExecutionMetrics { + initial_storage_writes: initial_storage_writes as usize, + repeated_storage_writes: repeated_storage_writes as usize, + gas_used, + published_bytecode_bytes, + l2_l1_long_messages, + l2_l1_logs: logs.l2_to_l1_logs.len(), + contracts_used, + contracts_deployed, + vm_events: logs.events.len(), + storage_logs: logs.storage_logs.len(), + total_log_queries: logs.total_log_queries_count, + cycles_used, + } + } +} + +impl Add for ExecutionMetrics { + type Output = ExecutionMetrics; + + fn add(self, other: ExecutionMetrics) -> ExecutionMetrics { + ExecutionMetrics { + initial_storage_writes: self.initial_storage_writes + other.initial_storage_writes, + repeated_storage_writes: self.repeated_storage_writes + other.repeated_storage_writes, + published_bytecode_bytes: self.published_bytecode_bytes + + other.published_bytecode_bytes, + contracts_deployed: self.contracts_deployed + other.contracts_deployed, + contracts_used: self.contracts_used + other.contracts_used, + l2_l1_long_messages: self.l2_l1_long_messages + other.l2_l1_long_messages, + l2_l1_logs: self.l2_l1_logs + other.l2_l1_logs, + gas_used: self.gas_used + other.gas_used, + vm_events: self.vm_events + other.vm_events, + storage_logs: self.storage_logs + other.storage_logs, + total_log_queries: self.total_log_queries + other.total_log_queries, + cycles_used: self.cycles_used + other.cycles_used, + } + } +} + +impl AddAssign for ExecutionMetrics { + fn add_assign(&mut self, other: Self) { + *self = *self + other; + } +} + +pub fn get_initial_and_repeated_storage_writes( + storage_log_queries: &[StorageLogQuery], +) -> (u32, u32) { + let mut initial_storage_writes = 0; + let mut repeated_storage_writes = 0; + + let (_, deduped_storage_logs) = sort_storage_access_queries(storage_log_queries); + for log in &deduped_storage_logs { + match log.log_type { + StorageLogQueryType::InitialWrite => { + initial_storage_writes += 1; + } + StorageLogQueryType::RepeatedWrite => repeated_storage_writes += 1, + StorageLogQueryType::Read => {} + } + } + (initial_storage_writes, repeated_storage_writes) +} diff --git a/core/lib/types/src/utils.rs b/core/lib/types/src/utils.rs new file mode 100644 index 000000000000..76c3e488b95c --- /dev/null +++ b/core/lib/types/src/utils.rs @@ -0,0 +1,137 @@ +use crate::system_contracts::DEPLOYMENT_NONCE_INCREMENT; +use crate::L2_ETH_TOKEN_ADDRESS; +use crate::{web3::signing::keccak256, AccountTreeId, StorageKey, U256}; +use once_cell::sync::Lazy; +use parity_crypto::Keccak256; +use std::collections::HashMap; + +use std::mem; +use std::sync::Mutex; +use std::time::Instant; + +use zksync_basic_types::{Address, H256}; + +use zksync_utils::{address_to_h256, h256_to_u256, u256_to_h256}; + +/// Transforms the *full* account nonce into an *account* nonce. +/// Full nonce is a composite one: it includes both account nonce (number of transactions +/// initiated by the account) and deployer nonce (number of smart contracts deployed by the +/// account). +/// For most public things, we need the account nonce. +pub fn decompose_full_nonce(full_nonce: U256) -> (U256, U256) { + ( + full_nonce % DEPLOYMENT_NONCE_INCREMENT, + full_nonce / DEPLOYMENT_NONCE_INCREMENT, + ) +} + +/// Converts tx nonce + deploy nonce into a full nonce. +pub fn nonces_to_full_nonce(tx_nonce: U256, deploy_nonce: U256) -> U256 { + DEPLOYMENT_NONCE_INCREMENT * deploy_nonce + tx_nonce +} + +static CACHE: Lazy>> = Lazy::new(|| Mutex::new(HashMap::new())); + +fn key_for_eth_balance(address: &Address) -> H256 { + let address_h256 = address_to_h256(address); + + let bytes = [address_h256.as_bytes(), &[0; 32]].concat(); + keccak256(&bytes).into() +} + +/// Create a `key` part of `StorageKey` to access the balance from ERC20 contract balances +fn key_for_erc20_balance(address: &Address) -> H256 { + let started_at = Instant::now(); + let address_h256 = address_to_h256(address); + let address_u256 = h256_to_u256(address_h256); + let mut hash_map = CACHE.lock().unwrap(); + + metrics::gauge!( + "server.compute_storage_key_for_erc20_cache_size", + hash_map.len() as f64 + ); + metrics::gauge!( + "server.compute_storage_key_for_erc20_cache_size_bytes", + mem::size_of_val(&hash_map) as f64 + ); + + let hash = hash_map.entry(address_u256).or_insert_with(|| { + // 20 bytes address first gets aligned to 32 bytes with index of `balanceOf` storage slot + // of default ERC20 contract and to then to 64 bytes. + + let slot_index = H256::from_low_u64_be(51); + + let bytes = [address_h256.as_bytes(), slot_index.as_bytes()].concat(); + keccak256(&bytes).into() + }); + + metrics::histogram!( + "server.compute_storage_key_for_erc20_balance_latency", + started_at.elapsed() + ); + *hash +} + +/// Create a storage key to access the balance from supported token contract balances +pub fn storage_key_for_standard_token_balance( + token_contract: AccountTreeId, + address: &Address, +) -> StorageKey { + // We have different implementation of the standard erc20 contract and native + // eth contract. The key for the balance is different for each. + let key = if token_contract.address() == &L2_ETH_TOKEN_ADDRESS { + key_for_eth_balance(address) + } else { + key_for_erc20_balance(address) + }; + + StorageKey::new(token_contract, key) +} + +pub fn storage_key_for_eth_balance(address: &Address) -> StorageKey { + storage_key_for_standard_token_balance(AccountTreeId::new(L2_ETH_TOKEN_ADDRESS), address) +} + +/// Pre-calculated the address of the to-be-deployed contract (via CREATE, not CREATE2). +pub fn deployed_address_create(sender: Address, deploy_nonce: U256) -> Address { + let prefix_bytes = "zksyncCreate".as_bytes().keccak256(); + let address_bytes = address_to_h256(&sender); + let nonce_bytes = u256_to_h256(deploy_nonce); + + let mut bytes = vec![]; + bytes.extend_from_slice(&prefix_bytes); + bytes.extend_from_slice(address_bytes.as_bytes()); + bytes.extend_from_slice(nonce_bytes.as_bytes()); + + Address::from_slice(&bytes.keccak256()[12..]) +} + +#[cfg(test)] +mod tests { + use crate::{ + utils::storage_key_for_standard_token_balance, AccountTreeId, Address, StorageKey, H256, + }; + use std::str::FromStr; + + #[test] + fn test_storage_key_for_eth_token() { + let contract = AccountTreeId::new(Address::zero()); + let addresses = vec![ + "0x1dfe8ea5e8de74634db78d9f8d41a1c832ab91e8", + "0xde03a0b5963f75f1c8485b355ff6d30f3093bde7", + "0x2c9fc71c164f7332f368da477256e1b049575979", + ]; + let hashes = vec![ + "0xd8f16e1d7fe824994134861c968a8f276930db7daf6ba4dd083567259d3ff857", + "0x4e08bf0f8822508eed9a1fb7d98cf6067ab156c74e9ebdda0924bef229d71995", + "0xb6ef92f5b364b6e13f237aef1213b68f53f91ac35dcea0ad60e103b5245fd85c", + ]; + for (address, hash) in addresses.iter().zip(hashes.iter()) { + let addr = Address::from_str(address).unwrap(); + let user_key = H256::from_str(hash).unwrap(); + let expected_storage_key = StorageKey::new(contract, user_key); + let calculated_storage_key = storage_key_for_standard_token_balance(contract, &addr); + assert_eq!(expected_storage_key, calculated_storage_key); + } + } +} diff --git a/core/lib/types/src/vm_trace.rs b/core/lib/types/src/vm_trace.rs new file mode 100644 index 000000000000..5317e2e303bf --- /dev/null +++ b/core/lib/types/src/vm_trace.rs @@ -0,0 +1,50 @@ +use crate::{Address, U256}; +use serde::{Deserialize, Serialize}; +use std::collections::{HashMap, HashSet}; + +#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq)] +pub struct VmExecutionTrace { + pub steps: Vec, + pub contracts: HashSet
, +} + +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] +pub struct VmExecutionStep { + pub contract_address: Address, + pub memory_page_index: usize, + pub child_memory_index: usize, + pub pc: u16, + pub set_flags: Vec, + pub registers: Vec, + pub register_interactions: HashMap, + pub sp: Option, + pub memory_interactions: Vec, + pub error: Option, +} + +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] +pub struct MemoryInteraction { + pub memory_type: String, + pub page: usize, + pub address: u16, + pub value: U256, + pub direction: MemoryDirection, +} + +#[derive(Debug, Serialize, Deserialize, Clone, Copy, PartialEq)] +pub enum MemoryDirection { + Read, + Write, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ContractSourceDebugInfo { + pub assembly_code: String, + pub pc_line_mapping: HashMap, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct VmDebugTrace { + pub steps: Vec, + pub sources: HashMap>, +} diff --git a/core/lib/utils/Cargo.toml b/core/lib/utils/Cargo.toml new file mode 100644 index 000000000000..23a5e5564c1a --- /dev/null +++ b/core/lib/utils/Cargo.toml @@ -0,0 +1,29 @@ +[package] +name = "zksync_utils" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-2" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] + +[dependencies] +zksync_basic_types = { path = "../../lib/basic_types", version = "1.0" } +zk_evm = {git = "https://github.com/matter-labs/zk_evm.git", branch = "v1.3.1"} +#zk_evm = { path = "../../../../zk_evm" } + +num = { version = "0.3.1", features = ["serde"] } +bigdecimal = { version = "=0.2.0", features = ["serde"]} +serde = { version = "1.0", features = ["derive"] } +tokio = { version = "1", features = ["time"] } +anyhow = "1.0" +thiserror = "1.0" +futures = "0.3" +hex = "0.4" +envy = "0.4" + +[dev-dependencies] +serde_json = "1.0.0" + diff --git a/core/lib/utils/src/bytecode.rs b/core/lib/utils/src/bytecode.rs new file mode 100644 index 000000000000..01802b826b1e --- /dev/null +++ b/core/lib/utils/src/bytecode.rs @@ -0,0 +1,55 @@ +use zksync_basic_types::H256; + +use crate::bytes_to_chunks; + +const MAX_BYTECODE_LENGTH_IN_WORDS: usize = (1 << 16) - 1; +const MAX_BYTECODE_LENGTH_BYTES: usize = MAX_BYTECODE_LENGTH_IN_WORDS * 32; + +#[derive(Debug, thiserror::Error)] +pub enum InvalidBytecodeError { + #[error("Bytecode too long: {0} bytes, while max {1} allowed")] + BytecodeTooLong(usize, usize), + #[error("Bytecode has even number of 32-byte words")] + BytecodeLengthInWordsIsEven, + #[error("Bytecode length is not divisible by 32")] + BytecodeLengthIsNotDivisibleBy32, +} + +pub fn validate_bytecode(code: &[u8]) -> Result<(), InvalidBytecodeError> { + let bytecode_len = code.len(); + + if bytecode_len > MAX_BYTECODE_LENGTH_BYTES { + return Err(InvalidBytecodeError::BytecodeTooLong( + bytecode_len, + MAX_BYTECODE_LENGTH_BYTES, + )); + } + + if bytecode_len % 32 != 0 { + return Err(InvalidBytecodeError::BytecodeLengthIsNotDivisibleBy32); + } + + let bytecode_len_words = bytecode_len / 32; + + if bytecode_len_words % 2 == 0 { + return Err(InvalidBytecodeError::BytecodeLengthInWordsIsEven); + } + + Ok(()) +} + +pub fn hash_bytecode(code: &[u8]) -> H256 { + let chunked_code = bytes_to_chunks(code); + let hash = zk_evm::zkevm_opcode_defs::utils::bytecode_to_code_hash(&chunked_code) + .expect("Invalid bytecode"); + + H256(hash) +} + +pub fn bytecode_len_in_words(bytecodehash: &H256) -> u16 { + u16::from_be_bytes([bytecodehash[2], bytecodehash[3]]) +} + +pub fn bytecode_len_in_bytes(bytecodehash: H256) -> usize { + bytecode_len_in_words(&bytecodehash) as usize * 32 +} diff --git a/core/lib/utils/src/convert.rs b/core/lib/utils/src/convert.rs new file mode 100644 index 000000000000..959f3fd2c8f3 --- /dev/null +++ b/core/lib/utils/src/convert.rs @@ -0,0 +1,261 @@ +use bigdecimal::BigDecimal; +use num::{ + bigint::ToBigInt, + rational::Ratio, + traits::{sign::Signed, Pow}, + BigUint, +}; +use std::convert::TryInto; +use zksync_basic_types::{Address, H256, U128, U256}; + +pub fn u256_to_big_decimal(value: U256) -> BigDecimal { + let ratio = Ratio::new_raw(u256_to_biguint(value), BigUint::from(1u8)); + ratio_to_big_decimal(&ratio, 80) +} + +pub fn ratio_to_big_decimal(num: &Ratio, precision: usize) -> BigDecimal { + let bigint = round_precision_raw_no_div(num, precision) + .to_bigint() + .unwrap(); + BigDecimal::new(bigint, precision as i64) +} + +pub fn ratio_to_big_decimal_normalized( + num: &Ratio, + precision: usize, + min_precision: usize, +) -> BigDecimal { + let normalized = ratio_to_big_decimal(num, precision).normalized(); + let min_scaled = normalized.with_scale(min_precision as i64); + normalized.max(min_scaled) +} + +pub fn big_decimal_to_ratio(num: &BigDecimal) -> Result, anyhow::Error> { + let (big_int, exp) = num.as_bigint_and_exponent(); + anyhow::ensure!(!big_int.is_negative(), "BigDecimal should be unsigned"); + let big_uint = big_int.to_biguint().unwrap(); + let ten_pow = BigUint::from(10_u32).pow(exp as u128); + Ok(Ratio::new(big_uint, ten_pow)) +} + +fn round_precision_raw_no_div(num: &Ratio, precision: usize) -> BigUint { + let ten_pow = BigUint::from(10u32).pow(precision); + (num * ten_pow).round().to_integer() +} + +/// Converts `U256` into the corresponding `BigUint` value. +pub fn u256_to_biguint(value: U256) -> BigUint { + let mut bytes = [0u8; 32]; + value.to_little_endian(&mut bytes); + BigUint::from_bytes_le(&bytes) +} + +/// Converts `U128` into the corresponding `BigUint` value. +pub fn u128_to_biguint(value: U128) -> BigUint { + let mut bytes = [0u8; 16]; + value.to_little_endian(&mut bytes); + BigUint::from_bytes_le(&bytes) +} + +/// Converts `BigUint` value into the corresponding `U256` value. +pub fn biguint_to_u256(value: BigUint) -> U256 { + let bytes = value.to_bytes_le(); + U256::from_little_endian(&bytes) +} + +/// Converts `BigDecimal` value into the corresponding `U256` value. +pub fn bigdecimal_to_u256(value: BigDecimal) -> U256 { + let bigint = value.with_scale(0).into_bigint_and_exponent().0; + biguint_to_u256(bigint.to_biguint().unwrap()) +} + +fn ensure_chunkable(bytes: &[u8]) { + assert!( + bytes.len() % 32 == 0, + "Bytes must be divisible by 32 to split into chunks" + ); +} + +pub fn h256_to_u256(num: H256) -> U256 { + U256::from_big_endian(num.as_bytes()) +} + +pub fn address_to_h256(address: &Address) -> H256 { + let mut buffer = [0u8; 32]; + buffer[12..].copy_from_slice(address.as_bytes()); + H256(buffer) +} + +pub fn address_to_u256(address: &Address) -> U256 { + h256_to_u256(address_to_h256(address)) +} + +pub fn bytes_to_chunks(bytes: &[u8]) -> Vec<[u8; 32]> { + ensure_chunkable(bytes); + bytes + .chunks(32) + .map(|el| { + let mut chunk = [0u8; 32]; + chunk.copy_from_slice(el); + chunk + }) + .collect() +} + +pub fn le_chunks_to_words(chunks: Vec<[u8; 32]>) -> Vec { + chunks + .into_iter() + .map(|el| U256::from_little_endian(&el)) + .collect() +} + +pub fn be_chunks_to_words(chunks: Vec<[u8; 32]>) -> Vec { + chunks + .into_iter() + .map(|el| U256::from_big_endian(&el)) + .collect() +} + +pub fn bytes_to_le_words(vec: Vec) -> Vec { + ensure_chunkable(&vec); + vec.chunks(32).map(U256::from_little_endian).collect() +} + +pub fn bytes_to_be_words(vec: Vec) -> Vec { + ensure_chunkable(&vec); + vec.chunks(32).map(U256::from_big_endian).collect() +} + +pub fn u256_to_h256(num: U256) -> H256 { + let mut bytes = [0u8; 32]; + num.to_big_endian(&mut bytes); + H256::from_slice(&bytes) +} + +/// Converts `U256` value into the Address +pub fn u256_to_account_address(value: &U256) -> Address { + let mut bytes = [0u8; 32]; + value.to_big_endian(&mut bytes); + + Address::from_slice(&bytes[12..]) +} + +/// Converts `H256` value into the Address +pub fn h256_to_account_address(value: &H256) -> Address { + Address::from_slice(&value.as_bytes()[12..]) +} + +pub fn be_bytes_to_safe_address(bytes: &[u8]) -> Option
{ + if bytes.len() < 20 { + return None; + } + + let (zero_bytes, address_bytes) = bytes.split_at(bytes.len() - 20); + + if zero_bytes.iter().any(|b| *b != 0) { + None + } else { + Some(Address::from_slice(address_bytes)) + } +} + +/// Converts `h256` value as BE into the u32 +pub fn h256_to_u32(value: H256) -> u32 { + let be_u32_bytes: [u8; 4] = value[28..].try_into().unwrap(); + u32::from_be_bytes(be_u32_bytes) +} + +/// Converts u32 into the h256 as BE bytes +pub fn u32_to_h256(value: u32) -> H256 { + let mut result = [0u8; 32]; + result[28..].copy_from_slice(&value.to_be_bytes()); + H256(result) +} + +/// Converts `h256` value as BE into the u64 +pub fn h256_to_u64(value: H256) -> u64 { + let be_u64_bytes: [u8; 8] = value[24..].try_into().unwrap(); + u64::from_be_bytes(be_u64_bytes) +} + +/// Converts u64 into the h256 as BE bytes +pub fn u64_to_h256(value: u64) -> H256 { + let mut result = [0u8; 32]; + result[24..].copy_from_slice(&value.to_be_bytes()); + H256(result) +} + +/// Converts `U256` value into bytes array +pub fn u256_to_bytes_be(value: &U256) -> Vec { + let mut bytes = vec![0u8; 32]; + value.to_big_endian(bytes.as_mut_slice()); + bytes +} + +#[cfg(test)] +mod test { + use super::*; + use num::BigInt; + use std::str::FromStr; + + #[test] + fn test_ratio_to_big_decimal() { + let ratio = Ratio::from_integer(BigUint::from(0u32)); + let dec = ratio_to_big_decimal(&ratio, 1); + assert_eq!(dec.to_string(), "0.0"); + let ratio = Ratio::from_integer(BigUint::from(1234u32)); + let dec = ratio_to_big_decimal(&ratio, 7); + assert_eq!(dec.to_string(), "1234.0000000"); + // 4 divided by 9 is 0.(4). + let ratio = Ratio::new(BigUint::from(4u32), BigUint::from(9u32)); + let dec = ratio_to_big_decimal(&ratio, 12); + assert_eq!(dec.to_string(), "0.444444444444"); + // First 7 decimal digits of pi. + let ratio = Ratio::new(BigUint::from(52163u32), BigUint::from(16604u32)); + let dec = ratio_to_big_decimal(&ratio, 6); + assert_eq!(dec.to_string(), "3.141592"); + } + + #[test] + fn test_ratio_to_big_decimal_normalized() { + let ratio = Ratio::from_integer(BigUint::from(10u32)); + let dec = ratio_to_big_decimal_normalized(&ratio, 100, 2); + assert_eq!(dec.to_string(), "10.00"); + + // First 7 decimal digits of pi. + let ratio = Ratio::new(BigUint::from(52163u32), BigUint::from(16604u32)); + let dec = ratio_to_big_decimal_normalized(&ratio, 6, 2); + assert_eq!(dec.to_string(), "3.141592"); + + // 4 divided by 9 is 0.(4). + let ratio = Ratio::new(BigUint::from(4u32), BigUint::from(9u32)); + let dec = ratio_to_big_decimal_normalized(&ratio, 12, 2); + assert_eq!(dec.to_string(), "0.444444444444"); + } + + #[test] + fn test_big_decimal_to_ratio() { + // Expect unsigned number. + let dec = BigDecimal::from(-1); + assert!(big_decimal_to_ratio(&dec).is_err()); + let expected = Ratio::from_integer(BigUint::from(0u32)); + let dec = BigDecimal::from(0); + let ratio = big_decimal_to_ratio(&dec).unwrap(); + assert_eq!(ratio, expected); + let expected = Ratio::new(BigUint::from(1234567u32), BigUint::from(10000u32)); + let dec = BigDecimal::from_str("123.4567").unwrap(); + let ratio = big_decimal_to_ratio(&dec).unwrap(); + assert_eq!(ratio, expected); + } + + #[test] + fn test_bigdecimal_to_u256() { + let value = BigDecimal::from(100u32); + let expected = U256::from(100u32); + assert_eq!(bigdecimal_to_u256(value), expected); + + let value = BigDecimal::new(BigInt::from(100), -2); + let expected = U256::from(10000u32); + assert_eq!(bigdecimal_to_u256(value), expected); + } +} diff --git a/core/lib/utils/src/env_tools.rs b/core/lib/utils/src/env_tools.rs new file mode 100644 index 000000000000..703d9f338f74 --- /dev/null +++ b/core/lib/utils/src/env_tools.rs @@ -0,0 +1,33 @@ +use std::{env, str::FromStr}; + +/// Obtains the environment variable value. +/// Panics if there is no environment variable with provided name set. +pub fn get_env(name: &str) -> String { + env::var(name).unwrap_or_else(|e| panic!("Env var {} missing, {}", name, e)) +} + +/// Obtains the environment variable value and parses it using the `FromStr` type implementation. +/// Panics if there is no environment variable with provided name set, or the value cannot be parsed. +pub fn parse_env(name: &str) -> F +where + F: FromStr, + F::Err: std::fmt::Debug, +{ + get_env(name) + .parse() + .unwrap_or_else(|e| panic!("Failed to parse environment variable {}: {:?}", name, e)) +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn test_env_tools() { + const KEY: &str = "KEY"; + // Our test environment variable. + env::set_var(KEY, "123"); + assert_eq!(get_env(KEY), "123"); + assert_eq!(parse_env::(KEY), 123); + } +} diff --git a/core/lib/utils/src/format.rs b/core/lib/utils/src/format.rs new file mode 100644 index 000000000000..9d15d4c358e7 --- /dev/null +++ b/core/lib/utils/src/format.rs @@ -0,0 +1,78 @@ +// Built-in deps +use std::collections::VecDeque; +use std::string::ToString; +// External deps +// Workspace deps + +/// Formats amount in wei to tokens with precision. +/// Behaves just like ethers.utils.formatUnits +pub fn format_units(wei: impl ToString, units: u8) -> String { + let mut chars: VecDeque = wei.to_string().chars().collect(); + + while chars.len() < units as usize { + chars.push_front('0'); + } + chars.insert(chars.len() - units as usize, '.'); + if *chars.front().unwrap() == '.' { + chars.push_front('0'); + } + while *chars.back().unwrap() == '0' { + chars.pop_back(); + } + if *chars.back().unwrap() == '.' { + chars.push_back('0'); + } + chars.iter().collect() +} + +/// Formats amount in wei to tokens. +/// Behaves just like js ethers.utils.formatEther +pub fn format_ether(wei: impl ToString) -> String { + format_units(wei, 18) +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn test_format_units() { + // Test vector of (decimals, wei input, expected output) + let vals = vec![ + (0, "1000000000000000100000", "1000000000000000100000.0"), + (1, "0", "0.0"), + (1, "11000000000000000000", "1100000000000000000.0"), + (2, "0", "0.0"), + (2, "1000000000000000100000", "10000000000000001000.0"), + (4, "10001000000", "1000100.0"), + (4, "10100000000000000000000", "1010000000000000000.0"), + (4, "110", "0.011"), + (6, "1000000000000000100000", "1000000000000000.1"), + (8, "0", "0.0"), + (8, "10100000000000000000000", "101000000000000.0"), + (8, "110", "0.0000011"), + (9, "10000000000000000001", "10000000000.000000001"), + (9, "11000000", "0.011"), + (9, "11000000000000000000", "11000000000.0"), + (10, "10001000000", "1.0001"), + (10, "20000000000000000000000", "2000000000000.0"), + (11, "0", "0.0"), + (11, "10100000000000000000000", "101000000000.0"), + (12, "1000000000000000100000", "1000000000.0000001"), + (12, "10001000000", "0.010001"), + (12, "10010000000", "0.01001"), + (12, "110", "0.00000000011"), + (13, "10010000000", "0.001001"), + (14, "10010000000", "0.0001001"), + (14, "110", "0.0000000000011"), + (15, "0", "0.0"), + (17, "1000000000000000100000", "10000.000000000001"), + (17, "10001000000", "0.00000010001"), + (18, "1000000000000000100000", "1000.0000000000001"), + ]; + + for (dec, input, output) in vals { + assert_eq!(format_units(&input, dec), output); + } + } +} diff --git a/core/lib/utils/src/lib.rs b/core/lib/utils/src/lib.rs new file mode 100644 index 000000000000..4875d3b7ea3a --- /dev/null +++ b/core/lib/utils/src/lib.rs @@ -0,0 +1,17 @@ +//! Various helpers used in the zkSync stack. + +pub mod bytecode; +mod convert; +mod env_tools; +mod macros; +pub mod misc; +pub mod panic_extractor; +pub mod panic_notify; +mod serde_wrappers; +pub mod test_utils; +pub mod time; +pub use convert::*; +pub use env_tools::*; +pub use macros::*; +pub use misc::*; +pub use serde_wrappers::*; diff --git a/core/lib/utils/src/macros.rs b/core/lib/utils/src/macros.rs new file mode 100644 index 000000000000..c72cab2fae43 --- /dev/null +++ b/core/lib/utils/src/macros.rs @@ -0,0 +1,35 @@ +//! Miscellaneous macros used across project. + +/// Implements `From` trait for given types allowing to convert wrapper to inner and vice versa. +/// If prefix `deref` supplied, also implements `From` trait for references. +#[macro_export] +macro_rules! impl_from_wrapper { + ($wrapper: ty, $inner: ty $(where for $(<$($gen: ident),+>)?: $($where: tt)+)?) => { + impl $($(<$($gen),+>)*)? From<$inner> for $wrapper $(where $($where)+)? { + fn from(inner: $inner) -> Self { + Self(inner) + } + } + + impl $($(<$($gen),+>)*)? From<$wrapper> for $inner $(where $($where)+)? { + fn from(wrapper: $wrapper) -> Self { + wrapper.0 + } + } + }; + (deref $wrapper: ty, $inner: ty $(where for $(<$($gen: ident),+>)?: $($where: tt)+)?) => { + $crate::impl_from_wrapper!($wrapper, $inner $(where for $(<$($gen),+>)*: $($where)+)?); + + impl $($(<$($gen),+>)*)? From<&$inner> for $wrapper $(where $($where)+)? { + fn from(inner: &$inner) -> Self { + Self(*inner) + } + } + + impl $($(<$($gen),+>)*)? From<&$wrapper> for $inner $(where $($where)+)? { + fn from(wrapper: &$wrapper) -> Self { + (*wrapper).0 + } + } + }; +} diff --git a/core/lib/utils/src/misc.rs b/core/lib/utils/src/misc.rs new file mode 100644 index 000000000000..baac59b11ba7 --- /dev/null +++ b/core/lib/utils/src/misc.rs @@ -0,0 +1,14 @@ +use zksync_basic_types::web3::signing::keccak256; +use zksync_basic_types::{MiniblockNumber, H256, U256}; + +pub fn miniblock_hash(miniblock_number: MiniblockNumber) -> H256 { + H256(keccak256(&miniblock_number.0.to_be_bytes())) +} + +pub const fn ceil_div(a: u64, b: u64) -> u64 { + (a + b - 1) / b +} + +pub fn ceil_div_u256(a: U256, b: U256) -> U256 { + (a + b - U256::from(1)) / b +} diff --git a/core/lib/utils/src/panic_extractor.rs b/core/lib/utils/src/panic_extractor.rs new file mode 100644 index 000000000000..a6fb7b69c7d8 --- /dev/null +++ b/core/lib/utils/src/panic_extractor.rs @@ -0,0 +1,16 @@ +use tokio::task::JoinError; + +pub fn try_extract_panic_message(err: JoinError) -> String { + if err.is_panic() { + let panic = err.into_panic(); + if let Some(panic_string) = panic.downcast_ref::<&'static str>() { + panic_string.to_string() + } else if let Some(panic_string) = panic.downcast_ref::() { + panic_string.to_string() + } else { + "Unknown panic".to_string() + } + } else { + "Cancelled task".to_string() + } +} diff --git a/core/lib/utils/src/panic_notify.rs b/core/lib/utils/src/panic_notify.rs new file mode 100644 index 000000000000..a782e48c9928 --- /dev/null +++ b/core/lib/utils/src/panic_notify.rs @@ -0,0 +1,25 @@ +// Built-in deps +// External uses +use futures::{channel::mpsc, executor::block_on, SinkExt, StreamExt}; +use tokio::task::JoinHandle; +// Local uses + +/// If its placed inside thread::spawn closure it will notify channel when this thread panics. +pub struct ThreadPanicNotify(pub mpsc::Sender); + +impl Drop for ThreadPanicNotify { + fn drop(&mut self) { + if std::thread::panicking() { + block_on(self.0.send(true)).unwrap(); + } + } +} + +pub fn spawn_panic_handler() -> (JoinHandle<()>, mpsc::Sender) { + let (panic_sender, mut panic_receiver) = mpsc::channel(1); + + let handler = tokio::spawn(async move { + let _ = panic_receiver.next().await; + }); + (handler, panic_sender) +} diff --git a/core/lib/utils/src/serde_wrappers.rs b/core/lib/utils/src/serde_wrappers.rs new file mode 100644 index 000000000000..16215b7087e0 --- /dev/null +++ b/core/lib/utils/src/serde_wrappers.rs @@ -0,0 +1,242 @@ +use std::str::FromStr; + +use bigdecimal::BigDecimal; +use num::{bigint::ToBigInt, rational::Ratio, BigUint}; +use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; + +use crate::convert::*; + +#[derive(Clone, Debug)] +pub struct UnsignedRatioSerializeAsDecimal; +impl UnsignedRatioSerializeAsDecimal { + pub fn serialize(value: &Ratio, serializer: S) -> Result + where + S: Serializer, + { + if serializer.is_human_readable() { + BigDecimal::serialize(&ratio_to_big_decimal(value, 18), serializer) + } else { + value.serialize(serializer) + } + } + + pub fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> + where + D: Deserializer<'de>, + { + if deserializer.is_human_readable() { + // First, deserialize a string value. It is expected to be a + // hexadecimal representation of `BigDecimal`. + let big_decimal_string = BigDecimal::deserialize(deserializer)?; + + big_decimal_to_ratio(&big_decimal_string).map_err(de::Error::custom) + } else { + Ratio::::deserialize(deserializer) + } + } + + pub fn deserialize_from_str_with_dot(input: &str) -> Result, anyhow::Error> { + big_decimal_to_ratio(&BigDecimal::from_str(input)?) + } + + pub fn serialize_to_str_with_dot(num: &Ratio, precision: usize) -> String { + ratio_to_big_decimal(num, precision) + .to_string() + .trim_end_matches('0') + .to_string() + } +} + +/// Used to serialize BigUint as radix 10 string. +#[derive(Clone, Debug)] +pub struct BigUintSerdeAsRadix10Str; + +impl BigUintSerdeAsRadix10Str { + pub fn serialize(val: &BigUint, serializer: S) -> Result + where + S: Serializer, + { + let big_dec = BigDecimal::from(val.to_bigint().unwrap()); + BigDecimal::serialize(&big_dec, serializer) + } + + pub fn deserialize<'de, D>(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + use serde::de::Error; + BigDecimal::deserialize(deserializer).and_then(|bigdecimal| { + let big_int = bigdecimal + .to_bigint() + .ok_or_else(|| Error::custom("Expected integer value"))?; + big_int + .to_biguint() + .ok_or_else(|| Error::custom("Expected positive value")) + }) + } +} + +#[derive(Clone, Debug, Serialize, Deserialize, Ord, PartialOrd, Eq, PartialEq, Hash, Default)] +pub struct BigUintSerdeWrapper(#[serde(with = "BigUintSerdeAsRadix10Str")] pub BigUint); + +impl From for BigUintSerdeWrapper { + fn from(uint: BigUint) -> BigUintSerdeWrapper { + BigUintSerdeWrapper(uint) + } +} + +/// Trait for specifying prefix for bytes to hex serialization +pub trait Prefix { + fn prefix() -> &'static str; +} + +/// "sync-bl:" hex prefix +pub struct SyncBlockPrefix; +impl Prefix for SyncBlockPrefix { + fn prefix() -> &'static str { + "sync-bl:" + } +} + +/// "0x" hex prefix +pub struct ZeroxPrefix; +impl Prefix for ZeroxPrefix { + fn prefix() -> &'static str { + "0x" + } +} + +/// "sync-tx:" hex prefix +pub struct SyncTxPrefix; +impl Prefix for SyncTxPrefix { + fn prefix() -> &'static str { + "sync-tx:" + } +} + +/// Used to annotate `Vec` fields that you want to serialize like hex-encoded string with prefix +/// Use this struct in annotation like that `[serde(with = "BytesToHexSerde::"]` +/// where T is concrete prefix type (e.g. `SyncBlockPrefix`) +pub struct BytesToHexSerde

{ + _marker: std::marker::PhantomData

, +} + +impl BytesToHexSerde

{ + pub fn serialize(value: &[u8], serializer: S) -> Result + where + S: Serializer, + { + // First, serialize to hexadecimal string. + let hex_value = format!("{}{}", P::prefix(), hex::encode(value)); + + // Then, serialize it using `Serialize` trait implementation for `String`. + String::serialize(&hex_value, serializer) + } + + pub fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> + where + D: Deserializer<'de>, + { + let deserialized_string = String::deserialize(deserializer)?; + + if let Some(deserialized_string) = deserialized_string.strip_prefix(P::prefix()) { + hex::decode(deserialized_string).map_err(de::Error::custom) + } else { + Err(de::Error::custom(format!( + "string value missing prefix: {:?}", + P::prefix() + ))) + } + } +} + +pub type ZeroPrefixHexSerde = BytesToHexSerde; + +/// Used to annotate `Option>` fields that you want to serialize like hex-encoded string with prefix +/// Use this struct in annotation like that `[serde(with = "OptionBytesToHexSerde::"]` +/// where T is concrete prefix type (e.g. `SyncBlockPrefix`) +pub struct OptionBytesToHexSerde

{ + _marker: std::marker::PhantomData

, +} + +impl OptionBytesToHexSerde

{ + pub fn serialize(value: &Option>, serializer: S) -> Result + where + S: Serializer, + { + // First, serialize to hexadecimal string. + let hex_value = value + .as_ref() + .map(|val| format!("{}{}", P::prefix(), hex::encode(val))); + + // Then, serialize it using `Serialize` trait implementation for `String`. + Option::serialize(&hex_value, serializer) + } + + pub fn deserialize<'de, D>(deserializer: D) -> Result>, D::Error> + where + D: Deserializer<'de>, + { + // First, deserialize a string value. It is expected to be a + // hexadecimal representation of `Vec`. + let optional_deserialized_string: Option = Option::deserialize(deserializer)?; + + optional_deserialized_string + .map(|s| { + if let Some(hex_str) = s.strip_prefix(P::prefix()) { + hex::decode(hex_str).map_err(de::Error::custom) + } else { + Err(de::Error::custom(format!( + "string value missing prefix: {:?}", + P::prefix() + ))) + } + }) + .transpose() + } +} + +#[derive(Clone, Debug, Serialize, Deserialize, Default, Ord, PartialOrd, Eq, PartialEq, Hash)] +pub struct ZeroPrefixSerdeWrapper(#[serde(with = "ZeroPrefixHexSerde")] pub Vec); + +impl From> for ZeroPrefixSerdeWrapper { + fn from(bytes: Vec) -> ZeroPrefixSerdeWrapper { + ZeroPrefixSerdeWrapper(bytes) + } +} + +#[cfg(test)] +mod test { + use super::*; + + /// Tests that `Ratio` serializer works correctly. + #[test] + fn test_ratio_serialize_as_decimal() { + #[derive(Clone, Serialize, Deserialize)] + struct RatioSerdeWrapper( + #[serde(with = "UnsignedRatioSerializeAsDecimal")] pub Ratio, + ); + // It's essential that this number is a finite decimal, otherwise the precision will be lost + // and the assertion will fail. + let expected = RatioSerdeWrapper(Ratio::new( + BigUint::from(120315391195132u64), + BigUint::from(1250000000u64), + )); + let value = + serde_json::to_value(expected.clone()).expect("cannot serialize Ratio as Decimal"); + let ratio: RatioSerdeWrapper = + serde_json::from_value(value).expect("cannot deserialize Ratio from Decimal"); + assert_eq!(expected.0, ratio.0); + } + + /// Tests that `BigUint` serializer works correctly. + #[test] + fn test_serde_big_uint_wrapper() { + let expected = BigUint::from(u64::MAX); + let wrapper = BigUintSerdeWrapper::from(expected.clone()); + let value = serde_json::to_value(wrapper).expect("cannot serialize BigUintSerdeWrapper"); + let uint: BigUintSerdeWrapper = + serde_json::from_value(value).expect("cannot deserialize BigUintSerdeWrapper"); + assert_eq!(uint.0, expected); + } +} diff --git a/core/lib/utils/src/test_utils.rs b/core/lib/utils/src/test_utils.rs new file mode 100644 index 000000000000..00f57491b7eb --- /dev/null +++ b/core/lib/utils/src/test_utils.rs @@ -0,0 +1,41 @@ +use serde::Deserialize; + +#[derive(Debug, Clone, Deserialize)] +pub struct LoadnextContractExecutionParams { + pub reads: usize, + pub writes: usize, + pub events: usize, + pub hashes: usize, + pub recursive_calls: usize, + pub deploys: usize, +} + +impl LoadnextContractExecutionParams { + pub fn from_env() -> Option { + envy::prefixed("CONTRACT_EXECUTION_PARAMS_").from_env().ok() + } + + pub fn empty() -> Self { + Self { + reads: 0, + writes: 0, + events: 0, + hashes: 0, + recursive_calls: 0, + deploys: 0, + } + } +} + +impl Default for LoadnextContractExecutionParams { + fn default() -> Self { + Self { + reads: 10, + writes: 10, + events: 10, + hashes: 10, + recursive_calls: 1, + deploys: 1, + } + } +} diff --git a/core/lib/utils/src/time.rs b/core/lib/utils/src/time.rs new file mode 100644 index 000000000000..70372db34f49 --- /dev/null +++ b/core/lib/utils/src/time.rs @@ -0,0 +1,19 @@ +use std::time::{Duration, SystemTime, UNIX_EPOCH}; + +pub fn seconds_since_epoch() -> u64 { + duration_since_epoch().as_secs() +} + +pub fn millis_since(since: u64) -> u64 { + (millis_since_epoch() - since as u128 * 1000) as u64 +} + +pub fn millis_since_epoch() -> u128 { + duration_since_epoch().as_millis() +} + +fn duration_since_epoch() -> Duration { + SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("Incorrect system time") +} diff --git a/core/lib/vlog/Cargo.toml b/core/lib/vlog/Cargo.toml new file mode 100644 index 000000000000..f1ffe013fc12 --- /dev/null +++ b/core/lib/vlog/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "vlog" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-2" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] +publish = false # This is an utility crate, not to be used by libraries. + +[dependencies] +chrono = "0.4" +tracing = { version = "0.1.26", features = ["log"] } +tracing-subscriber = { version = "0.3", features = ["fmt", "env-filter", "time", "json"] } +sentry = "0" +opentelemetry = { version = "0.17" } +opentelemetry-otlp = { version = "0.10", features = ["http-proto", "reqwest-blocking-client"] } +opentelemetry-semantic-conventions = "0.9" +tracing-opentelemetry = "0.17.2" +serde_json = "1.0" diff --git a/core/lib/vlog/src/lib.rs b/core/lib/vlog/src/lib.rs new file mode 100644 index 000000000000..89f08a781400 --- /dev/null +++ b/core/lib/vlog/src/lib.rs @@ -0,0 +1,317 @@ +//! A set of logging macros that print not only timestamp and log level, +//! but also filename, line and column. +//! +//! They behave just like usual tracing::warn, tracing::info, etc. +//! For warn and error macros we are adding file line and column to tracing variables +//! +//! The format of the logs in stdout can be `plain` or` json` and is set by the `MISC_LOG_FORMAT` env variable. +//! +//! Full documentation for the `tracing` crate here https://docs.rs/tracing/ +//! +//! Integration with sentry for catching errors and react on them immediately +//! https://docs.sentry.io/platforms/rust/ +//! + +use std::{borrow::Cow, str::FromStr}; + +use opentelemetry::sdk::{resource::Resource, trace::Sampler}; +use opentelemetry::trace::{TraceContextExt, TraceId}; +use opentelemetry::KeyValue; +use opentelemetry_otlp::WithExportConfig; +use sentry::protocol::Event; +use sentry::{types::Dsn, ClientInitGuard, ClientOptions}; +use std::backtrace::Backtrace; +use tracing_opentelemetry::OpenTelemetrySpanExt; +use tracing_subscriber::{fmt, layer::SubscriberExt, util::SubscriberInitExt}; + +pub use chrono as __chrono; +pub use sentry as __sentry; +pub use tracing as __tracing; +pub use tracing::{debug, info, log, trace}; + +fn get_trace_id() -> TraceId { + let span = tracing::span::Span::current(); + span.context().span().span_context().trace_id() +} + +#[macro_export] +macro_rules! warn { + ($fmt:expr) => {{ + $crate::__tracing::warn!( + file=file!(), + line=line!(), + column=column!(), + $fmt, + ); + // $crate::__sentry::capture_event($crate::__sentry::protocol::Event { + // fingerprint: ::std::borrow::Cow::Borrowed(&[::std::borrow::Cow::Borrowed($fmt)]), + // message: Some(format!($fmt)), + // level: $crate::__sentry::Level::Warning, + // ..Default::default() + // }); + }}; + + ($fmt:expr, $($args:tt)*) => { + { + $crate::__tracing::warn!( + file=file!(), + line=line!(), + column=column!(), + $fmt, + $($args)* + ); + // $crate::__sentry::capture_event($crate::__sentry::protocol::Event { + // fingerprint: ::std::borrow::Cow::Borrowed(&[::std::borrow::Cow::Borrowed($fmt)]), + // message: Some(format!($fmt, $($args)*)), + // level: $crate::__sentry::Level::Warning, + // ..Default::default() + // }); + } + }; +} + +#[macro_export] +macro_rules! panic { + ($fmt:expr) => {{ + $crate::__tracing::error!( + file=file!(), + line=line!(), + column=column!(), + $fmt, + ); + $crate::__sentry::capture_event($crate::__sentry::protocol::Event { + fingerprint: ::std::borrow::Cow::Borrowed(&[::std::borrow::Cow::Borrowed($fmt)]), + message: Some(format!($fmt)), + level: $crate::__sentry::Level::Fatal, + ..Default::default() + }); + }}; + ($fmt:expr, $($args:tt)*) => { + { + $crate::__tracing::error!( + file=file!(), + line=line!(), + column=column!(), + $fmt, + $($args)* + ); + $crate::__sentry::capture_event($crate::__sentry::protocol::Event { + fingerprint: ::std::borrow::Cow::Borrowed(&[::std::borrow::Cow::Borrowed($fmt)]), + message: Some(format!($fmt, $($args)*)), + level: $crate::__sentry::Level::Fatal, + ..Default::default() + }); + } + }; +} + +#[macro_export] +macro_rules! error { + ($fmt:expr) => {{ + $crate::__tracing::error!( + file=file!(), + line=line!(), + column=column!(), + $fmt, + ); + $crate::__sentry::capture_event($crate::__sentry::protocol::Event { + fingerprint: ::std::borrow::Cow::Borrowed(&[::std::borrow::Cow::Borrowed($fmt)]), + message: Some(format!($fmt)), + level: $crate::__sentry::Level::Error, + ..Default::default() + }); + }}; + ($fmt:expr, $($args:tt)*) => { + { + $crate::__tracing::error!( + file=file!(), + line=line!(), + column=column!(), + $fmt, + $($args)* + ); + $crate::__sentry::capture_event($crate::__sentry::protocol::Event { + fingerprint: ::std::borrow::Cow::Borrowed(&[::std::borrow::Cow::Borrowed($fmt)]), + message: Some(format!($fmt, $($args)*)), + level: $crate::__sentry::Level::Error, + ..Default::default() + }); + } + }; +} + +fn get_sentry_url() -> Option { + if let Ok(sentry_url) = std::env::var("MISC_SENTRY_URL") { + if let Ok(sentry_url) = Dsn::from_str(sentry_url.as_str()) { + return Some(sentry_url); + } + } + None +} + +fn get_otlp_url() -> Option { + std::env::var("MISC_OTLP_URL").ok().and_then(|url| { + if url.to_lowercase() == "unset" { + None + } else { + Some(url) + } + }) +} + +pub const DEFAULT_SAMPLING_RATIO: f64 = 0.1; + +fn get_sampling_ratio() -> f64 { + std::env::var("MISC_SAMPLING_RATIO") + .map(|x| x.as_str().parse::().unwrap()) + .unwrap_or(DEFAULT_SAMPLING_RATIO) +} + +/// Initialize logging with tracing and set up log format +/// +/// If the sentry URL is provided via an environment variable, this function will also initialize sentry. +/// Returns a sentry client guard. The full description can be found in the official documentation: +/// https://docs.sentry.io/platforms/rust/#configure +pub fn init() -> Option { + let log_format = std::env::var("MISC_LOG_FORMAT").unwrap_or_else(|_| "plain".to_string()); + let service_name = + std::env::var("SERVICE_NAME").unwrap_or_else(|_| "UNKNOWN_SERVICE".to_string()); + let namespace_name = + std::env::var("POD_NAMESPACE").unwrap_or_else(|_| "UNKNOWN_NAMESPACE".to_string()); + let pod_name = std::env::var("POD_NAME").unwrap_or_else(|_| "UNKNOWN_POD".to_string()); + let opentelemetry = get_otlp_url().map(|url| { + let otlp_exporter = opentelemetry_otlp::new_exporter().http().with_endpoint(url); + let sampler = Sampler::TraceIdRatioBased(get_sampling_ratio()); + + let tracer = opentelemetry_otlp::new_pipeline() + .tracing() + .with_exporter(otlp_exporter) + .with_trace_config( + opentelemetry::sdk::trace::config() + .with_resource(Resource::new(vec![ + KeyValue::new( + opentelemetry_semantic_conventions::resource::SERVICE_NAME, + service_name, + ), + KeyValue::new( + opentelemetry_semantic_conventions::resource::K8S_NAMESPACE_NAME, + namespace_name, + ), + KeyValue::new( + opentelemetry_semantic_conventions::resource::K8S_POD_NAME, + pod_name, + ), + ])) + .with_sampler(sampler), + ) + .install_simple() + .unwrap(); + tracing_opentelemetry::layer().with_tracer(tracer) + }); + match log_format.as_str() { + "plain" => { + if let Some(opentelemetry) = opentelemetry { + tracing_subscriber::registry() + .with(opentelemetry) + .with(fmt::Layer::default()) + .with(tracing_subscriber::EnvFilter::from_default_env()) + .init(); + } else { + tracing_subscriber::registry() + .with(fmt::Layer::default()) + .with(tracing_subscriber::EnvFilter::from_default_env()) + .init(); + } + } + "json" => { + let timer = tracing_subscriber::fmt::time::UtcTime::rfc_3339(); + // must be set before sentry hook for sentry to function + install_pretty_panic_hook(); + if let Some(opentelemetry) = opentelemetry { + tracing_subscriber::registry() + .with(opentelemetry) + .with(fmt::Layer::default().with_timer(timer).json()) + .with(tracing_subscriber::EnvFilter::from_default_env()) + .init(); + } else { + tracing_subscriber::registry() + .with(fmt::Layer::default().with_timer(timer).json()) + .with(tracing_subscriber::EnvFilter::from_default_env()) + .init(); + } + } + _ => panic!("MISC_LOG_FORMAT has an unexpected value {}", log_format), + }; + + get_sentry_url().map(|sentry_url| { + let l1_network = std::env::var("CHAIN_ETH_NETWORK").expect("Must be set"); + let l2_network = std::env::var("CHAIN_ETH_ZKSYNC_NETWORK").expect("Must be set"); + + let options = sentry::ClientOptions { + release: sentry::release_name!(), + environment: Some(Cow::from(format!("{} - {}", l1_network, l2_network))), + attach_stacktrace: true, + ..Default::default() + } + .add_integration(TraceIdToSentry); + + sentry::init((sentry_url, options)) + }) +} + +/// Format panics like vlog::error +fn install_pretty_panic_hook() { + // This hook does not use the previous one set because it leads to 2 logs: + // the first is the default panic log and the second is from this code. To avoid this situation, + // hook must be installed first + std::panic::set_hook(Box::new(move |panic_info| { + let backtrace = Backtrace::capture(); + let timestamp = chrono::Utc::now(); + let panic_message = if let Some(s) = panic_info.payload().downcast_ref::() { + s.as_str() + } else if let Some(s) = panic_info.payload().downcast_ref::<&str>() { + s + } else { + "Panic occurred without additional info" + }; + + let panic_location = panic_info + .location() + .map(|val| val.to_string()) + .unwrap_or_else(|| "Unknown location".to_owned()); + + let backtrace_str = format!("{}", backtrace); + let timestamp_str = format!("{}", timestamp.format("%Y-%m-%dT%H:%M:%S%.fZ")); + + let trace_id = get_trace_id(); + println!( + "{}", + serde_json::json!({ + "timestamp": timestamp_str, + "trace_id": trace_id.to_string(), + "level": "CRITICAL", + "fields": { + "message": panic_message, + "location": panic_location, + "backtrace": backtrace_str, + } + }) + ); + })); +} + +struct TraceIdToSentry; + +impl sentry::Integration for TraceIdToSentry { + fn process_event( + &self, + mut event: Event<'static>, + _options: &ClientOptions, + ) -> Option> { + let trace_id = get_trace_id(); + event + .extra + .insert("trace_id".to_string(), trace_id.to_string().into()); + Some(event) + } +} diff --git a/core/lib/vm/Cargo.toml b/core/lib/vm/Cargo.toml new file mode 100644 index 000000000000..0c2e66d16761 --- /dev/null +++ b/core/lib/vm/Cargo.toml @@ -0,0 +1,37 @@ +[package] +name = "vm" +version = "0.1.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-2" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] + +[dependencies] +zkevm-assembly = { git = "https://github.com/matter-labs/zkEVM-assembly.git", branch = "v1.3.1" } +zksync_crypto = { path = "../crypto", version = "1.0" } +zksync_types = { path = "../types", version = "1.0" } +zksync_utils = { path = "../utils", version = "1.0" } +zksync_config = { path = "../config", version = "1.0" } +zksync_state = {path = "../state", version = "1.0" } +zksync_storage = {path = "../storage", version = "1.0" } + +zk_evm = {git = "https://github.com/matter-labs/zk_evm.git", branch = "v1.3.1"} +zksync_contracts = { path = "../contracts" } + +hex = "0.4" +thiserror = "1.0" +itertools = "0.10" +once_cell = "1.7" +vlog = { path = "../../lib/vlog", version = "1.0" } +metrics = "0.20" + +tracing = "0.1" + +[dev-dependencies] +tempfile = "3.0.2" +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" + diff --git a/core/lib/vm/src/bootloader_state.rs b/core/lib/vm/src/bootloader_state.rs new file mode 100644 index 000000000000..e6c572d6eb7f --- /dev/null +++ b/core/lib/vm/src/bootloader_state.rs @@ -0,0 +1,103 @@ +/// Intermediate bootloader-related VM state. +/// +/// Required to process transactions one by one (since we intercept the VM execution to execute +/// transactions and add new ones to the memory on the fly). +/// Think about it like a two-pointer scheme: one pointer (`free_tx_index`) tracks the end of the +/// initialized memory; while another (`tx_to_execute`) tracks our progess in this initialized memory. +/// This is required since it's possible to push several transactions to the bootloader memory and then +/// execute it one by one. +/// +/// Serves two purposes: +/// - Tracks where next tx should be pushed to in the bootloader memory. +/// - Tracks which transaction should be executed next. +#[derive(Debug, Default, Clone)] +pub(crate) struct BootloaderState { + /// Memory offset (in words) for the next transaction data. + free_tx_offset: usize, + /// ID of the next transaction to be executed. + /// See the structure doc-comment for a better explanation of purpose. + tx_to_execute: usize, + /// Vector that contains sizes of all pushed transactions. + tx_sizes: Vec, +} + +impl BootloaderState { + /// Creates an empty bootloader state. + pub(crate) fn new() -> Self { + Self::default() + } + + /// Notifies the state about the fact that new transaction was pushed into the memory. + pub(crate) fn add_tx_data(&mut self, tx_size: usize) { + self.free_tx_offset += tx_size; + self.tx_sizes.push(tx_size); + } + + /// Returns the next "free" transaction index. + pub(crate) fn free_tx_index(&self) -> usize { + self.tx_sizes.len() + } + + /// Returns the next index of transaction to execute. + pub(crate) fn tx_to_execute(&self) -> usize { + self.tx_to_execute + } + + /// Returns the memory offset for the new transaction. + pub(crate) fn free_tx_offset(&self) -> usize { + self.free_tx_offset + } + + /// Returns the ID of the next transaction to be executed and increments the local transaction counter. + pub(crate) fn next_unexecuted_tx(&mut self) -> usize { + assert!( + self.tx_to_execute < self.tx_sizes.len(), + "Attempt to execute tx that was not pushed to memory. Tx ID: {}, txs in bootloader: {}", + self.tx_to_execute, + self.tx_sizes.len() + ); + + let old = self.tx_to_execute; + self.tx_to_execute += 1; + old + } + + /// Returns the size of the transaction with given index. + /// Panics if there is no such transaction. + #[allow(dead_code)] + pub(crate) fn get_tx_size(&self, tx_index: usize) -> usize { + self.tx_sizes[tx_index] + } +} + +#[cfg(test)] +mod tests { + use super::BootloaderState; + + #[test] + fn workflow() { + let mut state = BootloaderState::new(); + assert_eq!(state.free_tx_index(), 0); + assert_eq!(state.free_tx_offset(), 0); + + state.add_tx_data(2); + assert_eq!(state.free_tx_index(), 1); + assert_eq!(state.free_tx_offset(), 2); + + state.add_tx_data(4); + assert_eq!(state.free_tx_index(), 2); + assert_eq!(state.free_tx_offset(), 6); + + assert_eq!(state.next_unexecuted_tx(), 0); + assert_eq!(state.next_unexecuted_tx(), 1); + } + + #[test] + #[should_panic( + expected = "Attempt to execute tx that was not pushed to memory. Tx ID: 0, txs in bootloader: 0" + )] + fn get_not_pushed_tx() { + let mut state = BootloaderState::new(); + state.next_unexecuted_tx(); + } +} diff --git a/core/lib/vm/src/errors/bootloader_error.rs b/core/lib/vm/src/errors/bootloader_error.rs new file mode 100644 index 000000000000..bfbef44a42bd --- /dev/null +++ b/core/lib/vm/src/errors/bootloader_error.rs @@ -0,0 +1,58 @@ +#[derive(Debug)] +pub(crate) enum BootloaderErrorCode { + EthCall, + AccountTxValidationFailed, + FailedToChargeFee, + FromIsNotAnAccount, + FailedToCheckAccount, + UnacceptableGasPrice, + PayForTxFailed, + PrePaymasterPreparationFailed, + PaymasterValidationFailed, + FailedToSendFeesToTheOperator, + FailedToSetPrevBlockHash, + UnacceptablePubdataPrice, + TxValidationError, + MaxPriorityFeeGreaterThanMaxFee, + BaseFeeGreaterThanMaxFeePerGas, + PaymasterReturnedInvalidContext, + PaymasterContextIsTooLong, + AssertionError, + FailedToMarkFactoryDeps, + TxValidationOutOfGas, + NotEnoughGasProvided, + AccountReturnedInvalidMagic, + PaymasterReturnedInvalidMagic, + Unknown, +} + +impl From for BootloaderErrorCode { + fn from(code: u8) -> BootloaderErrorCode { + match code { + 0 => BootloaderErrorCode::EthCall, + 1 => BootloaderErrorCode::AccountTxValidationFailed, + 2 => BootloaderErrorCode::FailedToChargeFee, + 3 => BootloaderErrorCode::FromIsNotAnAccount, + 4 => BootloaderErrorCode::FailedToCheckAccount, + 5 => BootloaderErrorCode::UnacceptableGasPrice, + 6 => BootloaderErrorCode::FailedToSetPrevBlockHash, + 7 => BootloaderErrorCode::PayForTxFailed, + 8 => BootloaderErrorCode::PrePaymasterPreparationFailed, + 9 => BootloaderErrorCode::PaymasterValidationFailed, + 10 => BootloaderErrorCode::FailedToSendFeesToTheOperator, + 11 => BootloaderErrorCode::UnacceptablePubdataPrice, + 12 => BootloaderErrorCode::TxValidationError, + 13 => BootloaderErrorCode::MaxPriorityFeeGreaterThanMaxFee, + 14 => BootloaderErrorCode::BaseFeeGreaterThanMaxFeePerGas, + 15 => BootloaderErrorCode::PaymasterReturnedInvalidContext, + 16 => BootloaderErrorCode::PaymasterContextIsTooLong, + 17 => BootloaderErrorCode::AssertionError, + 18 => BootloaderErrorCode::FailedToMarkFactoryDeps, + 19 => BootloaderErrorCode::TxValidationOutOfGas, + 20 => BootloaderErrorCode::NotEnoughGasProvided, + 21 => BootloaderErrorCode::AccountReturnedInvalidMagic, + 22 => BootloaderErrorCode::PaymasterReturnedInvalidMagic, + _ => BootloaderErrorCode::Unknown, + } + } +} diff --git a/core/lib/vm/src/errors/mod.rs b/core/lib/vm/src/errors/mod.rs new file mode 100644 index 000000000000..462330b41f98 --- /dev/null +++ b/core/lib/vm/src/errors/mod.rs @@ -0,0 +1,9 @@ +mod bootloader_error; +mod tx_revert_reason; +mod vm_revert_reason; + +pub(crate) use bootloader_error::BootloaderErrorCode; +pub use tx_revert_reason::TxRevertReason; +pub use vm_revert_reason::{ + VmRevertReason, VmRevertReasonParsingError, VmRevertReasonParsingResult, +}; diff --git a/core/lib/vm/src/errors/tx_revert_reason.rs b/core/lib/vm/src/errors/tx_revert_reason.rs new file mode 100644 index 000000000000..ec3242d7954f --- /dev/null +++ b/core/lib/vm/src/errors/tx_revert_reason.rs @@ -0,0 +1,206 @@ +use std::{convert::TryFrom, fmt::Display}; + +use super::{BootloaderErrorCode, VmRevertReason}; + +// Note that currently only EthCall transactions have valid Revert Reason. +// Same transaction executed in bootloader will just have `InnerTxError`. +// Reasons why the transaction executed inside the bootloader could fail. +#[derive(Debug, Clone, PartialEq)] +pub enum TxRevertReason { + // Can only be returned in EthCall execution mode (=ExecuteOnly) + EthCall(VmRevertReason), + TxOutOfGas, + // Can only be returned in VerifyAndExecute + ValidationFailed(VmRevertReason), + PaymasterValidationFailed(VmRevertReason), + PrePaymasterPreparationFailed(VmRevertReason), + PayForTxFailed(VmRevertReason), + FailedToMarkFactoryDependencies(VmRevertReason), + FailedToChargeFee(VmRevertReason), + // Emitted when trying to call a transaction from an account that has not + // been deployed as an account (i.e. the `from` is just a contract). + // Can only be returned in VerifyAndExecute + FromIsNotAnAccount, + // Currently cannot be returned. Should be removed when refactoring errors. + InnerTxError, + Unknown(VmRevertReason), + // Temporarily used instead of panics to provide better experience for developers: + // their transaction would simply be rejected and they'll be able to provide + // information about the cause to us. + UnexpectedVMBehavior(String), + // Bootloader is out of gas. + BootloaderOutOfGas, + // Transaction has a too big gas limit and will not be executed by the server. + TooBigGasLimit, + // The bootloader did not have enough gas to start the transaction in the first place + NotEnoughGasProvided, +} + +impl TxRevertReason { + pub fn parse_error(bytes: &[u8]) -> Self { + // The first 32 bytes should correspond with error code. + // If the error is smaller than that, we will use a standardized bootloader error. + if bytes.is_empty() { + return Self::UnexpectedVMBehavior("Bootloader returned an empty error".to_string()); + } + + let (error_code, error_msg) = bytes.split_at(1); + let revert_reason = match VmRevertReason::try_from(error_msg) { + Ok(reason) => reason, + Err(_) => { + let function_selector = if error_msg.len() >= 4 { + error_msg[0..4].to_vec() + } else { + error_msg.to_vec() + }; + + let data = if error_msg.len() > 4 { + error_msg[4..].to_vec() + } else { + vec![] + }; + + VmRevertReason::Unknown { + function_selector, + data, + } + } + }; + + // `error_code` is a big-endian number, so we can safely take the first byte of it. + match BootloaderErrorCode::from(error_code[0]) { + BootloaderErrorCode::EthCall => Self::EthCall(revert_reason), + BootloaderErrorCode::AccountTxValidationFailed => Self::ValidationFailed(revert_reason), + BootloaderErrorCode::FailedToChargeFee => Self::FailedToChargeFee(revert_reason), + BootloaderErrorCode::FromIsNotAnAccount => Self::FromIsNotAnAccount, + BootloaderErrorCode::FailedToCheckAccount => Self::ValidationFailed(VmRevertReason::General { + msg: "Failed to check if `from` is an account. Most likely not enough gas provided".to_string() + }), + BootloaderErrorCode::UnacceptableGasPrice => Self::UnexpectedVMBehavior( + "The operator included transaction with an unacceptable gas price".to_owned(), + ), + BootloaderErrorCode::PrePaymasterPreparationFailed => { + Self::PrePaymasterPreparationFailed(revert_reason) + } + BootloaderErrorCode::PaymasterValidationFailed => { + Self::PaymasterValidationFailed(revert_reason) + } + BootloaderErrorCode::FailedToSendFeesToTheOperator => { + Self::UnexpectedVMBehavior("FailedToSendFeesToTheOperator".to_owned()) + } + BootloaderErrorCode::FailedToSetPrevBlockHash => { + panic!( + "The bootloader failed to set previous block hash. Reason: {}", + revert_reason + ) + } + BootloaderErrorCode::UnacceptablePubdataPrice => { + Self::UnexpectedVMBehavior("UnacceptablePubdataPrice".to_owned()) + } + // This is different from AccountTxValidationFailed error in a way that it means that + // the error was not produced by the account itself, but for some other unknown reason (most likely not enough gas) + BootloaderErrorCode::TxValidationError => Self::ValidationFailed(revert_reason), + // Note, that `InnerTxError` is derived only after the actual tx execution, so + // it is not parsed here. Unknown error means that bootloader failed by a reason + // that was not specified by the protocol: + BootloaderErrorCode::MaxPriorityFeeGreaterThanMaxFee => { + Self::UnexpectedVMBehavior("Max priority fee greater than max fee".to_owned()) + } + BootloaderErrorCode::PaymasterReturnedInvalidContext => { + Self::PaymasterValidationFailed(VmRevertReason::General { + msg: String::from("Paymaster returned invalid context"), + }) + } + BootloaderErrorCode::PaymasterContextIsTooLong => { + Self::PaymasterValidationFailed(VmRevertReason::General { + msg: String::from("Paymaster returned context that is too long"), + }) + } + BootloaderErrorCode::AssertionError => { + Self::UnexpectedVMBehavior(format!("Assertion error: {}", revert_reason)) + } + BootloaderErrorCode::BaseFeeGreaterThanMaxFeePerGas => Self::UnexpectedVMBehavior( + "Block.basefee is greater than max fee per gas".to_owned(), + ), + BootloaderErrorCode::PayForTxFailed => { + Self::PayForTxFailed(revert_reason) + }, + BootloaderErrorCode::FailedToMarkFactoryDeps => { + let msg = if let VmRevertReason::General { msg } = revert_reason { + msg + } else { + String::from("Most likely not enough gas provided") + }; + Self::FailedToMarkFactoryDependencies(VmRevertReason::General { + msg + }) + }, + BootloaderErrorCode::TxValidationOutOfGas => { + Self::ValidationFailed(VmRevertReason::General { msg: String::from("Not enough gas for transaction validation") }) + }, + BootloaderErrorCode::NotEnoughGasProvided => { + Self::NotEnoughGasProvided + }, + BootloaderErrorCode::AccountReturnedInvalidMagic => { + Self::ValidationFailed(VmRevertReason::General { msg: String::from("Account validation returned invalid magic value. Most often this means that the signature is incorrect") }) + }, + BootloaderErrorCode::PaymasterReturnedInvalidMagic => { + Self::ValidationFailed(VmRevertReason::General { msg: String::from("Paymaster validation returned invalid magic value. Please refer to the documentation of the paymaster for more details") }) + } + BootloaderErrorCode::Unknown => Self::UnexpectedVMBehavior(format!( + "Unsupported error code: {}. Revert reason: {}", + error_code[0], revert_reason + )), + } + } +} + +impl Display for TxRevertReason { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match &self { + // EthCall reason is usually returned unchanged. + TxRevertReason::EthCall(reason) => write!(f, "{}", reason), + TxRevertReason::TxOutOfGas => write!(f, "out of gas"), + TxRevertReason::ValidationFailed(reason) => { + write!(f, "Account validation error: {}", reason) + } + TxRevertReason::FailedToChargeFee(reason) => { + write!(f, "Failed to charge fee: {}", reason) + } + // Emitted when trying to call a transaction from an account that has no + // been deployed as an account (i.e. the `from` is just a contract). + TxRevertReason::FromIsNotAnAccount => write!(f, "Sender is not an account"), + TxRevertReason::InnerTxError => write!(f, "Bootloader-based tx failed"), + TxRevertReason::PaymasterValidationFailed(reason) => { + write!(f, "Paymaster validation error: {}", reason) + } + TxRevertReason::PrePaymasterPreparationFailed(reason) => { + write!(f, "Pre-paymaster preparation error: {}", reason) + } + TxRevertReason::Unknown(reason) => write!(f, "Unknown reason: {}", reason), + TxRevertReason::UnexpectedVMBehavior(problem) => { + write!(f, + "virtual machine entered unexpected state. Please contact developers and provide transaction details \ + that caused this error. Error description: {problem}" + ) + } + TxRevertReason::BootloaderOutOfGas => write!(f, "Bootloader out of gas"), + TxRevertReason::NotEnoughGasProvided => write!( + f, + "Bootloader did not have enough gas to start the transaction" + ), + TxRevertReason::FailedToMarkFactoryDependencies(reason) => { + write!(f, "Failed to mark factory dependencies: {}", reason) + } + TxRevertReason::PayForTxFailed(reason) => { + write!(f, "Failed to pay for the transaction: {}", reason) + } + TxRevertReason::TooBigGasLimit => { + write!( + f, + "Transaction has a too big ergs limit and will not be executed by the server" + ) + } + } + } +} diff --git a/core/lib/vm/src/errors/vm_revert_reason.rs b/core/lib/vm/src/errors/vm_revert_reason.rs new file mode 100644 index 000000000000..a38b99935e92 --- /dev/null +++ b/core/lib/vm/src/errors/vm_revert_reason.rs @@ -0,0 +1,230 @@ +use std::convert::TryFrom; +use std::fmt::{Debug, Display}; + +use zksync_types::U256; + +use crate::TxRevertReason; + +#[derive(Debug, thiserror::Error)] +pub enum VmRevertReasonParsingError { + #[error("Incorrect data offset. Data: {0:?}")] + IncorrectDataOffset(Vec), + #[error("Input is too short. Data: {0:?}")] + InputIsTooShort(Vec), + #[error("Incorrect string length. Data: {0:?}")] + IncorrectStringLength(Vec), +} + +/// Rich Revert Reasons https://github.com/0xProject/ZEIPs/issues/32 +#[derive(Debug, Clone, PartialEq)] +pub enum VmRevertReason { + General { + msg: String, + }, + InnerTxError, + VmError, + Unknown { + function_selector: Vec, + data: Vec, + }, +} + +impl VmRevertReason { + const GENERAL_ERROR_SELECTOR: &'static [u8] = &[0x08, 0xc3, 0x79, 0xa0]; + + fn parse_general_error(bytes: &[u8]) -> Result { + if bytes.len() < 32 { + return Err(VmRevertReasonParsingError::InputIsTooShort(bytes.to_vec())); + } + let data_offset = U256::from_big_endian(&bytes[0..32]).as_usize(); + + // Data offset couldn't be less than 32 because data offset size is 32 bytes + // and data offset bytes are part of the offset. Also data offset couldn't be greater than + // data length + if data_offset > bytes.len() || data_offset < 32 { + return Err(VmRevertReasonParsingError::IncorrectDataOffset( + bytes.to_vec(), + )); + }; + + let data = &bytes[data_offset..]; + + if data.len() < 32 { + return Err(VmRevertReasonParsingError::InputIsTooShort(bytes.to_vec())); + }; + + let string_length = U256::from_big_endian(&data[0..32]).as_usize(); + + if string_length + 32 > data.len() { + return Err(VmRevertReasonParsingError::IncorrectStringLength( + bytes.to_vec(), + )); + }; + + Ok(Self::General { + msg: String::from_utf8_lossy(&data[32..32 + string_length]).to_string(), + }) + } +} + +impl TryFrom<&[u8]> for VmRevertReason { + type Error = VmRevertReasonParsingError; + + fn try_from(bytes: &[u8]) -> Result { + if bytes.len() < 4 { + // Note, that when the method reverts with no data + // the selector is empty as well. + // For now, we only accept errors with either no data or + // the data with complete selectors. + if !bytes.is_empty() { + return Err(VmRevertReasonParsingError::IncorrectStringLength( + bytes.to_owned(), + )); + } + + let result = VmRevertReason::Unknown { + function_selector: vec![], + data: bytes.to_vec(), + }; + + return Ok(result); + } + + let function_selector = &bytes[0..4]; + let error_data = &bytes[4..]; + match function_selector { + VmRevertReason::GENERAL_ERROR_SELECTOR => Self::parse_general_error(error_data), + _ => { + let result = VmRevertReason::Unknown { + function_selector: function_selector.to_vec(), + data: error_data.to_vec(), + }; + vlog::warn!("Unsupported error type: {}", result); + Ok(result) + } + } + } +} + +impl Display for VmRevertReason { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + use VmRevertReason::{General, InnerTxError, Unknown, VmError}; + + match self { + General { msg } => write!(f, "{}", msg), + VmError => write!(f, "VM Error",), + InnerTxError => write!(f, "Bootloader-based tx failed"), + Unknown { + function_selector, + data, + } => write!( + f, + "Error function_selector = 0x{}, data = 0x{}", + hex::encode(function_selector), + hex::encode(data) + ), + } + } +} + +#[derive(Debug, Clone, PartialEq)] +pub struct VmRevertReasonParsingResult { + pub revert_reason: TxRevertReason, + pub original_data: Vec, +} + +impl VmRevertReasonParsingResult { + pub fn new(revert_reason: TxRevertReason, original_data: Vec) -> Self { + Self { + revert_reason, + original_data, + } + } +} + +#[cfg(test)] +mod tests { + use std::convert::TryFrom; + + use super::VmRevertReason; + + #[test] + fn revert_reason_parsing() { + let msg = vec![ + 8, 195, 121, 160, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 38, 69, 82, 67, 50, 48, 58, 32, 116, 114, 97, 110, + 115, 102, 101, 114, 32, 97, 109, 111, 117, 110, 116, 32, 101, 120, 99, 101, 101, 100, + 115, 32, 98, 97, 108, 97, 110, 99, 101, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, + ]; + let reason = VmRevertReason::try_from(msg.as_slice()).expect("Shouldn't be error"); + assert_eq!( + reason, + VmRevertReason::General { + msg: "ERC20: transfer amount exceeds balance".to_string() + } + ); + } + + #[test] + fn revert_reason_with_wrong_function_selector() { + let msg = vec![ + 8, 195, 121, 161, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 38, 69, 82, 67, 50, 48, 58, 32, 116, 114, 97, 110, + 115, 102, 101, 114, 32, 97, 109, 111, 117, 110, 116, 32, 101, 120, 99, 101, 101, 100, + 115, 32, 98, 97, 108, 97, 110, 99, 101, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, + ]; + let reason = VmRevertReason::try_from(msg.as_slice()).expect("Shouldn't be error"); + assert!(matches!(reason, VmRevertReason::Unknown { .. })); + } + + #[test] + fn revert_reason_with_wrong_data_offset() { + let msg = vec![ + 8, 195, 121, 160, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 38, 69, 82, 67, 50, 48, 58, 32, 116, 114, 97, 110, + 115, 102, 101, 114, 32, 97, 109, 111, 117, 110, 116, 32, 101, 120, 99, 101, 101, 100, + 115, 32, 98, 97, 108, 97, 110, 99, 101, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, + ]; + let reason = VmRevertReason::try_from(msg.as_slice()); + assert!(reason.is_err()); + } + + #[test] + fn revert_reason_with_big_data_offset() { + let msg = vec![ + 8, 195, 121, 160, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 132, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 38, 69, 82, 67, 50, 48, 58, 32, 116, 114, 97, 110, + 115, 102, 101, 114, 32, 97, 109, 111, 117, 110, 116, 32, 101, 120, 99, 101, 101, 100, + 115, 32, 98, 97, 108, 97, 110, 99, 101, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, + ]; + let reason = VmRevertReason::try_from(msg.as_slice()); + assert!(reason.is_err()); + } + + #[test] + fn revert_reason_with_wrong_string_length() { + let msg = vec![ + 8, 195, 121, 160, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 158, 69, 82, 67, 50, 48, 58, 32, 116, 114, 97, 110, + 115, 102, 101, 114, 32, 97, 109, 111, 117, 110, 116, 32, 101, 120, 99, 101, 101, 100, + 115, 32, 98, 97, 108, 97, 110, 99, 101, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, + ]; + let reason = VmRevertReason::try_from(msg.as_slice()); + assert!(reason.is_err()); + } +} diff --git a/core/lib/vm/src/event_sink.rs b/core/lib/vm/src/event_sink.rs new file mode 100644 index 000000000000..e850fb4e2bd4 --- /dev/null +++ b/core/lib/vm/src/event_sink.rs @@ -0,0 +1,170 @@ +use crate::{oracles::OracleWithHistory, utils::collect_log_queries_after_timestamp}; +use std::collections::HashMap; +use zk_evm::{ + abstractions::EventSink, + aux_structures::{LogQuery, Timestamp}, + reference_impls::event_sink::{ApplicationData, EventMessage}, + zkevm_opcode_defs::system_params::{ + BOOTLOADER_FORMAL_ADDRESS, EVENT_AUX_BYTE, L1_MESSAGE_AUX_BYTE, + }, +}; + +use crate::history_recorder::AppDataFrameManagerWithHistory; + +#[derive(Debug, Default, Clone, PartialEq)] +pub struct InMemoryEventSink { + pub frames_stack: AppDataFrameManagerWithHistory, +} + +impl OracleWithHistory for InMemoryEventSink { + fn rollback_to_timestamp(&mut self, timestamp: Timestamp) { + self.frames_stack.rollback_to_timestamp(timestamp); + } + + fn delete_history(&mut self) { + self.frames_stack.delete_history(); + } +} + +// as usual, if we rollback the current frame then we apply changes to storage immediately, +// otherwise we carry rollbacks to the parent's frames + +impl InMemoryEventSink { + pub fn flatten(&self) -> (Vec, Vec, Vec) { + assert_eq!( + self.frames_stack.inner().len(), + 1, + "there must exist an initial keeper frame" + ); + let full_history = self.frames_stack.inner().current_frame().clone(); + // we forget rollbacks as we have finished the execution and can just apply them + let ApplicationData { + forward, + rollbacks: _, + } = full_history; + let history = forward.clone(); + let (events, l1_messages) = Self::events_and_l1_messages_from_history(forward); + (history, events, l1_messages) + } + + pub fn get_log_queries(&self) -> usize { + let history = &self.frames_stack.inner().current_frame().forward; + history.len() + } + + pub fn get_events_and_l2_l1_logs_after_timestamp( + &self, + from_timestamp: Timestamp, + ) -> (Vec, Vec) { + let history = collect_log_queries_after_timestamp( + &self.frames_stack.inner().current_frame().forward, + from_timestamp, + ); + Self::events_and_l1_messages_from_history(history) + } + + fn events_and_l1_messages_from_history( + history: Vec, + ) -> (Vec, Vec) { + let mut tmp = HashMap::::with_capacity(history.len()); + + // note that we only use "forward" part and discard the rollbacks at the end, + // since if rollbacks of parents were not appended anywhere we just still keep them + for el in history.into_iter() { + // we are time ordered here in terms of rollbacks + if tmp.get(&el.timestamp.0).is_some() { + assert!(el.rollback); + tmp.remove(&el.timestamp.0); + } else { + assert!(!el.rollback); + tmp.insert(el.timestamp.0, el); + } + } + + // naturally sorted by timestamp + let mut keys: Vec<_> = tmp.keys().into_iter().cloned().collect(); + keys.sort_unstable(); + + let mut events = vec![]; + let mut l1_messages = vec![]; + + for k in keys.into_iter() { + let el = tmp.remove(&k).unwrap(); + let LogQuery { + shard_id, + is_service, + tx_number_in_block, + address, + key, + written_value, + aux_byte, + .. + } = el; + + let event = EventMessage { + shard_id, + is_first: is_service, + tx_number_in_block, + address, + key, + value: written_value, + }; + + if aux_byte == EVENT_AUX_BYTE { + events.push(event); + } else { + l1_messages.push(event); + } + } + + (events, l1_messages) + } +} + +impl EventSink for InMemoryEventSink { + // when we enter a new frame we should remember all our current applications and rollbacks + // when we exit the current frame then if we did panic we should concatenate all current + // forward and rollback cases + + fn add_partial_query(&mut self, _monotonic_cycle_counter: u32, mut query: LogQuery) { + assert!(query.rw_flag); + assert!(query.aux_byte == EVENT_AUX_BYTE || query.aux_byte == L1_MESSAGE_AUX_BYTE); + assert!(!query.rollback); + // just append to rollbacks and a full history + + self.frames_stack.push_forward(query, query.timestamp); + // we do not need it explicitly here, but let's be consistent with circuit counterpart + query.rollback = true; + self.frames_stack.push_rollback(query, query.timestamp); + } + + fn start_frame(&mut self, timestamp: Timestamp) { + self.frames_stack.push_frame(timestamp) + } + + fn finish_frame(&mut self, panicked: bool, timestamp: Timestamp) { + // if we panic then we append forward and rollbacks to the forward of parent, + // otherwise we place rollbacks of child before rollbacks of the parent + let ApplicationData { forward, rollbacks } = self.frames_stack.drain_frame(timestamp); + if panicked { + for query in forward { + self.frames_stack.push_forward(query, timestamp); + } + for query in rollbacks.into_iter().rev().into_iter().filter(|q| { + // As of now, the bootloader only emits debug logs + // for events, so we keep them here for now. + // They will be cleared on the server level. + q.address != *BOOTLOADER_FORMAL_ADDRESS || q.aux_byte != EVENT_AUX_BYTE + }) { + self.frames_stack.push_forward(query, timestamp); + } + } else { + for query in forward { + self.frames_stack.push_forward(query, timestamp); + } // we need to prepend rollbacks. No reverse here, as we do not care yet! + for query in rollbacks { + self.frames_stack.push_rollback(query, timestamp); + } + } + } +} diff --git a/core/lib/vm/src/events.rs b/core/lib/vm/src/events.rs new file mode 100644 index 000000000000..d9a2a10406bc --- /dev/null +++ b/core/lib/vm/src/events.rs @@ -0,0 +1,149 @@ +use zk_evm::{ethereum_types::Address, reference_impls::event_sink::EventMessage}; +use zksync_types::{L1BatchNumber, VmEvent, EVENT_WRITER_ADDRESS, H256}; +use zksync_utils::{be_chunks_to_words, h256_to_account_address, u256_to_h256}; + +#[derive(Clone)] +pub struct SolidityLikeEvent { + pub shard_id: u8, + pub tx_number_in_block: u16, + pub address: Address, + pub topics: Vec<[u8; 32]>, + pub data: Vec, +} + +impl SolidityLikeEvent { + pub fn into_vm_event(self, block_number: L1BatchNumber) -> VmEvent { + VmEvent { + location: (block_number, self.tx_number_in_block as u32), + address: self.address, + indexed_topics: be_chunks_to_words(self.topics) + .into_iter() + .map(u256_to_h256) + .collect(), + value: self.data, + } + } +} + +fn merge_events_inner(events: Vec) -> Vec { + let mut result = vec![]; + let mut current: Option<(usize, u32, SolidityLikeEvent)> = None; + + for message in events.into_iter() { + if !message.is_first { + let EventMessage { + shard_id, + is_first: _, + tx_number_in_block, + address, + key, + value, + } = message; + + if let Some((mut remaining_data_length, mut remaining_topics, mut event)) = + current.take() + { + if event.address != address + || event.shard_id != shard_id + || event.tx_number_in_block != tx_number_in_block + { + continue; + } + let mut data_0 = [0u8; 32]; + let mut data_1 = [0u8; 32]; + key.to_big_endian(&mut data_0); + value.to_big_endian(&mut data_1); + for el in [data_0, data_1].iter() { + if remaining_topics != 0 { + event.topics.push(*el); + remaining_topics -= 1; + } else if remaining_data_length != 0 { + if remaining_data_length >= 32 { + event.data.extend_from_slice(el); + remaining_data_length -= 32; + } else { + event.data.extend_from_slice(&el[..remaining_data_length]); + remaining_data_length = 0; + } + } + } + + if remaining_data_length != 0 || remaining_topics != 0 { + current = Some((remaining_data_length, remaining_topics, event)) + } else { + result.push(event); + } + } + } else { + // start new one. First take the old one only if it's well formed + if let Some((remaining_data_length, remaining_topics, event)) = current.take() { + if remaining_data_length == 0 && remaining_topics == 0 { + result.push(event); + } + } + + let EventMessage { + shard_id, + is_first: _, + tx_number_in_block, + address, + key, + value, + } = message; + // split key as our internal marker. Ignore higher bits + let mut num_topics = key.0[0] as u32; + let mut data_length = (key.0[0] >> 32) as usize; + let mut buffer = [0u8; 32]; + value.to_big_endian(&mut buffer); + + let (topics, data) = if num_topics == 0 && data_length == 0 { + (vec![], vec![]) + } else if num_topics == 0 { + data_length -= 32; + (vec![], buffer.to_vec()) + } else { + num_topics -= 1; + (vec![buffer], vec![]) + }; + + let new_event = SolidityLikeEvent { + shard_id, + tx_number_in_block, + address, + topics, + data, + }; + + current = Some((data_length, num_topics, new_event)) + } + } + + // add the last one + if let Some((remaining_data_length, remaining_topics, event)) = current.take() { + if remaining_data_length == 0 && remaining_topics == 0 { + result.push(event); + } + } + + result +} + +pub fn merge_events(events: Vec) -> Vec { + let raw_events = merge_events_inner(events); + + raw_events + .into_iter() + .filter(|e| e.address == EVENT_WRITER_ADDRESS) + .map(|event| { + // The events writer events where the first topic is the actual address of the event and the rest of the topics are real topics + let address = h256_to_account_address(&H256(event.topics[0])); + let topics = event.topics.into_iter().skip(1).collect(); + + SolidityLikeEvent { + topics, + address, + ..event + } + }) + .collect() +} diff --git a/core/lib/vm/src/history_recorder.rs b/core/lib/vm/src/history_recorder.rs new file mode 100644 index 000000000000..1f673675957f --- /dev/null +++ b/core/lib/vm/src/history_recorder.rs @@ -0,0 +1,635 @@ +use std::{ + collections::HashMap, + hash::{BuildHasherDefault, Hash, Hasher}, +}; + +use crate::storage::StoragePtr; + +use zk_evm::{ + aux_structures::Timestamp, + reference_impls::event_sink::ApplicationData, + vm_state::PrimitiveValue, + zkevm_opcode_defs::{self}, +}; + +use zksync_types::{StorageKey, U256}; +use zksync_utils::{h256_to_u256, u256_to_h256}; + +pub type AppDataFrameManagerWithHistory = FrameManagerWithHistory>; +pub type MemoryWithHistory = HistoryRecorder; +pub type FrameManagerWithHistory = HistoryRecorder>; +pub type IntFrameManagerWithHistory = FrameManagerWithHistory>; + +// Within the same cycle, timestamps in range timestamp..timestamp+TIME_DELTA_PER_CYCLE-1 +// can be used. This can sometimes vioalate monotonicity of the timestamp within the +// same cycle, so it should be normalized. +fn normalize_timestamp(timestamp: Timestamp) -> Timestamp { + let timestamp = timestamp.0; + + // Making sure it is divisible by TIME_DELTA_PER_CYCLE + Timestamp(timestamp - timestamp % zkevm_opcode_defs::TIME_DELTA_PER_CYCLE) +} + +/// Accepts history item as its parameter and applies it. +pub trait WithHistory { + type HistoryRecord; + type ReturnValue; + + // Applies an action and returns the action that would + // rollback its effect as well as some returned value + fn apply_historic_record( + &mut self, + item: Self::HistoryRecord, + ) -> (Self::HistoryRecord, Self::ReturnValue); +} + +/// A struct responsible for tracking history for +/// a component that is passed as a generic parameter to it (`inner`). +#[derive(Debug, PartialEq)] +pub struct HistoryRecorder { + inner: T, + history: Vec<(Timestamp, T::HistoryRecord)>, +} + +impl Clone for HistoryRecorder +where + T::HistoryRecord: Clone, +{ + fn clone(&self) -> Self { + Self { + inner: self.inner.clone(), + history: self.history.clone(), + } + } +} + +impl HistoryRecorder { + pub fn from_inner(inner: T) -> Self { + Self { + inner, + history: vec![], + } + } + + pub fn inner(&self) -> &T { + &self.inner + } + + pub fn history(&self) -> &Vec<(Timestamp, T::HistoryRecord)> { + &self.history + } + + pub fn apply_historic_record( + &mut self, + item: T::HistoryRecord, + timestamp: Timestamp, + ) -> T::ReturnValue { + let timestamp = normalize_timestamp(timestamp); + let last_recorded_timestamp = self.history.last().map(|(t, _)| *t).unwrap_or(Timestamp(0)); + assert!( + last_recorded_timestamp <= timestamp, + "Timestamps are not monotonic" + ); + + let (reversed_item, return_value) = self.inner.apply_historic_record(item); + self.history.push((timestamp, reversed_item)); + + return_value + } + + pub fn rollback_to_timestamp(&mut self, timestamp: Timestamp) { + loop { + let should_undo = self + .history + .last() + .map(|(item_timestamp, _)| *item_timestamp >= timestamp) + .unwrap_or(false); + if !should_undo { + break; + } + + let (_, item_to_apply) = self.history.pop().unwrap(); + self.inner.apply_historic_record(item_to_apply); + } + } + + /// Deletes all the history for its component, making + /// its current state irreversible + pub fn delete_history(&mut self) { + self.history.clear(); + } +} + +impl Default for HistoryRecorder { + fn default() -> Self { + Self::from_inner(T::default()) + } +} + +/// Frame manager is basically a wrapper +/// over a stack of items, which typically constitute +/// frames in oracles like StorageOracle, Memory, etc. +#[derive(Debug, PartialEq, Clone)] +pub struct FrameManager { + frame_stack: Vec, +} + +#[derive(Debug, Clone, PartialEq)] +pub enum FrameManagerHistoryRecord { + PushFrame, + PopFrame, + /// The operation should be handled by the current frame itself + InnerOperation(V), +} + +impl Default for FrameManager { + fn default() -> Self { + Self { + // We typically require at least the first frame to be there + // since the last user-provided frame might be reverted + frame_stack: vec![T::default()], + } + } +} + +impl WithHistory for FrameManager { + type HistoryRecord = FrameManagerHistoryRecord; + type ReturnValue = Option; + + fn apply_historic_record( + &mut self, + item: FrameManagerHistoryRecord, + ) -> (Self::HistoryRecord, Self::ReturnValue) { + match item { + FrameManagerHistoryRecord::PopFrame => { + self.frame_stack.pop().unwrap(); + (FrameManagerHistoryRecord::PushFrame, None) + } + FrameManagerHistoryRecord::PushFrame => { + self.frame_stack.push(T::default()); + (FrameManagerHistoryRecord::PopFrame, None) + } + FrameManagerHistoryRecord::InnerOperation(record) => { + let (resulting_op, return_value) = self + .frame_stack + .last_mut() + .unwrap() + .apply_historic_record(record); + ( + FrameManagerHistoryRecord::InnerOperation(resulting_op), + Some(return_value), + ) + } + } + } +} + +impl FrameManager +where + T: WithHistory + Default, +{ + pub fn current_frame(&self) -> &T { + self.frame_stack + .last() + .expect("Frame stack should never be empty") + } + + pub fn len(&self) -> usize { + self.frame_stack.len() + } +} + +impl HistoryRecorder> { + /// Add a new frame. + pub fn push_frame(&mut self, timestamp: Timestamp) { + self.apply_historic_record(FrameManagerHistoryRecord::PushFrame, timestamp); + } + + /// Remove the current frame. + pub fn pop_frame(&mut self, timestamp: Timestamp) { + self.apply_historic_record(FrameManagerHistoryRecord::PopFrame, timestamp); + } +} + +impl HistoryRecorder>> { + /// Push an element to the forward queue + pub fn push_forward(&mut self, elem: T, timestamp: Timestamp) { + let forward_event = + ApplicationDataHistoryEvent::ForwardEvent(VectorHistoryEvent::Push(elem)); + let event = FrameManagerHistoryRecord::InnerOperation(forward_event); + + self.apply_historic_record(event, timestamp); + } + + /// Pop an element from the forward queue + pub fn pop_forward(&mut self, timestamp: Timestamp) -> T { + let forward_event = ApplicationDataHistoryEvent::ForwardEvent(VectorHistoryEvent::Pop); + let event = FrameManagerHistoryRecord::InnerOperation(forward_event); + + self.apply_historic_record(event, timestamp) + .flatten() + .unwrap() + } + + /// Push an element to the rollback queue + pub fn push_rollback(&mut self, elem: T, timestamp: Timestamp) { + let rollback_event = + ApplicationDataHistoryEvent::RollbacksEvent(VectorHistoryEvent::Push(elem)); + let event = FrameManagerHistoryRecord::InnerOperation(rollback_event); + + self.apply_historic_record(event, timestamp); + } + + /// Pop an element from the rollback queue + pub fn pop_rollback(&mut self, timestamp: Timestamp) -> T { + let rollback_event = ApplicationDataHistoryEvent::RollbacksEvent(VectorHistoryEvent::Pop); + let event = FrameManagerHistoryRecord::InnerOperation(rollback_event); + + self.apply_historic_record(event, timestamp) + .flatten() + .unwrap() + } + + /// Pops the current frame and returns its value + pub fn drain_frame(&mut self, timestamp: Timestamp) -> ApplicationData { + let mut forward = vec![]; + while !self.inner.current_frame().forward.is_empty() { + let popped_item = self.pop_forward(timestamp); + forward.push(popped_item); + } + + let mut rollbacks = vec![]; + while !self.inner.current_frame().rollbacks.is_empty() { + let popped_item = self.pop_rollback(timestamp); + rollbacks.push(popped_item); + } + + self.pop_frame(timestamp); + + // items are in reversed order: + ApplicationData { + forward: forward.into_iter().rev().collect(), + rollbacks: rollbacks.into_iter().rev().collect(), + } + } +} + +#[derive(Debug, Clone, PartialEq)] +pub enum VectorHistoryEvent { + Push(X), + Pop, +} + +impl WithHistory for Vec { + type HistoryRecord = VectorHistoryEvent; + type ReturnValue = Option; + fn apply_historic_record( + &mut self, + item: VectorHistoryEvent, + ) -> (Self::HistoryRecord, Self::ReturnValue) { + match item { + VectorHistoryEvent::Pop => { + // Note, that here we assume that the users + // will check themselves whether this vector is empty + // prior to popping from it. + let poped_item = self.pop().unwrap(); + + (VectorHistoryEvent::Push(poped_item), Some(poped_item)) + } + VectorHistoryEvent::Push(x) => { + self.push(x); + + (VectorHistoryEvent::Pop, None) + } + } + } +} + +impl HistoryRecorder> { + pub fn push(&mut self, elem: T, timestamp: Timestamp) { + self.apply_historic_record(VectorHistoryEvent::Push(elem), timestamp); + } + + pub fn pop(&mut self, timestamp: Timestamp) -> T { + self.apply_historic_record(VectorHistoryEvent::Pop, timestamp) + .unwrap() + } + + pub fn len(&self) -> usize { + self.inner.len() + } + + pub fn is_empty(&self) -> bool { + self.len() == 0 + } +} + +impl HistoryRecorder>> { + /// Push an element to the current frame + pub fn push_to_frame(&mut self, elem: T, timestamp: Timestamp) { + self.apply_historic_record( + FrameManagerHistoryRecord::InnerOperation(VectorHistoryEvent::Push(elem)), + timestamp, + ); + } + + /// Pop an element from the current frame + pub fn pop_from_frame(&mut self, timestamp: Timestamp) -> T { + self.apply_historic_record( + FrameManagerHistoryRecord::InnerOperation(VectorHistoryEvent::Pop), + timestamp, + ) + .flatten() + .unwrap() + } + + /// Drains the top frame and returns its value + pub fn drain_frame(&mut self, timestamp: Timestamp) -> Vec { + let mut items = vec![]; + while !self.inner.current_frame().is_empty() { + let popped_item = self.pop_from_frame(timestamp); + items.push(popped_item); + } + + self.pop_frame(timestamp); + + // items are in reversed order: + items.into_iter().rev().collect() + } + + /// Extends the top frame with a vector of items + pub fn extend_frame(&mut self, items: Vec, timestamp: Timestamp) { + for item in items { + self.push_to_frame(item, timestamp); + } + } +} + +#[derive(Debug, Clone, PartialEq)] +pub struct HashMapHistoryEvent { + pub key: K, + pub value: Option, +} + +impl WithHistory for HashMap { + type HistoryRecord = HashMapHistoryEvent; + type ReturnValue = Option; + fn apply_historic_record( + &mut self, + item: Self::HistoryRecord, + ) -> (Self::HistoryRecord, Self::ReturnValue) { + let HashMapHistoryEvent { key, value } = item; + + let prev_value = match value { + Some(x) => self.insert(key, x), + None => self.remove(&key), + }; + + ( + HashMapHistoryEvent { + key, + value: prev_value.clone(), + }, + prev_value, + ) + } +} + +impl HistoryRecorder> { + pub fn insert(&mut self, key: K, value: V, timestamp: Timestamp) -> Option { + self.apply_historic_record( + HashMapHistoryEvent { + key, + value: Some(value), + }, + timestamp, + ) + } +} + +#[derive(Debug, Clone, PartialEq)] +pub enum ApplicationDataHistoryEvent { + // The event about the forward queue + ForwardEvent(VectorHistoryEvent), + // The event about the rollbacks queue + RollbacksEvent(VectorHistoryEvent), +} + +impl WithHistory for ApplicationData { + type HistoryRecord = ApplicationDataHistoryEvent; + type ReturnValue = Option; + + fn apply_historic_record( + &mut self, + item: ApplicationDataHistoryEvent, + ) -> (Self::HistoryRecord, Self::ReturnValue) { + match item { + ApplicationDataHistoryEvent::ForwardEvent(e) => { + let (vec_event, result) = self.forward.apply_historic_record(e); + (ApplicationDataHistoryEvent::ForwardEvent(vec_event), result) + } + ApplicationDataHistoryEvent::RollbacksEvent(e) => { + let (vec_event, result) = self.rollbacks.apply_historic_record(e); + ( + ApplicationDataHistoryEvent::RollbacksEvent(vec_event), + result, + ) + } + } + } +} + +#[derive(Default)] +pub struct NoopHasher(u64); + +impl Hasher for NoopHasher { + fn write_usize(&mut self, value: usize) { + self.0 = value as u64; + } + + fn write(&mut self, _bytes: &[u8]) { + unreachable!("internal hasher only handles usize type"); + } + + fn finish(&self) -> u64 { + self.0 + } +} + +#[derive(Debug, Default, Clone, PartialEq)] +pub struct MemoryWrapper { + pub memory: Vec>>, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct MemoryHistoryRecord { + pub page: usize, + pub slot: usize, + pub set_value: Option, +} + +impl MemoryWrapper { + pub fn shrink_pages(&mut self) { + while self.memory.last().map(|h| h.is_empty()).unwrap_or(false) { + self.memory.pop(); + } + } + + pub fn ensure_page_exists(&mut self, page: usize) { + if self.memory.len() <= page { + // We don't need to record such events in history + // because all these vectors will be empty + self.memory.resize_with(page + 1, HashMap::default); + } + } + + pub fn dump_page_content_as_u256_words( + &self, + page_number: u32, + range: std::ops::Range, + ) -> Vec { + if let Some(page) = self.memory.get(page_number as usize) { + let mut result = vec![]; + for i in range { + if let Some(word) = page.get(&(i as usize)) { + result.push(*word); + } else { + result.push(PrimitiveValue::empty()); + } + } + + result + } else { + vec![PrimitiveValue::empty(); range.len()] + } + } +} + +impl WithHistory for MemoryWrapper { + type HistoryRecord = MemoryHistoryRecord; + type ReturnValue = Option; + + fn apply_historic_record( + &mut self, + item: MemoryHistoryRecord, + ) -> (Self::HistoryRecord, Self::ReturnValue) { + let MemoryHistoryRecord { + page, + slot, + set_value, + } = item; + + self.ensure_page_exists(page); + let page_handle = self.memory.get_mut(page).unwrap(); + let prev_value = match set_value { + Some(x) => page_handle.insert(slot, x), + None => page_handle.remove(&slot), + }; + self.shrink_pages(); + + let reserved_item = MemoryHistoryRecord { + page, + slot, + set_value: prev_value, + }; + + (reserved_item, prev_value) + } +} + +impl HistoryRecorder { + pub fn write_to_memory( + &mut self, + page: usize, + slot: usize, + value: Option, + timestamp: Timestamp, + ) -> Option { + self.apply_historic_record( + MemoryHistoryRecord { + page, + slot, + set_value: value, + }, + timestamp, + ) + } + + pub fn clear_page(&mut self, page: usize, timestamp: Timestamp) { + let slots_to_clear: Vec<_> = match self.inner.memory.get(page) { + None => return, + Some(x) => x.keys().copied().collect(), + }; + + // We manually clear the page to preserve correct history + for slot in slots_to_clear { + self.write_to_memory(page, slot, None, timestamp); + } + } +} + +#[derive(Debug)] + +pub struct StorageWrapper<'a> { + storage_ptr: StoragePtr<'a>, +} + +impl<'a> StorageWrapper<'a> { + pub fn new(storage_ptr: StoragePtr<'a>) -> Self { + Self { storage_ptr } + } + + pub fn get_ptr(&self) -> StoragePtr<'a> { + self.storage_ptr.clone() + } + + pub fn read_from_storage(&self, key: &StorageKey) -> U256 { + h256_to_u256(self.storage_ptr.borrow_mut().get_value(key)) + } +} + +#[derive(Debug, Clone)] +pub struct StorageHistoryRecord { + pub key: StorageKey, + pub value: U256, +} + +impl<'a> WithHistory for StorageWrapper<'a> { + type HistoryRecord = StorageHistoryRecord; + type ReturnValue = U256; + + fn apply_historic_record( + &mut self, + item: Self::HistoryRecord, + ) -> (Self::HistoryRecord, Self::ReturnValue) { + let prev_value = h256_to_u256( + self.storage_ptr + .borrow_mut() + .set_value(&item.key, u256_to_h256(item.value)), + ); + + let reverse_item = StorageHistoryRecord { + key: item.key, + value: prev_value, + }; + + (reverse_item, prev_value) + } +} + +impl<'a> HistoryRecorder> { + pub fn read_from_storage(&self, key: &StorageKey) -> U256 { + self.inner.read_from_storage(key) + } + + pub fn write_to_storage(&mut self, key: StorageKey, value: U256, timestamp: Timestamp) -> U256 { + self.apply_historic_record(StorageHistoryRecord { key, value }, timestamp) + } + + /// Returns a pointer to the storage. + /// Note, that any changes done to the storage via this pointer + /// will NOT be recorded as its history. + pub fn get_ptr(&self) -> StoragePtr<'a> { + self.inner.get_ptr() + } +} diff --git a/core/lib/vm/src/lib.rs b/core/lib/vm/src/lib.rs new file mode 100644 index 000000000000..fcfb5eda9814 --- /dev/null +++ b/core/lib/vm/src/lib.rs @@ -0,0 +1,37 @@ +#![allow(clippy::derive_partial_eq_without_eq)] + +mod bootloader_state; +mod errors; +pub mod event_sink; +mod events; +mod history_recorder; +mod memory; +mod oracle_tools; +pub mod oracles; +mod pubdata_utils; +pub mod storage; +pub mod test_utils; +pub mod transaction_data; +pub mod utils; +pub mod vm; +pub mod vm_with_bootloader; + +#[cfg(test)] +mod tests; + +pub use crate::errors::TxRevertReason; +pub use crate::oracle_tools::OracleTools; +pub use crate::oracles::storage::StorageOracle; +pub use crate::vm::VmBlockResult; +pub use crate::vm::VmExecutionResult; +pub use crate::vm::VmInstance; +pub use zk_evm; +pub use zksync_types::vm_trace::VmExecutionTrace; + +pub type Word = zksync_types::U256; + +pub const MEMORY_SIZE: usize = 1 << 16; +pub const MAX_CALLS: usize = 65536; +pub const REGISTERS_COUNT: usize = 16; +pub const MAX_STACK_SIZE: usize = 256; +pub const MAX_CYCLES_FOR_TX: u32 = u32::MAX; diff --git a/core/lib/vm/src/memory.rs b/core/lib/vm/src/memory.rs new file mode 100644 index 000000000000..88582aa14e83 --- /dev/null +++ b/core/lib/vm/src/memory.rs @@ -0,0 +1,285 @@ +use zk_evm::abstractions::{Memory, MemoryType, MEMORY_CELLS_OTHER_PAGES}; +use zk_evm::aux_structures::{MemoryPage, MemoryQuery, Timestamp}; +use zk_evm::vm_state::PrimitiveValue; +use zk_evm::zkevm_opcode_defs::FatPointer; +use zksync_types::U256; + +use crate::history_recorder::{IntFrameManagerWithHistory, MemoryWithHistory}; +use crate::oracles::OracleWithHistory; +use crate::utils::{aux_heap_page_from_base, heap_page_from_base, stack_page_from_base}; + +#[derive(Debug, Default, Clone, PartialEq)] +pub struct SimpleMemory { + pub memory: MemoryWithHistory, + + pub observable_pages: IntFrameManagerWithHistory, +} + +impl OracleWithHistory for SimpleMemory { + fn rollback_to_timestamp(&mut self, timestamp: Timestamp) { + self.memory.rollback_to_timestamp(timestamp); + self.observable_pages.rollback_to_timestamp(timestamp); + } + + fn delete_history(&mut self) { + self.memory.delete_history(); + self.observable_pages.delete_history(); + } +} + +impl SimpleMemory { + pub fn populate(&mut self, elements: Vec<(u32, Vec)>, timestamp: Timestamp) { + for (page, values) in elements.into_iter() { + // Resizing the pages array to fit the page. + let len = values.len(); + assert!(len <= MEMORY_CELLS_OTHER_PAGES); + + for (i, value) in values.into_iter().enumerate() { + let value = PrimitiveValue { + value, + is_pointer: false, + }; + self.memory + .write_to_memory(page as usize, i, Some(value), timestamp); + } + } + } + + pub fn populate_page( + &mut self, + page: usize, + elements: Vec<(usize, U256)>, + timestamp: Timestamp, + ) { + elements.into_iter().for_each(|(offset, value)| { + let value = PrimitiveValue { + value, + is_pointer: false, + }; + + self.memory + .write_to_memory(page, offset, Some(value), timestamp); + }); + } + + pub fn dump_page_content_as_u256_words( + &self, + page: u32, + range: std::ops::Range, + ) -> Vec { + self.memory + .inner() + .dump_page_content_as_u256_words(page, range) + .into_iter() + .map(|v| v.value) + .collect() + } + + pub fn read_slot(&self, page: usize, slot: usize) -> PrimitiveValue { + let slot = slot as u32; + let page = page as u32; + self.memory + .inner() + .dump_page_content_as_u256_words(page, slot..slot + 1)[0] + } + + // This method should be used with relatively small lengths, since + // we don't heavily optimize here for cases with long lengths + pub fn read_unaligned_bytes(&self, page: usize, start: usize, length: usize) -> Vec { + if length == 0 { + return vec![]; + } + + let end = start + length - 1; + + let mut current_word = start / 32; + let mut result = vec![]; + while current_word * 32 <= end { + let word_value = self.read_slot(page, current_word).value; + let word_value = { + let mut bytes: Vec = vec![0u8; 32]; + word_value.to_big_endian(&mut bytes); + bytes + }; + + result.extend(extract_needed_bytes_from_word( + word_value, + current_word, + start, + end, + )); + + current_word += 1; + } + + assert_eq!(result.len(), length); + + result + } +} + +impl Memory for SimpleMemory { + fn execute_partial_query( + &mut self, + _monotonic_cycle_counter: u32, + mut query: MemoryQuery, + ) -> MemoryQuery { + // The following assertion works fine even when doing a read + // from heap through pointer, since `value_is_pointer` can only be set to + // `true` during memory writes. + if query.location.memory_type != MemoryType::Stack { + assert!( + !query.value_is_pointer, + "Pointers can only be stored on stack" + ); + } + + let page = query.location.page.0 as usize; + let slot = query.location.index.0 as usize; + + if query.rw_flag { + self.memory.write_to_memory( + page, + slot, + Some(PrimitiveValue { + value: query.value, + is_pointer: query.value_is_pointer, + }), + query.timestamp, + ); + } else { + let current_value = self.read_slot(page, slot); + query.value = current_value.value; + query.value_is_pointer = current_value.is_pointer; + } + + query + } + + fn specialized_code_query( + &mut self, + _monotonic_cycle_counter: u32, + mut query: MemoryQuery, + ) -> MemoryQuery { + assert!( + !query.value_is_pointer, + "Pointers are not used for decommmits" + ); + + let page = query.location.page.0 as usize; + let slot = query.location.index.0 as usize; + + if query.rw_flag { + self.memory.write_to_memory( + page, + slot, + Some(PrimitiveValue { + value: query.value, + is_pointer: query.value_is_pointer, + }), + query.timestamp, + ); + } else { + let current_value = self.read_slot(page, slot); + query.value = current_value.value; + query.value_is_pointer = current_value.is_pointer; + } + + query + } + + fn read_code_query( + &self, + _monotonic_cycle_counter: u32, + mut query: MemoryQuery, + ) -> MemoryQuery { + assert!( + !query.value_is_pointer, + "Pointers are not used for decommmits" + ); + assert!(!query.rw_flag, "Only read queries can be processed"); + + let page = query.location.page.0 as usize; + let slot = query.location.index.0 as usize; + + let current_value = self.read_slot(page, slot); + query.value = current_value.value; + query.value_is_pointer = current_value.is_pointer; + + query + } + + fn start_global_frame( + &mut self, + _current_base_page: MemoryPage, + new_base_page: MemoryPage, + calldata_fat_pointer: FatPointer, + timestamp: Timestamp, + ) { + // Besides the calldata page, we also formally include the current stack + // page, heap page and aux heap page. + // The code page will be always left observable, so we don't include it here. + self.observable_pages.push_frame(timestamp); + self.observable_pages.extend_frame( + vec![ + calldata_fat_pointer.memory_page, + stack_page_from_base(new_base_page).0, + heap_page_from_base(new_base_page).0, + aux_heap_page_from_base(new_base_page).0, + ], + timestamp, + ); + } + + fn finish_global_frame( + &mut self, + base_page: MemoryPage, + returndata_fat_pointer: FatPointer, + timestamp: Timestamp, + ) { + // Safe to unwrap here, since `finish_global_frame` is never called with empty stack + let current_observable_pages = self.observable_pages.drain_frame(timestamp); + let returndata_page = returndata_fat_pointer.memory_page; + + for page in current_observable_pages { + // If the page's number is greater than or equal to the base_page, + // it means that it was created by the internal calls of this contract. + // We need to add this check as the calldata pointer is also part of the + // observable pages. + if page >= base_page.0 && page != returndata_page { + self.memory.clear_page(page as usize, timestamp); + } + } + + // Push to the parent's frame + self.observable_pages + .push_to_frame(returndata_page, timestamp); + } +} + +// It is expected that there is some intersection between [word_number*32..word_number*32+31] and [start, end] +fn extract_needed_bytes_from_word( + word_value: Vec, + word_number: usize, + start: usize, + end: usize, +) -> Vec { + let word_start = word_number * 32; + let word_end = word_start + 31; // Note, that at word_start + 32 a new word already starts + + let intersection_left = std::cmp::max(word_start, start); + let intersection_right = std::cmp::min(word_end, end); + + if intersection_right < intersection_left { + vec![] + } else { + let start_bytes = intersection_left - word_start; + let to_take = intersection_right - intersection_left + 1; + + word_value + .into_iter() + .skip(start_bytes) + .take(to_take) + .collect() + } +} diff --git a/core/lib/vm/src/oracle_tools.rs b/core/lib/vm/src/oracle_tools.rs new file mode 100644 index 000000000000..ea28c2491e9f --- /dev/null +++ b/core/lib/vm/src/oracle_tools.rs @@ -0,0 +1,39 @@ +use crate::memory::SimpleMemory; +use std::cell::RefCell; + +use std::fmt::Debug; +use std::rc::Rc; + +use crate::event_sink::InMemoryEventSink; +use crate::oracles::decommitter::DecommitterOracle; +use crate::oracles::precompile::PrecompilesProcessorWithHistory; +use crate::oracles::storage::StorageOracle; +use crate::storage::{Storage, StoragePtr}; +use zk_evm::witness_trace::DummyTracer; + +#[derive(Debug)] +pub struct OracleTools<'a, const B: bool> { + pub storage: StorageOracle<'a>, + pub memory: SimpleMemory, + pub event_sink: InMemoryEventSink, + pub precompiles_processor: PrecompilesProcessorWithHistory, + pub decommittment_processor: DecommitterOracle<'a, B>, + pub witness_tracer: DummyTracer, + pub storage_view: StoragePtr<'a>, +} + +impl<'a> OracleTools<'a, false> { + pub fn new(storage_view: &'a mut dyn Storage) -> Self { + let pointer: Rc> = Rc::new(RefCell::new(storage_view)); + + Self { + storage: StorageOracle::new(pointer.clone()), + memory: SimpleMemory::default(), + event_sink: InMemoryEventSink::default(), + precompiles_processor: PrecompilesProcessorWithHistory::default(), + decommittment_processor: DecommitterOracle::new(pointer.clone()), + witness_tracer: DummyTracer {}, + storage_view: pointer, + } + } +} diff --git a/core/lib/vm/src/oracles/decommitter.rs b/core/lib/vm/src/oracles/decommitter.rs new file mode 100644 index 000000000000..e0c795879146 --- /dev/null +++ b/core/lib/vm/src/oracles/decommitter.rs @@ -0,0 +1,186 @@ +use std::collections::HashMap; + +use crate::history_recorder::HistoryRecorder; +use crate::storage::StoragePtr; + +use zk_evm::abstractions::MemoryType; +use zk_evm::aux_structures::Timestamp; +use zk_evm::{ + abstractions::{DecommittmentProcessor, Memory}, + aux_structures::{DecommittmentQuery, MemoryIndex, MemoryLocation, MemoryPage, MemoryQuery}, +}; +use zksync_types::U256; +use zksync_utils::bytecode::bytecode_len_in_words; +use zksync_utils::{bytes_to_be_words, u256_to_h256}; + +use super::OracleWithHistory; + +#[derive(Debug)] +pub struct DecommitterOracle<'a, const B: bool> { + /// Pointer that enables to read contract bytecodes from the database. + storage: StoragePtr<'a>, + /// The cache of bytecodes that the bootloader "knows", but that are not necessarily in the database. + pub known_bytecodes: HistoryRecorder>>, + /// Stores pages of memory where certain code hashes have already been decommitted. + decommitted_code_hashes: HistoryRecorder>, + /// Stores history of decommitment requests. + decommitment_requests: HistoryRecorder>, +} + +impl<'a, const B: bool> DecommitterOracle<'a, B> { + pub fn new(storage: StoragePtr<'a>) -> Self { + Self { + storage, + known_bytecodes: Default::default(), + decommitted_code_hashes: Default::default(), + decommitment_requests: Default::default(), + } + } + + pub fn get_bytecode(&mut self, hash: U256, timestamp: Timestamp) -> Vec { + let entry = self.known_bytecodes.inner().get(&hash); + + match entry { + Some(x) => x.clone(), + None => { + // It is ok to panic here, since the decommitter is never called directly by + // the users and always called by the VM. VM will never let decommit the + // code hash which we didn't previously claim to know the preimage of. + let value = self + .storage + .borrow_mut() + .load_factory_dep(u256_to_h256(hash)) + .expect("Trying to decode unexisting hash"); + + let value = bytes_to_be_words(value); + self.known_bytecodes.insert(hash, value.clone(), timestamp); + value + } + } + } + + pub fn populate(&mut self, bytecodes: Vec<(U256, Vec)>, timestamp: Timestamp) { + for (hash, bytecode) in bytecodes { + self.known_bytecodes.insert(hash, bytecode, timestamp); + } + } + + pub fn get_used_bytecode_hashes(&self) -> Vec { + self.decommitted_code_hashes + .inner() + .iter() + .map(|item| *item.0) + .collect() + } + + pub fn get_decommitted_bytes_after_timestamp(&self, timestamp: Timestamp) -> usize { + // Note, that here we rely on the fact that for each used bytecode + // there is one and only one corresponding event in the history of it. + self.decommitted_code_hashes + .history() + .iter() + .rev() + .take_while(|(t, _)| *t >= timestamp) + .count() + } + + pub fn get_number_of_decommitment_requests_after_timestamp( + &self, + timestamp: Timestamp, + ) -> usize { + self.decommitment_requests + .history() + .iter() + .rev() + .take_while(|(t, _)| *t >= timestamp) + .count() + } + + pub fn get_decommitted_code_hashes_with_history(&self) -> &HistoryRecorder> { + &self.decommitted_code_hashes + } + + pub fn get_storage(&self) -> StoragePtr<'a> { + self.storage.clone() + } +} + +impl<'a, const B: bool> OracleWithHistory for DecommitterOracle<'a, B> { + fn rollback_to_timestamp(&mut self, timestamp: Timestamp) { + self.decommitted_code_hashes + .rollback_to_timestamp(timestamp); + self.known_bytecodes.rollback_to_timestamp(timestamp); + self.decommitment_requests.rollback_to_timestamp(timestamp); + } + + fn delete_history(&mut self) { + self.decommitted_code_hashes.delete_history(); + self.known_bytecodes.delete_history(); + self.decommitment_requests.delete_history(); + } +} + +impl<'a, const B: bool> DecommittmentProcessor for DecommitterOracle<'a, B> { + fn decommit_into_memory( + &mut self, + monotonic_cycle_counter: u32, + mut partial_query: DecommittmentQuery, + memory: &mut M, + ) -> (DecommittmentQuery, Option>) { + self.decommitment_requests.push((), partial_query.timestamp); + if let Some(memory_page) = self + .decommitted_code_hashes + .inner() + .get(&partial_query.hash) + .copied() + { + partial_query.is_fresh = false; + partial_query.memory_page = MemoryPage(memory_page); + partial_query.decommitted_length = + bytecode_len_in_words(&u256_to_h256(partial_query.hash)); + + (partial_query, None) + } else { + // fresh one + let values = self.get_bytecode(partial_query.hash, partial_query.timestamp); + let page_to_use = partial_query.memory_page; + let timestamp = partial_query.timestamp; + partial_query.decommitted_length = values.len() as u16; + partial_query.is_fresh = true; + + // write into memory + let mut tmp_q = MemoryQuery { + timestamp, + location: MemoryLocation { + memory_type: MemoryType::Code, + page: page_to_use, + index: MemoryIndex(0), + }, + value: U256::zero(), + value_is_pointer: false, + rw_flag: true, + is_pended: false, + }; + self.decommitted_code_hashes + .insert(partial_query.hash, page_to_use.0, timestamp); + + if B { + for (i, value) in values.iter().enumerate() { + tmp_q.location.index = MemoryIndex(i as u32); + tmp_q.value = *value; + memory.execute_partial_query(monotonic_cycle_counter, tmp_q); + } + + (partial_query, Some(values)) + } else { + for (i, value) in values.into_iter().enumerate() { + tmp_q.location.index = MemoryIndex(i as u32); + tmp_q.value = value; + memory.execute_partial_query(monotonic_cycle_counter, tmp_q); + } + + (partial_query, None) + } + } + } +} diff --git a/core/lib/vm/src/oracles/mod.rs b/core/lib/vm/src/oracles/mod.rs new file mode 100644 index 000000000000..5b9378729ed8 --- /dev/null +++ b/core/lib/vm/src/oracles/mod.rs @@ -0,0 +1,19 @@ +use zk_evm::aux_structures::Timestamp; +// We will discard RAM as soon as the execution of a tx ends, so +// it is ok for now to use SimpleMemory +pub use zk_evm::reference_impls::memory::SimpleMemory as RamOracle; +// All the changes to the events in the DB will be applied after the tx is executed, +// so fow now it is fine. +pub use zk_evm::reference_impls::event_sink::InMemoryEventSink as EventSinkOracle; + +pub use zk_evm::testing::simple_tracer::NoopTracer; + +pub mod decommitter; +pub mod precompile; +pub mod storage; +pub mod tracer; + +pub trait OracleWithHistory { + fn rollback_to_timestamp(&mut self, timestamp: Timestamp); + fn delete_history(&mut self); +} diff --git a/core/lib/vm/src/oracles/precompile.rs b/core/lib/vm/src/oracles/precompile.rs new file mode 100644 index 000000000000..3374be5caa96 --- /dev/null +++ b/core/lib/vm/src/oracles/precompile.rs @@ -0,0 +1,78 @@ +use zk_evm::{ + abstractions::Memory, + abstractions::PrecompileCyclesWitness, + abstractions::PrecompilesProcessor, + aux_structures::{LogQuery, MemoryQuery, Timestamp}, + precompiles::DefaultPrecompilesProcessor, +}; + +use crate::history_recorder::HistoryRecorder; + +use super::OracleWithHistory; + +/// Wrap of DefaultPrecompilesProcessor that store queue +/// of timestamp when precompiles are called to be executed. +/// Number of precompiles per block is strictly limited, +/// saving timestamps allows us to check the exact number +/// of log queries, that were used during the tx execution. +#[derive(Debug, Clone)] +pub struct PrecompilesProcessorWithHistory { + pub timestamp_history: HistoryRecorder>, + pub default_precompiles_processor: DefaultPrecompilesProcessor, +} + +impl OracleWithHistory for PrecompilesProcessorWithHistory { + fn rollback_to_timestamp(&mut self, timestamp: Timestamp) { + self.timestamp_history.rollback_to_timestamp(timestamp); + } + + fn delete_history(&mut self) { + self.timestamp_history.delete_history(); + } +} + +impl PrecompilesProcessorWithHistory { + pub fn new() -> Self { + Self { + timestamp_history: Default::default(), + default_precompiles_processor: DefaultPrecompilesProcessor {}, + } + } + pub fn get_timestamp_history(&self) -> &Vec { + self.timestamp_history.inner() + } +} + +impl Default for PrecompilesProcessorWithHistory { + fn default() -> Self { + Self::new() + } +} + +impl PrecompilesProcessor for PrecompilesProcessorWithHistory { + fn start_frame(&mut self) { + self.default_precompiles_processor.start_frame(); + } + fn execute_precompile( + &mut self, + monotonic_cycle_counter: u32, + query: LogQuery, + memory: &mut M, + ) -> Option<(Vec, Vec, PrecompileCyclesWitness)> { + // In the next line we same `query.timestamp` as both + // an operation in the history of precompiles processor and + // the time when this operation occured. + // While slightly weird, it is done for consistency with other oracles + // where operations and timestamp have different types. + self.timestamp_history + .push(query.timestamp, query.timestamp); + self.default_precompiles_processor.execute_precompile( + monotonic_cycle_counter, + query, + memory, + ) + } + fn finish_frame(&mut self, _panicked: bool) { + self.default_precompiles_processor.finish_frame(_panicked); + } +} diff --git a/core/lib/vm/src/oracles/storage.rs b/core/lib/vm/src/oracles/storage.rs new file mode 100644 index 000000000000..9f0c23cfbf69 --- /dev/null +++ b/core/lib/vm/src/oracles/storage.rs @@ -0,0 +1,290 @@ +use std::collections::HashMap; + +use crate::storage::StoragePtr; + +use crate::history_recorder::{ + AppDataFrameManagerWithHistory, HashMapHistoryEvent, HistoryRecorder, StorageWrapper, +}; + +use zk_evm::abstractions::RefundedAmounts; +use zk_evm::zkevm_opcode_defs::system_params::INITIAL_STORAGE_WRITE_PUBDATA_BYTES; +use zk_evm::{ + abstractions::{RefundType, Storage as VmStorageOracle}, + aux_structures::{LogQuery, Timestamp}, + reference_impls::event_sink::ApplicationData, +}; +use zksync_types::{ + AccountTreeId, Address, StorageKey, StorageLogQuery, StorageLogQueryType, U256, +}; +use zksync_utils::u256_to_h256; + +use super::OracleWithHistory; + +// While the storage does not support different shards, it was decided to write the +// code of the StorageOracle with the shard parameters in mind. +pub fn triplet_to_storage_key(_shard_id: u8, address: Address, key: U256) -> StorageKey { + StorageKey::new(AccountTreeId::new(address), u256_to_h256(key)) +} + +pub fn storage_key_of_log(query: &LogQuery) -> StorageKey { + triplet_to_storage_key(query.shard_id, query.address, query.key) +} + +#[derive(Debug)] +pub struct StorageOracle<'a> { + // Access to the persistent storage. Please note that it + // is used only for read access. All the actual writes happen + // after the execution ended. + pub storage: HistoryRecorder>, + + pub frames_stack: AppDataFrameManagerWithHistory, + + // The changes that have been paid for in previous transactions. + // It is a mapping from storage key to the number of *bytes* that was paid by the user + // to cover this slot. + pub paid_changes: HistoryRecorder>, +} + +impl<'a> OracleWithHistory for StorageOracle<'a> { + fn rollback_to_timestamp(&mut self, timestamp: Timestamp) { + self.frames_stack.rollback_to_timestamp(timestamp); + self.storage.rollback_to_timestamp(timestamp); + self.paid_changes.rollback_to_timestamp(timestamp); + } + + fn delete_history(&mut self) { + self.frames_stack.delete_history(); + self.storage.delete_history(); + self.paid_changes.delete_history(); + } +} + +impl<'a> StorageOracle<'a> { + pub fn new(storage: StoragePtr<'a>) -> Self { + Self { + storage: HistoryRecorder::from_inner(StorageWrapper::new(storage)), + frames_stack: Default::default(), + paid_changes: Default::default(), + } + } + + fn is_storage_key_free(&self, key: &StorageKey) -> bool { + key.address() == &zksync_config::constants::SYSTEM_CONTEXT_ADDRESS + } + + pub fn read_value(&mut self, mut query: LogQuery) -> LogQuery { + let key = triplet_to_storage_key(query.shard_id, query.address, query.key); + let current_value = self.storage.read_from_storage(&key); + + query.read_value = current_value; + + self.frames_stack.push_forward( + StorageLogQuery { + log_query: query, + log_type: StorageLogQueryType::Read, + }, + query.timestamp, + ); + + query + } + + pub fn write_value(&mut self, mut query: LogQuery) -> LogQuery { + let key = triplet_to_storage_key(query.shard_id, query.address, query.key); + let current_value = + self.storage + .write_to_storage(key, query.written_value, query.timestamp); + + let log_query_type = if self.storage.get_ptr().borrow_mut().is_write_initial(&key) { + StorageLogQueryType::InitialWrite + } else { + StorageLogQueryType::RepeatedWrite + }; + + query.read_value = current_value; + + let mut storage_log_query = StorageLogQuery { + log_query: query, + log_type: log_query_type, + }; + self.frames_stack + .push_forward(storage_log_query, query.timestamp); + storage_log_query.log_query.rollback = true; + self.frames_stack + .push_rollback(storage_log_query, query.timestamp); + storage_log_query.log_query.rollback = false; + + query + } + + // Returns the amount of funds that has been already paid for writes into the storage slot + fn prepaid_for_write(&self, storage_key: &StorageKey) -> u32 { + self.paid_changes + .inner() + .get(storage_key) + .copied() + .unwrap_or_default() + } + + pub(crate) fn base_price_for_write(&self, query: &LogQuery) -> u32 { + let storage_key = storage_key_of_log(query); + + if self.is_storage_key_free(&storage_key) { + return 0; + } + + let is_initial = self + .storage + .get_ptr() + .borrow_mut() + .is_write_initial(&storage_key); + + get_pubdata_price_bytes(query, is_initial) + } + + // Returns the price of the update in terms of pubdata bytes. + fn value_update_price(&self, query: &LogQuery) -> u32 { + let storage_key = storage_key_of_log(query); + + let base_cost = self.base_price_for_write(query); + + let already_paid = self.prepaid_for_write(&storage_key); + + if base_cost <= already_paid { + // Some other transaction has already paid for this slot, no need to pay anything + 0u32 + } else { + base_cost - already_paid + } + } +} + +impl<'a> VmStorageOracle for StorageOracle<'a> { + // Perform a storage read/write access by taking an partially filled query + // and returning filled query and cold/warm marker for pricing purposes + fn execute_partial_query( + &mut self, + _monotonic_cycle_counter: u32, + query: LogQuery, + ) -> LogQuery { + // vlog::trace!( + // "execute partial query cyc {:?} addr {:?} key {:?}, rw {:?}, wr {:?}, tx {:?}", + // _monotonic_cycle_counter, + // query.address, + // query.key, + // query.rw_flag, + // query.written_value, + // query.tx_number_in_block + // ); + assert!(!query.rollback); + if query.rw_flag { + // The number of bytes that have been compensated by the user to perform this write + let storage_key = storage_key_of_log(&query); + + // It is considered that the user has paid for the whole base price for the writes + let to_pay_by_user = self.base_price_for_write(&query); + let prepaid = self.prepaid_for_write(&storage_key); + + if to_pay_by_user > prepaid { + self.paid_changes.apply_historic_record( + HashMapHistoryEvent { + key: storage_key, + value: Some(to_pay_by_user), + }, + query.timestamp, + ); + } + self.write_value(query) + } else { + self.read_value(query) + } + } + + // We can return the size of the refund before each storage query. + // Note, that while the `RefundType` allows to provide refunds both in + // `ergs` and `pubdata`, only refunds in pubdata will be compensated for the users + fn estimate_refunds_for_write( + &mut self, // to avoid any hacks inside, like prefetch + _monotonic_cycle_counter: u32, + partial_query: &LogQuery, + ) -> RefundType { + let price_to_pay = self.value_update_price(partial_query); + + RefundType::RepeatedWrite(RefundedAmounts { + ergs: 0, + // `INITIAL_STORAGE_WRITE_PUBDATA_BYTES` is the default amount of pubdata bytes the user pays for. + pubdata_bytes: (INITIAL_STORAGE_WRITE_PUBDATA_BYTES as u32) - price_to_pay, + }) + } + + // Indicate a start of execution frame for rollback purposes + fn start_frame(&mut self, timestamp: Timestamp) { + self.frames_stack.push_frame(timestamp); + } + + // Indicate that execution frame went out from the scope, so we can + // log the history and either rollback immediately or keep records to rollback later + fn finish_frame(&mut self, timestamp: Timestamp, panicked: bool) { + // If we panic then we append forward and rollbacks to the forward of parent, + // otherwise we place rollbacks of child before rollbacks of the parent + let current_frame = self.frames_stack.drain_frame(timestamp); + let ApplicationData { forward, rollbacks } = current_frame; + + if panicked { + // perform actual rollback + for query in rollbacks.iter().rev() { + let read_value = match query.log_type { + StorageLogQueryType::Read => { + // Having Read logs in rollback is not possible + vlog::warn!("Read log in rollback queue {:?}", query); + continue; + } + StorageLogQueryType::InitialWrite | StorageLogQueryType::RepeatedWrite => { + query.log_query.read_value + } + }; + + let LogQuery { written_value, .. } = query.log_query; + let key = triplet_to_storage_key( + query.log_query.shard_id, + query.log_query.address, + query.log_query.key, + ); + let current_value = self.storage.write_to_storage( + key, + // NOTE, that since it is a rollback query, + // the `read_value` is being set + read_value, timestamp, + ); + + // Additional validation that the current value was correct + // Unwrap is safe because the return value from write_inner is the previous value in this leaf. + // It is impossible to set leaf value to `None` + assert_eq!(current_value, written_value); + } + + for query in forward { + self.frames_stack.push_forward(query, timestamp) + } + for query in rollbacks.into_iter().rev() { + self.frames_stack.push_forward(query, timestamp) + } + } else { + for query in forward { + self.frames_stack.push_forward(query, timestamp) + } + for query in rollbacks { + self.frames_stack.push_rollback(query, timestamp) + } + } + } +} + +fn get_pubdata_price_bytes(_query: &LogQuery, is_initial: bool) -> u32 { + // should cost less. + if is_initial { + zk_evm::zkevm_opcode_defs::system_params::INITIAL_STORAGE_WRITE_PUBDATA_BYTES as u32 + } else { + zk_evm::zkevm_opcode_defs::system_params::REPEATED_STORAGE_WRITE_PUBDATA_BYTES as u32 + } +} diff --git a/core/lib/vm/src/oracles/tracer.rs b/core/lib/vm/src/oracles/tracer.rs new file mode 100644 index 000000000000..4fe7c798c643 --- /dev/null +++ b/core/lib/vm/src/oracles/tracer.rs @@ -0,0 +1,808 @@ +use std::{ + collections::HashSet, + fmt::{self, Display}, +}; + +use crate::{ + errors::VmRevertReasonParsingResult, + memory::SimpleMemory, + storage::StoragePtr, + utils::{aux_heap_page_from_base, heap_page_from_base}, + vm::{get_vm_hook_params, VM_HOOK_POSITION}, + vm_with_bootloader::BOOTLOADER_HEAP_PAGE, +}; +// use zk_evm::testing::memory::SimpleMemory; +use zk_evm::{ + abstractions::{ + AfterDecodingData, AfterExecutionData, BeforeExecutionData, Tracer, VmLocalStateData, + }, + aux_structures::MemoryPage, + vm_state::{ErrorFlags, VmLocalState}, + witness_trace::{DummyTracer, VmWitnessTracer}, + zkevm_opcode_defs::{ + decoding::VmEncodingMode, ContextOpcode, FarCallABI, FarCallForwardPageType, FatPointer, + LogOpcode, Opcode, RetOpcode, UMAOpcode, + }, +}; +use zksync_types::{ + get_code_key, web3::signing::keccak256, AccountTreeId, Address, StorageKey, + ACCOUNT_CODE_STORAGE_ADDRESS, BOOTLOADER_ADDRESS, CONTRACT_DEPLOYER_ADDRESS, H256, + KECCAK256_PRECOMPILE_ADDRESS, L2_ETH_TOKEN_ADDRESS, MSG_VALUE_SIMULATOR_ADDRESS, + SYSTEM_CONTEXT_ADDRESS, U256, +}; +use zksync_utils::{ + be_bytes_to_safe_address, h256_to_account_address, u256_to_account_address, u256_to_h256, +}; + +pub trait ExecutionEndTracer: Tracer { + // Returns whether the vm execution should stop. + fn should_stop_execution(&self) -> bool; +} + +pub trait PendingRefundTracer: Tracer { + // Some(x) means that the bootloader has asked the operator to provide the refund for the + // transaction, where `x` is the refund that the bootloader has suggested on its own. + fn requested_refund(&self) -> Option { + None + } + + // Set the current request for refund as fulfilled + fn set_refund_as_done(&mut self) {} +} + +#[derive(Debug, Clone)] +pub struct NoopMemoryTracer; + +impl> VmWitnessTracer for NoopMemoryTracer {} + +impl Tracer for NoopMemoryTracer { + type SupportedMemory = SimpleMemory; + const CALL_BEFORE_EXECUTION: bool = true; + + fn before_decoding(&mut self, _state: VmLocalStateData<'_>, _memory: &Self::SupportedMemory) {} + fn after_decoding( + &mut self, + _state: VmLocalStateData<'_>, + _data: AfterDecodingData, + _memory: &Self::SupportedMemory, + ) { + } + fn before_execution( + &mut self, + state: VmLocalStateData<'_>, + data: BeforeExecutionData, + memory: &Self::SupportedMemory, + ) { + let hook = VmHook::from_opcode_memory(&state, &data); + print_debug_if_needed(&hook, &state, memory); + } + fn after_execution( + &mut self, + _state: VmLocalStateData<'_>, + _data: AfterExecutionData, + _memory: &Self::SupportedMemory, + ) { + } +} + +impl ExecutionEndTracer for NoopMemoryTracer { + fn should_stop_execution(&self) -> bool { + // This tracer will not prevent the execution from going forward + // until the end of the block. + false + } +} + +impl PendingRefundTracer for NoopMemoryTracer {} + +#[derive(Debug, Clone, Eq, PartialEq, Copy)] +pub enum ValidationTracerMode { + // Should be activated when the transaction is being validated by user. + UserTxValidation, + // Should be activated when the transaction is being validated by the paymaster. + PaymasterTxValidation, + // Is a state when there are no restrictions on the execution. + NoValidation, +} + +#[derive(Debug, Clone)] +pub enum ViolatedValidationRule { + TouchedUnallowedStorageSlots(Address, U256), + CalledContractWithNoCode(Address), + TouchedUnallowedContext, +} + +pub enum ValidationError { + FailedTx(VmRevertReasonParsingResult), + VioalatedRule(ViolatedValidationRule), +} + +impl Display for ViolatedValidationRule { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ViolatedValidationRule::TouchedUnallowedStorageSlots(contract, key) => write!( + f, + "Touched unallowed storage slots: address {}, key: {}", + hex::encode(contract), + hex::encode(u256_to_h256(*key)) + ), + ViolatedValidationRule::CalledContractWithNoCode(contract) => { + write!(f, "Called contract with no code: {}", hex::encode(contract)) + } + ViolatedValidationRule::TouchedUnallowedContext => { + write!(f, "Touched unallowed context") + } + } + } +} + +impl Display for ValidationError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::FailedTx(revert_reason) => { + write!(f, "Validation revert: {}", revert_reason.revert_reason) + } + Self::VioalatedRule(rule) => { + write!(f, "Violated validation rules: {}", rule) + } + } + } +} + +fn touches_allowed_context(address: Address, key: U256) -> bool { + // Context is not touched at all + if address != SYSTEM_CONTEXT_ADDRESS { + return false; + } + + // Only chain_id is allowed to be touched. + key == U256::from(0u32) +} + +fn is_constant_code_hash(address: Address, key: U256, storage: StoragePtr<'_>) -> bool { + if address != ACCOUNT_CODE_STORAGE_ADDRESS { + // Not a code hash + return false; + } + + let value = storage.borrow_mut().get_value(&StorageKey::new( + AccountTreeId::new(address), + u256_to_h256(key), + )); + + value != H256::zero() +} + +fn valid_eth_token_call(address: Address, msg_sender: Address) -> bool { + let is_valid_caller = msg_sender == MSG_VALUE_SIMULATOR_ADDRESS + || msg_sender == CONTRACT_DEPLOYER_ADDRESS + || msg_sender == BOOTLOADER_ADDRESS; + address == L2_ETH_TOKEN_ADDRESS && is_valid_caller +} + +/// Tracer that is used to ensure that the validation adheres to all the rules +/// to prevent DDoS attacks on the server. +#[derive(Clone)] +pub struct ValidationTracer<'a> { + // A copy of it should be used in the Storage oracle + pub storage: StoragePtr<'a>, + pub validation_mode: ValidationTracerMode, + pub auxilary_allowed_slots: HashSet, + pub validation_error: Option, + + user_address: Address, + paymaster_address: Address, + should_stop_execution: bool, + trusted_slots: HashSet<(Address, U256)>, + trusted_addresses: HashSet

, + trusted_address_slots: HashSet<(Address, U256)>, +} + +impl fmt::Debug for ValidationTracer<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("ValidationTracer") + .field("storage", &"StoragePtr") + .field("validation_mode", &self.validation_mode) + .field("auxilary_allowed_slots", &self.auxilary_allowed_slots) + .field("validation_error", &self.validation_error) + .field("user_address", &self.user_address) + .field("paymaster_address", &self.paymaster_address) + .field("should_stop_execution", &self.should_stop_execution) + .field("trusted_slots", &self.trusted_slots) + .field("trusted_addresses", &self.trusted_addresses) + .field("trusted_address_slots", &self.trusted_address_slots) + .finish() + } +} + +#[derive(Debug, Clone)] +pub struct ValidationTracerParams { + pub user_address: Address, + pub paymaster_address: Address, + /// Slots that are trusted (i.e. the user can access them). + pub trusted_slots: HashSet<(Address, U256)>, + /// Trusted addresses (the user can access any slots on these addresses). + pub trusted_addresses: HashSet
, + /// Slots, that are trusted and the value of them is the new trusted address. + /// They are needed to work correctly with beacon proxy, where the address of the implementation is + /// stored in the beacon. + pub trusted_address_slots: HashSet<(Address, U256)>, +} + +#[derive(Debug, Clone, Default)] +pub struct NewTrustedValidationItems { + pub new_allowed_slots: Vec, + pub new_trusted_addresses: Vec
, +} + +type ValidationRoundResult = Result; + +impl<'a> ValidationTracer<'a> { + pub fn new(storage: StoragePtr<'a>, params: ValidationTracerParams) -> Self { + ValidationTracer { + storage, + validation_error: None, + validation_mode: ValidationTracerMode::NoValidation, + auxilary_allowed_slots: Default::default(), + + should_stop_execution: false, + user_address: params.user_address, + paymaster_address: params.paymaster_address, + trusted_slots: params.trusted_slots, + trusted_addresses: params.trusted_addresses, + trusted_address_slots: params.trusted_address_slots, + } + } + + fn process_validation_round_result(&mut self, result: ValidationRoundResult) { + match result { + Ok(NewTrustedValidationItems { + new_allowed_slots, + new_trusted_addresses, + }) => { + self.auxilary_allowed_slots.extend(new_allowed_slots); + self.trusted_addresses.extend(new_trusted_addresses); + } + Err(err) => { + self.validation_error = Some(err); + } + } + } + + // Checks whether such storage access is acceptable. + fn is_allowed_storage_read(&self, address: Address, key: U256, msg_sender: Address) -> bool { + // If there are no restrictions, all storage reads are valid. + // We also don't support the paymaster validation for now. + if matches!( + self.validation_mode, + ValidationTracerMode::NoValidation | ValidationTracerMode::PaymasterTxValidation + ) { + return true; + } + + // The pair of MSG_VALUE_SIMULATOR_ADDRESS & L2_ETH_TOKEN_ADDRESS simulates the behavior of transfering ETH + // that is safe for the DDoS protection rules. + if valid_eth_token_call(address, msg_sender) { + return true; + } + + if self.trusted_slots.contains(&(address, key)) + || self.trusted_addresses.contains(&address) + || self.trusted_address_slots.contains(&(address, key)) + { + return true; + } + + if touches_allowed_context(address, key) { + return true; + } + + // The user is allowed to touch its own slots or slots semantically related to him. + let valid_users_slot = address == self.user_address + || u256_to_account_address(&key) == self.user_address + || self.auxilary_allowed_slots.contains(&u256_to_h256(key)); + if valid_users_slot { + return true; + } + + if is_constant_code_hash(address, key, self.storage.clone()) { + return true; + } + + false + } + + // Used to remember user-related fields (its balance/allowance/etc). + // Note that it assumes that the length of the calldata is 64 bytes. + fn slot_to_add_from_keccak_call( + &self, + calldata: &[u8], + validated_address: Address, + ) -> Option { + assert_eq!(calldata.len(), 64); + + let (potential_address_bytes, potential_position_bytes) = calldata.split_at(32); + let potential_address = be_bytes_to_safe_address(potential_address_bytes); + + // If the validation_address is equal to the potential_address, + // then it is a request that could be used for mapping of kind mapping(address => ...). + // + // If the potential_position_bytes were already allowed before, then this keccak might be used + // for ERC-20 allowance or any other of mapping(address => mapping(...)) + if potential_address == Some(validated_address) + || self + .auxilary_allowed_slots + .contains(&H256::from_slice(potential_position_bytes)) + { + // This is request that could be used for mapping of kind mapping(address => ...) + + // We could theoretically wait for the slot number to be returned by the + // keccak256 precompile itself, but this would complicate the code even further + // so let's calculate it here. + let slot = keccak256(calldata); + + // Adding this slot to the allowed ones + Some(H256(slot)) + } else { + None + } + } + + pub fn check_user_restrictions( + &mut self, + state: VmLocalStateData<'_>, + data: BeforeExecutionData, + memory: &SimpleMemory, + ) -> ValidationRoundResult { + let opcode_variant = data.opcode.variant; + match opcode_variant.opcode { + Opcode::FarCall(_) => { + let packed_abi = data.src0_value.value; + let call_destination_value = data.src1_value.value; + + let called_address = u256_to_account_address(&call_destination_value); + let far_call_abi = FarCallABI::from_u256(packed_abi); + + if called_address == KECCAK256_PRECOMPILE_ADDRESS + && far_call_abi.memory_quasi_fat_pointer.length == 64 + { + let calldata_page = get_calldata_page_via_abi( + &far_call_abi, + state.vm_local_state.callstack.current.base_memory_page, + ); + let calldata = memory.read_unaligned_bytes( + calldata_page as usize, + far_call_abi.memory_quasi_fat_pointer.start as usize, + 64, + ); + + let slot_to_add = + self.slot_to_add_from_keccak_call(&calldata, self.user_address); + + if let Some(slot) = slot_to_add { + return Ok(NewTrustedValidationItems { + new_allowed_slots: vec![slot], + ..Default::default() + }); + } + } else if called_address != self.user_address { + let code_key = get_code_key(&called_address); + let code = self.storage.borrow_mut().get_value(&code_key); + + if code == H256::zero() { + // The users are not allowed to call contracts with no code + return Err(ViolatedValidationRule::CalledContractWithNoCode( + called_address, + )); + } + } + } + Opcode::Context(context) => { + match context { + ContextOpcode::Meta => { + return Err(ViolatedValidationRule::TouchedUnallowedContext); + } + ContextOpcode::ErgsLeft => { + } + _ => {} + } + } + Opcode::Log(LogOpcode::StorageRead) => { + let key = data.src0_value.value; + let this_address = state.vm_local_state.callstack.current.this_address; + let msg_sender = state.vm_local_state.callstack.current.msg_sender; + + if !self.is_allowed_storage_read(this_address, key, msg_sender) { + return Err(ViolatedValidationRule::TouchedUnallowedStorageSlots( + this_address, + key, + )); + } + + if self.trusted_address_slots.contains(&(this_address, key)) { + let storage_key = + StorageKey::new(AccountTreeId::new(this_address), u256_to_h256(key)); + + let value = self.storage.borrow_mut().get_value(&storage_key); + + return Ok(NewTrustedValidationItems { + new_trusted_addresses: vec![h256_to_account_address(&value)], + ..Default::default() + }); + } + } + _ => {} + } + + Ok(Default::default()) + } +} + +impl Tracer for ValidationTracer<'_> { + const CALL_BEFORE_EXECUTION: bool = true; + + type SupportedMemory = SimpleMemory; + fn before_decoding(&mut self, _state: VmLocalStateData<'_>, _memory: &Self::SupportedMemory) {} + fn after_decoding( + &mut self, + _state: VmLocalStateData<'_>, + _data: AfterDecodingData, + _memory: &Self::SupportedMemory, + ) { + } + fn before_execution( + &mut self, + state: VmLocalStateData<'_>, + data: BeforeExecutionData, + memory: &Self::SupportedMemory, + ) { + // For now, we support only validations for users. + if let ValidationTracerMode::UserTxValidation = self.validation_mode { + let validation_round_result = self.check_user_restrictions(state, data, memory); + self.process_validation_round_result(validation_round_result); + } + + let hook = VmHook::from_opcode_memory(&state, &data); + print_debug_if_needed(&hook, &state, memory); + + let current_mode = self.validation_mode; + match (current_mode, hook) { + (ValidationTracerMode::NoValidation, VmHook::AccountValidationEntered) => { + // Account validation can be entered when there is no prior validation (i.e. "nested" validations are not allowed) + self.validation_mode = ValidationTracerMode::UserTxValidation; + } + (ValidationTracerMode::NoValidation, VmHook::PaymasterValidationEntered) => { + // Paymaster validation can be entered when there is no prior validation (i.e. "nested" validations are not allowed) + self.validation_mode = ValidationTracerMode::PaymasterTxValidation; + } + (_, VmHook::AccountValidationEntered | VmHook::PaymasterValidationEntered) => { + panic!( + "Unallowed transition inside the validation tracer. Mode: {:#?}, hook: {:#?}", + self.validation_mode, hook + ); + } + (_, VmHook::NoValidationEntered) => { + // Validation can be always turned off + self.validation_mode = ValidationTracerMode::NoValidation; + } + (_, VmHook::ValidationStepEndeded) => { + // The validation step has ended. + self.should_stop_execution = true; + } + (_, _) => { + // The hook is not relevant to the validation tracer. Ignore. + } + } + } + fn after_execution( + &mut self, + _state: VmLocalStateData<'_>, + _data: AfterExecutionData, + _memory: &Self::SupportedMemory, + ) { + } +} + +fn get_calldata_page_via_abi(far_call_abi: &FarCallABI, base_page: MemoryPage) -> u32 { + match far_call_abi.forwarding_mode { + FarCallForwardPageType::ForwardFatPointer => { + far_call_abi.memory_quasi_fat_pointer.memory_page + } + FarCallForwardPageType::UseAuxHeap => aux_heap_page_from_base(base_page).0, + FarCallForwardPageType::UseHeap => heap_page_from_base(base_page).0, + } +} + +impl ExecutionEndTracer for ValidationTracer<'_> { + fn should_stop_execution(&self) -> bool { + self.should_stop_execution || self.validation_error.is_some() + } +} + +impl PendingRefundTracer for ValidationTracer<'_> {} + +/// Allows any opcodes, but tells the VM to end the execution once the tx is over. +#[derive(Debug, Clone, Default)] +pub struct OneTxTracer { + tx_has_been_processed: bool, + + // Some(x) means that the bootloader has asked the operator + // to provide the refund the user, where `x` is the refund proposed + // by the bootloader itself. + pending_operator_refund: Option, + + pub operator_suggested_refund_gas: u32, + + pub refund_gas: u32, + bootloader_tracer: BootloaderTracer, +} + +impl Tracer for OneTxTracer { + const CALL_BEFORE_EXECUTION: bool = true; + const CALL_AFTER_EXECUTION: bool = true; + type SupportedMemory = SimpleMemory; + + fn before_decoding(&mut self, _state: VmLocalStateData<'_>, _memory: &Self::SupportedMemory) {} + fn after_decoding( + &mut self, + _state: VmLocalStateData<'_>, + _data: AfterDecodingData, + _memory: &Self::SupportedMemory, + ) { + } + + fn before_execution( + &mut self, + state: VmLocalStateData<'_>, + data: BeforeExecutionData, + memory: &Self::SupportedMemory, + ) { + let hook = VmHook::from_opcode_memory(&state, &data); + print_debug_if_needed(&hook, &state, memory); + + match hook { + VmHook::TxHasEnded => self.tx_has_been_processed = true, + VmHook::NotifyAboutRefund => self.refund_gas = get_vm_hook_params(memory)[0].as_u32(), + VmHook::AskOperatorForRefund => { + self.pending_operator_refund = Some(get_vm_hook_params(memory)[0].as_u32()) + } + _ => {} + } + } + + fn after_execution( + &mut self, + state: VmLocalStateData<'_>, + data: AfterExecutionData, + memory: &Self::SupportedMemory, + ) { + self.bootloader_tracer.after_execution(state, data, memory) + } +} + +impl ExecutionEndTracer for OneTxTracer { + fn should_stop_execution(&self) -> bool { + self.tx_has_been_processed || self.bootloader_tracer.should_stop_execution() + } +} + +impl PendingRefundTracer for OneTxTracer { + fn requested_refund(&self) -> Option { + self.pending_operator_refund + } + + fn set_refund_as_done(&mut self) { + self.pending_operator_refund = None; + } +} + +impl OneTxTracer { + pub fn is_bootloader_out_of_gas(&self) -> bool { + self.bootloader_tracer.is_bootloader_out_of_gas() + } + + pub fn tx_has_been_processed(&self) -> bool { + self.tx_has_been_processed + } +} + +/// Tells the VM to end the execution before `ret` from the booloader if there is no panic or revert. +/// Also, saves the information if this `ret` was caused by "out of gas" panic. +#[derive(Debug, Clone, Default)] +pub struct BootloaderTracer { + is_bootloader_out_of_gas: bool, + ret_from_the_bootloader: Option, +} + +impl Tracer for BootloaderTracer { + const CALL_AFTER_DECODING: bool = true; + const CALL_AFTER_EXECUTION: bool = true; + type SupportedMemory = SimpleMemory; + + fn before_decoding(&mut self, _state: VmLocalStateData<'_>, _memory: &Self::SupportedMemory) {} + fn after_decoding( + &mut self, + state: VmLocalStateData<'_>, + data: AfterDecodingData, + _memory: &Self::SupportedMemory, + ) { + // We should check not only for the `NOT_ENOUGH_ERGS` flag but if the current frame is bootloader too. + if Self::current_frame_is_bootloader(state.vm_local_state) + && data + .error_flags_accumulated + .contains(ErrorFlags::NOT_ENOUGH_ERGS) + { + self.is_bootloader_out_of_gas = true; + } + } + + fn before_execution( + &mut self, + _state: VmLocalStateData<'_>, + _data: BeforeExecutionData, + _memory: &Self::SupportedMemory, + ) { + } + + fn after_execution( + &mut self, + state: VmLocalStateData<'_>, + _data: AfterExecutionData, + memory: &Self::SupportedMemory, + ) { + // Decodes next opcode. + // `self` is passed as `tracer`, so `self.after_decoding` will be called and it will catch "out of gas". + let (next_opcode, _, _) = + zk_evm::vm_state::read_and_decode(state.vm_local_state, memory, &mut DummyTracer, self); + if Self::current_frame_is_bootloader(state.vm_local_state) { + if let Opcode::Ret(ret) = next_opcode.inner.variant.opcode { + self.ret_from_the_bootloader = Some(ret); + } + } + } +} + +impl ExecutionEndTracer for BootloaderTracer { + fn should_stop_execution(&self) -> bool { + self.ret_from_the_bootloader == Some(RetOpcode::Ok) + } +} + +impl PendingRefundTracer for BootloaderTracer {} + +impl BootloaderTracer { + fn current_frame_is_bootloader(local_state: &VmLocalState) -> bool { + // The current frame is bootloader if the callstack depth is 1. + // Some of the near calls inside the bootloader can be out of gas, which is totally normal behavior + // and it shouldn't result in `is_bootloader_out_of_gas` becoming true. + local_state.callstack.inner.len() == 1 + } + + pub fn is_bootloader_out_of_gas(&self) -> bool { + self.is_bootloader_out_of_gas + } + + pub fn bootloader_panicked(&self) -> bool { + self.ret_from_the_bootloader == Some(RetOpcode::Panic) + } +} + +#[derive(Clone, Debug, Copy)] +pub(crate) enum VmHook { + AccountValidationEntered, + PaymasterValidationEntered, + NoValidationEntered, + ValidationStepEndeded, + TxHasEnded, + DebugLog, + DebugReturnData, + NoHook, + NearCallCatch, + AskOperatorForRefund, + NotifyAboutRefund, + ExecutionResult, +} + +impl VmHook { + pub fn from_opcode_memory(state: &VmLocalStateData<'_>, data: &BeforeExecutionData) -> Self { + let opcode_variant = data.opcode.variant; + let heap_page = + heap_page_from_base(state.vm_local_state.callstack.current.base_memory_page).0; + + let src0_value = data.src0_value.value; + + let fat_ptr = FatPointer::from_u256(src0_value); + + let value = data.src1_value.value; + + // Only UMA opcodes in the bootloader serve for vm hooks + if !matches!(opcode_variant.opcode, Opcode::UMA(UMAOpcode::HeapWrite)) + || heap_page != BOOTLOADER_HEAP_PAGE + || fat_ptr.offset != VM_HOOK_POSITION * 32 + { + return Self::NoHook; + } + + match value.as_u32() { + 0 => Self::AccountValidationEntered, + 1 => Self::PaymasterValidationEntered, + 2 => Self::NoValidationEntered, + 3 => Self::ValidationStepEndeded, + 4 => Self::TxHasEnded, + 5 => Self::DebugLog, + 6 => Self::DebugReturnData, + 7 => Self::NearCallCatch, + 8 => Self::AskOperatorForRefund, + 9 => Self::NotifyAboutRefund, + 10 => Self::ExecutionResult, + _ => panic!("Unkown hook"), + } + } +} + +fn get_debug_log(state: &VmLocalStateData<'_>, memory: &SimpleMemory) -> String { + let vm_hook_params: Vec<_> = get_vm_hook_params(memory) + .into_iter() + .map(u256_to_h256) + .collect(); + let msg = vm_hook_params[0].as_bytes().to_vec(); + let data = vm_hook_params[1].as_bytes().to_vec(); + + let msg = String::from_utf8(msg).expect("Invalid debug message"); + let data = U256::from_big_endian(&data); + + // For long data, it is better to use hex-encoding for greater readibility + let data_str = if data > U256::from(u64::max_value()) { + let mut bytes = [0u8; 32]; + data.to_big_endian(&mut bytes); + format!("0x{}", hex::encode(bytes)) + } else { + data.to_string() + }; + + let tx_id = state.vm_local_state.tx_number_in_block; + + format!("Bootloader transaction {}: {} {}", tx_id, msg, data_str) +} + +/// Reads the memory slice represented by the fat pointer. +/// Note, that the fat pointer must point to the accesible memory (i.e. not cleared up yet). +pub(crate) fn read_pointer(memory: &SimpleMemory, pointer: FatPointer) -> Vec { + let FatPointer { + offset, + length, + start, + memory_page, + } = pointer; + + // The actual bounds of the returndata ptr is [start+offset..start+length] + let mem_region_start = start + offset; + let mem_region_length = length - offset; + + memory.read_unaligned_bytes( + memory_page as usize, + mem_region_start as usize, + mem_region_length as usize, + ) +} + +/// Outputs the returndata for the latest call. +/// This is usually used to output the revert reason. +fn get_debug_returndata(memory: &SimpleMemory) -> String { + let vm_hook_params: Vec<_> = get_vm_hook_params(memory); + let returndata_ptr = FatPointer::from_u256(vm_hook_params[0]); + let returndata = read_pointer(memory, returndata_ptr); + + format!("0x{}", hex::encode(returndata)) +} + +/// Accepts a vm hook and, if it requires to output some debug log, outputs it. +fn print_debug_if_needed(hook: &VmHook, state: &VmLocalStateData<'_>, memory: &SimpleMemory) { + let log = match hook { + VmHook::DebugLog => get_debug_log(state, memory), + VmHook::DebugReturnData => get_debug_returndata(memory), + _ => return, + }; + + vlog::trace!("{}", log); +} diff --git a/core/lib/vm/src/pubdata_utils.rs b/core/lib/vm/src/pubdata_utils.rs new file mode 100644 index 000000000000..6051c03686f6 --- /dev/null +++ b/core/lib/vm/src/pubdata_utils.rs @@ -0,0 +1,94 @@ +use crate::oracles::storage::storage_key_of_log; +use crate::utils::collect_storage_log_queries_after_timestamp; +use crate::VmInstance; +use std::collections::HashMap; +use zk_evm::aux_structures::Timestamp; +use zksync_types::event::{extract_long_l2_to_l1_messages, extract_published_bytecodes}; +use zksync_types::StorageKey; +use zksync_utils::bytecode::bytecode_len_in_bytes; + +impl<'a> VmInstance<'a> { + pub fn pubdata_used(&self, from_timestamp: Timestamp) -> u32 { + let storage_writes_pubdata_used = self.pubdata_used_for_writes(from_timestamp); + + let (events, l2_to_l1_logs) = + self.collect_events_and_l1_logs_after_timestamp(from_timestamp); + let l2_l1_logs_bytes = (l2_to_l1_logs.len() as u32) + * zk_evm::zkevm_opcode_defs::system_params::L1_MESSAGE_PUBDATA_BYTES; + let l2_l1_long_messages_bytes: u32 = extract_long_l2_to_l1_messages(&events) + .iter() + .map(|event| event.len() as u32) + .sum(); + + let published_bytecode_bytes: u32 = extract_published_bytecodes(&events) + .iter() + .map(|bytecodehash| bytecode_len_in_bytes(*bytecodehash) as u32) + .sum(); + + storage_writes_pubdata_used + + l2_l1_logs_bytes + + l2_l1_long_messages_bytes + + published_bytecode_bytes + } + + fn pubdata_used_for_writes(&self, from_timestamp: Timestamp) -> u32 { + // This `HashMap` contains how much was already paid for every slot that was paid during the last tx execution. + // For the slots that weren't paid during the last tx execution we can just use + // `self.state.storage.paid_changes.inner().get(&key)` to get how much it was paid before. + let pre_paid_before_tx_map: HashMap = self + .state + .storage + .paid_changes + .history() + .iter() + .rev() + .take_while(|history_elem| history_elem.0 >= from_timestamp) + .map(|history_elem| (history_elem.1.key, history_elem.1.value.unwrap_or(0))) + .collect(); + let pre_paid_before_tx = |key: &StorageKey| -> u32 { + if let Some(pre_paid) = pre_paid_before_tx_map.get(key) { + *pre_paid + } else { + self.state + .storage + .paid_changes + .inner() + .get(key) + .copied() + .unwrap_or(0) + } + }; + + let storage_logs = collect_storage_log_queries_after_timestamp( + &self + .state + .storage + .frames_stack + .inner() + .current_frame() + .forward, + from_timestamp, + ); + let (_, deduplicated_logs) = + zksync_types::log_query_sorter::sort_storage_access_queries(&storage_logs); + + deduplicated_logs + .into_iter() + .filter_map(|log| { + if log.log_query.rw_flag { + let key = storage_key_of_log(&log.log_query); + let pre_paid = pre_paid_before_tx(&key); + let to_pay_by_user = self.state.storage.base_price_for_write(&log.log_query); + + if to_pay_by_user > pre_paid { + Some(to_pay_by_user - pre_paid) + } else { + None + } + } else { + None + } + }) + .sum() + } +} diff --git a/core/lib/vm/src/storage.rs b/core/lib/vm/src/storage.rs new file mode 100644 index 000000000000..3156f58bbbca --- /dev/null +++ b/core/lib/vm/src/storage.rs @@ -0,0 +1,63 @@ +use std::cell::RefCell; +use std::collections::HashMap; +use std::fmt::Debug; +use std::rc::Rc; + +use zksync_state::storage_view::StorageView; +use zksync_types::{Address, StorageKey, StorageValue, ZkSyncReadStorage, H256}; + +pub trait Storage: Debug + Sync + Send { + fn get_value(&mut self, key: &StorageKey) -> StorageValue; + // Returns the original value. + fn set_value(&mut self, key: &StorageKey, value: StorageValue) -> StorageValue; + fn is_write_initial(&mut self, key: &StorageKey) -> bool; + fn load_contract(&mut self, address: Address) -> Option>; + fn save_contract(&mut self, address: Address, bytecode: Vec); + fn load_factory_dep(&mut self, hash: H256) -> Option>; + fn save_factory_dep(&mut self, hash: H256, bytecode: Vec); + + fn number_of_updated_storage_slots(&self) -> usize; + + fn get_modified_storage_keys(&self) -> &HashMap; +} + +impl Storage for StorageView { + fn get_value(&mut self, key: &StorageKey) -> StorageValue { + self.get_value(key) + } + + /// Returns the original value. + fn set_value(&mut self, key: &StorageKey, value: StorageValue) -> StorageValue { + self.set_value(key, value) + } + + fn is_write_initial(&mut self, key: &StorageKey) -> bool { + self.is_write_initial(key) + } + + fn load_contract(&mut self, address: Address) -> Option> { + self.load_contract(address) + } + + fn save_contract(&mut self, address: Address, bytecode: Vec) { + self.save_contract(address, bytecode); + } + + fn load_factory_dep(&mut self, hash: H256) -> Option> { + self.load_factory_dep(hash) + } + + fn save_factory_dep(&mut self, hash: H256, bytecode: Vec) { + self.save_factory_dep(hash, bytecode); + } + + fn number_of_updated_storage_slots(&self) -> usize { + self.get_modified_storage_keys().len() + } + + fn get_modified_storage_keys(&self) -> &HashMap { + self.get_modified_storage_keys() + } +} + +pub type StoragePtr<'a> = Rc>; diff --git a/core/lib/vm/src/test_utils.rs b/core/lib/vm/src/test_utils.rs new file mode 100644 index 000000000000..907dcae8f0fc --- /dev/null +++ b/core/lib/vm/src/test_utils.rs @@ -0,0 +1,331 @@ +//! +//! This file contains various utilities +//! that could be used for testing, but are not needed anywhere else. +//! +//! They are not put into the `cfg(test)` folder to allow easy sharing of the content +//! of this file with other crates. +//! + +use std::collections::HashMap; + +use itertools::Itertools; +use zk_evm::{ + aux_structures::Timestamp, reference_impls::event_sink::ApplicationData, vm_state::VmLocalState, +}; +use zksync_contracts::{deployer_contract, get_loadnext_contract, load_contract}; +use zksync_types::{ + ethabi::{Address, Token}, + fee::Fee, + l2::L2Tx, + web3::signing::keccak256, + Execute, L2ChainId, Nonce, StorageKey, StorageLogQuery, StorageValue, + CONTRACT_DEPLOYER_ADDRESS, H256, U256, +}; +use zksync_utils::{ + address_to_h256, bytecode::hash_bytecode, h256_to_account_address, + test_utils::LoadnextContractExecutionParams, u256_to_h256, +}; + +/// The tests here help us with the testing the VM +use crate::{ + event_sink::InMemoryEventSink, + history_recorder::{FrameManager, HistoryRecorder}, + memory::SimpleMemory, + VmInstance, +}; + +#[derive(Clone, Debug)] +pub struct ModifiedKeysMap(HashMap); + +// We consider hashmaps to be equal even if there is a key +// that is not present in one but has zero value in another. +impl PartialEq for ModifiedKeysMap { + fn eq(&self, other: &Self) -> bool { + for (key, value) in self.0.iter() { + if *value != other.0.get(key).cloned().unwrap_or_default() { + return false; + } + } + for (key, value) in other.0.iter() { + if *value != self.0.get(key).cloned().unwrap_or_default() { + return false; + } + } + true + } +} + +#[derive(Clone, PartialEq, Debug)] +pub struct DecommitterTestInnerState { + /// There is no way to "trully" compare the storage pointer, + /// so we just compare the modified keys. This is reasonable enough. + pub modified_storage_keys: ModifiedKeysMap, + pub known_bytecodes: HistoryRecorder>>, + pub decommitted_code_hashes: HistoryRecorder>, +} + +#[derive(Clone, PartialEq, Debug)] +pub struct StorageOracleInnerState { + /// There is no way to "trully" compare the storage pointer, + /// so we just compare the modified keys. This is reasonable enough. + pub modified_storage_keys: ModifiedKeysMap, + + pub frames_stack: HistoryRecorder>>, +} + +#[derive(Clone, PartialEq, Debug)] +pub struct PrecompileProcessorTestInnerState { + pub timestamp_history: HistoryRecorder>, +} + +/// A struct that encapsulates the state of the VM's oracles +/// The state is to be used in tests. +#[derive(Clone, PartialEq, Debug)] +pub struct VmInstanceInnerState { + event_sink: InMemoryEventSink, + precompile_processor_state: PrecompileProcessorTestInnerState, + memory: SimpleMemory, + decommitter_state: DecommitterTestInnerState, + storage_oracle_state: StorageOracleInnerState, + local_state: VmLocalState, +} + +impl<'a> VmInstance<'a> { + /// This method is mostly to be used in tests. It dumps the inner state of all the oracles and the VM itself. + pub fn dump_inner_state(&self) -> VmInstanceInnerState { + let event_sink = self.state.event_sink.clone(); + let precompile_processor_state = PrecompileProcessorTestInnerState { + timestamp_history: self.state.precompiles_processor.timestamp_history.clone(), + }; + let memory = self.state.memory.clone(); + let decommitter_state = DecommitterTestInnerState { + modified_storage_keys: ModifiedKeysMap( + self.state + .decommittment_processor + .get_storage() + .borrow() + .get_modified_storage_keys() + .clone(), + ), + known_bytecodes: self.state.decommittment_processor.known_bytecodes.clone(), + decommitted_code_hashes: self + .state + .decommittment_processor + .get_decommitted_code_hashes_with_history() + .clone(), + }; + let storage_oracle_state = StorageOracleInnerState { + modified_storage_keys: ModifiedKeysMap( + self.state + .storage + .storage + .get_ptr() + .borrow() + .get_modified_storage_keys() + .clone(), + ), + frames_stack: self.state.storage.frames_stack.clone(), + }; + let local_state = self.state.local_state.clone(); + + VmInstanceInnerState { + event_sink, + precompile_processor_state, + memory, + decommitter_state, + storage_oracle_state, + local_state, + } + } +} + +// This one is used only for tests, but it is in this folder to +// be able to share it among crates +pub fn mock_loadnext_test_call( + eth_private_key: H256, + nonce: Nonce, + contract_address: Address, + fee: Fee, + execution_params: LoadnextContractExecutionParams, +) -> L2Tx { + let loadnext_contract = get_loadnext_contract(); + + let contract_function = loadnext_contract.contract.function("execute").unwrap(); + + let params = vec![ + Token::Uint(U256::from(execution_params.reads)), + Token::Uint(U256::from(execution_params.writes)), + Token::Uint(U256::from(execution_params.hashes)), + Token::Uint(U256::from(execution_params.events)), + Token::Uint(U256::from(execution_params.recursive_calls)), + Token::Uint(U256::from(execution_params.deploys)), + ]; + let calldata = contract_function + .encode_input(¶ms) + .expect("failed to encode parameters"); + + let mut l2_tx = L2Tx::new_signed( + contract_address, + calldata, + nonce, + fee, + Default::default(), + L2ChainId(270), + ð_private_key, + None, + Default::default(), + ) + .unwrap(); + // Input means all transaction data (NOT calldata, but all tx fields) that came from the API. + // This input will be used for the derivation of the tx hash, so put some random to it to be sure + // that the transaction hash is unique. + l2_tx.set_input(H256::random().0.to_vec(), H256::random()); + l2_tx +} + +// This one is used only for tests, but it is in this folder to +// be able to share it among crates +pub fn mock_loadnext_gas_burn_call( + eth_private_key: H256, + nonce: Nonce, + contract_address: Address, + fee: Fee, + gas: u32, +) -> L2Tx { + let loadnext_contract = get_loadnext_contract(); + + let contract_function = loadnext_contract.contract.function("burnGas").unwrap(); + + let params = vec![Token::Uint(U256::from(gas))]; + let calldata = contract_function + .encode_input(¶ms) + .expect("failed to encode parameters"); + + let mut l2_tx = L2Tx::new_signed( + contract_address, + calldata, + nonce, + fee, + Default::default(), + L2ChainId(270), + ð_private_key, + None, + Default::default(), + ) + .unwrap(); + // Input means all transaction data (NOT calldata, but all tx fields) that came from the API. + // This input will be used for the derivation of the tx hash, so put some random to it to be sure + // that the transaction hash is unique. + l2_tx.set_input(H256::random().0.to_vec(), H256::random()); + l2_tx +} + +pub fn get_create_execute(code: &[u8], calldata: &[u8]) -> Execute { + let deployer = deployer_contract(); + + let contract_function = deployer.function("create").unwrap(); + + let params = [ + Token::FixedBytes(vec![0u8; 32]), + Token::FixedBytes(hash_bytecode(code).0.to_vec()), + Token::Bytes(calldata.to_vec()), + ]; + let calldata = contract_function + .encode_input(¶ms) + .expect("failed to encode parameters"); + + Execute { + contract_address: CONTRACT_DEPLOYER_ADDRESS, + calldata, + factory_deps: Some(vec![code.to_vec()]), + value: U256::zero(), + } +} + +fn get_execute_error_calldata() -> Vec { + let test_contract = load_contract( + "etc/contracts-test-data/artifacts-zk/contracts/error/error.sol/SimpleRequire.json", + ); + + let function = test_contract.function("require_short").unwrap(); + + function + .encode_input(&[]) + .expect("failed to encode parameters") +} + +pub fn get_deploy_tx( + account_private_key: H256, + nonce: Nonce, + code: &[u8], + factory_deps: Vec>, + calldata: &[u8], + fee: Fee, +) -> L2Tx { + let factory_deps = factory_deps + .into_iter() + .chain(vec![code.to_vec()]) + .collect(); + let execute = get_create_execute(code, calldata); + + let mut signed = L2Tx::new_signed( + CONTRACT_DEPLOYER_ADDRESS, + execute.calldata, + nonce, + fee, + U256::zero(), + L2ChainId(270), + &account_private_key, + Some(factory_deps), + Default::default(), + ) + .expect("should create a signed execute transaction"); + + signed.set_input(H256::random().as_bytes().to_vec(), H256::random()); + + signed +} + +pub fn get_error_tx( + account_private_key: H256, + nonce: Nonce, + contract_address: Address, + fee: Fee, +) -> L2Tx { + let factory_deps = vec![]; + let calldata = get_execute_error_calldata(); + + let mut signed = L2Tx::new_signed( + contract_address, + calldata, + nonce, + fee, + U256::zero(), + L2ChainId(270), + &account_private_key, + Some(factory_deps), + Default::default(), + ) + .expect("should create a signed execute transaction"); + + signed.set_input(H256::random().as_bytes().to_vec(), H256::random()); + + signed +} + +pub fn get_create_zksync_address(sender_address: Address, sender_nonce: Nonce) -> Address { + let prefix = keccak256("zksyncCreate".as_bytes()); + let address = address_to_h256(&sender_address); + let nonce = u256_to_h256(U256::from(sender_nonce.0)); + + let digest = prefix + .iter() + .chain(address.0.iter()) + .chain(nonce.0.iter()) + .copied() + .collect_vec(); + + let hash = keccak256(&digest); + + h256_to_account_address(&H256(hash)) +} diff --git a/core/lib/vm/src/tests/bootloader.rs b/core/lib/vm/src/tests/bootloader.rs new file mode 100644 index 000000000000..40584f23ebf1 --- /dev/null +++ b/core/lib/vm/src/tests/bootloader.rs @@ -0,0 +1,1588 @@ +//! +//! Tests for the bootloader +//! The description for each of the tests can be found in the corresponding `.yul` file. +//! +#![cfg_attr(test, allow(unused_imports))] + +use crate::errors::{VmRevertReason, VmRevertReasonParsingResult}; +use crate::memory::SimpleMemory; +use crate::oracles::tracer::{ + read_pointer, ExecutionEndTracer, NoopMemoryTracer, PendingRefundTracer, VmHook, +}; +use crate::storage::{Storage, StoragePtr}; +use crate::test_utils::{ + get_create_execute, get_create_zksync_address, get_deploy_tx, get_error_tx, + mock_loadnext_test_call, VmInstanceInnerState, +}; +use crate::utils::{ + create_test_block_params, default_block_properties, insert_system_contracts, + read_bootloader_test_code, BLOCK_GAS_LIMIT, +}; +use crate::vm::{ + get_vm_hook_params, tx_has_failed, VmBlockResult, VmExecutionStopReason, ZkSyncVmState, + MAX_MEM_SIZE_BYTES, +}; +use crate::vm_with_bootloader::{ + bytecode_to_factory_dep, get_bootloader_memory, get_bootloader_memory_for_encoded_tx, + init_vm_inner, push_raw_transaction_to_bootloader_memory, + push_transaction_to_bootloader_memory, BlockContext, DerivedBlockContext, BOOTLOADER_HEAP_PAGE, + BOOTLOADER_TX_DESCRIPTION_OFFSET, TX_DESCRIPTION_OFFSET, +}; +use crate::vm_with_bootloader::{BlockContextMode, BootloaderJobType, TxExecutionMode}; +use crate::{test_utils, VmInstance}; +use crate::{TxRevertReason, VmExecutionResult}; +use itertools::Itertools; +use std::cell::RefCell; +use std::convert::TryFrom; +use std::ops::{Add, DivAssign}; +use std::rc::Rc; +use tempfile::TempDir; +use zk_evm::abstractions::{ + AfterDecodingData, AfterExecutionData, BeforeExecutionData, Tracer, VmLocalStateData, + MAX_HEAP_PAGE_SIZE_IN_WORDS, MAX_MEMORY_BYTES, +}; +use zk_evm::aux_structures::Timestamp; +use zk_evm::block_properties::BlockProperties; +use zk_evm::sha3::digest::typenum::U830; +use zk_evm::witness_trace::VmWitnessTracer; +use zk_evm::zkevm_opcode_defs::decoding::VmEncodingMode; +use zk_evm::zkevm_opcode_defs::FatPointer; +use zksync_types::block::DeployedContract; +use zksync_types::ethabi::encode; +use zksync_types::l1::L1Tx; +use zksync_types::tx::tx_execution_info::{TxExecutionStatus, VmExecutionLogs}; +use zksync_utils::test_utils::LoadnextContractExecutionParams; +use zksync_utils::{ + address_to_h256, bytecode::hash_bytecode, bytes_to_be_words, bytes_to_le_words, h256_to_u256, + u256_to_h256, +}; +use zksync_utils::{h256_to_account_address, u256_to_account_address}; + +use crate::{transaction_data::TransactionData, OracleTools}; +use std::time; +use zksync_contracts::{ + default_erc20_bytecode, get_loadnext_contract, known_codes_contract, load_contract, + load_sys_contract, read_bootloader_code, read_bytecode, read_zbin_bytecode, + DEFAULT_ACCOUNT_CODE, PLAYGROUND_BLOCK_BOOTLOADER_CODE, PROVED_BLOCK_BOOTLOADER_CODE, +}; +use zksync_crypto::rand::random; +use zksync_state::secondary_storage::SecondaryStateStorage; +use zksync_state::storage_view::StorageView; +use zksync_storage::db::Database; +use zksync_storage::RocksDB; +use zksync_types::system_contracts::{DEPLOYMENT_NONCE_INCREMENT, TX_NONCE_INCREMENT}; +use zksync_types::utils::{ + deployed_address_create, storage_key_for_eth_balance, storage_key_for_standard_token_balance, +}; +use zksync_types::{ + ethabi::Token, AccountTreeId, Address, Execute, ExecuteTransactionCommon, L1BatchNumber, + L2ChainId, PackedEthSignature, StorageKey, StorageLogQueryType, Transaction, H256, + KNOWN_CODES_STORAGE_ADDRESS, U256, +}; +use zksync_types::{fee::Fee, l2::L2Tx, l2_to_l1_log::L2ToL1Log, tx::ExecutionMetrics}; +use zksync_types::{ + get_code_key, get_is_account_key, get_known_code_key, get_nonce_key, L1TxCommonData, Nonce, + PriorityOpId, SerialId, StorageLog, ZkSyncReadStorage, BOOTLOADER_ADDRESS, + CONTRACT_DEPLOYER_ADDRESS, FAIR_L2_GAS_PRICE, H160, L2_ETH_TOKEN_ADDRESS, + MAX_GAS_PER_PUBDATA_BYTE, MAX_TXS_IN_BLOCK, SYSTEM_CONTEXT_ADDRESS, + SYSTEM_CONTEXT_GAS_PRICE_POSITION, SYSTEM_CONTEXT_MINIMAL_BASE_FEE, + SYSTEM_CONTEXT_TX_ORIGIN_POSITION, +}; + +fn run_vm_with_custom_factory_deps<'a>( + oracle_tools: &'a mut OracleTools<'a, false>, + block_context: BlockContext, + block_properties: &'a BlockProperties, + encoded_tx: Vec, + predefined_overhead: u32, + expected_error: Option, +) { + let mut vm = init_vm_inner( + oracle_tools, + BlockContextMode::OverrideCurrent(block_context.into()), + block_properties, + BLOCK_GAS_LIMIT, + PLAYGROUND_BLOCK_BOOTLOADER_CODE.code.clone(), + TxExecutionMode::VerifyExecute, + ); + + vm.bootloader_state.add_tx_data(encoded_tx.len()); + vm.state.memory.populate_page( + BOOTLOADER_HEAP_PAGE as usize, + get_bootloader_memory_for_encoded_tx( + encoded_tx, + 0, + TxExecutionMode::VerifyExecute, + 0, + 0, + predefined_overhead, + ), + Timestamp(0), + ); + + let result = vm.execute_next_tx().err(); + + assert_eq!(expected_error, result); +} + +fn get_balance(token_id: AccountTreeId, account: &Address, main_storage: StoragePtr<'_>) -> U256 { + let key = storage_key_for_standard_token_balance(token_id, account); + h256_to_u256(main_storage.borrow_mut().get_value(&key)) +} + +#[test] +fn test_dummy_bootloader() { + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let db = RocksDB::new(Database::StateKeeper, temp_dir.as_ref(), false); + let mut raw_storage = SecondaryStateStorage::new(db); + insert_system_contracts(&mut raw_storage); + let mut storage_accessor = StorageView::new(&raw_storage); + let storage_ptr: &mut dyn Storage = &mut storage_accessor; + + let mut oracle_tools = OracleTools::new(storage_ptr); + let (block_context, block_properties) = create_test_block_params(); + + let mut vm = init_vm_inner( + &mut oracle_tools, + BlockContextMode::NewBlock(block_context.into(), Default::default()), + &block_properties, + BLOCK_GAS_LIMIT, + read_bootloader_test_code("dummy"), + TxExecutionMode::VerifyExecute, + ); + + let VmBlockResult { + full_result: res, .. + } = vm.execute_till_block_end(BootloaderJobType::BlockPostprocessing); + + // Dummy bootloader should not panic + assert!(res.revert_reason.is_none()); + + let correct_first_cell = U256::from_str_radix("123123123", 16).unwrap(); + + verify_required_memory( + &vm.state, + vec![(correct_first_cell, BOOTLOADER_HEAP_PAGE, 0)], + ); +} + +#[test] +fn test_bootloader_out_of_gas() { + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let db = RocksDB::new(Database::StateKeeper, temp_dir.as_ref(), false); + let mut raw_storage = SecondaryStateStorage::new(db); + insert_system_contracts(&mut raw_storage); + let mut storage_accessor = StorageView::new(&raw_storage); + let storage_ptr: &mut dyn Storage = &mut storage_accessor; + + let mut oracle_tools = OracleTools::new(storage_ptr); + let (block_context, block_properties) = create_test_block_params(); + + // init vm with only 100 gas + let mut vm = init_vm_inner( + &mut oracle_tools, + BlockContextMode::NewBlock(block_context.into(), Default::default()), + &block_properties, + 10, + read_bootloader_test_code("dummy"), + TxExecutionMode::VerifyExecute, + ); + + let res = vm.execute_block_tip(); + + assert_eq!(res.revert_reason, Some(TxRevertReason::BootloaderOutOfGas)); +} + +fn verify_required_storage(state: &ZkSyncVmState<'_>, required_values: Vec<(H256, StorageKey)>) { + for (required_value, key) in required_values { + let current_value = state.storage.storage.read_from_storage(&key); + + assert_eq!( + u256_to_h256(current_value), + required_value, + "Invalid value at key {key:?}" + ); + } +} + +fn verify_required_memory(state: &ZkSyncVmState<'_>, required_values: Vec<(U256, u32, u32)>) { + for (required_value, memory_page, cell) in required_values { + let current_value = state + .memory + .dump_page_content_as_u256_words(memory_page, cell..cell + 1)[0]; + assert_eq!(current_value, required_value); + } +} + +#[test] +fn test_default_aa_interaction() { + // In this test, we aim to test whether a simple account interaction (without any fee logic) + // will work. The account will try to deploy a simple contract from integration tests. + + let (block_context, block_properties) = create_test_block_params(); + let block_context: DerivedBlockContext = block_context.into(); + + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let db = RocksDB::new(Database::StateKeeper, temp_dir.as_ref(), false); + let mut raw_storage = SecondaryStateStorage::new(db); + insert_system_contracts(&mut raw_storage); + let storage_ptr: &mut dyn Storage = &mut StorageView::new(&raw_storage); + + let operator_address = block_context.context.operator_address; + let base_fee = block_context.base_fee; + // We deploy here counter contract, because its logic is trivial + let contract_code = read_test_contract(); + let contract_code_hash = hash_bytecode(&contract_code); + let tx: Transaction = get_deploy_tx( + H256::random(), + Nonce(0), + &contract_code, + vec![], + &[], + Fee { + gas_limit: U256::from(10000000u32), + max_fee_per_gas: U256::from(base_fee), + max_priority_fee_per_gas: U256::from(0), + gas_per_pubdata_limit: U256::from(MAX_GAS_PER_PUBDATA_BYTE), + }, + ) + .into(); + let tx_data: TransactionData = tx.clone().into(); + + let maximal_fee = tx_data.gas_limit * tx_data.max_fee_per_gas; + let sender_address = tx_data.from(); + // set balance + + let key = storage_key_for_eth_balance(&sender_address); + storage_ptr.set_value(&key, u256_to_h256(U256([0, 0, 1, 0]))); + + let mut oracle_tools = OracleTools::new(storage_ptr); + + let mut vm = init_vm_inner( + &mut oracle_tools, + BlockContextMode::NewBlock(block_context, Default::default()), + &block_properties, + BLOCK_GAS_LIMIT, + PROVED_BLOCK_BOOTLOADER_CODE.code.clone(), + TxExecutionMode::VerifyExecute, + ); + push_transaction_to_bootloader_memory(&mut vm, &tx, TxExecutionMode::VerifyExecute); + + let tx_execution_result = vm + .execute_next_tx() + .expect("Bootloader failed while processing transaction"); + + assert_eq!( + tx_execution_result.status, + TxExecutionStatus::Success, + "Transaction wasn't successful" + ); + + let VmBlockResult { + full_result: res, .. + } = vm.execute_till_block_end(BootloaderJobType::TransactionExecution); + // Should not panic + assert!( + res.revert_reason.is_none(), + "Bootloader was not expected to revert: {:?}", + res.revert_reason + ); + + // Both deployment and ordinary nonce should be incremented by one. + let account_nonce_key = get_nonce_key(&sender_address); + let expected_nonce = TX_NONCE_INCREMENT + DEPLOYMENT_NONCE_INCREMENT; + + // The code hash of the deployed contract should be marked as republished. + let known_codes_key = get_known_code_key(&contract_code_hash); + + // The contract should be deployed successfully. + let deployed_address = deployed_address_create(sender_address, U256::zero()); + let account_code_key = get_code_key(&deployed_address); + + let expected_slots = vec![ + (u256_to_h256(expected_nonce), account_nonce_key), + (u256_to_h256(U256::from(1u32)), known_codes_key), + (contract_code_hash, account_code_key), + ]; + + verify_required_storage(&vm.state, expected_slots); + + assert!(!tx_has_failed(&vm.state, 0)); + + let expected_fee = + maximal_fee - U256::from(tx_execution_result.gas_refunded) * U256::from(base_fee); + let operator_balance = get_balance( + AccountTreeId::new(L2_ETH_TOKEN_ADDRESS), + &operator_address, + vm.state.storage.storage.get_ptr(), + ); + + assert!( + operator_balance == expected_fee, + "Operator did not receive his fee" + ); +} + +fn execute_vm_with_predetermined_refund(txs: Vec, refunds: Vec) -> VmBlockResult { + let (block_context, block_properties) = create_test_block_params(); + let block_context: DerivedBlockContext = block_context.into(); + + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let db = RocksDB::new(Database::StateKeeper, temp_dir.as_ref(), false); + let mut raw_storage = SecondaryStateStorage::new(db); + insert_system_contracts(&mut raw_storage); + let storage_ptr: &mut dyn Storage = &mut StorageView::new(&raw_storage); + + // set balance + for tx in txs.iter() { + let sender_address = tx.initiator_account(); + let key = storage_key_for_eth_balance(&sender_address); + storage_ptr.set_value(&key, u256_to_h256(U256([0, 0, 1, 0]))); + } + + let mut oracle_tools = OracleTools::new(storage_ptr); + + let mut vm = init_vm_inner( + &mut oracle_tools, + BlockContextMode::NewBlock(block_context, Default::default()), + &block_properties, + BLOCK_GAS_LIMIT, + PROVED_BLOCK_BOOTLOADER_CODE.code.clone(), + TxExecutionMode::VerifyExecute, + ); + + let codes_for_decommiter = txs + .iter() + .flat_map(|tx| { + tx.execute + .factory_deps + .clone() + .unwrap_or_default() + .iter() + .map(|dep| bytecode_to_factory_dep(dep.clone())) + .collect::)>>() + }) + .collect(); + + vm.state.decommittment_processor.populate( + codes_for_decommiter, + Timestamp(vm.state.local_state.timestamp), + ); + + let memory_with_suggested_refund = get_bootloader_memory( + txs.into_iter().map(Into::into).collect(), + refunds, + TxExecutionMode::VerifyExecute, + BlockContextMode::NewBlock(block_context, Default::default()), + ); + + vm.state.memory.populate_page( + BOOTLOADER_HEAP_PAGE as usize, + memory_with_suggested_refund, + Timestamp(0), + ); + + vm.execute_till_block_end(BootloaderJobType::TransactionExecution) +} + +#[test] +fn test_predetermined_refunded_gas() { + // In this test, we compare the execution of the bootloader with the predefined + // refunded gas and without them + + let (block_context, block_properties) = create_test_block_params(); + let block_context: DerivedBlockContext = block_context.into(); + + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let db = RocksDB::new(Database::StateKeeper, temp_dir.as_ref(), false); + let mut raw_storage = SecondaryStateStorage::new(db); + insert_system_contracts(&mut raw_storage); + let storage_ptr: &mut dyn Storage = &mut StorageView::new(&raw_storage); + + let base_fee = block_context.base_fee; + // We deploy here counter contract, because its logic is trivial + let contract_code = read_test_contract(); + let tx: Transaction = get_deploy_tx( + H256::random(), + Nonce(0), + &contract_code, + vec![], + &[], + Fee { + gas_limit: U256::from(10000000u32), + max_fee_per_gas: U256::from(base_fee), + max_priority_fee_per_gas: U256::from(0), + gas_per_pubdata_limit: U256::from(MAX_GAS_PER_PUBDATA_BYTE), + }, + ) + .into(); + + let sender_address = tx.initiator_account(); + + // set balance + let key = storage_key_for_eth_balance(&sender_address); + storage_ptr.set_value(&key, u256_to_h256(U256([0, 0, 1, 0]))); + + let mut oracle_tools = OracleTools::new(storage_ptr); + + let mut vm = init_vm_inner( + &mut oracle_tools, + BlockContextMode::NewBlock(block_context, Default::default()), + &block_properties, + BLOCK_GAS_LIMIT, + PROVED_BLOCK_BOOTLOADER_CODE.code.clone(), + TxExecutionMode::VerifyExecute, + ); + + push_transaction_to_bootloader_memory(&mut vm, &tx, TxExecutionMode::VerifyExecute); + + let tx_execution_result = vm + .execute_next_tx() + .expect("Bootloader failed while processing transaction"); + + assert_eq!( + tx_execution_result.status, + TxExecutionStatus::Success, + "Transaction wasn't successful" + ); + + // If the refund provided by the operator or the final refund are the 0 + // there is no impact of the operator's refund at all and so this test does not + // make much sense. + assert!( + tx_execution_result.operator_suggested_refund > 0, + "The operator's refund is 0" + ); + assert!( + tx_execution_result.gas_refunded > 0, + "The final refund is 0" + ); + + let mut result = vm.execute_till_block_end(BootloaderJobType::TransactionExecution); + assert!( + result.full_result.revert_reason.is_none(), + "Bootloader was not expected to revert: {:?}", + result.full_result.revert_reason + ); + + let mut result_with_predetermined_refund = execute_vm_with_predetermined_refund( + vec![tx], + vec![tx_execution_result.operator_suggested_refund], + ); + // We need to sort these lists as those are flattened from HashMaps + result.full_result.used_contract_hashes.sort(); + result_with_predetermined_refund + .full_result + .used_contract_hashes + .sort(); + + assert_eq!( + result.full_result.events, + result_with_predetermined_refund.full_result.events + ); + assert_eq!( + result.full_result.l2_to_l1_logs, + result_with_predetermined_refund.full_result.l2_to_l1_logs + ); + assert_eq!( + result.full_result.storage_log_queries, + result_with_predetermined_refund + .full_result + .storage_log_queries + ); + assert_eq!( + result.full_result.used_contract_hashes, + result_with_predetermined_refund + .full_result + .used_contract_hashes + ); +} + +#[derive(Debug, Clone)] +enum TransactionRollbackTestInfo { + Rejected(Transaction, TxRevertReason), + Processed(Transaction, bool, TxExecutionStatus), +} + +impl TransactionRollbackTestInfo { + fn new_rejected(transaction: Transaction, revert_reason: TxRevertReason) -> Self { + Self::Rejected(transaction, revert_reason) + } + + fn new_processed( + transaction: Transaction, + should_be_rollbacked: bool, + expected_status: TxExecutionStatus, + ) -> Self { + Self::Processed(transaction, should_be_rollbacked, expected_status) + } + + fn get_transaction(&self) -> &Transaction { + match self { + TransactionRollbackTestInfo::Rejected(tx, _) => tx, + TransactionRollbackTestInfo::Processed(tx, _, _) => tx, + } + } + + fn rejection_reason(&self) -> Option { + match self { + TransactionRollbackTestInfo::Rejected(_, revert_reason) => Some(revert_reason.clone()), + TransactionRollbackTestInfo::Processed(_, _, _) => None, + } + } + + fn should_rollback(&self) -> bool { + match self { + TransactionRollbackTestInfo::Rejected(_, _) => true, + TransactionRollbackTestInfo::Processed(_, x, _) => *x, + } + } + + fn expected_status(&self) -> TxExecutionStatus { + match self { + TransactionRollbackTestInfo::Rejected(_, _) => { + panic!("There is no execution status for rejected transaction") + } + TransactionRollbackTestInfo::Processed(_, _, status) => *status, + } + } +} + +// Accepts the address of the sender as well as the list of pairs of its transactions +// and whether these transactions should succeed. +fn execute_vm_with_possible_rollbacks( + sender_address: Address, + transactions: Vec, + block_context: DerivedBlockContext, + block_properties: BlockProperties, +) -> VmExecutionResult { + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let db = RocksDB::new(Database::StateKeeper, temp_dir.as_ref(), false); + let mut raw_storage = SecondaryStateStorage::new(db); + insert_system_contracts(&mut raw_storage); + let storage_ptr: &mut dyn Storage = &mut StorageView::new(&raw_storage); + + // Setting infinite balance for the sender. + let key = storage_key_for_eth_balance(&sender_address); + storage_ptr.set_value(&key, u256_to_h256(U256([0, 0, 1, 0]))); + + let mut oracle_tools = OracleTools::new(storage_ptr); + + let mut vm = init_vm_inner( + &mut oracle_tools, + BlockContextMode::NewBlock(block_context, Default::default()), + &block_properties, + BLOCK_GAS_LIMIT, + PROVED_BLOCK_BOOTLOADER_CODE.code.clone(), + TxExecutionMode::VerifyExecute, + ); + + for test_info in transactions { + vm.save_current_vm_as_snapshot(); + let vm_state_before_tx = vm.dump_inner_state(); + push_transaction_to_bootloader_memory( + &mut vm, + test_info.get_transaction(), + TxExecutionMode::VerifyExecute, + ); + + match vm.execute_next_tx() { + Err(reason) => { + assert_eq!(test_info.rejection_reason(), Some(reason)); + } + Ok(res) => { + assert_eq!(test_info.rejection_reason(), None); + assert_eq!( + res.status, + test_info.expected_status(), + "Transaction status is not correct" + ); + } + }; + + if test_info.should_rollback() { + // Some error has occured, we should reject the transaction + vm.rollback_to_latest_snapshot(); + + // vm_state_before_tx. + let state_after_rollback = vm.dump_inner_state(); + assert_eq!( + vm_state_before_tx, state_after_rollback, + "Did not rollback VM state correctly" + ); + } + } + + let VmBlockResult { + full_result: mut result, + .. + } = vm.execute_till_block_end(BootloaderJobType::BlockPostprocessing); + // Used contract hashes are retrieved in unordered manner. + // However it must be sorted for the comparisons in tests to work + result.used_contract_hashes.sort(); + + result +} + +// Sets the signature for an L2 transaction and returns the same transaction +// but this different signature. +fn change_signature(mut tx: Transaction, signature: Vec) -> Transaction { + tx.common_data = match tx.common_data { + ExecuteTransactionCommon::L2(mut data) => { + data.signature = signature; + ExecuteTransactionCommon::L2(data) + } + _ => unreachable!(), + }; + + tx +} + +#[test] +fn test_vm_rollbacks() { + let (block_context, block_properties): (DerivedBlockContext, BlockProperties) = { + let (block_context, block_properties) = create_test_block_params(); + (block_context.into(), block_properties) + }; + + let base_fee = U256::from(block_context.base_fee); + + let sender_private_key = H256::random(); + let contract_code = read_test_contract(); + + let tx_nonce_0: Transaction = get_deploy_tx( + sender_private_key, + Nonce(0), + &contract_code, + vec![], + &[], + Fee { + gas_limit: U256::from(5000000u32), + max_fee_per_gas: base_fee, + max_priority_fee_per_gas: U256::zero(), + gas_per_pubdata_limit: U256::from(MAX_GAS_PER_PUBDATA_BYTE), + }, + ) + .into(); + let tx_nonce_1: Transaction = get_deploy_tx( + sender_private_key, + Nonce(1), + &contract_code, + vec![], + &[], + Fee { + gas_limit: U256::from(5000000u32), + max_fee_per_gas: base_fee, + max_priority_fee_per_gas: U256::zero(), + gas_per_pubdata_limit: U256::from(MAX_GAS_PER_PUBDATA_BYTE), + }, + ) + .into(); + let tx_nonce_2: Transaction = get_deploy_tx( + sender_private_key, + Nonce(2), + &contract_code, + vec![], + &[], + Fee { + gas_limit: U256::from(5000000u32), + max_fee_per_gas: base_fee, + max_priority_fee_per_gas: U256::zero(), + gas_per_pubdata_limit: U256::from(MAX_GAS_PER_PUBDATA_BYTE), + }, + ) + .into(); + + let wrong_signature_length_tx = change_signature(tx_nonce_0.clone(), vec![1u8; 32]); + let wrong_v_tx = change_signature(tx_nonce_0.clone(), vec![1u8; 65]); + let wrong_signature_tx = change_signature(tx_nonce_0.clone(), vec![27u8; 65]); + + let sender_address = tx_nonce_0.initiator_account(); + + let result_without_rollbacks = execute_vm_with_possible_rollbacks( + sender_address, + vec![ + // The nonces are ordered correctly, all the transactions should succeed. + TransactionRollbackTestInfo::new_processed( + tx_nonce_0.clone(), + false, + TxExecutionStatus::Success, + ), + TransactionRollbackTestInfo::new_processed( + tx_nonce_1.clone(), + false, + TxExecutionStatus::Success, + ), + TransactionRollbackTestInfo::new_processed( + tx_nonce_2.clone(), + false, + TxExecutionStatus::Success, + ), + ], + block_context, + block_properties, + ); + + let incorrect_nonce = TxRevertReason::ValidationFailed(VmRevertReason::General { + msg: "Incorrect nonce".to_string(), + }); + let reusing_nonce_twice = TxRevertReason::ValidationFailed(VmRevertReason::General { + msg: "Reusing the same nonce twice".to_string(), + }); + let signature_length_is_incorrect = TxRevertReason::ValidationFailed(VmRevertReason::General { + msg: "Signature length is incorrect".to_string(), + }); + let v_is_incorrect = TxRevertReason::ValidationFailed(VmRevertReason::General { + msg: "v is neither 27 nor 28".to_string(), + }); + let signature_is_incorrect = TxRevertReason::ValidationFailed(VmRevertReason::General { + msg: "Account validation returned invalid magic value. Most often this means that the signature is incorrect".to_string(), + }); + + let result_with_rollbacks = execute_vm_with_possible_rollbacks( + sender_address, + vec![ + TransactionRollbackTestInfo::new_rejected( + wrong_signature_length_tx, + signature_length_is_incorrect, + ), + TransactionRollbackTestInfo::new_rejected(wrong_v_tx, v_is_incorrect), + TransactionRollbackTestInfo::new_rejected(wrong_signature_tx, signature_is_incorrect), + // The correct nonce is 0, this tx will fail + TransactionRollbackTestInfo::new_rejected(tx_nonce_2.clone(), incorrect_nonce.clone()), + // This tx will succeed + TransactionRollbackTestInfo::new_processed( + tx_nonce_0.clone(), + false, + TxExecutionStatus::Success, + ), + // The correct nonce is 1, this tx will fail + TransactionRollbackTestInfo::new_rejected( + tx_nonce_0.clone(), + reusing_nonce_twice.clone(), + ), + // The correct nonce is 1, this tx will fail + TransactionRollbackTestInfo::new_rejected(tx_nonce_2.clone(), incorrect_nonce), + // This tx will succeed + TransactionRollbackTestInfo::new_processed( + tx_nonce_1, + false, + TxExecutionStatus::Success, + ), + // The correct nonce is 2, this tx will fail + TransactionRollbackTestInfo::new_rejected(tx_nonce_0, reusing_nonce_twice.clone()), + // This tx will succeed + TransactionRollbackTestInfo::new_processed( + tx_nonce_2.clone(), + false, + TxExecutionStatus::Success, + ), + // This tx will fail + TransactionRollbackTestInfo::new_rejected(tx_nonce_2, reusing_nonce_twice.clone()), + ], + block_context, + block_properties, + ); + + assert_eq!(result_without_rollbacks, result_with_rollbacks); + + let loadnext_contract = get_loadnext_contract(); + + let loadnext_constructor_data = encode(&[Token::Uint(U256::from(100))]); + let loadnext_deploy_tx: Transaction = get_deploy_tx( + sender_private_key, + Nonce(0), + &loadnext_contract.bytecode, + loadnext_contract.factory_deps, + &loadnext_constructor_data, + Fee { + gas_limit: U256::from(30000000u32), + max_fee_per_gas: base_fee, + max_priority_fee_per_gas: U256::zero(), + gas_per_pubdata_limit: U256::from(MAX_GAS_PER_PUBDATA_BYTE), + }, + ) + .into(); + let loadnext_contract_address = + get_create_zksync_address(loadnext_deploy_tx.initiator_account(), Nonce(0)); + let deploy_loadnext_tx_info = TransactionRollbackTestInfo::new_processed( + loadnext_deploy_tx, + false, + TxExecutionStatus::Success, + ); + + let get_load_next_tx = |params: LoadnextContractExecutionParams, nonce: Nonce| { + // Here we test loadnext with various kinds of operations + let tx: Transaction = mock_loadnext_test_call( + sender_private_key, + nonce, + loadnext_contract_address, + Fee { + gas_limit: U256::from(60000000u32), + max_fee_per_gas: base_fee, + max_priority_fee_per_gas: U256::zero(), + gas_per_pubdata_limit: U256::from(MAX_GAS_PER_PUBDATA_BYTE), + }, + params, + ) + .into(); + + tx + }; + + let loadnext_tx_0 = get_load_next_tx( + LoadnextContractExecutionParams { + reads: 100, + writes: 100, + events: 100, + hashes: 500, + recursive_calls: 10, + deploys: 100, + }, + Nonce(1), + ); + let loadnext_tx_1 = get_load_next_tx( + LoadnextContractExecutionParams { + reads: 100, + writes: 100, + events: 100, + hashes: 500, + recursive_calls: 10, + deploys: 100, + }, + Nonce(2), + ); + + let result_without_rollbacks = execute_vm_with_possible_rollbacks( + sender_address, + vec![ + deploy_loadnext_tx_info.clone(), + TransactionRollbackTestInfo::new_processed( + loadnext_tx_0.clone(), + false, + TxExecutionStatus::Success, + ), + TransactionRollbackTestInfo::new_processed( + loadnext_tx_1.clone(), + false, + TxExecutionStatus::Success, + ), + ], + block_context, + block_properties, + ); + + let result_with_rollbacks = execute_vm_with_possible_rollbacks( + sender_address, + vec![ + deploy_loadnext_tx_info, + TransactionRollbackTestInfo::new_processed( + loadnext_tx_0.clone(), + true, + TxExecutionStatus::Success, + ), + // After the previous tx has been rolled back, this one should succeed + TransactionRollbackTestInfo::new_processed( + loadnext_tx_0.clone(), + false, + TxExecutionStatus::Success, + ), + // The nonce has been bumped up, this transaction should now fail + TransactionRollbackTestInfo::new_rejected(loadnext_tx_0, reusing_nonce_twice.clone()), + TransactionRollbackTestInfo::new_processed( + loadnext_tx_1.clone(), + true, + TxExecutionStatus::Success, + ), + // After the previous tx has been rolled back, this one should succeed + TransactionRollbackTestInfo::new_processed( + loadnext_tx_1.clone(), + true, + TxExecutionStatus::Success, + ), + // After the previous tx has been rolled back, this one should succeed + TransactionRollbackTestInfo::new_processed( + loadnext_tx_1.clone(), + false, + TxExecutionStatus::Success, + ), + // The nonce has been bumped up, this transaction should now fail + TransactionRollbackTestInfo::new_rejected(loadnext_tx_1, reusing_nonce_twice), + ], + block_context, + block_properties, + ); + + assert_eq!(result_without_rollbacks, result_with_rollbacks); +} + +// Inserts the contracts into the test environment, bypassing the +// deployer system contract. Besides the reference to storage +// it accepts a `contracts` tuple of information about the contract +// and whether or not it is an account. +fn insert_contracts( + raw_storage: &mut SecondaryStateStorage, + contracts: Vec<(DeployedContract, bool)>, +) { + let logs: Vec = contracts + .iter() + .flat_map(|(contract, is_account)| { + let mut new_logs = vec![]; + + let deployer_code_key = get_code_key(contract.account_id.address()); + new_logs.push(StorageLog::new_write_log( + deployer_code_key, + hash_bytecode(&contract.bytecode), + )); + + if *is_account { + let is_account_key = get_is_account_key(contract.account_id.address()); + new_logs.push(StorageLog::new_write_log( + is_account_key, + u256_to_h256(1u32.into()), + )); + } + + new_logs + }) + .collect(); + raw_storage.process_transaction_logs(&logs); + + for (contract, _) in contracts { + raw_storage.store_contract(*contract.account_id.address(), contract.bytecode.clone()); + raw_storage.store_factory_dep(hash_bytecode(&contract.bytecode), contract.bytecode); + } + raw_storage.save(L1BatchNumber(0)); +} + +enum NonceHolderTestMode { + SetValueUnderNonce, + IncreaseMinNonceBy5, + IncreaseMinNonceTooMuch, + LeaveNonceUnused, + IncreaseMinNonceBy1, + SwitchToArbitraryOrdering, +} + +impl From for u8 { + fn from(mode: NonceHolderTestMode) -> u8 { + match mode { + NonceHolderTestMode::SetValueUnderNonce => 0, + NonceHolderTestMode::IncreaseMinNonceBy5 => 1, + NonceHolderTestMode::IncreaseMinNonceTooMuch => 2, + NonceHolderTestMode::LeaveNonceUnused => 3, + NonceHolderTestMode::IncreaseMinNonceBy1 => 4, + NonceHolderTestMode::SwitchToArbitraryOrdering => 5, + } + } +} + +fn get_nonce_holder_test_tx( + nonce: U256, + account_address: Address, + test_mode: NonceHolderTestMode, + block_context: &DerivedBlockContext, +) -> TransactionData { + TransactionData { + tx_type: 113, + from: account_address, + to: account_address, + gas_limit: U256::from(10000000u32), + pubdata_price_limit: U256::from(MAX_GAS_PER_PUBDATA_BYTE), + max_fee_per_gas: U256::from(block_context.base_fee), + max_priority_fee_per_gas: U256::zero(), + nonce, + // The reserved fields that are unique for different types of transactions. + // E.g. nonce is currently used in all transaction, but it should not be mandatory + // in the long run. + reserved: [U256::zero(); 4], + data: vec![12], + signature: vec![test_mode.into()], + + ..Default::default() + } +} + +fn run_vm_with_raw_tx<'a>( + oracle_tools: &'a mut OracleTools<'a, false>, + block_context: DerivedBlockContext, + block_properties: &'a BlockProperties, + tx: TransactionData, +) -> (VmExecutionResult, bool) { + let mut vm = init_vm_inner( + oracle_tools, + BlockContextMode::OverrideCurrent(block_context), + block_properties, + BLOCK_GAS_LIMIT, + PLAYGROUND_BLOCK_BOOTLOADER_CODE.code.clone(), + TxExecutionMode::VerifyExecute, + ); + let overhead = tx.overhead_gas(); + push_raw_transaction_to_bootloader_memory( + &mut vm, + tx, + TxExecutionMode::VerifyExecute, + overhead, + ); + let VmBlockResult { + full_result: result, + .. + } = vm.execute_till_block_end(BootloaderJobType::TransactionExecution); + + (result, tx_has_failed(&vm.state, 0)) +} + +#[test] +fn test_nonce_holder() { + let (block_context, block_properties): (DerivedBlockContext, BlockProperties) = { + let (block_context, block_properties) = create_test_block_params(); + (block_context.into(), block_properties) + }; + + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let db = RocksDB::new(Database::StateKeeper, temp_dir.as_ref(), false); + let mut raw_storage = SecondaryStateStorage::new(db); + insert_system_contracts(&mut raw_storage); + + let account_address = H160::random(); + let account = DeployedContract { + account_id: AccountTreeId::new(account_address), + bytecode: read_nonce_holder_tester(), + }; + + insert_contracts(&mut raw_storage, vec![(account, true)]); + + let storage_ptr: &mut dyn Storage = &mut StorageView::new(&raw_storage); + + // We deploy here counter contract, because its logic is trivial + + let key = storage_key_for_eth_balance(&account_address); + storage_ptr.set_value(&key, u256_to_h256(U256([0, 0, 1, 0]))); + + let mut run_nonce_test = |nonce: U256, + test_mode: NonceHolderTestMode, + error_message: Option, + comment: &'static str| { + let tx = get_nonce_holder_test_tx(nonce, account_address, test_mode, &block_context); + + let mut oracle_tools = OracleTools::new(storage_ptr); + let (result, tx_has_failed) = + run_vm_with_raw_tx(&mut oracle_tools, block_context, &block_properties, tx); + if let Some(msg) = error_message { + let expected_error = TxRevertReason::ValidationFailed(VmRevertReason::General { msg }); + assert_eq!( + result + .revert_reason + .expect("No revert reason") + .revert_reason, + expected_error, + "{}", + comment + ); + } else { + assert!(!tx_has_failed, "{}", comment); + } + }; + + // Test 1: trying to set value under non sequential nonce value. + run_nonce_test( + 1u32.into(), + NonceHolderTestMode::SetValueUnderNonce, + Some("Previous nonce has not been used".to_string()), + "Allowed to set value under non sequential value", + ); + + // Test 2: increase min nonce by 1 with sequential nonce ordering: + run_nonce_test( + 0u32.into(), + NonceHolderTestMode::IncreaseMinNonceBy1, + None, + "Failed to increment nonce by 1 for sequential account", + ); + + // Test 3: correctly set value under nonce with sequential nonce ordering: + run_nonce_test( + 1u32.into(), + NonceHolderTestMode::SetValueUnderNonce, + None, + "Failed to set value under nonce sequential value", + ); + + // Test 5: migrate to the arbitrary nonce ordering: + run_nonce_test( + 2u32.into(), + NonceHolderTestMode::SwitchToArbitraryOrdering, + None, + "Failed to switch to arbitrary ordering", + ); + + // Test 6: increase min nonce by 5 + run_nonce_test( + 6u32.into(), + NonceHolderTestMode::IncreaseMinNonceBy5, + None, + "Failed to increase min nonce by 5", + ); + + // Test 7: since the nonces in range [6,10] are no longer allowed, the + // tx with nonce 10 should not be allowed + run_nonce_test( + 10u32.into(), + NonceHolderTestMode::IncreaseMinNonceBy5, + Some("Reusing the same nonce twice".to_string()), + "Allowed to reuse nonce below the minimal one", + ); + + // Test 8: we should be able to use nonce 13 + run_nonce_test( + 13u32.into(), + NonceHolderTestMode::SetValueUnderNonce, + None, + "Did not allow to use unused nonce 10", + ); + + // Test 9: we should not be able to reuse nonce 13 + run_nonce_test( + 13u32.into(), + NonceHolderTestMode::IncreaseMinNonceBy5, + Some("Reusing the same nonce twice".to_string()), + "Allowed to reuse the same nonce twice", + ); + + // Test 10: we should be able to simply use nonce 14, while bumping the minimal nonce by 5 + run_nonce_test( + 14u32.into(), + NonceHolderTestMode::IncreaseMinNonceBy5, + None, + "Did not allow to use a bumped nonce", + ); + + // Test 6: Do not allow bumping nonce by too much + run_nonce_test( + 16u32.into(), + NonceHolderTestMode::IncreaseMinNonceTooMuch, + Some("The value for incrementing the nonce is too high".to_string()), + "Allowed for incrementing min nonce too much", + ); + + // Test 7: Do not allow not setting a nonce as used + run_nonce_test( + 16u32.into(), + NonceHolderTestMode::LeaveNonceUnused, + Some("The nonce was not set as used".to_string()), + "Allowed to leave nonce as unused", + ); +} + +#[test] +fn test_l1_tx_execution() { + // In this test, we try to execute a contract deployment from L1 + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let db = RocksDB::new(Database::StateKeeper, temp_dir.as_ref(), false); + let mut raw_storage = SecondaryStateStorage::new(db); + insert_system_contracts(&mut raw_storage); + let mut storage_accessor = StorageView::new(&raw_storage); + let storage_ptr: &mut dyn Storage = &mut storage_accessor; + + let mut oracle_tools = OracleTools::new(storage_ptr); + let (block_context, block_properties) = create_test_block_params(); + + // Here instead of marking code hash via the bootloader means, we will + // using L1->L2 communication, the same it would likely be done during the priority mode. + let contract_code = read_test_contract(); + let contract_code_hash = hash_bytecode(&contract_code); + let l1_deploy_tx = get_l1_deploy_tx(&contract_code, &[]); + let l1_deploy_tx_data: TransactionData = l1_deploy_tx.clone().into(); + + let required_l2_to_l1_logs = vec![ + L2ToL1Log { + shard_id: 0, + is_service: false, + tx_number_in_block: 0, + sender: SYSTEM_CONTEXT_ADDRESS, + key: u256_to_h256(U256::from(block_context.block_timestamp)), + value: Default::default(), + }, + L2ToL1Log { + shard_id: 0, + is_service: true, + tx_number_in_block: 0, + sender: BOOTLOADER_ADDRESS, + key: l1_deploy_tx_data.canonical_l1_tx_hash(), + value: u256_to_h256(U256::from(1u32)), + }, + ]; + + let sender_address = l1_deploy_tx_data.from(); + + oracle_tools.decommittment_processor.populate( + vec![( + h256_to_u256(contract_code_hash), + bytes_to_be_words(contract_code), + )], + Timestamp(0), + ); + + let mut vm = init_vm_inner( + &mut oracle_tools, + BlockContextMode::NewBlock(block_context.into(), Default::default()), + &block_properties, + BLOCK_GAS_LIMIT, + PROVED_BLOCK_BOOTLOADER_CODE.code.clone(), + TxExecutionMode::VerifyExecute, + ); + push_transaction_to_bootloader_memory(&mut vm, &l1_deploy_tx, TxExecutionMode::VerifyExecute); + + let res = vm.execute_next_tx().unwrap(); + + // The code hash of the deployed contract should be marked as republished. + let known_codes_key = get_known_code_key(&contract_code_hash); + + // The contract should be deployed successfully. + let deployed_address = deployed_address_create(sender_address, U256::zero()); + let account_code_key = get_code_key(&deployed_address); + + let expected_slots = vec![ + (u256_to_h256(U256::from(1u32)), known_codes_key), + (contract_code_hash, account_code_key), + ]; + assert!(!tx_has_failed(&vm.state, 0)); + + verify_required_storage(&vm.state, expected_slots); + + assert_eq!(res.result.logs.l2_to_l1_logs, required_l2_to_l1_logs); + + let tx = get_l1_execute_test_contract_tx(deployed_address, true); + push_transaction_to_bootloader_memory(&mut vm, &tx, TxExecutionMode::VerifyExecute); + let res = ExecutionMetrics::new(&vm.execute_next_tx().unwrap().result.logs, 0, 0, 0, 0); + assert_eq!(res.initial_storage_writes, 0); + + let tx = get_l1_execute_test_contract_tx(deployed_address, false); + push_transaction_to_bootloader_memory(&mut vm, &tx, TxExecutionMode::VerifyExecute); + let res = ExecutionMetrics::new(&vm.execute_next_tx().unwrap().result.logs, 0, 0, 0, 0); + assert_eq!(res.initial_storage_writes, 2); + + let repeated_writes = res.repeated_storage_writes; + + push_transaction_to_bootloader_memory(&mut vm, &tx, TxExecutionMode::VerifyExecute); + let res = ExecutionMetrics::new(&vm.execute_next_tx().unwrap().result.logs, 0, 0, 0, 0); + assert_eq!(res.initial_storage_writes, 1); + // We do the same storage write, so it will be deduplicated + assert_eq!(res.repeated_storage_writes, repeated_writes); + + let mut tx = get_l1_execute_test_contract_tx(deployed_address, false); + tx.execute.value = U256::from(1); + match &mut tx.common_data { + ExecuteTransactionCommon::L1(l1_data) => { + l1_data.to_mint = U256::from(4); + } + _ => unreachable!(), + } + push_transaction_to_bootloader_memory(&mut vm, &tx, TxExecutionMode::VerifyExecute); + let execution_result = vm.execute_next_tx().unwrap(); + // The method is not payable, so the transaction with non-zero value should fail + assert_eq!( + execution_result.status, + TxExecutionStatus::Failure, + "The transaction should fail" + ); + + let res = ExecutionMetrics::new(&execution_result.result.logs, 0, 0, 0, 0); + + // There are 2 initial writes here: + // - totalSupply of ETH token + // - balance of the refund recipient + assert_eq!(res.initial_storage_writes, 2); +} + +#[test] +fn test_invalid_bytecode() { + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let db = RocksDB::new(Database::StateKeeper, temp_dir.as_ref(), false); + let mut raw_storage = SecondaryStateStorage::new(db); + insert_system_contracts(&mut raw_storage); + let (block_context, block_properties) = create_test_block_params(); + + let test_vm_with_custom_bytecode_hash = + |bytecode_hash: H256, expected_revert_reason: Option| { + let mut storage_accessor = StorageView::new(&raw_storage); + let storage_ptr: &mut dyn Storage = &mut storage_accessor; + let mut oracle_tools = OracleTools::new(storage_ptr); + + let (encoded_tx, predefined_overhead) = + get_l1_tx_with_custom_bytecode_hash(h256_to_u256(bytecode_hash)); + + run_vm_with_custom_factory_deps( + &mut oracle_tools, + block_context, + &block_properties, + encoded_tx, + predefined_overhead, + expected_revert_reason, + ); + }; + + let failed_to_mark_factory_deps = |msg: &str| { + TxRevertReason::FailedToMarkFactoryDependencies(VmRevertReason::General { + msg: msg.to_string(), + }) + }; + + // Here we provide the correctly-formatted bytecode hash of + // odd length, so it should work. + test_vm_with_custom_bytecode_hash( + H256([ + 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, + ]), + None, + ); + + // Here we provide correctly formatted bytecode of even length, so + // it should fail. + test_vm_with_custom_bytecode_hash( + H256([ + 1, 0, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, + ]), + Some(failed_to_mark_factory_deps( + "Code length in words must be odd", + )), + ); + + // Here we provide incorrectly formatted bytecode of odd length, so + // it should fail. + test_vm_with_custom_bytecode_hash( + H256([ + 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, + ]), + Some(failed_to_mark_factory_deps( + "Incorrectly formatted bytecodeHash", + )), + ); + + // Here we provide incorrectly formatted bytecode of odd length, so + // it should fail. + test_vm_with_custom_bytecode_hash( + H256([ + 2, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, + ]), + Some(failed_to_mark_factory_deps( + "Incorrectly formatted bytecodeHash", + )), + ); +} + +#[derive(Debug)] +enum TestExecutionResult { + Success(Vec), + Revert(Vec), +} + +#[derive(Debug, Default)] +struct TransactionExecutionErrorTracer { + result: Option, +} + +impl Tracer for TransactionExecutionErrorTracer { + type SupportedMemory = SimpleMemory; + const CALL_BEFORE_EXECUTION: bool = true; + + fn before_decoding(&mut self, _state: VmLocalStateData<'_>, _memory: &Self::SupportedMemory) {} + fn after_decoding( + &mut self, + _state: VmLocalStateData<'_>, + _data: AfterDecodingData, + _memory: &Self::SupportedMemory, + ) { + } + fn before_execution( + &mut self, + state: VmLocalStateData<'_>, + data: BeforeExecutionData, + memory: &Self::SupportedMemory, + ) { + let hook = VmHook::from_opcode_memory(&state, &data); + + if matches!(hook, VmHook::ExecutionResult) { + let vm_hook_params = get_vm_hook_params(memory); + + let success = vm_hook_params[0]; + let returndata_ptr = FatPointer::from_u256(vm_hook_params[1]); + let returndata = read_pointer(memory, returndata_ptr); + + assert!( + success == U256::zero() || success == U256::one(), + "The success should be either 0 or 1" + ); + assert!(self.result.is_none(), "The result is emitted twice"); + + let result = if success == U256::zero() { + TestExecutionResult::Revert(returndata) + } else { + TestExecutionResult::Success(returndata) + }; + + self.result = Some(result); + } + } + fn after_execution( + &mut self, + _state: VmLocalStateData<'_>, + _data: AfterExecutionData, + _memory: &Self::SupportedMemory, + ) { + } +} + +impl ExecutionEndTracer for TransactionExecutionErrorTracer { + fn should_stop_execution(&self) -> bool { + // This tracer will not prevent the execution from going forward + // until the end of the block. + false + } +} + +impl PendingRefundTracer for TransactionExecutionErrorTracer {} + +#[test] +fn test_tracing_of_execution_errors() { + // In this test, we are checking that the execution errors are transmitted correctly from the bootloader. + let (block_context, block_properties) = create_test_block_params(); + let block_context: DerivedBlockContext = block_context.into(); + + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDB"); + let db = RocksDB::new(Database::StateKeeper, temp_dir.as_ref(), false); + let mut raw_storage = SecondaryStateStorage::new(db); + insert_system_contracts(&mut raw_storage); + + let contract_address = Address::random(); + let error_contract = DeployedContract { + account_id: AccountTreeId::new(contract_address), + bytecode: read_error_contract(), + }; + + let tx = get_error_tx( + H256::random(), + Nonce(0), + contract_address, + Fee { + gas_limit: U256::from(1000000u32), + max_fee_per_gas: U256::from(10000000000u64), + max_priority_fee_per_gas: U256::zero(), + gas_per_pubdata_limit: U256::from(50000u32), + }, + ); + + insert_contracts(&mut raw_storage, vec![(error_contract, false)]); + + let storage_ptr: &mut dyn Storage = &mut StorageView::new(&raw_storage); + + let key = storage_key_for_eth_balance(&tx.common_data.initiator_address); + storage_ptr.set_value(&key, u256_to_h256(U256([0, 0, 1, 0]))); + + let mut oracle_tools = OracleTools::new(storage_ptr); + + let mut vm = init_vm_inner( + &mut oracle_tools, + BlockContextMode::NewBlock(block_context, Default::default()), + &block_properties, + BLOCK_GAS_LIMIT, + PROVED_BLOCK_BOOTLOADER_CODE.code.clone(), + TxExecutionMode::VerifyExecute, + ); + push_transaction_to_bootloader_memory(&mut vm, &tx.into(), TxExecutionMode::VerifyExecute); + + let mut tracer = TransactionExecutionErrorTracer::default(); + assert_eq!( + vm.execute_with_custom_tracer(&mut tracer), + VmExecutionStopReason::VmFinished, + "Tracer should never request stop" + ); + + match tracer.result { + Some(TestExecutionResult::Revert(revert_reason)) => { + let revert_reason = VmRevertReason::try_from(&revert_reason as &[u8]).unwrap(); + assert_eq!( + revert_reason, + VmRevertReason::General { + msg: "short".to_string() + } + ) + } + _ => panic!("Tracer captured incorrect result {:#?}", tracer.result), + } +} + +pub fn get_l1_tx_with_custom_bytecode_hash(bytecode_hash: U256) -> (Vec, u32) { + let tx: TransactionData = get_l1_execute_test_contract_tx(Default::default(), false).into(); + let predefined_overhead = tx.overhead_gas_with_custom_factory_deps(vec![bytecode_hash]); + let tx_bytes = tx.abi_encode_with_custom_factory_deps(vec![bytecode_hash]); + + (bytes_to_be_words(tx_bytes), predefined_overhead) +} + +const L1_TEST_GAS_PER_PUBDATA_BYTE: u32 = 800; + +pub fn get_l1_execute_test_contract_tx(deployed_address: Address, with_panic: bool) -> Transaction { + let execute = execute_test_contract(deployed_address, with_panic); + Transaction { + common_data: ExecuteTransactionCommon::L1(L1TxCommonData { + sender: H160::random(), + gas_limit: U256::from(1000000u32), + gas_per_pubdata_limit: L1_TEST_GAS_PER_PUBDATA_BYTE.into(), + ..Default::default() + }), + execute, + received_timestamp_ms: 0, + } +} + +pub fn get_l1_deploy_tx(code: &[u8], calldata: &[u8]) -> Transaction { + let execute = get_create_execute(code, calldata); + + Transaction { + common_data: ExecuteTransactionCommon::L1(L1TxCommonData { + sender: H160::random(), + gas_limit: U256::from(2000000u32), + gas_per_pubdata_limit: L1_TEST_GAS_PER_PUBDATA_BYTE.into(), + ..Default::default() + }), + execute, + received_timestamp_ms: 0, + } +} + +fn read_test_contract() -> Vec { + read_bytecode("etc/contracts-test-data/artifacts-zk/contracts/counter/counter.sol/Counter.json") +} + +fn read_nonce_holder_tester() -> Vec { + read_bytecode("etc/contracts-test-data/artifacts-zk/contracts/custom-account/nonce-holder-test.sol/NonceHolderTest.json") +} + +fn read_error_contract() -> Vec { + read_bytecode( + "etc/contracts-test-data/artifacts-zk/contracts/error/error.sol/SimpleRequire.json", + ) +} + +fn execute_test_contract(address: Address, with_panic: bool) -> Execute { + let test_contract = load_contract( + "etc/contracts-test-data/artifacts-zk/contracts/counter/counter.sol/Counter.json", + ); + + let function = test_contract.function("incrementWithRevert").unwrap(); + + let calldata = function + .encode_input(&[Token::Uint(U256::from(1u8)), Token::Bool(with_panic)]) + .expect("failed to encode parameters"); + Execute { + contract_address: address, + calldata, + value: U256::zero(), + factory_deps: None, + } +} diff --git a/core/lib/vm/src/tests/mod.rs b/core/lib/vm/src/tests/mod.rs new file mode 100644 index 000000000000..3900135abeaa --- /dev/null +++ b/core/lib/vm/src/tests/mod.rs @@ -0,0 +1 @@ +mod bootloader; diff --git a/core/lib/vm/src/transaction_data.rs b/core/lib/vm/src/transaction_data.rs new file mode 100644 index 000000000000..683d67ec90a5 --- /dev/null +++ b/core/lib/vm/src/transaction_data.rs @@ -0,0 +1,484 @@ +use zk_evm::zkevm_opcode_defs::system_params::{MAX_PUBDATA_PER_BLOCK, MAX_TX_ERGS_LIMIT}; +use zksync_types::ethabi::{encode, Address, Token}; +use zksync_types::fee::encoding_len; +use zksync_types::MAX_TXS_IN_BLOCK; +use zksync_types::{l2::TransactionType, ExecuteTransactionCommon, Transaction, U256}; +use zksync_utils::{address_to_h256, ceil_div_u256}; +use zksync_utils::{bytecode::hash_bytecode, bytes_to_be_words, h256_to_u256}; + +use crate::vm_with_bootloader::{ + BLOCK_OVERHEAD_GAS, BLOCK_OVERHEAD_PUBDATA, BOOTLOADER_TX_ENCODING_SPACE, +}; + +const L1_TX_TYPE: u8 = 255; + +// This structure represents the data that is used by +// the Bootloader to describe the transaction. +#[derive(Debug, Default, Clone)] +pub struct TransactionData { + pub tx_type: u8, + pub from: Address, + pub to: Address, + pub gas_limit: U256, + pub pubdata_price_limit: U256, + pub max_fee_per_gas: U256, + pub max_priority_fee_per_gas: U256, + pub paymaster: Address, + pub nonce: U256, + pub value: U256, + // The reserved fields that are unique for different types of transactions. + // E.g. nonce is currently used in all transaction, but it should not be mandatory + // in the long run. + pub reserved: [U256; 4], + pub data: Vec, + pub signature: Vec, + // The factory deps provided with the transaction. + // Note that *only hashes* of these bytecodes are signed by the user + // and they are used in the ABI encoding of the struct. + pub factory_deps: Vec>, + pub paymaster_input: Vec, + pub reserved_dynamic: Vec, +} + +impl From for TransactionData { + fn from(execute_tx: Transaction) -> Self { + match &execute_tx.common_data { + ExecuteTransactionCommon::L2(common_data) => { + let nonce = U256::from_big_endian(&common_data.nonce.to_be_bytes()); + + let should_check_chain_id = if matches!( + common_data.transaction_type, + TransactionType::LegacyTransaction + ) { + U256([1, 0, 0, 0]) + } else { + U256::zero() + }; + + TransactionData { + tx_type: (common_data.transaction_type as u32) as u8, + from: execute_tx.initiator_account(), + to: execute_tx.execute.contract_address, + gas_limit: common_data.fee.gas_limit, + pubdata_price_limit: common_data.fee.gas_per_pubdata_limit, + max_fee_per_gas: common_data.fee.max_fee_per_gas, + max_priority_fee_per_gas: common_data.fee.max_priority_fee_per_gas, + paymaster: common_data.paymaster_params.paymaster, + nonce, + value: execute_tx.execute.value, + reserved: [ + should_check_chain_id, + U256::zero(), + U256::zero(), + U256::zero(), + ], + data: execute_tx.execute.calldata, + signature: common_data.signature.clone(), + factory_deps: execute_tx.execute.factory_deps.unwrap_or_default(), + paymaster_input: common_data.paymaster_params.paymaster_input.clone(), + reserved_dynamic: vec![], + } + } + ExecuteTransactionCommon::L1(common_data) => { + let refund_recipient = h256_to_u256(address_to_h256(&common_data.refund_recipient)); + TransactionData { + tx_type: L1_TX_TYPE, + from: common_data.sender, + to: execute_tx.execute.contract_address, + gas_limit: common_data.gas_limit, + pubdata_price_limit: common_data.gas_per_pubdata_limit, + // It doesn't matter what we put here, since + // the bootloader does not charge anything + max_fee_per_gas: U256::zero(), + max_priority_fee_per_gas: U256::zero(), + paymaster: Address::default(), + nonce: U256::from(common_data.serial_id.0), // priority op ID + value: execute_tx.execute.value, + reserved: [ + common_data.to_mint, + refund_recipient, + U256::zero(), + U256::zero(), + ], + data: execute_tx.execute.calldata, + // The signature isn't checked for L1 transactions so we don't care + signature: vec![], + factory_deps: execute_tx.execute.factory_deps.unwrap_or_default(), + paymaster_input: vec![], + reserved_dynamic: vec![], + } + } + } + } +} + +impl TransactionData { + pub fn from(&self) -> Address { + self.from + } + + // This method is to be used only in tests, when we want to bypass the checks imposed + // on the bytecode hash. + pub(crate) fn abi_encode_with_custom_factory_deps( + self, + factory_deps_hashes: Vec, + ) -> Vec { + encode(&[Token::Tuple(vec![ + Token::Uint(U256::from_big_endian(&self.tx_type.to_be_bytes())), + Token::Address(self.from), + Token::Address(self.to), + Token::Uint(self.gas_limit), + Token::Uint(self.pubdata_price_limit), + Token::Uint(self.max_fee_per_gas), + Token::Uint(self.max_priority_fee_per_gas), + Token::Address(self.paymaster), + Token::Uint(self.nonce), + Token::Uint(self.value), + Token::FixedArray(self.reserved.iter().copied().map(Token::Uint).collect()), + Token::Bytes(self.data), + Token::Bytes(self.signature), + Token::Array(factory_deps_hashes.into_iter().map(Token::Uint).collect()), + Token::Bytes(self.paymaster_input), + Token::Bytes(self.reserved_dynamic), + ])]) + } + + pub(crate) fn abi_encode(self) -> Vec { + let factory_deps_hashes = self + .factory_deps + .iter() + .map(|dep| h256_to_u256(hash_bytecode(dep))) + .collect(); + self.abi_encode_with_custom_factory_deps(factory_deps_hashes) + } + + pub fn into_tokens(self) -> Vec { + let bytes = self.abi_encode(); + assert!(bytes.len() % 32 == 0); + + bytes_to_be_words(bytes) + } + + pub fn overhead_gas(&self) -> u32 { + if self.tx_type != L1_TX_TYPE { + return 0; + } + + let total_gas_limit = self.gas_limit.as_u32(); + let gas_per_pubdata_byte_limit = self.pubdata_price_limit.as_u32(); + let encoded_len = encoding_len( + self.data.len() as u64, + self.signature.len() as u64, + self.factory_deps.len() as u64, + self.paymaster_input.len() as u64, + self.reserved_dynamic.len() as u64, + ); + + get_maximal_allowed_overhead(total_gas_limit, gas_per_pubdata_byte_limit, encoded_len) + } + + #[cfg(test)] + pub(crate) fn overhead_gas_with_custom_factory_deps( + &self, + factory_deps_hashes: Vec, + ) -> u32 { + let total_gas_limit = self.gas_limit.as_u32(); + let gas_per_pubdata_byte_limit = self.pubdata_price_limit.as_u32(); + let encoded_len = encoding_len( + self.data.len() as u64, + self.signature.len() as u64, + factory_deps_hashes.len() as u64, + self.paymaster_input.len() as u64, + self.reserved_dynamic.len() as u64, + ); + get_maximal_allowed_overhead(total_gas_limit, gas_per_pubdata_byte_limit, encoded_len) + } + + #[cfg(test)] + pub(crate) fn canonical_l1_tx_hash(&self) -> zksync_types::H256 { + use zksync_types::web3::signing::keccak256; + + if self.tx_type != L1_TX_TYPE { + panic!("Trying to get L1 tx hash for non-L1 tx"); + } + + let encoded_bytes = self.clone().abi_encode(); + + zksync_types::H256(keccak256(&encoded_bytes)) + } +} + +pub fn derive_overhead(gas_limit: u32, gas_price_per_pubdata: u32, encoded_len: usize) -> u32 { + assert!( + gas_limit <= MAX_TX_ERGS_LIMIT, + "gas limit is larger than the maximal one" + ); + + // Using large U256 type to avoid overflow + let max_block_overhead = U256::from(block_overhead_gas(gas_price_per_pubdata)); + let gas_limit = U256::from(gas_limit); + let gas_price_per_pubdata = U256::from(gas_price_per_pubdata); + let encoded_len = U256::from(encoded_len); + + // The MAX_TX_ERGS_LIMIT is formed in a way that may fullfills a single-instance circuits + // if used in full. That is, within MAX_TX_ERGS_LIMIT it is possible to fully saturate all the single-instance + // circuits. + let overhead_for_single_instance_circuits = + ceil_div_u256(gas_limit * max_block_overhead, MAX_TX_ERGS_LIMIT.into()); + + // The overhead for occupying the bootloader memory + let overhead_for_length = ceil_div_u256( + encoded_len * max_block_overhead, + BOOTLOADER_TX_ENCODING_SPACE.into(), + ); + + // The overhead for occupying a single tx slot + let tx_slot_overhead = ceil_div_u256(max_block_overhead, MAX_TXS_IN_BLOCK.into()); + + // We use "ceil" here for formal reasons to allow easier approach for calculating the overhead in O(1) + let max_pubdata_in_tx = ceil_div_u256(gas_limit, gas_price_per_pubdata); + + // The maximal potential overhead from pubdata + let pubdata_overhead = ceil_div_u256( + max_pubdata_in_tx * max_block_overhead, + MAX_PUBDATA_PER_BLOCK.into(), + ); + + let overhead = vec![ + overhead_for_single_instance_circuits, + overhead_for_length, + tx_slot_overhead, + pubdata_overhead, + ] + .into_iter() + .max() + .unwrap(); + + overhead.as_u32() +} + +pub fn get_maximal_allowed_overhead( + total_gas_limit: u32, + gas_per_pubdata_byte_limit: u32, + encoded_len: usize, +) -> u32 { + // Using large U256 type to prevent overflows. + let overhead_for_block_gas = U256::from(block_overhead_gas(gas_per_pubdata_byte_limit)); + let total_gas_limit = U256::from(total_gas_limit); + let gas_per_pubdata_byte_limit = U256::from(gas_per_pubdata_byte_limit); + let encoded_len = U256::from(encoded_len); + + // Derivation of overhead consists of 4 parts: + // 1. The overhead for taking up a transaction's slot. (O1): O1 = 1 / MAX_TXS_IN_BLOCK + // 2. The overhead for taking up the bootloader's memory (O2): O2 = encoded_len / BOOTLOADER_TX_ENCODING_SPACE + // 3. The overhead for possible usage of pubdata. (O3): O3 = max_pubdata_in_tx / MAX_PUBDATA_PER_BLOCK + // 4. The overhead for possible usage of all the single-instance circuits. (O4): O4 = gas_limit / MAX_TX_ERGS_LIMIT + // + // The maximum of these is taken to derive the part of the block's overhead to be paid by the users: + // + // max_overhead = max(O1, O2, O3, O4) + // overhead_gas = ceil(max_overhead * overhead_for_block_gas). Thus, overhead_gas is a function of + // tx_gas_limit, gas_per_pubdata_byte_limit and encoded_len. + // + // While it is possible to derive the overhead with binary search in O(log n), it is too expensive to be done + // on L1, so here is a reference implementation of finding the overhead for transaction in O(1): + // + // Given total_gas_limit = tx_gas_limit + overhead_gas, we need to find overhead_gas and tx_gas_limit, such that: + // 1. overhead_gas is maximal possible (the operator is paid fairly) + // 2. overhead_gas(tx_gas_limit, gas_per_pubdata_byte_limit, encoded_len) >= overhead_gas (the user does not overpay) + // The third part boils to the following 4 inequalities (at least one of these must hold): + // ceil(O1 * overhead_for_block_gas) >= overhead_gas + // ceil(O2 * overhead_for_block_gas) >= overhead_gas + // ceil(O3 * overhead_for_block_gas) >= overhead_gas + // ceil(O4 * overhead_for_block_gas) >= overhead_gas + // + // Now, we need to solve each of these separately: + + // 1. The overhead for occupying a single tx slot is a constant: + let tx_slot_overhead = ceil_div_u256(overhead_for_block_gas, MAX_TXS_IN_BLOCK.into()); + + // 2. The overhead for occupying the bootloader memory can be derived from encoded_len + let overhead_for_length = ceil_div_u256( + encoded_len * overhead_for_block_gas, + BOOTLOADER_TX_ENCODING_SPACE.into(), + ); + + // 3. ceil(O3 * overhead_for_block_gas) >= overhead_gas + // O3 = max_pubdata_in_tx / MAX_PUBDATA_PER_BLOCK = ceil(gas_limit / gas_per_pubdata_byte_limit) / MAX_PUBDATA_PER_BLOCK + // >= (gas_limit / (gas_per_pubdata_byte_limit * MAX_PUBDATA_PER_BLOCK). Throwing off the `ceil`, while may provide marginally lower + // overhead to the operator, provides substantially easier formula to work with. + // + // For better clarity, let's denote gas_limit = GL, MAX_PUBDATA_PER_BLOCK = MP, gas_per_pubdata_byte_limit = EP, overhead_for_block_gas = OB, total_gas_limit = TL, overhead_gas = OE + // ceil(OB * (TL - OE) / (EP * MP)) >= OE + // + // OB * (TL - OE) / (MP * EP) > OE - 1 + // OB * (TL - OE) > (OE - 1) * EP * MP + // OB * TL + EP * MP > OE * EP * MP + OE * OB + // (OB * TL + EP * MP) / (EP * MP + OB) > OE + // OE = floor((OB * TL + EP * MP) / (EP * MP + OB)) with possible -1 if the division is without remainder + let overhead_for_pubdata = { + let numerator: U256 = overhead_for_block_gas * total_gas_limit + + gas_per_pubdata_byte_limit * U256::from(MAX_PUBDATA_PER_BLOCK); + let denominator = + gas_per_pubdata_byte_limit * U256::from(MAX_PUBDATA_PER_BLOCK) + overhead_for_block_gas; + + // Corner case: if `total_gas_limit` = `gas_per_pubdata_byte_limit` = 0 + // then the numerator will be 0 and subtracting 1 will cause a panic, so we just return a zero. + if numerator.is_zero() { + 0.into() + } else { + (numerator - 1) / denominator + } + }; + + // 4. ceil(O4 * overhead_for_block_gas) >= overhead_gas + // O4 = gas_limit / MAX_TX_ERGS_LIMIT. Using the notation from the previous equation: + // ceil(OB * GL / MAX_TX_ERGS_LIMIT) >= OE + // ceil(OB * (TL - OE) / MAX_TX_ERGS_LIMIT) >= OE + // OB * (TL - OE) / MAX_TX_ERGS_LIMIT > OE - 1 + // OB * (TL - OE) > OE * MAX_TX_ERGS_LIMIT - MAX_TX_ERGS_LIMIT + // OB * TL + MAX_TX_ERGS_LIMIT > OE * ( MAX_TX_ERGS_LIMIT + OB) + // OE = floor(OB * TL + MAX_TX_ERGS_LIMIT / (MAX_TX_ERGS_LIMIT + OB)), with possible -1 if the division is without remainder + let overhead_for_gas = { + let numerator = overhead_for_block_gas * total_gas_limit + U256::from(MAX_TX_ERGS_LIMIT); + let denominator: U256 = U256::from(MAX_TX_ERGS_LIMIT) + overhead_for_block_gas; + + (numerator - 1) / denominator + }; + + let max_overhead = vec![ + tx_slot_overhead, + overhead_for_length, + overhead_for_pubdata, + overhead_for_gas, + ] + .into_iter() + .max() + // For the sake of consistency making sure that total_gas_limit >= max_overhead + .map(|max_overhead| std::cmp::min(max_overhead, total_gas_limit)) + .unwrap(); + + max_overhead.as_u32() +} + +pub(crate) fn block_overhead_gas(gas_per_pubdata_byte: u32) -> u32 { + BLOCK_OVERHEAD_GAS + BLOCK_OVERHEAD_PUBDATA * gas_per_pubdata_byte +} + +#[cfg(test)] +mod tests { + + use zksync_types::fee::encoding_len; + + use super::*; + + // This method returns the maximum block overhead that can be charged from the user based on the binary search approach + pub fn get_maximal_allowed_overhead_bin_search( + total_gas_limit: u32, + gas_per_pubdata_byte_limit: u32, + encoded_len: usize, + ) -> u32 { + let mut left_bound = if MAX_TX_ERGS_LIMIT < total_gas_limit { + total_gas_limit - MAX_TX_ERGS_LIMIT + } else { + 0u32 + }; + // Safe cast: the gas_limit for a transaction can not be larger than 2^32 + let mut right_bound = total_gas_limit; + + // The closure returns whether a certain overhead would be accepted by the bootloader. + // It is accepted if the derived overhead (i.e. the actual overhead that the user has to pay) + // is >= than the overhead proposed by the operator. + let is_overhead_accepted = |suggested_overhead: u32| { + let derived_overhead = derive_overhead( + total_gas_limit - suggested_overhead, + gas_per_pubdata_byte_limit, + encoded_len, + ); + + derived_overhead >= suggested_overhead + }; + + // In order to find the maximal allowed overhead we are doing binary search + while left_bound + 1 < right_bound { + let mid = (left_bound + right_bound) / 2; + + if is_overhead_accepted(mid) { + left_bound = mid; + } else { + right_bound = mid; + } + } + + if is_overhead_accepted(right_bound) { + right_bound + } else { + left_bound + } + } + + #[test] + fn test_correctness_for_efficient_overhead() { + let test_params = |total_gas_limit: u32, gas_per_pubdata: u32, encoded_len: usize| { + assert!( + total_gas_limit / gas_per_pubdata <= MAX_PUBDATA_PER_BLOCK, + "The input data should not allow too much pubdata per block" + ); + + let result_by_efficient_search = + get_maximal_allowed_overhead(total_gas_limit, gas_per_pubdata, encoded_len); + + let result_by_binary_search = get_maximal_allowed_overhead_bin_search( + total_gas_limit, + gas_per_pubdata, + encoded_len, + ); + + assert_eq!(result_by_efficient_search, result_by_binary_search); + }; + + // Some arbitrary test + test_params(60_000_000, 800, 2900); + + // Very small parameters + test_params(0, 1, 12); + + // Relatively big parameters + let max_tx_overhead = derive_overhead(MAX_TX_ERGS_LIMIT, 5000, 10000); + test_params(MAX_TX_ERGS_LIMIT + max_tx_overhead, 5000, 10000); + } + + #[test] + fn test_consistency_with_encoding_length() { + let transaction = TransactionData { + tx_type: 113, + from: Address::random(), + to: Address::random(), + gas_limit: U256::from(1u32), + pubdata_price_limit: U256::from(1u32), + max_fee_per_gas: U256::from(1u32), + max_priority_fee_per_gas: U256::from(1u32), + paymaster: Address::random(), + nonce: U256::zero(), + value: U256::zero(), + // The reserved fields that are unique for different types of transactions. + // E.g. nonce is currently used in all transaction, but it should not be mandatory + // in the long run. + reserved: [U256::zero(); 4], + data: vec![0u8; 65], + signature: vec![0u8; 75], + // The factory deps provided with the transaction. + // Note that *only hashes* of these bytecodes are signed by the user + // and they are used in the ABI encoding of the struct. + factory_deps: vec![vec![0u8; 32], vec![1u8; 32]], + paymaster_input: vec![0u8; 85], + reserved_dynamic: vec![0u8; 32], + }; + + let assumed_encoded_len = encoding_len(65, 75, 2, 85, 32); + + let true_encoding_len = transaction.into_tokens().len(); + + assert_eq!(assumed_encoded_len, true_encoding_len); + } +} diff --git a/core/lib/vm/src/utils.rs b/core/lib/vm/src/utils.rs new file mode 100644 index 000000000000..2a9c73c497e0 --- /dev/null +++ b/core/lib/vm/src/utils.rs @@ -0,0 +1,281 @@ +use crate::{memory::SimpleMemory, vm_with_bootloader::BlockContext}; + +use zk_evm::{ + aux_structures::{LogQuery, MemoryPage, Timestamp}, + block_properties::BlockProperties, + vm_state::PrimitiveValue, + zkevm_opcode_defs::FatPointer, +}; +use zksync_contracts::{read_zbin_bytecode, DEFAULT_ACCOUNT_CODE}; +use zksync_state::secondary_storage::SecondaryStateStorage; +use zksync_types::{ + get_code_key, get_system_context_init_logs, system_contracts::get_system_smart_contracts, + Address, L1BatchNumber, StorageLog, StorageLogQuery, FAIR_L2_GAS_PRICE, H160, H256, + MAX_L2_TX_GAS_LIMIT, U256, +}; +use zksync_utils::{bytecode::hash_bytecode, bytes_to_be_words}; + +pub const INITIAL_TIMESTAMP: u32 = 1024; +pub const INITIAL_MEMORY_COUNTER: u32 = 2048; +pub const INITIAL_CALLDATA_PAGE: u32 = 7; +pub const INITIAL_BASE_PAGE: u32 = 8; +pub const ENTRY_POINT_PAGE: u32 = code_page_candidate_from_base(MemoryPage(INITIAL_BASE_PAGE)).0; + +/// How many gas bootloader is allowed to spend within one block. +/// Note that this value doesn't correspond to the gas limit of any particular transaction +/// (except for the fact that, of course, gas limit for each transaction should be <= `BLOCK_GAS_LIMIT`). +pub const BLOCK_GAS_LIMIT: u32 = zk_evm::zkevm_opcode_defs::system_params::VM_INITIAL_FRAME_ERGS; +pub const ETH_CALL_GAS_LIMIT: u32 = MAX_L2_TX_GAS_LIMIT as u32; + +#[derive(Debug, Clone)] +pub enum VmExecutionResult { + Ok(Vec), + Revert(Vec), + Panic, + MostLikelyDidNotFinish(Address, u16), +} + +pub const fn code_page_candidate_from_base(base: MemoryPage) -> MemoryPage { + MemoryPage(base.0) +} + +pub const fn stack_page_from_base(base: MemoryPage) -> MemoryPage { + MemoryPage(base.0 + 1) +} + +pub const fn heap_page_from_base(base: MemoryPage) -> MemoryPage { + MemoryPage(base.0 + 2) +} + +pub const fn aux_heap_page_from_base(base: MemoryPage) -> MemoryPage { + MemoryPage(base.0 + 3) +} + +pub(crate) fn dump_memory_page_using_primitive_value( + memory: &SimpleMemory, + ptr: PrimitiveValue, +) -> Vec { + if !ptr.is_pointer { + return vec![]; + } + let fat_ptr = FatPointer::from_u256(ptr.value); + dump_memory_page_using_fat_pointer(memory, fat_ptr) +} + +pub(crate) fn dump_memory_page_using_fat_pointer( + memory: &SimpleMemory, + fat_ptr: FatPointer, +) -> Vec { + dump_memory_page_by_offset_and_length( + memory, + fat_ptr.memory_page, + (fat_ptr.start + fat_ptr.offset) as usize, + (fat_ptr.length - fat_ptr.offset) as usize, + ) +} + +pub(crate) fn dump_memory_page_by_offset_and_length( + memory: &SimpleMemory, + page: u32, + offset: usize, + length: usize, +) -> Vec { + assert!(offset < (1u32 << 24) as usize); + assert!(length < (1u32 << 24) as usize); + let mut dump = Vec::with_capacity(length); + if length == 0 { + return dump; + } + + let first_word = offset / 32; + let end_byte = offset + length; + let mut last_word = end_byte / 32; + if end_byte % 32 != 0 { + last_word += 1; + } + + let unalignment = offset % 32; + + let page_part = + memory.dump_page_content_as_u256_words(page, (first_word as u32)..(last_word as u32)); + + let mut is_first = true; + let mut remaining = length; + for word in page_part.into_iter() { + let it = word.into_be_iter(); + if is_first { + is_first = false; + let it = it.skip(unalignment); + for next in it { + if remaining > 0 { + dump.push(next); + remaining -= 1; + } + } + } else { + for next in it { + if remaining > 0 { + dump.push(next); + remaining -= 1; + } + } + } + } + + assert_eq!( + dump.len(), + length, + "tried to dump with offset {}, length {}, got a bytestring of length {}", + offset, + length, + dump.len() + ); + + dump +} + +pub trait FixedLengthIterator<'a, I: 'a, const N: usize>: Iterator +where + Self: 'a, +{ + fn next(&mut self) -> Option<::Item> { + ::next(self) + } +} + +pub trait IntoFixedLengthByteIterator { + type IntoIter: FixedLengthIterator<'static, u8, N>; + fn into_le_iter(self) -> Self::IntoIter; + fn into_be_iter(self) -> Self::IntoIter; +} + +pub struct FixedBufferValueIterator { + iter: std::array::IntoIter, +} + +impl Iterator for FixedBufferValueIterator { + type Item = T; + fn next(&mut self) -> Option { + self.iter.next() + } +} + +impl FixedLengthIterator<'static, T, N> + for FixedBufferValueIterator +{ +} + +impl IntoFixedLengthByteIterator<32> for U256 { + type IntoIter = FixedBufferValueIterator; + fn into_le_iter(self) -> Self::IntoIter { + let mut buffer = [0u8; 32]; + self.to_little_endian(&mut buffer); + + FixedBufferValueIterator { + iter: IntoIterator::into_iter(buffer), + } + } + + fn into_be_iter(self) -> Self::IntoIter { + let mut buffer = [0u8; 32]; + self.to_big_endian(&mut buffer); + + FixedBufferValueIterator { + iter: IntoIterator::into_iter(buffer), + } + } +} + +/// Collects storage log queries where `log.log_query.timestamp >= from_timestamp`. +/// Denote `n` to be the number of such queries, then it works in O(n). +pub fn collect_storage_log_queries_after_timestamp( + all_log_queries: &[StorageLogQuery], + from_timestamp: Timestamp, +) -> Vec { + all_log_queries + .iter() + .rev() + .take_while(|log_query| log_query.log_query.timestamp >= from_timestamp) + .cloned() + .collect::>() + .into_iter() + .rev() + .collect() +} + +/// Collects all log queries where `log_query.timestamp >= from_timestamp`. +/// Denote `n` to be the number of such queries, then it works in O(n). +pub fn collect_log_queries_after_timestamp( + all_log_queries: &[LogQuery], + from_timestamp: Timestamp, +) -> Vec { + all_log_queries + .iter() + .rev() + .take_while(|log_query| log_query.timestamp >= from_timestamp) + .cloned() + .collect::>() + .into_iter() + .rev() + .collect() +} + +/// Receives sorted slice of timestamps. +/// Returns count of timestamps that are greater than or equal to `from_timestamp`. +/// Works in O(log(sorted_timestamps.len())). +pub fn precompile_calls_count_after_timestamp( + sorted_timestamps: &[Timestamp], + from_timestamp: Timestamp, +) -> usize { + sorted_timestamps.len() - sorted_timestamps.partition_point(|t| *t < from_timestamp) +} + +pub fn default_block_properties() -> BlockProperties { + BlockProperties { + default_aa_code_hash: DEFAULT_ACCOUNT_CODE.hash, + zkporter_is_available: false, + } +} + +pub fn create_test_block_params() -> (BlockContext, BlockProperties) { + let context = BlockContext { + block_number: 1u32, + block_timestamp: 1000, + l1_gas_price: 50_000_000_000, // 50 gwei + fair_l2_gas_price: FAIR_L2_GAS_PRICE, + operator_address: H160::zero(), + }; + + let block_properties = default_block_properties(); + + (context, block_properties) +} + +pub fn insert_system_contracts(raw_storage: &mut SecondaryStateStorage) { + let contracts = get_system_smart_contracts(); + let system_context_init_log = get_system_context_init_logs(H256::from_low_u64_be(270)); + + let logs: Vec = contracts + .iter() + .map(|contract| { + let deployer_code_key = get_code_key(contract.account_id.address()); + StorageLog::new_write_log(deployer_code_key, hash_bytecode(&contract.bytecode)) + }) + .chain(system_context_init_log) + .collect(); + raw_storage.process_transaction_logs(&logs); + + for contract in contracts { + raw_storage.store_contract(*contract.account_id.address(), contract.bytecode.clone()); + raw_storage.store_factory_dep(hash_bytecode(&contract.bytecode), contract.bytecode); + } + raw_storage.save(L1BatchNumber(0)) +} + +pub fn read_bootloader_test_code(test: &str) -> Vec { + let bytecode = read_zbin_bytecode(format!( + "etc/system-contracts/bootloader/tests/artifacts/{}.yul/{}.yul.zbin", + test, test + )); + bytes_to_be_words(bytecode) +} diff --git a/core/lib/vm/src/vm.rs b/core/lib/vm/src/vm.rs new file mode 100644 index 000000000000..0fd543472242 --- /dev/null +++ b/core/lib/vm/src/vm.rs @@ -0,0 +1,904 @@ +use std::fmt::Debug; + +use zk_evm::aux_structures::Timestamp; +use zk_evm::vm_state::{PrimitiveValue, VmLocalState, VmState}; +use zk_evm::witness_trace::DummyTracer; +use zk_evm::zkevm_opcode_defs::decoding::{AllowedPcOrImm, EncodingModeProduction, VmEncodingMode}; +use zk_evm::zkevm_opcode_defs::definitions::RET_IMPLICIT_RETURNDATA_PARAMS_REGISTER; +use zksync_config::constants::MAX_TXS_IN_BLOCK; +use zksync_types::l2_to_l1_log::L2ToL1Log; +use zksync_types::tx::tx_execution_info::{TxExecutionStatus, VmExecutionLogs}; +use zksync_types::vm_trace::VmExecutionTrace; +use zksync_types::{L1BatchNumber, StorageLogQuery, VmEvent, U256}; +use zksync_utils::bytes_to_be_words; + +use crate::bootloader_state::BootloaderState; +use crate::errors::{TxRevertReason, VmRevertReason, VmRevertReasonParsingResult}; +use crate::event_sink::InMemoryEventSink; +use crate::events::merge_events; +use crate::memory::SimpleMemory; +use crate::oracles::decommitter::DecommitterOracle; +use crate::oracles::precompile::PrecompilesProcessorWithHistory; +use crate::oracles::storage::StorageOracle; +use crate::oracles::tracer::{ + BootloaderTracer, ExecutionEndTracer, NoopMemoryTracer, OneTxTracer, PendingRefundTracer, + ValidationError, ValidationTracer, ValidationTracerParams, +}; +use crate::oracles::OracleWithHistory; +use crate::utils::{ + collect_log_queries_after_timestamp, collect_storage_log_queries_after_timestamp, + dump_memory_page_using_primitive_value, precompile_calls_count_after_timestamp, +}; +use crate::vm_with_bootloader::{ + BootloaderJobType, DerivedBlockContext, TxExecutionMode, BOOTLOADER_HEAP_PAGE, + OPERATOR_REFUNDS_OFFSET, +}; +use crate::Word; + +pub type ZkSyncVmState<'a> = VmState< + 'a, + StorageOracle<'a>, + SimpleMemory, + InMemoryEventSink, + PrecompilesProcessorWithHistory, + DecommitterOracle<'a, false>, + DummyTracer, +>; + +pub const MAX_MEM_SIZE_BYTES: u32 = 16777216; // 2^24 + +// Arbitrary space in memory closer to the end of the page +pub const RESULT_SUCCESS_FIRST_SLOT: u32 = + (MAX_MEM_SIZE_BYTES - (MAX_TXS_IN_BLOCK as u32) * 32) / 32; +// The slot that is used for tracking vm hooks +pub const VM_HOOK_POSITION: u32 = RESULT_SUCCESS_FIRST_SLOT - 1; +pub const VM_HOOK_PARAMS_COUNT: u32 = 2; +pub const VM_HOOK_PARAMS_START_POSITION: u32 = VM_HOOK_POSITION - VM_HOOK_PARAMS_COUNT; + +pub(crate) fn get_vm_hook_params(memory: &SimpleMemory) -> Vec { + memory.dump_page_content_as_u256_words( + BOOTLOADER_HEAP_PAGE, + VM_HOOK_PARAMS_START_POSITION..VM_HOOK_PARAMS_START_POSITION + VM_HOOK_PARAMS_COUNT, + ) +} + +#[derive(Debug)] +pub struct VmInstance<'a> { + pub gas_limit: u32, + pub state: ZkSyncVmState<'a>, + pub execution_mode: TxExecutionMode, + pub block_context: DerivedBlockContext, + pub(crate) bootloader_state: BootloaderState, + + pub snapshots: Vec, +} + +/// This structure stores data that accumulates during the VM run. +#[derive(Debug, PartialEq)] +pub struct VmExecutionResult { + pub events: Vec, + pub storage_log_queries: Vec, + pub used_contract_hashes: Vec, + pub l2_to_l1_logs: Vec, + pub return_data: Vec, + + /// Value denoting the amount of gas spent withing VM invocation. + /// Note that return value represents the difference between the amount of gas + /// available to VM before and after execution. + /// + /// It means, that depending on the context, `gas_used` may represent different things. + /// If VM is continously invoked and interrupted after each tx, this field may represent the + /// amount of gas spent by a single transaction. + /// + /// To understand, which value does `gas_used` represent, see the documentation for the method + /// that you use to get `VmExecutionResult` object. + /// + /// Side note: this may sound confusing, but this arises from the nature of the bootloader: for it, + /// processing multiple transactions is a single action. We *may* intrude and stop VM once transaction + /// is executed, but it's not enforced. So best we can do is to calculate the amount of gas before and + /// after the invocation, leaving the interpretation of this value to the user. + pub gas_used: u32, + pub contracts_used: usize, + pub revert_reason: Option, + pub trace: VmExecutionTrace, + pub total_log_queries: usize, + pub cycles_used: u32, +} + +impl VmExecutionResult { + pub fn error_message(&self) -> Option { + self.revert_reason + .as_ref() + .map(|result| result.revert_reason.to_string()) + } +} + +#[derive(Debug, PartialEq)] +pub struct VmBlockResult { + /// Result for the whole block execution. + pub full_result: VmExecutionResult, + /// Result for the block tip execution. + pub block_tip_result: VmPartialExecutionResult, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct VmPartialExecutionResult { + pub logs: VmExecutionLogs, + pub revert_reason: Option, + pub contracts_used: usize, + pub cycles_used: u32, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct VmTxExecutionResult { + pub status: TxExecutionStatus, + pub result: VmPartialExecutionResult, + // Gas refunded to the user at the end of the transaction + pub gas_refunded: u32, + // Gas proposed by the operator to be refunded, before the postOp call. + // This value is needed to correctly recover memory of the bootloader. + pub operator_suggested_refund: u32, +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum VmExecutionStopReason { + VmFinished, + TracerRequestedStop, +} + +use crate::utils::VmExecutionResult as NewVmExecutionResult; + +fn vm_may_have_ended_inner( + vm: &VmState< + StorageOracle, + SimpleMemory, + InMemoryEventSink, + PrecompilesProcessorWithHistory, + DecommitterOracle, + DummyTracer, + >, +) -> Option { + let execution_has_ended = vm.execution_has_ended(); + + let r1 = vm.local_state.registers[RET_IMPLICIT_RETURNDATA_PARAMS_REGISTER as usize]; + let current_address = vm.local_state.callstack.get_current_stack().this_address; + + let outer_eh_location = >::PcOrImm::MAX.as_u64(); + match ( + execution_has_ended, + vm.local_state.callstack.get_current_stack().pc.as_u64(), + ) { + (true, 0) => { + let returndata = dump_memory_page_using_primitive_value(vm.memory, r1); + + Some(NewVmExecutionResult::Ok(returndata)) + } + (false, _) => None, + (true, l) if l == outer_eh_location => { + // check r1,r2,r3 + if vm.local_state.flags.overflow_or_less_than_flag { + Some(NewVmExecutionResult::Panic) + } else { + let returndata = dump_memory_page_using_primitive_value(vm.memory, r1); + Some(NewVmExecutionResult::Revert(returndata)) + } + } + (_, a) => Some(NewVmExecutionResult::MostLikelyDidNotFinish( + current_address, + a as u16, + )), + } +} + +// This method returns `VmExecutionResult` struct, but some of the fields are left empty. +// +// `gas_before` argument is used to calculate the amount of gas spent by transaction. +// It is required because the same VM instance is continuously used to apply several transactions. +fn vm_may_have_ended(vm: &VmInstance, gas_before: u32) -> Option { + let basic_execution_result = vm_may_have_ended_inner(&vm.state)?; + + let gas_used = gas_before - vm.gas_remaining(); + + match basic_execution_result { + NewVmExecutionResult::Ok(mut data) => { + while data.len() % 32 != 0 { + data.push(0) + } + Some(VmExecutionResult { + // The correct `events` value for this field should be set separately + // later on based on the information inside the event_sink oracle. + events: vec![], + storage_log_queries: vm.get_final_log_queries(), + used_contract_hashes: vm.get_used_contracts(), + l2_to_l1_logs: vec![], + return_data: bytes_to_be_words(data), + gas_used, + contracts_used: vm + .state + .decommittment_processor + .get_used_bytecode_hashes() + .len(), + revert_reason: None, + trace: VmExecutionTrace::default(), + total_log_queries: vm.state.event_sink.get_log_queries() + + vm.state.precompiles_processor.get_timestamp_history().len() + + vm.get_final_log_queries().len(), + cycles_used: vm.state.local_state.monotonic_cycle_counter, + }) + } + NewVmExecutionResult::Revert(data) => { + let revert_reason = VmRevertReasonParsingResult::new( + TxRevertReason::parse_error(data.as_slice()), + data, + ); + + // Check if error indicates a bug in server/vm/bootloader. + if matches!( + revert_reason.revert_reason, + TxRevertReason::UnexpectedVMBehavior(_) + ) { + vlog::error!( + "Observed error that should never happen: {:?}. Full VM data: {:?}", + revert_reason, + vm + ); + } + + Some(VmExecutionResult { + events: vec![], + storage_log_queries: vm.get_final_log_queries(), + used_contract_hashes: vm.get_used_contracts(), + l2_to_l1_logs: vec![], + return_data: vec![], + gas_used, + contracts_used: vm + .state + .decommittment_processor + .get_used_bytecode_hashes() + .len(), + revert_reason: Some(revert_reason), + trace: VmExecutionTrace::default(), + total_log_queries: vm.state.event_sink.get_log_queries() + + vm.state.precompiles_processor.get_timestamp_history().len() + + vm.get_final_log_queries().len(), + cycles_used: vm.state.local_state.monotonic_cycle_counter, + }) + } + // Panic is effectively the same as Revert, but has different nature. + NewVmExecutionResult::Panic => Some(VmExecutionResult { + events: vec![], + storage_log_queries: vec![], + used_contract_hashes: vec![], + l2_to_l1_logs: vec![], + return_data: vec![], + gas_used, + contracts_used: vm + .state + .decommittment_processor + .get_used_bytecode_hashes() + .len(), + revert_reason: Some(VmRevertReasonParsingResult { + revert_reason: TxRevertReason::Unknown(VmRevertReason::VmError), + original_data: vec![], + }), + trace: VmExecutionTrace::default(), + total_log_queries: vm.state.event_sink.get_log_queries() + + vm.state.precompiles_processor.get_timestamp_history().len() + + vm.get_final_log_queries().len(), + cycles_used: vm.state.local_state.monotonic_cycle_counter, + }), + NewVmExecutionResult::MostLikelyDidNotFinish(_, _) => { + // The execution has not ended yet. It should either continue + // or throw Out-of-gas error. + None + } + } +} + +/// A snapshot of the VM that holds enough information to +/// rollback the VM to some historical state. +#[derive(Debug, Clone)] +pub struct VmSnapshot { + local_state: VmLocalState, + bootloader_state: BootloaderState, +} + +impl<'a> VmInstance<'a> { + fn has_ended(&self) -> bool { + match vm_may_have_ended_inner(&self.state) { + None | Some(NewVmExecutionResult::MostLikelyDidNotFinish(_, _)) => false, + Some( + NewVmExecutionResult::Ok(_) + | NewVmExecutionResult::Revert(_) + | NewVmExecutionResult::Panic, + ) => true, + } + } + + fn revert_reason(&self) -> Option { + match vm_may_have_ended_inner(&self.state) { + None + | Some( + NewVmExecutionResult::MostLikelyDidNotFinish(_, _) | NewVmExecutionResult::Ok(_), + ) => None, + Some(NewVmExecutionResult::Revert(data)) => { + let revert_reason = VmRevertReasonParsingResult::new( + TxRevertReason::parse_error(data.as_slice()), + data, + ); + + // Check if error indicates a bug in server/vm/bootloader. + if matches!( + revert_reason.revert_reason, + TxRevertReason::UnexpectedVMBehavior(_) + ) { + vlog::error!( + "Observed error that should never happen: {:?}. Full VM data: {:?}", + revert_reason, + self + ); + } + + Some(revert_reason) + } + Some(NewVmExecutionResult::Panic) => Some(VmRevertReasonParsingResult { + revert_reason: TxRevertReason::Unknown(VmRevertReason::VmError), + original_data: vec![], + }), + } + } + + /// Saves the snapshot of the current state of the VM that can be used + /// to roll back its state later on. + pub fn save_current_vm_as_snapshot(&mut self) { + self.snapshots.push(VmSnapshot { + // Vm local state contains O(1) various parameters (registers/etc). + // The only "expensive" copying here is copying of the callstack. + // It will take O(callstack_depth) to copy it. + // So it is generally recommended to get snapshots of the bootloader frame, + // where the depth is 1. + local_state: self.state.local_state.clone(), + bootloader_state: self.bootloader_state.clone(), + }); + } + + fn rollback_to_snapshot(&mut self, snapshot: VmSnapshot) { + let VmSnapshot { + local_state, + bootloader_state, + } = snapshot; + + let timestamp = Timestamp(local_state.timestamp); + + vlog::trace!("Rollbacking decomitter"); + self.state + .decommittment_processor + .rollback_to_timestamp(timestamp); + + vlog::trace!("Rollbacking event_sink"); + self.state.event_sink.rollback_to_timestamp(timestamp); + + vlog::trace!("Rollbacking storage"); + self.state.storage.rollback_to_timestamp(timestamp); + + vlog::trace!("Rollbacking memory"); + self.state.memory.rollback_to_timestamp(timestamp); + + vlog::trace!("Rollbacking precompiles_processor"); + self.state + .precompiles_processor + .rollback_to_timestamp(timestamp); + self.state.local_state = local_state; + self.bootloader_state = bootloader_state; + } + + /// Rollbacks the state of the VM to the state of the latest snapshot. + pub fn rollback_to_latest_snapshot(&mut self) { + let snapshot = self.snapshots.last().cloned().unwrap(); + self.rollback_to_snapshot(snapshot); + } + + /// Rollbacks the state of the VM to the state of the latest snapshot. + /// Removes that snapshot from the list. + pub fn rollback_to_latest_snapshot_popping(&mut self) { + let snapshot = self.snapshots.pop().unwrap(); + self.rollback_to_snapshot(snapshot); + } + + /// Returns the amount of gas remaining to the VM. + /// Note that this *does not* correspond to the gas limit of a transaction. + /// To calculate the amount of gas spent by transaction, you should call this method before and after + /// the execution, and subtract these values. + /// + /// Note: this method should only be called when either transaction is fully completed or VM completed + /// its execution. Remaining gas value is read from the current stack frame, so if you'll attempt to + /// read it during the transaction execution, you may receive invalid value. + fn gas_remaining(&self) -> u32 { + self.state.local_state.callstack.current.ergs_remaining + } + + /// Returns the amount of gas consumed by the VM so far (based on the `gas_limit` provided + /// to initiate the virtual machine). + /// + /// Note: this method should only be called when either transaction is fully completed or VM completed + /// its execution. Remaining gas value is read from the current stack frame, so if you'll attempt to + /// read it during the transaction execution, you may receive invalid value. + pub fn gas_consumed(&self) -> u32 { + self.gas_limit - self.gas_remaining() + } + + pub(crate) fn collect_events_and_l1_logs_after_timestamp( + &self, + from_timestamp: Timestamp, + ) -> (Vec, Vec) { + let (raw_events, l1_messages) = self + .state + .event_sink + .get_events_and_l2_l1_logs_after_timestamp(from_timestamp); + let events = merge_events(raw_events) + .into_iter() + .map(|e| e.into_vm_event(L1BatchNumber(self.block_context.context.block_number))) + .collect(); + ( + events, + l1_messages.into_iter().map(L2ToL1Log::from).collect(), + ) + } + + fn collect_execution_logs_after_timestamp(&self, from_timestamp: Timestamp) -> VmExecutionLogs { + let storage_logs = collect_storage_log_queries_after_timestamp( + &self + .state + .storage + .frames_stack + .inner() + .current_frame() + .forward, + from_timestamp, + ); + let storage_logs_count = storage_logs.len(); + + let (events, l2_to_l1_logs) = + self.collect_events_and_l1_logs_after_timestamp(from_timestamp); + + let log_queries = collect_log_queries_after_timestamp( + &self + .state + .event_sink + .frames_stack + .inner() + .current_frame() + .forward, + from_timestamp, + ); + + let precompile_calls_count = precompile_calls_count_after_timestamp( + self.state.precompiles_processor.timestamp_history.inner(), + from_timestamp, + ); + VmExecutionLogs { + storage_logs, + events, + l2_to_l1_logs, + total_log_queries_count: storage_logs_count + + log_queries.len() + + precompile_calls_count, + } + } + + // Returns a tuple of `VmExecutionStopReason` and the size of the refund proposed by the operator + fn execute_with_custom_tracer_and_refunds( + &mut self, + tracer: &mut T, + ) -> (VmExecutionStopReason, u32) { + let mut operator_refund = None; + let timestamp_initial = Timestamp(self.state.local_state.timestamp); + let gas_remaining_before = self.gas_remaining(); + + loop { + // Sanity check: we should never reach the maximum value, because then we won't be able to process the next cycle. + assert_ne!( + self.state.local_state.monotonic_cycle_counter, + u32::MAX, + "VM reached maximum possible amount of cycles. Vm state: {:?}", + self.state + ); + + let timestamp_before_cycle = self.state.local_state.timestamp; + self.state.cycle(tracer); + + if self.has_ended() { + return ( + VmExecutionStopReason::VmFinished, + operator_refund.unwrap_or_default(), + ); + } + + if let Some(bootloader_refund) = tracer.requested_refund() { + assert!( + operator_refund.is_none(), + "Operator was asked for refund two times" + ); + + let refund_to_propose = bootloader_refund + + self.block_overhead_refund(timestamp_initial, gas_remaining_before); + let refund_slot = + OPERATOR_REFUNDS_OFFSET + self.bootloader_state.tx_to_execute() - 1; + + // Writing the refund into memory + self.state.memory.memory.write_to_memory( + BOOTLOADER_HEAP_PAGE as usize, + refund_slot, + Some(PrimitiveValue { + value: refund_to_propose.into(), + is_pointer: false, + }), + Timestamp(timestamp_before_cycle), + ); + operator_refund = Some(refund_to_propose); + tracer.set_refund_as_done(); + } + + if tracer.should_stop_execution() { + return ( + VmExecutionStopReason::TracerRequestedStop, + operator_refund.unwrap_or_default(), + ); + } + } + } + + /// Calculates the refund for the block overhead. + /// This refund is the difference between how much user paid in advance for the block overhead + /// and how much he should pay based on actual tx execution result. + fn block_overhead_refund(&self, _from_timestamp: Timestamp, _gas_remaining_before: u32) -> u32 { + 0 + + // let pubdata_used = self.pubdata_used(from_timestamp); + + // let gas_used = gas_remaining_before - self.gas_remaining(); + // // Can be fixed in the scope of SMA-1654 because it also requires calculation of `pubdata_paid_for`. + // let computational_gas_used = + // gas_used - pubdata_used * self.state.local_state.current_ergs_per_pubdata_byte; + // let (_, l2_to_l1_logs) = self.collect_events_and_l1_logs_after_timestamp(from_timestamp); + // let current_tx_index = self.bootloader_state.tx_to_execute() - 1; + + // let actual_overhead = Self::actual_overhead_gas( + // self.state.local_state.current_ergs_per_pubdata_byte, + // self.bootloader_state.get_tx_size(current_tx_index), + // pubdata_used, + // computational_gas_used, + // self.state + // .decommittment_processor + // .get_number_of_decommitment_requests_after_timestamp(from_timestamp), + // l2_to_l1_logs.len(), + // ); + + // let predefined_overhead = self + // .state + // .memory + // .read_slot( + // BOOTLOADER_HEAP_PAGE as usize, + // TX_OVERHEAD_OFFSET + current_tx_index, + // ) + // .value + // .as_u32(); + + // if actual_overhead <= predefined_overhead { + // predefined_overhead - actual_overhead + // } else { + // // This should never happen but potential mistakes at the early stage should not bring the server down. + // vlog::error!( + // "Actual overhead is greater than predefined one, actual: {}, predefined: {}", + // actual_overhead, + // predefined_overhead + // ); + // 0 + // } + } + + #[allow(dead_code)] + fn actual_overhead_gas( + _gas_per_pubdata_byte_limit: u32, + _encoded_len: usize, + _pubdata_used: u32, + _computational_gas_used: u32, + _number_of_decommitment_requests: usize, + _l2_l1_logs: usize, + ) -> u32 { + 0 + + // let overhead_for_block_gas = U256::from(crate::transaction_data::block_overhead_gas( + // gas_per_pubdata_byte_limit, + // )); + + // let encoded_len = U256::from(encoded_len); + // let pubdata_used = U256::from(pubdata_used); + // let computational_gas_used = U256::from(computational_gas_used); + // let number_of_decommitment_requests = U256::from(number_of_decommitment_requests); + // let l2_l1_logs = U256::from(l2_l1_logs); + + // let tx_slot_overhead = ceil_div_u256(overhead_for_block_gas, MAX_TXS_IN_BLOCK.into()); + + // let overhead_for_length = ceil_div_u256( + // encoded_len * overhead_for_block_gas, + // BOOTLOADER_TX_ENCODING_SPACE.into(), + // ); + + // let actual_overhead_for_pubdata = ceil_div_u256( + // pubdata_used * overhead_for_block_gas, + // MAX_PUBDATA_PER_BLOCK.into(), + // ); + + // let actual_gas_limit_overhead = ceil_div_u256( + // computational_gas_used * overhead_for_block_gas, + // MAX_BLOCK_MULTIINSTANCE_GAS_LIMIT.into(), + // ); + + // let code_decommitter_sorter_circuit_overhead = ceil_div_u256( + // number_of_decommitment_requests * overhead_for_block_gas, + // GEOMETRY_CONFIG.limit_for_code_decommitter_sorter.into(), + // ); + + // let l1_l2_logs_overhead = ceil_div_u256( + // l2_l1_logs * overhead_for_block_gas, + // std::cmp::min( + // GEOMETRY_CONFIG.limit_for_l1_messages_merklizer, + // GEOMETRY_CONFIG.limit_for_l1_messages_pudata_hasher, + // ) + // .into(), + // ); + + // let overhead = vec![ + // tx_slot_overhead, + // overhead_for_length, + // actual_overhead_for_pubdata, + // actual_gas_limit_overhead, + // code_decommitter_sorter_circuit_overhead, + // l1_l2_logs_overhead, + // ] + // .into_iter() + // .max() + // .unwrap(); + + // overhead.as_u32() + } + + // Executes VM until the end or tracer says to stop. + pub(crate) fn execute_with_custom_tracer( + &mut self, + tracer: &mut T, + ) -> VmExecutionStopReason { + self.execute_with_custom_tracer_and_refunds(tracer).0 + } + + // Err when transaction is rejected. + // Ok(status: TxExecutionStatus::Success) when the transaction succeeded + // Ok(status: TxExecutionStatus::Failure) when the transaction failed. + // Note that failed transactions are considered properly processed and are included in blocks + pub fn execute_next_tx(&mut self) -> Result { + let tx_index = self.bootloader_state.next_unexecuted_tx() as u32; + let mut tx_tracer = OneTxTracer::default(); + + let timestamp_initial = Timestamp(self.state.local_state.timestamp); + let cycles_initial = self.state.local_state.monotonic_cycle_counter; + + let (stop_reason, operator_suggested_refund) = + self.execute_with_custom_tracer_and_refunds(&mut tx_tracer); + match stop_reason { + VmExecutionStopReason::VmFinished => { + // Bootloader resulted in panic or revert, this means either the transaction is rejected + // (e.g. not enough fee or incorrect signature) or bootloader is out of gas. + + // Collect generated events to show bootloader debug logs. + let _ = self.collect_events_and_l1_logs_after_timestamp(timestamp_initial); + + let error = if tx_tracer.is_bootloader_out_of_gas() { + TxRevertReason::BootloaderOutOfGas + } else { + self.revert_reason() + .expect("vm ended execution prematurely, but no revert reason is given") + .revert_reason + }; + Err(error) + } + VmExecutionStopReason::TracerRequestedStop => { + if tx_tracer.tx_has_been_processed() { + let tx_execution_status = + TxExecutionStatus::from_has_failed(tx_has_failed(&self.state, tx_index)); + let vm_execution_logs = + self.collect_execution_logs_after_timestamp(timestamp_initial); + + Ok(VmTxExecutionResult { + gas_refunded: tx_tracer.refund_gas, + operator_suggested_refund, + status: tx_execution_status, + result: VmPartialExecutionResult { + logs: vm_execution_logs, + // If there is a revert Err is already returned above. + revert_reason: None, + // getting contracts used during this transaction + // at least for now the number returned here is always <= to the number + // of the code hashes actually used by the transaction, since it might've + // reused bytecode hashes from some of the previous ones. + contracts_used: self + .state + .decommittment_processor + .get_decommitted_bytes_after_timestamp(timestamp_initial), + cycles_used: self.state.local_state.monotonic_cycle_counter + - cycles_initial, + }, + }) + } else { + // VM ended up in state `stop_reason == VmExecutionStopReason::TracerRequestedStop && !tx_tracer.tx_has_been_processed()`. + // It means that bootloader successfully finished its execution without executing the transaction. + // It is an unexpected situation. + panic!("VM successfully finished executing bootloader but transaction wasn't executed"); + } + } + } + } + + /// Returns full VM result and partial result produced within the current execution. + pub fn execute_till_block_end(&mut self, job_type: BootloaderJobType) -> VmBlockResult { + let timestamp_initial = Timestamp(self.state.local_state.timestamp); + let cycles_initial = self.state.local_state.monotonic_cycle_counter; + let gas_before = self.gas_remaining(); + + let stop_reason = self.execute_with_custom_tracer(&mut NoopMemoryTracer); + match stop_reason { + VmExecutionStopReason::VmFinished => { + let mut full_result = vm_may_have_ended(self, gas_before).unwrap(); + + // if `job_type == BootloaderJobType::TransactionExecution` it means + // that the transaction has been executed as eth_call. + if job_type == BootloaderJobType::TransactionExecution + && tx_has_failed(&self.state, 0) + && full_result.revert_reason.is_none() + { + full_result.revert_reason = Some(VmRevertReasonParsingResult { + revert_reason: TxRevertReason::TxOutOfGas, + original_data: vec![], + }); + } + + let block_tip_result = VmPartialExecutionResult { + logs: self.collect_execution_logs_after_timestamp(timestamp_initial), + revert_reason: full_result.revert_reason.clone().map(|r| r.revert_reason), + contracts_used: self + .state + .decommittment_processor + .get_decommitted_bytes_after_timestamp(timestamp_initial), + cycles_used: self.state.local_state.monotonic_cycle_counter - cycles_initial, + }; + + // Collecting `block_tip_result` needs logs with timestamp, so we drain events for the `full_result` + // after because draining will drop timestamps. + let (_full_history, raw_events, l1_messages) = self.state.event_sink.flatten(); + full_result.events = merge_events(raw_events) + .into_iter() + .map(|e| { + e.into_vm_event(L1BatchNumber(self.block_context.context.block_number)) + }) + .collect(); + full_result.l2_to_l1_logs = l1_messages.into_iter().map(L2ToL1Log::from).collect(); + VmBlockResult { + full_result, + block_tip_result, + } + } + VmExecutionStopReason::TracerRequestedStop => { + unreachable!("NoopMemoryTracer will never stop execution until the block ends") + } + } + } + + /// Unlike `execute_till_block_end` methods returns only result for the block tip execution. + pub fn execute_block_tip(&mut self) -> VmPartialExecutionResult { + let timestamp_initial = Timestamp(self.state.local_state.timestamp); + let cycles_initial = self.state.local_state.monotonic_cycle_counter; + let mut bootloader_tracer = BootloaderTracer::default(); + + let stop_reason = self.execute_with_custom_tracer(&mut bootloader_tracer); + let revert_reason = match stop_reason { + VmExecutionStopReason::VmFinished => { + // Bootloader panicked or reverted. + let revert_reason = if bootloader_tracer.is_bootloader_out_of_gas() { + TxRevertReason::BootloaderOutOfGas + } else { + self.revert_reason() + .expect("vm ended execution prematurely, but no revert reason is given") + .revert_reason + }; + Some(revert_reason) + } + VmExecutionStopReason::TracerRequestedStop => { + // Bootloader finished successfully. + None + } + }; + VmPartialExecutionResult { + logs: self.collect_execution_logs_after_timestamp(timestamp_initial), + revert_reason, + contracts_used: self + .state + .decommittment_processor + .get_decommitted_bytes_after_timestamp(timestamp_initial), + cycles_used: self.state.local_state.monotonic_cycle_counter - cycles_initial, + } + } + + pub fn execute_validation( + &mut self, + validation_params: ValidationTracerParams, + ) -> Result<(), ValidationError> { + let mut validation_tracer = ValidationTracer::new( + self.state.storage.storage.inner().get_ptr(), + validation_params, + ); + + let stop_reason = self.execute_with_custom_tracer(&mut validation_tracer); + + match (stop_reason, validation_tracer.validation_error) { + (VmExecutionStopReason::VmFinished, _) => { + // The tx should only end in case of a revert, so it is safe to unwrap here + Err(ValidationError::FailedTx(self.revert_reason().unwrap())) + } + (VmExecutionStopReason::TracerRequestedStop, Some(err)) => { + Err(ValidationError::VioalatedRule(err)) + } + (VmExecutionStopReason::TracerRequestedStop, None) => Ok(()), + } + } + + // returns Some only when there is just one frame in execution trace. + fn get_final_log_queries(&self) -> Vec { + assert_eq!( + self.state.storage.frames_stack.inner().len(), + 1, + "VM finished execution in unexpected state" + ); + + let result = self + .state + .storage + .frames_stack + .inner() + .current_frame() + .forward + .clone(); + + result + } + + fn get_used_contracts(&self) -> Vec { + self.state + .decommittment_processor + .known_bytecodes + .inner() + .keys() + .cloned() + .collect() + } + + pub fn number_of_updated_storage_slots(&self) -> usize { + self.state + .storage + .storage + .inner() + .get_ptr() + .borrow_mut() + .number_of_updated_storage_slots() + } +} + +// Reads the bootloader memory and checks whether the execution step of the transaction +// has failed. +pub(crate) fn tx_has_failed(state: &ZkSyncVmState<'_>, tx_id: u32) -> bool { + let mem_slot = RESULT_SUCCESS_FIRST_SLOT + tx_id; + let mem_value = state + .memory + .dump_page_content_as_u256_words(BOOTLOADER_HEAP_PAGE, mem_slot..mem_slot + 1)[0]; + + mem_value == U256::zero() +} diff --git a/core/lib/vm/src/vm_with_bootloader.rs b/core/lib/vm/src/vm_with_bootloader.rs new file mode 100644 index 000000000000..3f591f97dff6 --- /dev/null +++ b/core/lib/vm/src/vm_with_bootloader.rs @@ -0,0 +1,575 @@ +use std::{collections::HashMap, time::Instant}; + +use zk_evm::{ + abstractions::{MAX_HEAP_PAGE_SIZE_IN_WORDS, MAX_MEMORY_BYTES}, + aux_structures::{MemoryPage, Timestamp}, + block_properties::BlockProperties, + vm_state::{CallStackEntry, PrimitiveValue, VmState}, + zkevm_opcode_defs::{ + system_params::INITIAL_FRAME_FORMAL_EH_LOCATION, FatPointer, BOOTLOADER_BASE_PAGE, + BOOTLOADER_CALLDATA_PAGE, STARTING_BASE_PAGE, STARTING_TIMESTAMP, + }, +}; +use zksync_config::constants::MAX_TXS_IN_BLOCK; +use zksync_contracts::{ + DEFAULT_ACCOUNT_CODE, ESTIMATE_FEE_BLOCK_CODE, PLAYGROUND_BLOCK_BOOTLOADER_CODE, + PROVED_BLOCK_BOOTLOADER_CODE, +}; + +use zksync_types::{ + zkevm_test_harness::INITIAL_MONOTONIC_CYCLE_COUNTER, Address, Transaction, BOOTLOADER_ADDRESS, + L1_GAS_PER_PUBDATA_BYTE, MAX_GAS_PER_PUBDATA_BYTE, MAX_NEW_FACTORY_DEPS, U256, +}; +use zksync_utils::{address_to_u256, bytecode::hash_bytecode, bytes_to_be_words, misc::ceil_div}; + +use crate::{ + bootloader_state::BootloaderState, + oracles::OracleWithHistory, + transaction_data::TransactionData, + utils::{ + code_page_candidate_from_base, heap_page_from_base, BLOCK_GAS_LIMIT, INITIAL_BASE_PAGE, + }, + vm::ZkSyncVmState, + OracleTools, VmInstance, +}; + +pub const BLOCK_OVERHEAD_GAS: u32 = 1200000; +pub const BLOCK_OVERHEAD_L1_GAS: u32 = 1000000; +pub const BLOCK_OVERHEAD_PUBDATA: u32 = BLOCK_OVERHEAD_L1_GAS / L1_GAS_PER_PUBDATA_BYTE; + +pub const MAX_BLOCK_MULTIINSTANCE_GAS_LIMIT: u32 = 300_000_000; + +/// `BlockContext` is a structure that contains parameters for +/// a block that are used as input for the bootloader and not the VM per se. +/// +/// These values are generally unique for each block (the exception is the operator's address). +#[derive(Clone, Debug, Copy)] +pub struct BlockContext { + pub block_number: u32, + pub block_timestamp: u64, + pub operator_address: Address, + pub l1_gas_price: u64, + pub fair_l2_gas_price: u64, +} + +/// Besides the raw values from the `BlockContext`, contains the values that are to be derived +/// from the other values +#[derive(Debug, Copy, Clone)] +pub struct DerivedBlockContext { + pub context: BlockContext, + pub base_fee: u64, +} + +fn eth_price_per_pubdata_byte(l1_gas_price: u64) -> u64 { + // This value will typically be a lot less than u64 + // unless the gas price on L1 goes beyond tens of millions of gwei + l1_gas_price * (L1_GAS_PER_PUBDATA_BYTE as u64) +} + +pub fn base_fee_to_gas_per_pubdata(l1_gas_price: u64, base_fee: u64) -> u64 { + let eth_price_per_pubdata_byte = eth_price_per_pubdata_byte(l1_gas_price); + + ceil_div(eth_price_per_pubdata_byte, base_fee) +} + +pub fn derive_base_fee_and_gas_per_pubdata(l1_gas_price: u64, fair_gas_price: u64) -> (u64, u64) { + let eth_price_per_pubdata_byte = eth_price_per_pubdata_byte(l1_gas_price); + + // The baseFee is set in such a way that it is always possible to a transaciton to + // publish enough public data while compensating us for it. + let base_fee = std::cmp::max( + fair_gas_price, + ceil_div(eth_price_per_pubdata_byte, MAX_GAS_PER_PUBDATA_BYTE), + ); + + ( + base_fee, + base_fee_to_gas_per_pubdata(l1_gas_price, base_fee), + ) +} + +impl From for DerivedBlockContext { + fn from(context: BlockContext) -> Self { + let base_fee = + derive_base_fee_and_gas_per_pubdata(context.l1_gas_price, context.fair_l2_gas_price).0; + + DerivedBlockContext { context, base_fee } + } +} + +// The first 32 slots are reserved for debugging purposes +pub const DEBUG_SLOTS_OFFSET: usize = 8; +pub const DEBUG_FIRST_SLOTS: usize = 32; +// The next 33 slots are reserved for dealing with the paymaster context (1 slot for storing length + 32 slots for storing the actual context). +pub const PAYMASTER_CONTEXT_SLOTS: usize = 32 + 1; +// The next PAYMASTER_CONTEXT_SLOTS + 7 slots free slots are needed before each tx, so that the +// postOp operation could be encoded correctly. +pub const MAX_POSTOP_SLOTS: usize = PAYMASTER_CONTEXT_SLOTS + 7; + +// Slots used to store the current L2 transaction's hash and the hash recommended +// to be used for signing the transaction's content. +const CURRENT_L2_TX_HASHES_SLOTS: usize = 2; + +// Slots used to store the calldata for the KnownCodesStorage to mark new factory +// dependencies as known ones. Besides the slots for the new factory dependencies themselves +// another 4 slots are needed for: selector, marker of whether the user should pay for the pubdata, +// the offset for the encoding of the array as well as the length of the array. +pub const NEW_FACTORY_DEPS_RESERVED_SLOTS: usize = MAX_NEW_FACTORY_DEPS + 4; + +// The operator can provide for each transaction the proposed minimal refund +pub const OPERATOR_REFUNDS_SLOTS: usize = MAX_TXS_IN_BLOCK; + +pub const OPERATOR_REFUNDS_OFFSET: usize = DEBUG_SLOTS_OFFSET + + DEBUG_FIRST_SLOTS + + PAYMASTER_CONTEXT_SLOTS + + CURRENT_L2_TX_HASHES_SLOTS + + NEW_FACTORY_DEPS_RESERVED_SLOTS; + +pub const TX_OVERHEAD_OFFSET: usize = OPERATOR_REFUNDS_OFFSET + OPERATOR_REFUNDS_SLOTS; +pub const TX_OVERHEAD_SLOTS: usize = MAX_TXS_IN_BLOCK; + +pub const BOOTLOADER_TX_DESCRIPTION_OFFSET: usize = TX_OVERHEAD_OFFSET + TX_OVERHEAD_SLOTS; + +// The size of the bootloader memory dedicated to the encodings of transactions +pub const BOOTLOADER_TX_ENCODING_SPACE: u32 = + (MAX_HEAP_PAGE_SIZE_IN_WORDS - TX_DESCRIPTION_OFFSET - MAX_TXS_IN_BLOCK) as u32; + +// Size of the bootloader tx description in words +pub const BOOTLOADER_TX_DESCRIPTION_SIZE: usize = 2; + +// The actual descriptions of transactions should start after the minor descriptions and a MAX_POSTOP_SLOTS +// free slots to allow postOp encoding. +pub const TX_DESCRIPTION_OFFSET: usize = BOOTLOADER_TX_DESCRIPTION_OFFSET + + BOOTLOADER_TX_DESCRIPTION_SIZE * MAX_TXS_IN_BLOCK + + MAX_POSTOP_SLOTS; + +pub(crate) const BOOTLOADER_HEAP_PAGE: u32 = heap_page_from_base(MemoryPage(INITIAL_BASE_PAGE)).0; +const BOOTLOADER_CODE_PAGE: u32 = code_page_candidate_from_base(MemoryPage(INITIAL_BASE_PAGE)).0; + +/// Enum denoting the *in-server* execution mode for the bootloader transactions. +/// +/// If `EthCall` mode is chosen, the bootloader will use `mimicCall` opcode +/// to simulate the call instead of using the standard `execute` method of account. +/// This is needed to be able to behave equivalently to Ethereum without much overhead for custom account builders. +/// With `VerifyExecute` mode, transaction will be executed normally. +/// With `EstimateFee`, the bootloader will be used that has the same behavior +/// as the full `VerifyExecute` block, but errors in the account validation will be ignored. +#[derive(Debug, Clone, Copy)] +pub enum TxExecutionMode { + VerifyExecute, + EstimateFee, + EthCall, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum BootloaderJobType { + TransactionExecution, + BlockPostprocessing, +} + +impl Default for TxExecutionMode { + fn default() -> Self { + Self::VerifyExecute + } +} + +pub fn init_vm<'a>( + oracle_tools: &'a mut OracleTools<'a, false>, + block_context: BlockContextMode, + block_properties: &'a BlockProperties, + execution_mode: TxExecutionMode, +) -> Box> { + init_vm_with_gas_limit( + oracle_tools, + block_context, + block_properties, + execution_mode, + BLOCK_GAS_LIMIT, + ) +} + +pub fn init_vm_with_gas_limit<'a>( + oracle_tools: &'a mut OracleTools<'a, false>, + block_context: BlockContextMode, + block_properties: &'a BlockProperties, + execution_mode: TxExecutionMode, + gas_limit: u32, +) -> Box> { + let bootloader_code = match (&block_context, execution_mode) { + (_, TxExecutionMode::EthCall) => PLAYGROUND_BLOCK_BOOTLOADER_CODE.code.clone(), + (BlockContextMode::OverrideCurrent(_), TxExecutionMode::VerifyExecute) => { + PLAYGROUND_BLOCK_BOOTLOADER_CODE.code.clone() + } + (_, TxExecutionMode::EstimateFee) => ESTIMATE_FEE_BLOCK_CODE.code.clone(), + _ => PROVED_BLOCK_BOOTLOADER_CODE.code.clone(), + }; + + init_vm_inner( + oracle_tools, + block_context, + block_properties, + gas_limit, + bootloader_code, + execution_mode, + ) +} + +#[derive(Debug, Clone, Copy)] +// The block.number/block.timestamp data are stored in the CONTEXT_SYSTEM_CONTRACT. +// The bootloader can support execution in two modes: +// - "NewBlock" when the new block is created. It is enforced that the block.number is incremented by 1 +// and the timestamp is non-decreasing. Also, the L2->L1 message used to verify the correctness of the previous root hash is sent. +// This is the mode that should be used in the state keeper. +// - "OverrideCurrent" when we need to provide custom block.number and block.timestamp. ONLY to be used in testing/ethCalls. +pub enum BlockContextMode { + NewBlock(DerivedBlockContext, U256), + OverrideCurrent(DerivedBlockContext), +} + +impl BlockContextMode { + const OPERATOR_ADDRESS_SLOT: usize = 0; + const PREV_BLOCK_HASH_SLOT: usize = 1; + const NEW_BLOCK_TIMESTAMP_SLOT: usize = 2; + const NEW_BLOCK_NUMBER_SLOT: usize = 3; + const L1_GAS_PRICE_SLOT: usize = 4; + const FAIR_L2_GAS_PRICE_SLOT: usize = 5; + const EXPECTED_BASE_FEE_SLOT: usize = 6; + const SHOULD_SET_NEW_BLOCK_SLOT: usize = 7; + + // Returns the previous block hash and timestamp fields that should be used by the bootloader. + // If the timestamp is 0, then the bootloader will not attempt to start a new block + // and will continue using the existing block properties. + fn bootloader_block_params(&self) -> Vec<(usize, U256)> { + let DerivedBlockContext { context, base_fee } = self.inner_block_context(); + + let mut base_params: HashMap = vec![ + ( + Self::OPERATOR_ADDRESS_SLOT, + address_to_u256(&context.operator_address), + ), + (Self::PREV_BLOCK_HASH_SLOT, Default::default()), + ( + Self::NEW_BLOCK_TIMESTAMP_SLOT, + U256::from(context.block_timestamp), + ), + ( + Self::NEW_BLOCK_NUMBER_SLOT, + U256::from(context.block_number), + ), + (Self::L1_GAS_PRICE_SLOT, U256::from(context.l1_gas_price)), + ( + Self::FAIR_L2_GAS_PRICE_SLOT, + U256::from(context.fair_l2_gas_price), + ), + (Self::EXPECTED_BASE_FEE_SLOT, U256::from(base_fee)), + (Self::SHOULD_SET_NEW_BLOCK_SLOT, U256::from(0u32)), + ] + .into_iter() + .collect(); + + match *self { + BlockContextMode::OverrideCurrent(_) => base_params.into_iter().collect(), + BlockContextMode::NewBlock(_, prev_block_hash) => { + base_params.insert(Self::PREV_BLOCK_HASH_SLOT, prev_block_hash); + base_params.insert(Self::SHOULD_SET_NEW_BLOCK_SLOT, U256::from(1u32)); + base_params.into_iter().collect() + } + } + } + + pub fn inner_block_context(&self) -> DerivedBlockContext { + match *self { + BlockContextMode::OverrideCurrent(props) => props, + BlockContextMode::NewBlock(props, _) => props, + } + } + + pub fn timestamp(&self) -> u64 { + self.inner_block_context().context.block_timestamp + } +} + +// This method accepts a custom bootloader code. +// It should be used only in tests. +pub fn init_vm_inner<'a>( + oracle_tools: &'a mut OracleTools<'a, false>, + block_context: BlockContextMode, + block_properties: &'a BlockProperties, + gas_limit: u32, + bootloader_bytecode: Vec, + execution_mode: TxExecutionMode, +) -> Box> { + let start = Instant::now(); + + oracle_tools.decommittment_processor.populate( + vec![(DEFAULT_ACCOUNT_CODE.hash, DEFAULT_ACCOUNT_CODE.code.clone())], + Timestamp(0), + ); + + oracle_tools.memory.populate( + vec![(BOOTLOADER_CODE_PAGE, bootloader_bytecode)], + Timestamp(0), + ); + + oracle_tools.memory.populate_page( + BOOTLOADER_HEAP_PAGE as usize, + bootloader_initial_memory(&block_context), + Timestamp(0), + ); + + let state = get_default_local_state(oracle_tools, block_properties, gas_limit); + + let vm = Box::new(VmInstance { + gas_limit, + state, + execution_mode, + block_context: block_context.inner_block_context(), + bootloader_state: BootloaderState::new(), + snapshots: Vec::new(), + }); + + metrics::histogram!("server.vm.init", start.elapsed()); + vm +} + +fn bootloader_initial_memory(block_properties: &BlockContextMode) -> Vec<(usize, U256)> { + block_properties.bootloader_block_params() +} + +pub fn get_bootloader_memory( + txs: Vec, + predefined_refunds: Vec, + execution_mode: TxExecutionMode, + block_context: BlockContextMode, +) -> Vec<(usize, U256)> { + let mut memory = bootloader_initial_memory(&block_context); + + let mut already_included_txs_size = 0; + for (tx_index_in_block, tx) in txs.into_iter().enumerate() { + let memory_for_current_tx = get_bootloader_memory_for_tx( + tx.clone(), + tx_index_in_block, + execution_mode, + already_included_txs_size, + predefined_refunds[tx_index_in_block], + ); + memory.extend(memory_for_current_tx); + let encoded_struct = tx.into_tokens(); + let encoding_length = encoded_struct.len(); + already_included_txs_size += encoding_length; + } + memory +} + +pub fn push_transaction_to_bootloader_memory( + vm: &mut VmInstance, + tx: &Transaction, + execution_mode: TxExecutionMode, +) { + let tx: TransactionData = tx.clone().into(); + let overhead = tx.overhead_gas(); + push_raw_transaction_to_bootloader_memory(vm, tx, execution_mode, overhead); +} + +pub fn push_raw_transaction_to_bootloader_memory( + vm: &mut VmInstance, + tx: TransactionData, + execution_mode: TxExecutionMode, + predefined_overhead: u32, +) { + let tx_index_in_block = vm.bootloader_state.free_tx_index(); + let already_included_txs_size = vm.bootloader_state.free_tx_offset(); + + let timestamp = Timestamp(vm.state.local_state.timestamp); + let codes_for_decommiter = tx + .factory_deps + .iter() + .map(|dep| bytecode_to_factory_dep(dep.clone())) + .collect(); + vm.state + .decommittment_processor + .populate(codes_for_decommiter, timestamp); + + let encoded_tx = tx.into_tokens(); + let encoded_tx_size = encoded_tx.len(); + + let bootloader_memory = get_bootloader_memory_for_encoded_tx( + encoded_tx, + tx_index_in_block, + execution_mode, + already_included_txs_size, + 0, + predefined_overhead, + ); + + vm.state.memory.populate_page( + BOOTLOADER_HEAP_PAGE as usize, + bootloader_memory, + Timestamp(vm.state.local_state.timestamp), + ); + vm.bootloader_state.add_tx_data(encoded_tx_size); +} + +fn get_bootloader_memory_for_tx( + tx: TransactionData, + tx_index_in_block: usize, + execution_mode: TxExecutionMode, + already_included_txs_size: usize, + predefined_refund: u32, +) -> Vec<(usize, U256)> { + let overhead_gas = tx.overhead_gas(); + get_bootloader_memory_for_encoded_tx( + tx.into_tokens(), + tx_index_in_block, + execution_mode, + already_included_txs_size, + predefined_refund, + overhead_gas, + ) +} + +pub(crate) fn get_bootloader_memory_for_encoded_tx( + encoded_tx: Vec, + tx_index_in_block: usize, + execution_mode: TxExecutionMode, + already_included_txs_size: usize, + predefined_refund: u32, + predefined_overhead: u32, +) -> Vec<(usize, U256)> { + let mut memory: Vec<(usize, U256)> = Vec::default(); + let bootloader_description_offset = + BOOTLOADER_TX_DESCRIPTION_OFFSET + BOOTLOADER_TX_DESCRIPTION_SIZE * tx_index_in_block; + + let tx_description_offset = TX_DESCRIPTION_OFFSET + already_included_txs_size; + + // Marking that this transaction should be executed. + memory.push(( + bootloader_description_offset, + assemble_tx_meta(execution_mode, true), + )); + memory.push(( + bootloader_description_offset + 1, + U256::from_big_endian(&(32 * tx_description_offset).to_be_bytes()), + )); + + let refund_offset = OPERATOR_REFUNDS_OFFSET + tx_index_in_block; + memory.push((refund_offset, predefined_refund.into())); + + let overhead_offset = TX_OVERHEAD_OFFSET + tx_index_in_block; + memory.push((overhead_offset, predefined_overhead.into())); + + // Now we need to actually put the transaction description: + let encoding_length = encoded_tx.len(); + memory.extend((tx_description_offset..tx_description_offset + encoding_length).zip(encoded_tx)); + + memory +} + +fn get_default_local_state<'a>( + tools: &'a mut OracleTools<'a, false>, + block_properties: &'a BlockProperties, + gas_limit: u32, +) -> ZkSyncVmState<'a> { + let mut vm = VmState::empty_state( + &mut tools.storage, + &mut tools.memory, + &mut tools.event_sink, + &mut tools.precompiles_processor, + &mut tools.decommittment_processor, + &mut tools.witness_tracer, + block_properties, + ); + + let initial_context = CallStackEntry { + this_address: BOOTLOADER_ADDRESS, + msg_sender: Address::zero(), + code_address: BOOTLOADER_ADDRESS, + base_memory_page: MemoryPage(BOOTLOADER_BASE_PAGE), + code_page: MemoryPage(BOOTLOADER_CODE_PAGE), + sp: 0, + pc: 0, + // Note, that since the results are written at the end of the memory + // it is needed to have the entire heap available from the beginning + heap_bound: MAX_MEMORY_BYTES as u32, + aux_heap_bound: MAX_MEMORY_BYTES as u32, + exception_handler_location: INITIAL_FRAME_FORMAL_EH_LOCATION, + ergs_remaining: gas_limit, + this_shard_id: 0, + caller_shard_id: 0, + code_shard_id: 0, + is_static: false, + is_local_frame: false, + context_u128_value: 0, + }; + + // We consider the contract that is being run as a bootloader + vm.push_bootloader_context(INITIAL_MONOTONIC_CYCLE_COUNTER - 1, initial_context); + vm.local_state.timestamp = STARTING_TIMESTAMP; + vm.local_state.memory_page_counter = STARTING_BASE_PAGE; + vm.local_state.monotonic_cycle_counter = INITIAL_MONOTONIC_CYCLE_COUNTER; + vm.local_state.current_ergs_per_pubdata_byte = 0; + vm.local_state.registers[0] = formal_calldata_abi(); + + // Deleting all the historical records brought by the initial + // initialization of the VM to make them permanent. + vm.decommittment_processor.delete_history(); + vm.event_sink.delete_history(); + vm.storage.delete_history(); + vm.memory.delete_history(); + vm.precompiles_processor.delete_history(); + + vm +} + +fn formal_calldata_abi() -> PrimitiveValue { + let fat_pointer = FatPointer { + offset: 0, + memory_page: BOOTLOADER_CALLDATA_PAGE, + start: 0, + length: 0, + }; + + PrimitiveValue { + value: fat_pointer.to_u256(), + is_pointer: true, + } +} + +pub(crate) fn bytecode_to_factory_dep(bytecode: Vec) -> (U256, Vec) { + let bytecode_hash = hash_bytecode(&bytecode); + let bytecode_hash = U256::from_big_endian(bytecode_hash.as_bytes()); + + let bytecode_words = bytes_to_be_words(bytecode); + + (bytecode_hash, bytecode_words) +} + +/// Forms a word that contains meta information for the transaction execution. +/// +/// # Current layout +/// +/// - 0 byte (MSB): server-side tx execution mode +/// In the server, we may want to execute different parts of the transaction in the different context +/// For example, when checking validity, we don't want to actually execute transaction and have side effects. +/// +/// Possible values: +/// - 0x00: validate & execute (normal mode) +/// - 0x01: validate but DO NOT execute +/// - 0x02: execute but DO NOT validate +/// +/// - 31 byte (LSB): whether to execute transaction or not (at all). +fn assemble_tx_meta(execution_mode: TxExecutionMode, execute_tx: bool) -> U256 { + let mut output = [0u8; 32]; + + // Set 0 byte (execution mode) + output[0] = match execution_mode { + TxExecutionMode::VerifyExecute => 0x00, + TxExecutionMode::EstimateFee => 0x00, + TxExecutionMode::EthCall => 0x02, + }; + + // Set 31 byte (marker for tx execution) + output[31] = u8::from(execute_tx); + + U256::from_big_endian(&output) +} diff --git a/core/lib/web3_decl/Cargo.toml b/core/lib/web3_decl/Cargo.toml new file mode 100644 index 000000000000..22cf1689ea73 --- /dev/null +++ b/core/lib/web3_decl/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "zksync_web3_decl" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-2" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] + +[dependencies] +itertools = "0.10.1" +serde = "1.0" +serde_json = "1.0" +rlp = "0.5.0" +thiserror = "1.0" +bigdecimal = { version = "=0.2.0", features = ["serde"] } +jsonrpsee = { version = "0.16.2", default-features = false, features = ["macros"] } +chrono = "0.4" +zksync_types = { path = "../../lib/types", version = "1.0" } + +[features] +default = ["server", "client"] +server = ["jsonrpsee/server"] +client = ["jsonrpsee/client"] diff --git a/core/lib/web3_decl/src/error.rs b/core/lib/web3_decl/src/error.rs new file mode 100644 index 000000000000..1395014939a7 --- /dev/null +++ b/core/lib/web3_decl/src/error.rs @@ -0,0 +1,36 @@ +//! Definition of errors that can occur in the zkSync Web3 API. + +use thiserror::Error; +use zksync_types::api::SerializationTransactionError; + +#[derive(Debug, Error)] +pub enum Web3Error { + #[error("Block with such an ID doesn't exist yet")] + NoBlock, + #[error("Request timeout")] + RequestTimeout, + #[error("Internal error")] + InternalError, + #[error("RLP decoding error: {0}")] + RLPError(#[from] rlp::DecoderError), + #[error("No function with given signature found")] + NoSuchFunction, + #[error("Invalid transaction data: {0}")] + InvalidTransactionData(#[from] zksync_types::ethabi::Error), + #[error("Failed to submit transaction: {0}")] + SubmitTransactionError(String), + #[error("Failed to serialize transaction: {0}")] + SerializationError(#[from] SerializationTransactionError), + #[error("Invalid fee parameters: {0}")] + InvalidFeeParams(String), + #[error("More than four topics in filter")] + TooManyTopics, + #[error("Your connection time exceeded the limit")] + PubSubTimeout, + #[error("Filter not found")] + FilterNotFound, + #[error("Not implemented")] + NotImplemented, + #[error("Query returned more than {0} results. Try with this block range [{1:#x}, {2:#x}].")] + LogsLimitExceeded(usize, u32, u32), +} diff --git a/core/lib/web3_decl/src/lib.rs b/core/lib/web3_decl/src/lib.rs new file mode 100644 index 000000000000..6072da410e02 --- /dev/null +++ b/core/lib/web3_decl/src/lib.rs @@ -0,0 +1,17 @@ +//! `zksync_web3_decl` is a collection of common types required for zkSync Web3 API +//! and also `jsonrpsee`-based declaration of server and client traits. +//! +//! Web3 namespaces are declared in `namespaces` module. +//! +//! For the usage of these traits, check the documentation of `jsonrpsee` crate. + +#![allow(clippy::derive_partial_eq_without_eq)] + +#[cfg(all(not(feature = "server"), not(feature = "client")))] +std::compile_error!(r#"At least on of features ["server", "client"] must be enabled"#); + +pub mod error; +pub mod namespaces; +pub mod types; + +pub use jsonrpsee; diff --git a/core/lib/web3_decl/src/namespaces/eth.rs b/core/lib/web3_decl/src/namespaces/eth.rs new file mode 100644 index 000000000000..2716dd6a0686 --- /dev/null +++ b/core/lib/web3_decl/src/namespaces/eth.rs @@ -0,0 +1,160 @@ +// External uses +use jsonrpsee::{core::RpcResult, proc_macros::rpc}; + +// Workspace uses +use crate::types::{ + Block, Bytes, Filter, FilterChanges, Index, Log, SyncState, TransactionReceipt, U256, U64, +}; + +use zksync_types::{ + api::Transaction, + api::{BlockIdVariant, BlockNumber, TransactionVariant}, + transaction_request::CallRequest, + Address, H256, +}; + +// Local uses + +#[cfg_attr( + all(feature = "client", feature = "server"), + rpc(server, client, namespace = "eth") +)] +#[cfg_attr( + all(feature = "client", not(feature = "server")), + rpc(client, namespace = "eth") +)] +#[cfg_attr( + all(not(feature = "client"), feature = "server"), + rpc(server, namespace = "eth") +)] +pub trait EthNamespace { + #[method(name = "blockNumber")] + fn get_block_number(&self) -> RpcResult; + + #[method(name = "chainId")] + fn chain_id(&self) -> RpcResult; + + #[method(name = "call")] + fn call(&self, req: CallRequest, block: Option) -> RpcResult; + + #[method(name = "estimateGas")] + fn estimate_gas(&self, req: CallRequest, _block: Option) -> RpcResult; + + #[method(name = "gasPrice")] + fn gas_price(&self) -> RpcResult; + + #[method(name = "newFilter")] + fn new_filter(&self, filter: Filter) -> RpcResult; + + #[method(name = "newBlockFilter")] + fn new_block_filter(&self) -> RpcResult; + + #[method(name = "uninstallFilter")] + fn uninstall_filter(&self, idx: U256) -> RpcResult; + + #[method(name = "newPendingTransactionFilter")] + fn new_pending_transaction_filter(&self) -> RpcResult; + + #[method(name = "getLogs")] + fn get_logs(&self, filter: Filter) -> RpcResult>; + + #[method(name = "getFilterLogs")] + fn get_filter_logs(&self, filter_index: U256) -> RpcResult; + + #[method(name = "getFilterChanges")] + fn get_filter_changes(&self, filter_index: U256) -> RpcResult; + + #[method(name = "getBalance")] + fn get_balance(&self, address: Address, block: Option) -> RpcResult; + + #[method(name = "getBlockByNumber")] + fn get_block_by_number( + &self, + block_number: BlockNumber, + full_transactions: bool, + ) -> RpcResult>>; + + #[method(name = "getBlockByHash")] + fn get_block_by_hash( + &self, + hash: H256, + full_transactions: bool, + ) -> RpcResult>>; + + #[method(name = "getBlockTransactionCountByNumber")] + fn get_block_transaction_count_by_number( + &self, + block_number: BlockNumber, + ) -> RpcResult>; + + #[method(name = "getBlockTransactionCountByHash")] + fn get_block_transaction_count_by_hash(&self, block_hash: H256) -> RpcResult>; + + #[method(name = "getCode")] + fn get_code(&self, address: Address, block: Option) -> RpcResult; + + #[method(name = "getStorageAt")] + fn get_storage_at( + &self, + address: Address, + idx: U256, + block: Option, + ) -> RpcResult; + + #[method(name = "getTransactionCount")] + fn get_transaction_count( + &self, + address: Address, + block: Option, + ) -> RpcResult; + + #[method(name = "getTransactionByHash")] + fn get_transaction_by_hash(&self, hash: H256) -> RpcResult>; + + #[method(name = "getTransactionByBlockHashAndIndex")] + fn get_transaction_by_block_hash_and_index( + &self, + block_hash: H256, + index: Index, + ) -> RpcResult>; + + #[method(name = "getTransactionByBlockNumberAndIndex")] + fn get_transaction_by_block_number_and_index( + &self, + block_number: BlockNumber, + index: Index, + ) -> RpcResult>; + + #[method(name = "getTransactionReceipt")] + fn get_transaction_receipt(&self, hash: H256) -> RpcResult>; + + #[method(name = "protocolVersion")] + fn protocol_version(&self) -> RpcResult; + + #[method(name = "sendRawTransaction")] + fn send_raw_transaction(&self, tx_bytes: Bytes) -> RpcResult; + + #[method(name = "syncing")] + fn syncing(&self) -> RpcResult; + + #[method(name = "accounts")] + fn accounts(&self) -> RpcResult>; + + #[method(name = "coinbase")] + fn coinbase(&self) -> RpcResult
; + + #[method(name = "getCompilers")] + fn compilers(&self) -> RpcResult>; + + #[method(name = "hashrate")] + fn hashrate(&self) -> RpcResult; + + #[method(name = "getUncleCountByBlockHash")] + fn get_uncle_count_by_block_hash(&self, hash: H256) -> RpcResult>; + + #[method(name = "getUncleCountByBlockNumber")] + fn get_uncle_count_by_block_number(&self, number: BlockNumber) -> RpcResult>; + + #[method(name = "mining")] + fn mining(&self) -> RpcResult; +} diff --git a/core/lib/web3_decl/src/namespaces/eth_subscribe.rs b/core/lib/web3_decl/src/namespaces/eth_subscribe.rs new file mode 100644 index 000000000000..8b137891791f --- /dev/null +++ b/core/lib/web3_decl/src/namespaces/eth_subscribe.rs @@ -0,0 +1 @@ + diff --git a/core/lib/web3_decl/src/namespaces/mod.rs b/core/lib/web3_decl/src/namespaces/mod.rs new file mode 100644 index 000000000000..3c924f83201c --- /dev/null +++ b/core/lib/web3_decl/src/namespaces/mod.rs @@ -0,0 +1,19 @@ +pub mod eth; +pub mod eth_subscribe; +pub mod net; +pub mod web3; +pub mod zks; + +// Server trait re-exports. +#[cfg(feature = "server")] +pub use self::{ + eth::EthNamespaceServer, net::NetNamespaceServer, web3::Web3NamespaceServer, + zks::ZksNamespaceServer, +}; + +// Client trait re-exports. +#[cfg(feature = "client")] +pub use self::{ + eth::EthNamespaceClient, net::NetNamespaceClient, web3::Web3NamespaceClient, + zks::ZksNamespaceClient, +}; diff --git a/core/lib/web3_decl/src/namespaces/net.rs b/core/lib/web3_decl/src/namespaces/net.rs new file mode 100644 index 000000000000..9213b3536716 --- /dev/null +++ b/core/lib/web3_decl/src/namespaces/net.rs @@ -0,0 +1,25 @@ +use jsonrpsee::{core::RpcResult, proc_macros::rpc}; +use zksync_types::U256; + +#[cfg_attr( + all(feature = "client", feature = "server"), + rpc(server, client, namespace = "net") +)] +#[cfg_attr( + all(feature = "client", not(feature = "server")), + rpc(client, namespace = "net") +)] +#[cfg_attr( + all(not(feature = "client"), feature = "server"), + rpc(server, namespace = "net") +)] +pub trait NetNamespace { + #[method(name = "version")] + fn version(&self) -> RpcResult; + + #[method(name = "peerCount")] + fn peer_count(&self) -> RpcResult; + + #[method(name = "listening")] + fn is_listening(&self) -> RpcResult; +} diff --git a/core/lib/web3_decl/src/namespaces/web3.rs b/core/lib/web3_decl/src/namespaces/web3.rs new file mode 100644 index 000000000000..b143477a5f1a --- /dev/null +++ b/core/lib/web3_decl/src/namespaces/web3.rs @@ -0,0 +1,21 @@ +use jsonrpsee::{core::RpcResult, proc_macros::rpc}; + +#[cfg_attr( + all(feature = "client", feature = "server"), + rpc(server, client, namespace = "web3") +)] +#[cfg_attr( + all(feature = "client", not(feature = "server")), + rpc(client, namespace = "web3") +)] +#[cfg_attr( + all(not(feature = "client"), feature = "server"), + rpc(server, namespace = "web3") +)] +pub trait Web3Namespace { + #[method(name = "clientVersion")] + fn client_version(&self) -> RpcResult; + + // `sha3` method is intentionally not implemented for the main server implementation: + // it can easily be implemented on the user side. +} diff --git a/core/lib/web3_decl/src/namespaces/zks.rs b/core/lib/web3_decl/src/namespaces/zks.rs new file mode 100644 index 000000000000..3bdbd27a40e5 --- /dev/null +++ b/core/lib/web3_decl/src/namespaces/zks.rs @@ -0,0 +1,95 @@ +use crate::types::Token; +use bigdecimal::BigDecimal; +use jsonrpsee::{core::RpcResult, proc_macros::rpc}; +use std::collections::HashMap; +use zksync_types::api::{BridgeAddresses, L2ToL1LogProof, TransactionDetails}; +use zksync_types::transaction_request::CallRequest; +use zksync_types::{ + api::U64, + explorer_api::BlockDetails, + fee::Fee, + vm_trace::{ContractSourceDebugInfo, VmDebugTrace}, + Address, H256, U256, +}; +use zksync_types::{L1BatchNumber, MiniblockNumber}; + +#[cfg_attr( + all(feature = "client", feature = "server"), + rpc(server, client, namespace = "zks") +)] +#[cfg_attr( + all(feature = "client", not(feature = "server")), + rpc(client, namespace = "zks") +)] +#[cfg_attr( + all(not(feature = "client"), feature = "server"), + rpc(server, namespace = "zks") +)] +pub trait ZksNamespace { + #[method(name = "estimateFee")] + fn estimate_fee(&self, req: CallRequest) -> RpcResult; + + #[method(name = "getMainContract")] + fn get_main_contract(&self) -> RpcResult
; + + #[method(name = "getTestnetPaymaster")] + fn get_testnet_paymaster(&self) -> RpcResult>; + + #[method(name = "getBridgeContracts")] + fn get_bridge_contracts(&self) -> RpcResult; + + #[method(name = "L1ChainId")] + fn l1_chain_id(&self) -> RpcResult; + + #[method(name = "getConfirmedTokens")] + fn get_confirmed_tokens(&self, from: u32, limit: u8) -> RpcResult>; + #[method(name = "getTokenPrice")] + fn get_token_price(&self, token_address: Address) -> RpcResult; + + #[method(name = "setContractDebugInfo")] + fn set_contract_debug_info( + &self, + address: Address, + info: ContractSourceDebugInfo, + ) -> RpcResult; + + #[method(name = "getContractDebugInfo")] + fn get_contract_debug_info( + &self, + address: Address, + ) -> RpcResult>; + + #[method(name = "getTransactionTrace")] + fn get_transaction_trace(&self, hash: H256) -> RpcResult>; + + #[method(name = "getAllAccountBalances")] + fn get_all_account_balances(&self, address: Address) -> RpcResult>; + + #[method(name = "getL2ToL1MsgProof")] + fn get_l2_to_l1_msg_proof( + &self, + block: MiniblockNumber, + sender: Address, + msg: H256, + l2_log_position: Option, + ) -> RpcResult>; + + #[method(name = "getL2ToL1LogProof")] + fn get_l2_to_l1_log_proof( + &self, + tx_hash: H256, + index: Option, + ) -> RpcResult>; + + #[method(name = "L1BatchNumber")] + fn get_l1_batch_number(&self) -> RpcResult; + + #[method(name = "getL1BatchBlockRange")] + fn get_miniblock_range(&self, batch: L1BatchNumber) -> RpcResult>; + + #[method(name = "getBlockDetails")] + fn get_block_details(&self, block_number: MiniblockNumber) -> RpcResult>; + + #[method(name = "getTransactionDetails")] + fn get_transaction_details(&self, hash: H256) -> RpcResult>; +} diff --git a/core/lib/web3_decl/src/types.rs b/core/lib/web3_decl/src/types.rs new file mode 100644 index 000000000000..0b8285366250 --- /dev/null +++ b/core/lib/web3_decl/src/types.rs @@ -0,0 +1,417 @@ +//! Web3 API types definitions. +//! +//! Most of the types are re-exported from the `web3` crate, but some of them maybe extended with +//! new variants (enums) or optional fields (structures). +//! +//! These "extensions" are required to provide more zkSync-specific information while remaining Web3-compilant. + +use core::convert::{TryFrom, TryInto}; +use core::fmt; +use core::marker::PhantomData; + +use chrono::NaiveDateTime; +use itertools::unfold; +use rlp::Rlp; +use serde::{de, Deserialize, Serialize, Serializer}; + +pub use zksync_types::{ + api::{Block, BlockNumber, Log, TransactionReceipt, TransactionRequest}, + vm_trace::{ContractSourceDebugInfo, VmDebugTrace, VmExecutionStep}, + web3::{ + ethabi, + types::{ + Address, BlockHeader, Bytes, CallRequest, Index, SyncState, TraceFilter, Transaction, + Work, H160, H256, H64, U256, U64, + }, + }, +}; + +/// Token in the zkSync network +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Token { + pub l1_address: Address, + pub l2_address: Address, + pub name: String, + pub symbol: String, + pub decimals: u8, +} + +/// Helper structure used to parse deserialized `Ethereum` transaction. +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct TransactionCalldata { + pub selector: [u8; 4], + pub data: Vec, +} + +/// Helper structure used to parse deserialized `Ethereum` transaction according to `EIP-2718`. +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct EIP2718TransactionCallData(TransactionCalldata); + +impl rlp::Decodable for TransactionCalldata { + fn decode(d: &Rlp) -> Result { + if d.item_count()? != 9 { + return Err(rlp::DecoderError::RlpIncorrectListLen); + } + + let calldata: Vec = d.val_at(5)?; + + Self::try_from(calldata).map_err(|_| rlp::DecoderError::RlpIncorrectListLen) + } +} + +impl rlp::Decodable for EIP2718TransactionCallData { + fn decode(d: &Rlp) -> Result { + if d.item_count()? != 12 { + return Err(rlp::DecoderError::RlpIncorrectListLen); + } + + let calldata: Vec = d.val_at(7)?; + + TransactionCalldata::try_from(calldata) + .map(Self) + .map_err(|_| rlp::DecoderError::RlpIncorrectListLen) + } +} + +impl From for TransactionCalldata { + fn from(EIP2718TransactionCallData(calldata): EIP2718TransactionCallData) -> Self { + calldata + } +} + +impl TryFrom> for TransactionCalldata { + type Error = usize; + + fn try_from(mut calldata: Vec) -> Result { + let selector = calldata + .get(0..4) + .ok_or(calldata.len())? + .try_into() + .unwrap(); + let data = calldata.split_off(4); + + Ok(TransactionCalldata { selector, data }) + } +} + +// Changes watched by the given `Filter`. +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +#[serde(untagged)] +pub enum FilterChanges { + Hashes(Vec), + Logs(Vec), + Empty([u8; 0]), +} + +/// Represents all kinds of `Filter`. +#[derive(Debug, Clone)] +pub enum TypedFilter { + Events(Filter, zksync_types::MiniblockNumber), + Blocks(zksync_types::MiniblockNumber), + PendingTransactions(NaiveDateTime), +} + +/// Either value or array of values. +#[derive(Default, Debug, PartialEq, Clone)] +pub struct ValueOrArray(pub Vec); + +impl Serialize for ValueOrArray +where + T: Serialize, +{ + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + match self.0.len() { + 0 => serializer.serialize_none(), + 1 => Serialize::serialize(&self.0[0], serializer), + _ => Serialize::serialize(&self.0, serializer), + } + } +} + +impl<'de, T: std::fmt::Debug + Deserialize<'de>> ::serde::Deserialize<'de> for ValueOrArray { + fn deserialize(deserializer: D) -> Result + where + D: ::serde::Deserializer<'de>, + { + struct Visitor(PhantomData); + + impl<'de, T: std::fmt::Debug + Deserialize<'de>> de::Visitor<'de> for Visitor { + type Value = ValueOrArray; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("Expected value or sequence") + } + + fn visit_str(self, value: &str) -> Result + where + E: de::Error, + { + use serde::de::IntoDeserializer; + + Deserialize::deserialize(value.into_deserializer()) + .map(|value| ValueOrArray(vec![value])) + } + + fn visit_seq(self, visitor: S) -> Result + where + S: de::SeqAccess<'de>, + { + unfold(visitor, |vis| vis.next_element().transpose()) + .collect::>() + .map(ValueOrArray) + } + } + + deserializer.deserialize_any(Visitor(PhantomData)) + } +} + +/// Filter +#[derive(Default, Debug, PartialEq, Clone, Serialize, Deserialize)] +pub struct Filter { + /// From Block + #[serde(rename = "fromBlock", skip_serializing_if = "Option::is_none")] + pub from_block: Option, + /// To Block + #[serde(rename = "toBlock", skip_serializing_if = "Option::is_none")] + pub to_block: Option, + /// Address + #[serde(skip_serializing_if = "Option::is_none")] + pub address: Option>, + /// Topics + #[serde(skip_serializing_if = "Option::is_none")] + pub topics: Option>>>, +} + +/// Filter Builder +#[derive(Default, Clone)] +pub struct FilterBuilder { + filter: Filter, +} + +impl FilterBuilder { + /// Sets from block + pub fn set_from_block(mut self, block: BlockNumber) -> Self { + self.filter.from_block = Some(block); + self + } + + /// Sets to block + pub fn set_to_block(mut self, block: BlockNumber) -> Self { + self.filter.to_block = Some(block); + self + } + + /// Single address + pub fn set_address(mut self, address: Vec) -> Self { + self.filter.address = Some(ValueOrArray(address)); + self + } + + /// Topics + pub fn set_topics( + mut self, + topic1: Option>, + topic2: Option>, + topic3: Option>, + topic4: Option>, + ) -> Self { + let mut topics = vec![topic1, topic2, topic3, topic4] + .into_iter() + .rev() + .skip_while(Option::is_none) + .map(|option| option.map(ValueOrArray)) + .collect::>(); + topics.reverse(); + + self.filter.topics = Some(topics); + self + } + + /// Sets the topics according to the given `ethabi` topic filter + pub fn set_topic_filter(self, topic_filter: ethabi::TopicFilter) -> Self { + self.set_topics( + topic_to_option(topic_filter.topic0), + topic_to_option(topic_filter.topic1), + topic_to_option(topic_filter.topic2), + topic_to_option(topic_filter.topic3), + ) + } + + /// Returns filter + pub fn build(&self) -> Filter { + self.filter.clone() + } +} + +#[derive(Default, Debug, PartialEq, Clone, Serialize, Deserialize)] +pub struct PubSubFilter { + #[serde(skip_serializing_if = "Option::is_none")] + pub address: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub topics: Option>>>, +} + +impl PubSubFilter { + pub fn matches(&self, log: &Log) -> bool { + if let Some(addresses) = &self.address { + if !addresses.0.contains(&log.address) { + return false; + } + } + if let Some(all_topics) = &self.topics { + for (idx, expected_topics) in all_topics.iter().enumerate() { + if let Some(expected_topics) = expected_topics { + if let Some(actual_topic) = log.topics.get(idx) { + if !expected_topics.0.contains(actual_topic) { + return false; + } + } else { + return false; + } + } + } + } + true + } +} + +#[derive(Default, Clone)] +pub struct PubSubFilterBuilder { + filter: PubSubFilter, +} + +impl PubSubFilterBuilder { + /// Single address + pub fn set_address(mut self, address: Vec) -> Self { + self.filter.address = Some(ValueOrArray(address)); + self + } + + /// Topics + pub fn set_topics( + mut self, + topic1: Option>, + topic2: Option>, + topic3: Option>, + topic4: Option>, + ) -> Self { + let mut topics = vec![topic1, topic2, topic3, topic4] + .into_iter() + .rev() + .skip_while(Option::is_none) + .map(|option| option.map(ValueOrArray)) + .collect::>(); + topics.reverse(); + + self.filter.topics = Some(topics); + self + } + + /// Sets the topics according to the given `ethabi` topic filter + pub fn set_topic_filter(self, topic_filter: ethabi::TopicFilter) -> Self { + self.set_topics( + topic_to_option(topic_filter.topic0), + topic_to_option(topic_filter.topic1), + topic_to_option(topic_filter.topic2), + topic_to_option(topic_filter.topic3), + ) + } + + /// Returns filter + pub fn build(&self) -> PubSubFilter { + self.filter.clone() + } +} + +/// Converts a `Topic` to an equivalent `Option>`, suitable for `FilterBuilder::topics` +fn topic_to_option(topic: ethabi::Topic) -> Option> { + match topic { + ethabi::Topic::Any => None, + ethabi::Topic::OneOf(v) => Some(v), + ethabi::Topic::This(t) => Some(vec![t]), + } +} + +#[allow(clippy::large_enum_variant)] +#[derive(Debug, Serialize, Deserialize)] +#[serde(untagged)] +pub enum PubSubResult { + Header(BlockHeader), + Log(Log), + TxHash(H256), + Syncing(bool), +} + +#[cfg(test)] +mod tests { + use super::*; + use zksync_types::api::{BlockId, BlockIdVariant}; + + #[test] + fn get_block_number_serde() { + let test_vector = &[ + (r#""committed""#, BlockNumber::Committed), + (r#""finalized""#, BlockNumber::Finalized), + (r#""pending""#, BlockNumber::Pending), + (r#""latest""#, BlockNumber::Latest), + (r#""earliest""#, BlockNumber::Earliest), + (r#""0x1""#, BlockNumber::Number(1.into())), + (r#""0x10""#, BlockNumber::Number(16.into())), + ]; + + for (serialized_repr, deserialized_repr) in test_vector { + let serialized = serde_json::to_string(deserialized_repr).unwrap(); + assert_eq!(&serialized, serialized_repr); + + let deserialized: BlockNumber = serde_json::from_str(serialized_repr).unwrap(); + assert_eq!(&deserialized, deserialized_repr); + } + } + + #[test] + fn get_block_id_serde() { + let test_vector = &[ + ( + r#""0x0000000000000000000000000000000000000000000000000000000000000000""#, + BlockId::Hash(H256::default()), + ), + (r#""latest""#, BlockId::Number(BlockNumber::Latest)), + (r#""0x10""#, BlockId::Number(BlockNumber::Number(16.into()))), + ]; + + for (serialized_repr, deserialized_repr) in test_vector { + let serialized = serde_json::to_string(deserialized_repr).unwrap(); + assert_eq!(&serialized, serialized_repr); + + let deserialized: BlockId = serde_json::from_str(serialized_repr).unwrap(); + assert_eq!(&deserialized, deserialized_repr); + } + } + + #[test] + fn block_id_variant_serializing() { + let test_vector = &[ + (r#""latest""#, BlockId::Number(BlockNumber::Latest)), + (r#""0x10""#, BlockId::Number(BlockNumber::Number(16.into()))), + ( + r#"{"blockHash": "0x0000000000000000000000000000000000000000000000000000000000000000"}"#, + BlockId::Hash(H256::default()), + ), + ( + r#"{"blockNumber": "0x10"}"#, + BlockId::Number(BlockNumber::Number(16.into())), + ), + ]; + + for (serialized_repr, expected_block_id) in test_vector { + let deserialized: BlockIdVariant = serde_json::from_str(serialized_repr).unwrap(); + let actual_block_id: BlockId = deserialized.into(); + assert_eq!(&actual_block_id, expected_block_id); + } + } +} diff --git a/core/tests/loadnext/Cargo.toml b/core/tests/loadnext/Cargo.toml new file mode 100644 index 000000000000..c8fb4318c9be --- /dev/null +++ b/core/tests/loadnext/Cargo.toml @@ -0,0 +1,38 @@ +[package] +name = "loadnext" +version = "0.1.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-2" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] +publish = false # We don't want to publish our tests. + +[dependencies] +zksync = { path = "../../../sdk/zksync-rs", version = "0.3", features = ["mint"] } +zksync_types = { path = "../../lib/types", version = "1.0" } +zksync_utils = { path = "../../lib/utils", version = "1.0" } +zksync_eth_signer = { path = "../../lib/eth_signer", version = "1.0" } +zksync_web3_decl = { path = "../../lib/web3_decl", version = "1.0" } +zksync_eth_client = { path = "../../lib/eth_client", version = "1.0" } +zksync_config = { path = "../../lib/config", version = "1.0" } +vlog = { path = "../../lib/vlog", version = "1.0" } + +async-trait = "0.1" +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +num = { version = "0.3.1", features = ["serde"] } +tokio = { version = "1", features = ["full"] } +futures = "0.3" +anyhow = "1.0" +rand = { version = "0.8", features = ["small_rng"] } +rand_distr = "0.4.3" +envy = "0.4" +hex = "0.4" +static_assertions = "1.1" +once_cell = "1.7" +thiserror = "1" +reqwest = { version = "0.11", features = ["blocking", "json"] } +regex = "1.7" diff --git a/core/tests/loadnext/README.md b/core/tests/loadnext/README.md new file mode 100644 index 000000000000..6ba12cd260a7 --- /dev/null +++ b/core/tests/loadnext/README.md @@ -0,0 +1,125 @@ +# Loadnext: the next generation loadtest for zkSync + +Loadnext is an utility for random stress-testing the zkSync server. It is capable of simulating the behavior of many +independent users of zkSync network, who are sending quasi-random requests to the server. + +It: + +- doesn't care whether the server is alive or not. At worst, it will just consider the test failed. No panics, no + mindless unwraps, yay. +- does a unique set of operations for each participating account. +- sends transactions and priority operations. +- sends incorrect transactions as well as correct ones and compares the outcome to the expected one. +- has an easy-to-extend command system that allows adding new types of actions to the flow. +- has an easy-to-extend report analysis system. + +Flaws: + +- It does not send API requests other than required to execute transactions. +- So far it has pretty primitive report system. + +## Launch + +In order to launch the test in the development scenario, you must first run server and prover (it is recommended to use +dummy prover), and then launch the test itself. + +```sh +# First terminal +zk server +# Second terminal +RUST_BACKTRACE=1 RUST_LOG=info,jsonrpsee_ws_client=error cargo run --bin loadnext +``` + +Without any configuration supplied, the test will fallback to the dev defaults: + +- Use one of the "rich" accounts in the private local Ethereum chain. +- Use a random ERC-20 token from `etc/tokens/localhost.json`. +- Connect to the localhost zkSync node and use localhost web3 API. + +**Note:** when running the loadtest in the localhost scenario, you **must** adjust the supported block chunks sizes. +Edit the `etc/env/dev/chain.toml` and set `block_chunk_sizes` to `[10,32,72,156,322,654]` and `aggregated_proof_sizes` +to `[1,4,8,18]`. Do not forget to re-compile configs after that. + +This is required because the loadtest relies on batches, which will not fit into smaller block sizes. + +## Configuration + +For cases when loadtest is launched outside of the localhost environment, configuration is provided via environment +variables. + +The following variables are required: + +```sh +# Address of the Ethereum web3 API. +L1_RPC_ADDRESS +# Ethereum private key of the wallet that has funds to perform a test (without `0x` prefix). +MASTER_WALLET_PK +# Amount of accounts to be used in test. +# This option configures the "width" of the test: +# how many concurrent operation flows will be executed. +ACCOUNTS_AMOUNT +# All of test accounts get split into groups that share the +# deployed contract address. This helps to emulate the behavior of +# sending `Execute` to the same contract and reading its events by +# single a group. This value should be less than or equal to `ACCOUNTS_AMOUNT`. +ACCOUNTS_GROUP_SIZE +# Amount of operations per account. +# This option configures the "length" of the test: +# how many individual operations each account of the test will execute. +OPERATIONS_PER_ACCOUNT +# Address of the ERC-20 token to be used in test. +# +# Token must satisfy two criteria: +# - Be supported by zkSync. +# - Have `mint` operation. +# +# Note that we use ERC-20 token since we can't easily mint a lot of ETH on +# Rinkeby or Ropsten without caring about collecting it back. +MAIN_TOKEN +# Path to test contracts bytecode and ABI required for sending +# deploy and execute L2 transactions. Each folder in the path is expected +# to have the following structure: +# . +# ├── bytecode +# └── abi.json +# Contract folder names names are not restricted. +# An example: +# . +# ├── erc-20 +# │   ├── bytecode +# │   └── abi.json +# └── simple-contract +# ├── bytecode +# └── abi.json +TEST_CONTRACTS_PATH +# Limits the number of simultaneous API requests being performed at any moment of time. +# +# Setting it to: +# - 0 turns off API requests. +# - `ACCOUNTS_AMOUNT` relieves the limit. +SYNC_API_REQUESTS_LIMIT +# zkSync Chain ID. +L2_CHAIN_ID +# Address of the zkSync web3 API. +L2_RPC_ADDRESS +``` + +Optional parameters: + +```sh +# Optional seed to be used in the test: normally you don't need to set the seed, +# but you can re-use seed from previous run to reproduce the sequence of operations locally. +# Seed must be represented as a hexadecimal string. +SEED +``` + +## Infrastructure relationship + +This crate is meant to be independent of the existing zkSync infrastructure. It is not integrated in `zk` and does not +rely on `zksync_config` crate, and is not meant to be. + +The reason is that this application is meant to be used in CI, where any kind of configuration pre-requisites makes the +tool harder to use: + +- Additional tools (e.g. `zk`) must be shipped together with the application, or included into the docker container. +- Configuration that lies in files is harder to use in CI, due some sensitive data being stored in GITHUB_SECRETS. diff --git a/core/tests/loadnext/src/account/api_request_executor.rs b/core/tests/loadnext/src/account/api_request_executor.rs new file mode 100644 index 000000000000..82dc73d5a585 --- /dev/null +++ b/core/tests/loadnext/src/account/api_request_executor.rs @@ -0,0 +1,162 @@ +use std::time::Instant; + +use futures::SinkExt; +use once_cell::sync::OnceCell; +use rand::seq::IteratorRandom; +use regex::Regex; +use tokio::sync::Semaphore; + +use zksync::{ + error::{ClientError, RpcError}, + types::FilterBuilder, + EthNamespaceClient, +}; +use zksync_types::{api, ethabi::Contract, H256, U64}; +use zksync_web3_decl::jsonrpsee::types::error::{CallError, ErrorObject}; + +use super::AccountLifespan; +use crate::{ + command::{ApiRequest, ApiRequestType}, + constants::API_REQUEST_TIMEOUT, + report::{ApiActionType, ReportBuilder, ReportLabel}, + rng::LoadtestRng, +}; + +/// Shared semaphore which limits the number of accounts performing +/// API requests at any moment of time. +/// Lazily initialized by the first account accessing it. +static REQUEST_LIMITER: OnceCell = OnceCell::new(); + +impl AccountLifespan { + async fn execute_api_request(&mut self, request: ApiRequest) -> Result<(), ClientError> { + let request_result = + tokio::time::timeout(API_REQUEST_TIMEOUT, self.execute_api_request_inner(request)) + .await; + + match request_result { + Ok(result) => result.map_err(Into::into), + Err(_) => Err(ClientError::OperationTimeout), + } + } + + async fn execute_api_request_inner(&mut self, request: ApiRequest) -> Result<(), RpcError> { + let wallet = &self.wallet.wallet; + let ApiRequest { + request_type, + block_number, + } = request; + + match request_type { + ApiRequestType::BlockWithTxs => wallet + .provider + .get_block_by_number(block_number, true) + .await + .map(drop), + ApiRequestType::Balance => wallet + .get_balance(block_number, self.main_l2_token) + .await + .map(drop) + .map_err(|err| match err { + ClientError::RpcError(err) => err, + err => RpcError::Custom(err.to_string()), + }), + ApiRequestType::GetLogs => { + let topics = + random_topics(&self.wallet.test_contract.contract, &mut self.wallet.rng); + // Safety: `run_api_requests_task` checks whether the cell is initialized + // at every loop iteration and skips logs action if it's not. Thus, + // it's safe to unwrap it. + let contract_address = + unsafe { self.wallet.deployed_contract_address.get_unchecked() }; + + let to_block_number = match block_number { + api::BlockNumber::Number(number) => { + api::BlockNumber::Number(number + U64::from(99u64)) + } + _ => api::BlockNumber::Latest, + }; + let mut filter = FilterBuilder::default() + .set_from_block(block_number) + .set_to_block(to_block_number) + .set_topics(Some(topics), None, None, None) + .set_address(vec![*contract_address]) + .build(); + + let response = wallet.provider.get_logs(filter.clone()).await; + match response { + Err(RpcError::Call(err)) => { + let error_object: ErrorObject = err.into(); + let re = Regex::new(r"^Query returned more than \d* results\. Try with this block range \[0x([a-fA-F0-9]+), 0x([a-fA-F0-9]+)]\.$").unwrap(); + if let Some(caps) = re.captures(error_object.message()) { + filter.to_block = Some(api::BlockNumber::Number( + U64::from_str_radix(&caps[2], 16).unwrap(), + )); + wallet.provider.get_logs(filter).await.map(|_| ()) + } else { + Err(RpcError::Call(CallError::Custom(error_object))) + } + } + Err(err) => Err(err), + Ok(_) => Ok(()), + } + } + } + } + + pub(super) async fn run_api_requests_task(mut self) { + loop { + let semaphore = + REQUEST_LIMITER.get_or_init(|| Semaphore::new(self.config.sync_api_requests_limit)); + // The number of simultaneous requests is limited by semaphore. + let permit = semaphore + .acquire() + .await + .expect("static semaphore cannot be closed"); + + let request = ApiRequest::random(&self.wallet.wallet, &mut self.wallet.rng).await; + + let start = Instant::now(); + + // Skip the action if the contract is not yet initialized for the account. + let label = if let (ApiRequestType::GetLogs, None) = ( + request.request_type, + self.wallet.deployed_contract_address.get(), + ) { + ReportLabel::skipped("Contract not deployed yet") + } else { + let result = self.execute_api_request(request).await; + match result { + Ok(_) => ReportLabel::ActionDone, + Err(err) => { + let error = err.to_string(); + + vlog::error!("API request failed: {:?}, reason: {}", request, error); + ReportLabel::ActionFailed { error } + } + } + }; + + let api_action_type = ApiActionType::from(request); + + let report = ReportBuilder::default() + .action(api_action_type) + .label(label) + .time(start.elapsed()) + .reporter(self.wallet.wallet.address()) + .finish(); + drop(permit); + let _ = self.report_sink.send(report).await; + } + } +} + +pub fn random_topics(contract: &Contract, rng: &mut LoadtestRng) -> Vec { + let events_count = contract.events().count(); + let topics_num = (1..=events_count).choose(rng).unwrap(); + + contract + .events() + .take(topics_num) + .map(|event| event.signature()) + .collect() +} diff --git a/core/tests/loadnext/src/account/explorer_api_executor.rs b/core/tests/loadnext/src/account/explorer_api_executor.rs new file mode 100644 index 000000000000..4cf6d03ce1f3 --- /dev/null +++ b/core/tests/loadnext/src/account/explorer_api_executor.rs @@ -0,0 +1,317 @@ +use std::cmp; +use std::time::Instant; + +use futures::SinkExt; +use once_cell::sync::OnceCell; +use rand::{seq::SliceRandom, Rng}; +use rand_distr::Distribution; +use rand_distr::Normal; +use reqwest::{Response, StatusCode}; +use serde::{de::DeserializeOwned, Deserialize}; +use tokio::sync::Semaphore; + +use zksync::error::ClientError; +use zksync_types::explorer_api::{ + BlocksQuery, PaginationDirection, PaginationQuery, TransactionsQuery, +}; +use zksync_types::{Address, MiniblockNumber, H256}; + +use crate::account::AccountLifespan; +use crate::command::{ExplorerApiRequest, ExplorerApiRequestType}; +use crate::constants::API_REQUEST_TIMEOUT; +use crate::report::{ActionType, ReportBuilder, ReportLabel}; + +/// Shared semaphore which limits the number of accounts performing +/// API requests at any moment of time. +/// Lazily initialized by the first account accessing it. +static REQUEST_LIMITER: OnceCell = OnceCell::new(); + +#[derive(Debug, Clone)] +pub struct ExplorerApiClient { + pub client: reqwest::Client, + pub base_url: String, + pub last_sealed_block_number: Option, +} + +#[derive(Debug, Deserialize)] +pub struct NetworkStats { + pub last_sealed: MiniblockNumber, + pub last_verified: MiniblockNumber, + pub total_transactions: usize, +} + +// Client for explorer api, we don't use return values anywhere, so we return just json +impl ExplorerApiClient { + async fn response_to_result( + response: Response, + ) -> anyhow::Result> { + match response.status() { + StatusCode::OK => Ok(Some(response.json().await?)), + StatusCode::NOT_FOUND => Ok(None), + code => Err(anyhow::anyhow!("Unexpected status code: {}", code)), + } + } + + pub async fn network_stats(&mut self) -> anyhow::Result> { + let url = format!("{}/network_stats", &self.base_url); + let response = self.client.get(url).send().await?; + let result: anyhow::Result> = Self::response_to_result(response).await; + if let Ok(Some(stats)) = result.as_ref() { + self.last_sealed_block_number = Some(stats.last_sealed); + } + result + } + + pub async fn blocks( + &mut self, + query: BlocksQuery, + ) -> anyhow::Result> { + let url = format!("{}/blocks", &self.base_url); + let response = self.client.get(url).query(&query).send().await?; + Self::response_to_result(response).await + } + + pub async fn block(&mut self, number: u32) -> anyhow::Result> { + let url = format!("{}/block/{}", &self.base_url, number); + let response = self.client.get(url).send().await?; + Self::response_to_result(response).await + } + + pub async fn transaction(&mut self, hash: &H256) -> anyhow::Result> { + let url = format!("{}/transaction/{:?}", &self.base_url, hash); + let response = self.client.get(url).send().await?; + Self::response_to_result(response).await + } + + pub async fn transactions( + &mut self, + query: TransactionsQuery, + ) -> anyhow::Result> { + let url = format!("{}/transactions", &self.base_url); + let response = self.client.get(url).query(&query).send().await?; + Self::response_to_result(response).await + } + + pub async fn account( + &mut self, + address: &Address, + ) -> anyhow::Result> { + let url = format!("{}/account/{:?}", &self.base_url, address); + let response = self.client.get(url).send().await?; + Self::response_to_result(response).await + } + + pub async fn contract( + &mut self, + address: &Address, + ) -> anyhow::Result> { + let url = format!("{}/contract/{:?}", &self.base_url, address); + let response = self.client.get(url).send().await?; + Self::response_to_result(response).await + } + + pub async fn token(&mut self, address: &Address) -> anyhow::Result> { + let url = format!("{}/token/{:?}", &self.base_url, address); + let response = self.client.get(url).send().await?; + Self::response_to_result(response).await + } +} + +impl AccountLifespan { + async fn execute_explorer_api_request( + &mut self, + request: ExplorerApiRequest, + ) -> Result<(), anyhow::Error> { + let request_result = tokio::time::timeout( + API_REQUEST_TIMEOUT, + self.execute_explorer_api_request_inner(request), + ) + .await; + + match request_result { + Ok(result) => result.map_err(Into::into), + Err(_) => Err(ClientError::OperationTimeout)?, + } + } + + fn random_existed_block(&self) -> Option { + self.explorer_client.last_sealed_block_number.map(|number| { + let num = rand::thread_rng().gen_range(0..number.0); + MiniblockNumber(num) + }) + } + + async fn execute_explorer_api_request_inner( + &mut self, + request: ExplorerApiRequest, + ) -> Result<(), anyhow::Error> { + let ExplorerApiRequest { request_type } = request; + + match request_type { + ExplorerApiRequestType::NetworkStats => { + self.explorer_client.network_stats().await.map(drop) + } + ExplorerApiRequestType::Blocks => { + let from_block = self.random_existed_block(); + + // Offset should be less than last_block_number - from, otherwise no blocks will be returned for the request + // Offset should be less than 9990 because we have a limit for value (limit + offset) <= 10000 + let offset = from_block + .map(|bl| { + let last_block = self.explorer_client.last_sealed_block_number.unwrap(); + rand::thread_rng() + .gen_range(0..std::cmp::min((last_block - bl.0).0, 9900) as usize) + }) + .unwrap_or_default(); + + self.explorer_client + .blocks(BlocksQuery { + from: from_block, + pagination: PaginationQuery { + limit: 100, + offset, + direction: PaginationDirection::Newer, + }, + }) + .await + .map(drop) + } + ExplorerApiRequestType::Block => { + let block = self.random_existed_block().map(|b| *b).unwrap_or(1); + self.explorer_client.block(block).await.map(drop) + } + ExplorerApiRequestType::Account => self + .explorer_client + .account(&self.wallet.wallet.address()) + .await + .map(drop), + ExplorerApiRequestType::Transaction => { + let tx = self + .successfully_sent_txs + .read() + .await + .choose(&mut self.wallet.rng) + .copied() + .expect("We skip such requests if success_tx is empty"); + self.explorer_client.transaction(&tx).await.map(drop) + } + ExplorerApiRequestType::Contract => { + let contract = self + .wallet + .deployed_contract_address + .get() + .expect("We skip such requests if contract is none"); + self.explorer_client.contract(contract).await.map(drop) + } + ExplorerApiRequestType::Token => self + .explorer_client + .token(&self.main_l2_token) + .await + .map(drop), + ExplorerApiRequestType::Transactions => { + let from_block = self.random_existed_block(); + let offset = self.get_normally_distributed_offset(3000.0, 9900); + self.explorer_client + .transactions(TransactionsQuery { + from_block_number: from_block, + from_tx_index: None, + block_number: None, + address: None, + account_address: None, + contract_address: None, + pagination: PaginationQuery { + limit: 100, + offset, + direction: PaginationDirection::Newer, + }, + }) + .await + .map(drop) + } + ExplorerApiRequestType::AccountTransactions => { + let from_block = self.random_existed_block(); + let offset = self.get_normally_distributed_offset(3000.0, 9900); + + self.explorer_client + .transactions(TransactionsQuery { + from_block_number: from_block, + from_tx_index: None, + block_number: None, + address: None, + account_address: Some(self.wallet.wallet.address()), + contract_address: None, + pagination: PaginationQuery { + limit: 100, + offset, + direction: PaginationDirection::Newer, + }, + }) + .await + .map(drop) + } + } + } + + fn get_normally_distributed_offset(&self, std_dev: f32, limit: usize) -> usize { + let normal = Normal::new(0.0, std_dev).unwrap(); + let v: f32 = normal.sample(&mut rand::thread_rng()); + + let offset = v.abs() as usize; + cmp::min(offset, limit) + } + + pub(super) async fn run_explorer_api_requests_task(mut self) { + // Setup current last block + let _ = self.explorer_client.network_stats().await; + loop { + let semaphore = + REQUEST_LIMITER.get_or_init(|| Semaphore::new(self.config.sync_api_requests_limit)); + // The number of simultaneous requests is limited by semaphore. + let permit = semaphore + .acquire() + .await + .expect("static semaphore cannot be closed"); + + let request = ExplorerApiRequest::random(&mut self.wallet.rng).await; + + let start = Instant::now(); + let mut empty_success_txs = true; + if request.request_type == ExplorerApiRequestType::Transaction { + empty_success_txs = self.successfully_sent_txs.read().await.is_empty(); + } + + let label = if let (ExplorerApiRequestType::Contract, None) = ( + request.request_type, + self.wallet.deployed_contract_address.get(), + ) { + ReportLabel::skipped("Contract not deployed yet") + } else if let (ExplorerApiRequestType::Transaction, true) = + (request.request_type, empty_success_txs) + { + ReportLabel::skipped("No one txs has been submitted yet") + } else { + let result = self.execute_explorer_api_request(request).await; + match result { + Ok(_) => ReportLabel::ActionDone, + Err(err) => { + let error = err.to_string(); + + vlog::error!("API request failed: {:?}, reason: {}", request, error); + ReportLabel::ActionFailed { error } + } + } + }; + + let api_action_type = ActionType::from(request.request_type); + + let report = ReportBuilder::default() + .action(api_action_type) + .label(label) + .time(start.elapsed()) + .reporter(self.wallet.wallet.address()) + .finish(); + drop(permit); + let _ = self.report_sink.send(report).await; + } + } +} diff --git a/core/tests/loadnext/src/account/mod.rs b/core/tests/loadnext/src/account/mod.rs new file mode 100644 index 000000000000..cafa38d10385 --- /dev/null +++ b/core/tests/loadnext/src/account/mod.rs @@ -0,0 +1,384 @@ +use futures::{channel::mpsc, FutureExt, SinkExt}; +use std::{ + collections::VecDeque, + future::Future, + sync::Arc, + time::{Duration, Instant}, +}; +use tokio::{sync::RwLock, time::sleep}; +use zksync_utils::test_utils::LoadnextContractExecutionParams; + +use zksync::{error::ClientError, operations::SyncTransactionHandle, HttpClient}; +use zksync_types::{ + api::{TransactionReceipt, U64}, + Address, Nonce, H256, U256, +}; +use zksync_web3_decl::jsonrpsee; + +use crate::{ + account::{explorer_api_executor::ExplorerApiClient, tx_command_executor::SubmitResult}, + account_pool::{AddressPool, TestWallet}, + command::{ExpectedOutcome, IncorrectnessModifier, TxCommand, TxType}, + config::LoadtestConfig, + constants::POLLING_INTERVAL, + report::{Report, ReportBuilder, ReportLabel}, +}; + +mod api_request_executor; +mod explorer_api_executor; +mod pubsub_executor; +mod tx_command_executor; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ExecutionType { + L1, + L2, +} + +/// Tx that has been sent to the server but has not yet received a receipt +#[derive(Debug, Clone)] +struct InflightTx { + tx_hash: H256, + attempt: usize, + start: Instant, + command: TxCommand, +} + +/// Account lifespan represents a flow of a single account: +/// it will send transactions, both correct and incorrect, and will check +/// whether outcome matches expected one. +/// +/// This structure is expected to not care about the server behavior; even if the server is down, it will only cause +/// performed actions to be considered failed. +#[derive(Debug, Clone)] +pub struct AccountLifespan { + /// Wallet used to perform the test. + pub wallet: TestWallet, + /// Client for explorer api + pub explorer_client: ExplorerApiClient, + config: LoadtestConfig, + contract_execution_params: LoadnextContractExecutionParams, + /// Pool of account addresses, used to generate commands. + addresses: AddressPool, + /// Successful transactions, required for requesting api + successfully_sent_txs: Arc>>, + /// L1 ERC-20 token used in the test. + main_l1_token: Address, + /// L2 ERC-20 token used in the test. + main_l2_token: Address, + /// Address of the paymaster used in the test. + paymaster_address: Address, + /// Channel for sending reports about performed operations. + report_sink: mpsc::Sender, + /// Pool of sent but not yet executed txs + inflight_txs: VecDeque, + /// Current account nonce, it is None at the beginning and will be set after the first transaction + current_nonce: Option, +} + +impl AccountLifespan { + pub fn new( + config: &LoadtestConfig, + contract_execution_params: LoadnextContractExecutionParams, + addresses: AddressPool, + test_account: TestWallet, + report_sink: mpsc::Sender, + main_l2_token: Address, + paymaster_address: Address, + ) -> Self { + let explorer_client = ExplorerApiClient { + client: Default::default(), + base_url: config.l2_explorer_api_address.clone(), + last_sealed_block_number: None, + }; + + Self { + wallet: test_account, + explorer_client, + config: config.clone(), + contract_execution_params, + addresses, + successfully_sent_txs: Default::default(), + main_l1_token: config.main_token, + main_l2_token, + paymaster_address, + + report_sink, + inflight_txs: Default::default(), + current_nonce: None, + } + } + + pub async fn run(self) { + let duration = self.config.duration(); + let mut tx_execution_task = Box::pin(self.clone().run_tx_execution()).fuse(); + let mut api_requests_task = Box::pin(self.clone().run_api_requests_task()).fuse(); + let mut api_explorer_requests_task = + Box::pin(self.clone().run_explorer_api_requests_task()).fuse(); + let mut pubsub_task = Box::pin(self.run_pubsub_task()).fuse(); + let mut sleep_task = Box::pin(sleep(duration)).fuse(); + + futures::select! { + () = tx_execution_task => {}, + () = api_requests_task => { + vlog::error!("API requests task unexpectedly finished first"); + }, + () = api_explorer_requests_task => { + vlog::error!("Explorer API requests task unexpectedly finished first"); + }, + () = pubsub_task => { + vlog::error!("PubSub task unexpectedly finished first"); + }, + () = sleep_task => {} + } + } + + async fn run_tx_execution(mut self) { + // Every account starts with deploying a contract. + let deploy_command = TxCommand { + command_type: TxType::DeployContract, + modifier: IncorrectnessModifier::None, + to: Address::zero(), + amount: U256::zero(), + }; + self.execute_command(deploy_command.clone()).await; + self.wait_for_all_inflight_tx().await; + + let mut timer = tokio::time::interval(POLLING_INTERVAL); + loop { + let command = self.generate_command(); + // The new transaction should be sent only if mempool is not full + loop { + if self.inflight_txs.len() >= self.config.max_inflight_txs { + timer.tick().await; + self.check_inflight_txs().await; + } else { + self.execute_command(command.clone()).await; + break; + } + } + } + } + + async fn wait_for_all_inflight_tx(&mut self) { + let mut timer = tokio::time::interval(POLLING_INTERVAL); + while !self.inflight_txs.is_empty() { + timer.tick().await; + self.check_inflight_txs().await; + } + } + + async fn check_inflight_txs(&mut self) { + // No need to wait for confirmation for all tx, one check for each tx is enough. + // If some txs haven't been processed yet, we'll check them in the next iteration. + // Due to natural sleep for sending tx, usually more than 1 tx can be already + // processed and have a receipt + while let Some(tx) = self.inflight_txs.pop_front() { + if let Ok(Some(transaction_receipt)) = + self.get_tx_receipt_for_committed_block(tx.tx_hash).await + { + let label = self.verify_receipt( + &transaction_receipt, + &tx.command.modifier.expected_outcome(), + ); + self.report(label, tx.start.elapsed(), tx.attempt, tx.command) + .await; + } else { + self.inflight_txs.push_front(tx); + break; + } + } + } + + fn verify_receipt( + &self, + transaction_receipt: &TransactionReceipt, + expected_outcome: &ExpectedOutcome, + ) -> ReportLabel { + match expected_outcome { + ExpectedOutcome::TxSucceed if transaction_receipt.status == Some(U64::one()) => { + // If it was a successful `DeployContract` transaction, set the contract + // address for subsequent usage by `Execute`. + if let Some(address) = transaction_receipt.contract_address { + // An error means that the contract is already initialized. + let _ = self.wallet.deployed_contract_address.set(address); + } + + // Transaction succeed and it should have. + ReportLabel::done() + } + ExpectedOutcome::TxRejected if transaction_receipt.status == Some(U64::zero()) => { + // Transaction failed and it should have. + ReportLabel::done() + } + other => { + // Transaction status didn't match expected one. + let error = format!( + "Unexpected transaction status: expected {:#?} because of modifier {:?}, receipt {:#?}", + other, expected_outcome, transaction_receipt + ); + ReportLabel::failed(&error) + } + } + } + + /// Executes a command with support of retries: + /// If command fails due to the network/API error, it will be retried multiple times + /// before considering it completely failed. Such an approach makes us a bit more resilient to + /// volatile errors such as random connection drop or insufficient fee error. + async fn execute_command(&mut self, command: TxCommand) { + // We consider API errors to be somewhat likely, thus we will retry the operation if it fails + // due to connection issues. + const MAX_RETRIES: usize = 3; + + let mut attempt = 0; + loop { + let start = Instant::now(); + let result = self.execute_tx_command(&command).await; + + let submit_result = match result { + Ok(result) => result, + Err(ClientError::NetworkError(_)) | Err(ClientError::OperationTimeout) => { + if attempt < MAX_RETRIES { + // Retry operation. + attempt += 1; + continue; + } + + // We reached the maximum amount of retries. + let error = format!( + "Retries limit reached. Latest error: {}", + result.unwrap_err() + ); + SubmitResult::ReportLabel(ReportLabel::failed(&error)) + } + Err(err) => { + // Other kinds of errors should not be handled, we will just report them. + SubmitResult::ReportLabel(ReportLabel::failed(&err.to_string())) + } + }; + + match submit_result { + SubmitResult::TxHash(tx_hash) => { + self.inflight_txs.push_back(InflightTx { + tx_hash, + start, + attempt, + command: command.clone(), + }); + self.successfully_sent_txs.write().await.push(tx_hash) + } + SubmitResult::ReportLabel(label) => { + // Make a report if it was some problems in sending tx + self.report(label, start.elapsed(), attempt, command).await + } + }; + + // We won't continue the loop unless `continue` was manually called. + break; + } + } + + pub async fn reset_nonce(&mut self) { + let nonce = Nonce(self.wallet.wallet.get_nonce().await.unwrap()); + self.current_nonce = Some(nonce); + } + + /// Builds a report and sends it. + async fn report( + &mut self, + label: ReportLabel, + time: Duration, + retries: usize, + command: TxCommand, + ) { + if let ReportLabel::ActionFailed { error } = &label { + vlog::error!( + "Command failed: from {:?}, {:#?} (${})", + self.wallet.wallet.address(), + command, + error + ) + } + + let report = ReportBuilder::new() + .label(label) + .reporter(self.wallet.wallet.address()) + .time(time) + .retries(retries) + .action(command) + .finish(); + + if let Err(_err) = self.report_sink.send(report).await { + // It's not that important if report will be skipped. + vlog::trace!("Failed to send report to the sink"); + }; + } + + /// Generic submitter for zkSync network: it can operate individual transactions, + /// as long as we can provide a `SyncTransactionHandle` to wait for the commitment and the + /// execution result. + /// Once result is obtained, it's compared to the expected operation outcome in order to check whether + /// command was completed as planned. + async fn submit<'a, F, Fut>( + &'a mut self, + modifier: IncorrectnessModifier, + send: F, + ) -> Result + where + F: FnOnce() -> Fut, + Fut: Future, ClientError>>, + { + let expected_outcome = modifier.expected_outcome(); + + let send_result = send().await; + + let submit_result = match (expected_outcome, send_result) { + (ExpectedOutcome::ApiRequestFailed, Ok(_handle)) => { + // Transaction got accepted, but should have not been. + let error = "Tx was accepted, but should have not been"; + SubmitResult::ReportLabel(ReportLabel::failed(error)) + } + (_, Ok(handle)) => { + // Transaction should have been accepted by API and it was; now wait for the commitment. + SubmitResult::TxHash(handle.hash()) + } + (ExpectedOutcome::ApiRequestFailed, Err(_error)) => { + // Transaction was expected to be rejected and it was. + SubmitResult::ReportLabel(ReportLabel::done()) + } + (_, Err(err)) => { + // Transaction was expected to be accepted, but was rejected. + if let ClientError::RpcError(jsonrpsee::core::Error::Call(err)) = &err { + let message = match err { + jsonrpsee::types::error::CallError::InvalidParams(err) => err.to_string(), + jsonrpsee::types::error::CallError::Failed(err) => err.to_string(), + jsonrpsee::types::error::CallError::Custom(err) => { + err.message().to_string() + } + }; + if message.contains("nonce is incorrect") { + self.reset_nonce().await; + return Ok(SubmitResult::ReportLabel(ReportLabel::skipped(&message))); + } + } + + let error = format!( + "Tx should have been accepted, but got rejected. Reason: {:?}", + err + ); + SubmitResult::ReportLabel(ReportLabel::failed(&error)) + } + }; + Ok(submit_result) + } + + /// Prepares a list of random operations to be executed by an account. + fn generate_command(&mut self) -> TxCommand { + TxCommand::random( + &mut self.wallet.rng, + self.wallet.wallet.address(), + &self.addresses, + ) + } +} diff --git a/core/tests/loadnext/src/account/pubsub_executor.rs b/core/tests/loadnext/src/account/pubsub_executor.rs new file mode 100644 index 000000000000..54d05e67fcd6 --- /dev/null +++ b/core/tests/loadnext/src/account/pubsub_executor.rs @@ -0,0 +1,127 @@ +use std::time::{Duration, Instant}; + +use futures::SinkExt; +use once_cell::sync::OnceCell; +use tokio::sync::Semaphore; + +use zksync::error::ClientError; +use zksync_web3_decl::{ + jsonrpsee::{ + core::client::{Subscription, SubscriptionClientT}, + rpc_params, + ws_client::WsClientBuilder, + }, + types::PubSubResult, +}; + +use super::AccountLifespan; +use crate::{ + command::SubscriptionType, + report::{ReportBuilder, ReportLabel}, + rng::WeightedRandom, +}; +use zksync::types::PubSubFilterBuilder; + +/// Shared semaphore which limits the number of active subscriptions. +/// Lazily initialized by the first account accessing it. +static REQUEST_LIMITER: OnceCell = OnceCell::new(); + +impl AccountLifespan { + async fn start_single_subscription_task( + &mut self, + subscription_type: SubscriptionType, + ) -> Result<(), ClientError> { + let client = WsClientBuilder::default() + .build(&self.config.l2_ws_rpc_address) + .await?; + let params = match subscription_type { + SubscriptionType::Logs => { + let topics = super::api_request_executor::random_topics( + &self.wallet.test_contract.contract, + &mut self.wallet.rng, + ); + // Safety: `run_pubsub_task` checks whether the cell is initialized + // at every loop iteration and skips logs action if it's not. Thus, + // it's safe to unwrap it. + let contract_address = + unsafe { self.wallet.deployed_contract_address.get_unchecked() }; + let filter = PubSubFilterBuilder::default() + .set_topics(Some(topics), None, None, None) + .set_address(vec![*contract_address]) + .build(); + rpc_params![subscription_type.rpc_name(), filter] + } + _ => rpc_params![subscription_type.rpc_name()], + }; + let mut subscription: Subscription = client + .subscribe("eth_subscribe", params, "eth_unsubscribe") + .await?; + let start = Instant::now(); + let subscription_duration = Duration::from_secs(self.config.single_subscription_time_secs); + loop { + if let Ok(resp) = tokio::time::timeout(subscription_duration, subscription.next()).await + { + match resp { + None => return Err(ClientError::OperationTimeout), + Some(Err(err)) => return Err(err.into()), + _ => {} + } + } + if start.elapsed() > subscription_duration { + break; + } + } + Ok(()) + } + + #[allow(clippy::let_underscore_future)] + pub(super) async fn run_pubsub_task(self) { + loop { + let semaphore = REQUEST_LIMITER + .get_or_init(|| Semaphore::new(self.config.sync_pubsub_subscriptions_limit)); + // The number of simultaneous subscriptions is limited by semaphore. + let permit = semaphore + .acquire() + .await + .expect("static semaphore cannot be closed"); + let mut self_ = self.clone(); + let _ = tokio::spawn(async move { + let subscription_type = SubscriptionType::random(&mut self_.wallet.rng); + let start = Instant::now(); + + // Skip the action if the contract is not yet initialized for the account. + let label = if let (SubscriptionType::Logs, None) = ( + subscription_type, + self_.wallet.deployed_contract_address.get(), + ) { + ReportLabel::skipped("Contract not deployed yet") + } else { + let result = self_ + .start_single_subscription_task(subscription_type) + .await; + match result { + Ok(_) => ReportLabel::ActionDone, + Err(err) => { + let error = err.to_string(); + // Subscriptions can fail for a variety of reasons - no need to escalate it. + vlog::warn!( + "Subscription failed: {:?}, reason: {}", + subscription_type, + error + ); + ReportLabel::ActionFailed { error } + } + } + }; + let report = ReportBuilder::default() + .action(subscription_type) + .label(label) + .time(start.elapsed()) + .reporter(self_.wallet.wallet.address()) + .finish(); + drop(permit); + let _ = self_.report_sink.send(report).await; + }); + } + } +} diff --git a/core/tests/loadnext/src/account/tx_command_executor.rs b/core/tests/loadnext/src/account/tx_command_executor.rs new file mode 100644 index 000000000000..ca7fdd92c743 --- /dev/null +++ b/core/tests/loadnext/src/account/tx_command_executor.rs @@ -0,0 +1,445 @@ +use zksync::web3::ethabi; +use zksync::EthNamespaceClient; +use zksync::{ + error::ClientError, + ethereum::PriorityOpHolder, + utils::{ + get_approval_based_paymaster_input, get_approval_based_paymaster_input_for_estimation, + }, +}; +use zksync_config::constants::MAX_L1_TRANSACTION_GAS_LIMIT; +use zksync_eth_client::EthInterface; +use zksync_types::{ + api::{BlockNumber, TransactionReceipt}, + l2::L2Tx, + Address, H256, U256, +}; + +use crate::account::ExecutionType; +use crate::{ + account::AccountLifespan, + command::{IncorrectnessModifier, TxCommand, TxType}, + constants::{ETH_CONFIRMATION_TIMEOUT, ETH_POLLING_INTERVAL}, + corrupted_tx::Corrupted, + report::ReportLabel, +}; + +#[derive(Debug)] +pub enum SubmitResult { + TxHash(H256), + ReportLabel(ReportLabel), +} + +impl AccountLifespan { + pub(super) async fn execute_tx_command( + &mut self, + command: &TxCommand, + ) -> Result { + match command.command_type { + TxType::WithdrawToOther | TxType::WithdrawToSelf => { + self.execute_withdraw(command).await + } + TxType::Deposit => self.execute_deposit(command).await, + TxType::DeployContract => self.execute_deploy_contract(command).await, + TxType::L2Execute => { + self.execute_loadnext_contract(command, ExecutionType::L2) + .await + } + TxType::L1Execute => { + self.execute_loadnext_contract(command, ExecutionType::L1) + .await + } + } + } + + fn tx_creation_error(err: ClientError) -> ClientError { + // Translate network errors (so operation will be retried), but don't accept other ones. + // For example, we will retry operation if fee ticker returned an error, + // but will panic if transaction cannot be signed. + match err { + ClientError::NetworkError(_) + | ClientError::RpcError(_) + | ClientError::MalformedResponse(_) => err, + _ => panic!("Transaction should be correct"), + } + } + + async fn apply_modifier(&self, tx: L2Tx, modifier: IncorrectnessModifier) -> L2Tx { + let wallet = &self.wallet.wallet; + tx.apply_modifier(modifier, &wallet.signer).await + } + + /// Returns the balances for ETH and the main token on the L1. + /// This function is used to check whether the L1 operation can be performed or should be + /// skipped. + async fn l1_balances(&self) -> Result<(U256, U256), ClientError> { + let wallet = &self.wallet.wallet; + let ethereum = wallet.ethereum(&self.config.l1_rpc_address).await?; + let eth_balance = ethereum.balance().await?; + let erc20_balance = ethereum + .erc20_balance(wallet.address(), self.main_l1_token) + .await?; + + Ok((eth_balance, erc20_balance)) + } + + async fn execute_deposit(&self, command: &TxCommand) -> Result { + let wallet = &self.wallet.wallet; + + let (eth_balance, erc20_balance) = self.l1_balances().await?; + if eth_balance.is_zero() || erc20_balance < command.amount { + // We don't have either funds in L1 to pay for tx or to deposit. + // It's not a problem with the server, thus we mark this operation as skipped. + return Ok(SubmitResult::ReportLabel(ReportLabel::skipped( + "No L1 balance", + ))); + } + + let mut ethereum = wallet.ethereum(&self.config.l1_rpc_address).await?; + ethereum.set_confirmation_timeout(ETH_CONFIRMATION_TIMEOUT); + ethereum.set_polling_interval(ETH_POLLING_INTERVAL); + let gas_price = ethereum + .client() + .get_gas_price("executor") + .await + .map_err(|_| ClientError::Other)?; + + // We should check whether we've previously approved ERC-20 deposits. + let deposits_allowed = ethereum + .is_erc20_deposit_approved(self.main_l1_token, None) + .await?; + if !deposits_allowed { + let approve_tx_hash = ethereum + .approve_erc20_token_deposits(self.main_l1_token, None) + .await?; + // Before submitting the deposit, wait for the approve transaction confirmation. + match ethereum.wait_for_tx(approve_tx_hash).await { + Ok(receipt) => { + if receipt.status != Some(1.into()) { + return Ok(SubmitResult::ReportLabel(ReportLabel::skipped( + "Approve transaction failed", + ))); + } + } + Err(_) => { + return Ok(SubmitResult::ReportLabel(ReportLabel::skipped( + "Approve transaction failed", + ))); + } + } + } + + let eth_balance = ethereum.balance().await?; + if eth_balance < gas_price * U256::from(MAX_L1_TRANSACTION_GAS_LIMIT) { + // We don't have either funds in L1 to pay for tx or to deposit. + // It's not a problem with the server, thus we mark this operation as skipped. + return Ok(SubmitResult::ReportLabel(ReportLabel::skipped( + "Not enough L1 balance", + ))); + } + let eth_tx_hash = match ethereum + .deposit( + self.main_l1_token, + command.amount, + wallet.address(), + None, + None, + None, + ) + .await + { + Ok(hash) => hash, + Err(err) => { + // Most likely we don't have enough ETH to perform operations. + // Just mark the operations as skipped. + let reason = format!("Unable to perform an L1 operation. Reason: {}", err); + return Ok(SubmitResult::ReportLabel(ReportLabel::skipped(&reason))); + } + }; + + self.get_priority_op_l2_hash(eth_tx_hash).await + } + + async fn get_priority_op_l2_hash( + &self, + eth_tx_hash: H256, + ) -> Result { + let wallet = &self.wallet.wallet; + + let mut ethereum = wallet.ethereum(&self.config.l1_rpc_address).await?; + ethereum.set_confirmation_timeout(ETH_CONFIRMATION_TIMEOUT); + ethereum.set_polling_interval(ETH_POLLING_INTERVAL); + + let receipt = ethereum.wait_for_tx(eth_tx_hash).await?; + + match receipt.priority_op() { + Some(tx_common_data) => Ok(SubmitResult::TxHash(tx_common_data.canonical_tx_hash)), + None => { + // Probably we did something wrong, no big deal. + Ok(SubmitResult::ReportLabel(ReportLabel::skipped( + "Ethereum transaction for deposit failed", + ))) + } + } + } + + async fn execute_submit( + &mut self, + tx: L2Tx, + modifier: IncorrectnessModifier, + ) -> Result { + let nonce = tx.nonce(); + let result = match modifier { + IncorrectnessModifier::IncorrectSignature => { + let wallet = self.wallet.corrupted_wallet.clone(); + self.submit(modifier, || async { wallet.send_transaction(tx).await }) + .await + } + _ => { + let wallet = self.wallet.wallet.clone(); + self.submit(modifier, || async { wallet.send_transaction(tx).await }) + .await + } + }?; + + // Update current nonce for future txs + // If the transaction has a tx_hash and is small enough to be included in a block, this tx will change the nonce. + // We can be sure that the nonce will be changed based on this assumption. + if let SubmitResult::TxHash(_) = &result { + self.current_nonce = Some(nonce + 1) + } + + Ok(result) + } + + async fn execute_withdraw(&mut self, command: &TxCommand) -> Result { + let tx = self.build_withdraw(command).await?; + self.execute_submit(tx, command.modifier).await + } + + pub(super) async fn build_withdraw(&self, command: &TxCommand) -> Result { + let wallet = self.wallet.wallet.clone(); + + let mut builder = wallet + .start_withdraw() + .to(command.to) + .amount(command.amount) + .token(self.main_l2_token); + + let fee = builder + .estimate_fee(Some(get_approval_based_paymaster_input_for_estimation( + self.paymaster_address, + self.main_l2_token, + ))) + .await?; + builder = builder.fee(fee.clone()); + + let paymaster_params = get_approval_based_paymaster_input( + self.paymaster_address, + self.main_l2_token, + fee.max_total_fee(), + Vec::new(), + ); + builder = builder.fee(fee); + builder = builder.paymaster_params(paymaster_params); + + if let Some(nonce) = self.current_nonce { + builder = builder.nonce(nonce); + } + + let tx = builder.tx().await.map_err(Self::tx_creation_error)?; + + Ok(self.apply_modifier(tx, command.modifier).await) + } + + async fn execute_deploy_contract( + &mut self, + command: &TxCommand, + ) -> Result { + let tx = self.build_deploy_loadnext_contract(command).await?; + self.execute_submit(tx, command.modifier).await + } + + async fn build_deploy_loadnext_contract( + &self, + command: &TxCommand, + ) -> Result { + let wallet = self.wallet.wallet.clone(); + let constructor_calldata = ethabi::encode(&[ethabi::Token::Uint(U256::from( + self.contract_execution_params.reads, + ))]); + + let mut builder = wallet + .start_deploy_contract() + .bytecode(self.wallet.test_contract.bytecode.clone()) + .constructor_calldata(constructor_calldata); + + let fee = builder + .estimate_fee(Some(get_approval_based_paymaster_input_for_estimation( + self.paymaster_address, + self.main_l2_token, + ))) + .await?; + builder = builder.fee(fee.clone()); + + let paymaster_params = get_approval_based_paymaster_input( + self.paymaster_address, + self.main_l2_token, + fee.max_total_fee(), + Vec::new(), + ); + builder = builder.fee(fee); + builder = builder.paymaster_params(paymaster_params); + + if let Some(nonce) = self.current_nonce { + builder = builder.nonce(nonce); + } + + let tx = builder.tx().await.map_err(Self::tx_creation_error)?; + + Ok(self.apply_modifier(tx, command.modifier).await) + } + + async fn execute_loadnext_contract( + &mut self, + command: &TxCommand, + execution_type: ExecutionType, + ) -> Result { + let contract_address = match self.wallet.deployed_contract_address.get() { + Some(address) => *address, + None => { + return Ok(SubmitResult::ReportLabel(ReportLabel::skipped( + "Account haven't successfully deployed a contract yet", + ))); + } + }; + + match execution_type { + ExecutionType::L1 => { + let calldata = self.prepare_calldata_for_loadnext_contract(); + let ethereum = self + .wallet + .wallet + .ethereum(&self.config.l1_rpc_address) + .await?; + let tx_hash = match ethereum + .request_execute( + contract_address, + U256::zero(), + calldata, + U256::from(2_000_000u32), + Some(self.wallet.test_contract.factory_deps.clone()), + None, + None, + Default::default(), + ) + .await + { + Ok(hash) => hash, + Err(err) => { + return Ok(SubmitResult::ReportLabel(ReportLabel::failed( + &err.to_string(), + ))); + } + }; + self.get_priority_op_l2_hash(tx_hash).await + } + + ExecutionType::L2 => { + let tx = self + .build_execute_loadnext_contract(command, contract_address) + .await?; + + self.execute_submit(tx, command.modifier).await + } + } + } + + fn prepare_calldata_for_loadnext_contract(&self) -> Vec { + let contract = &self.wallet.test_contract.contract; + let function = contract.function("execute").unwrap(); + function + .encode_input(&vec![ + ethabi::Token::Uint(U256::from(self.contract_execution_params.reads)), + ethabi::Token::Uint(U256::from(self.contract_execution_params.writes)), + ethabi::Token::Uint(U256::from(self.contract_execution_params.hashes)), + ethabi::Token::Uint(U256::from(self.contract_execution_params.events)), + ethabi::Token::Uint(U256::from(self.contract_execution_params.recursive_calls)), + ethabi::Token::Uint(U256::from(self.contract_execution_params.deploys)), + ]) + .expect("failed to encode parameters when creating calldata") + } + + async fn build_execute_loadnext_contract( + &mut self, + command: &TxCommand, + contract_address: Address, + ) -> Result { + let wallet = &self.wallet.wallet; + + let calldata = self.prepare_calldata_for_loadnext_contract(); + let mut builder = wallet + .start_execute_contract() + .calldata(calldata) + .contract_address(contract_address) + .factory_deps(self.wallet.test_contract.factory_deps.clone()); + + let fee = builder + .estimate_fee(Some(get_approval_based_paymaster_input_for_estimation( + self.paymaster_address, + self.main_l2_token, + ))) + .await?; + builder = builder.fee(fee.clone()); + + let paymaster_params = get_approval_based_paymaster_input( + self.paymaster_address, + self.main_l2_token, + fee.max_total_fee(), + Vec::new(), + ); + builder = builder.fee(fee); + builder = builder.paymaster_params(paymaster_params); + + if let Some(nonce) = self.current_nonce { + builder = builder.nonce(nonce); + } + + let tx = builder.tx().await.map_err(Self::tx_creation_error)?; + + Ok(self.apply_modifier(tx, command.modifier).await) + } + + pub(crate) async fn get_tx_receipt_for_committed_block( + &mut self, + tx_hash: H256, + ) -> Result, ClientError> { + let response = self + .wallet + .wallet + .provider + .get_transaction_receipt(tx_hash) + .await?; + + let receipt = if response.as_ref().and_then(|r| r.block_number).is_some() { + response.unwrap() + } else { + return Ok(None); + }; + + let block_number = receipt.block_number.unwrap(); + + let response = self + .wallet + .wallet + .provider + .get_block_by_number(BlockNumber::Committed, false) + .await?; + if let Some(received_number) = response.map(|block| block.number) { + if block_number <= received_number { + return Ok(Some(receipt)); + } + } + Ok(None) + } +} diff --git a/core/tests/loadnext/src/account_pool.rs b/core/tests/loadnext/src/account_pool.rs new file mode 100644 index 000000000000..385540badde3 --- /dev/null +++ b/core/tests/loadnext/src/account_pool.rs @@ -0,0 +1,176 @@ +use std::{collections::VecDeque, str::FromStr, sync::Arc, time::Duration}; + +use once_cell::sync::OnceCell; +use rand::Rng; +use tokio::time::timeout; + +use zksync::{signer::Signer, HttpClient, HttpClientBuilder, Wallet, ZksNamespaceClient}; +use zksync_eth_signer::PrivateKeySigner; +use zksync_types::{tx::primitives::PackedEthSignature, Address, L2ChainId, H256}; + +use crate::{ + config::LoadtestConfig, + corrupted_tx::CorruptedSigner, + fs_utils::{loadnext_contract, TestContract}, + rng::{LoadtestRng, Random}, +}; + +/// An alias to [`zksync::Wallet`] with HTTP client. Wrapped in `Arc` since +/// the client cannot be cloned due to limitations in jsonrpsee. +pub type SyncWallet = Arc>; +pub type CorruptedSyncWallet = Arc>; + +/// Thread-safe pool of the addresses of accounts used in the loadtest. +#[derive(Debug, Clone)] +pub struct AddressPool { + addresses: Arc>, +} + +impl AddressPool { + pub fn new(addresses: Vec
) -> Self { + Self { + addresses: Arc::new(addresses), + } + } + + /// Randomly chooses one of the addresses stored in the pool. + pub fn random_address(&self, rng: &mut LoadtestRng) -> Address { + let index = rng.gen_range(0..self.addresses.len()); + self.addresses[index] + } +} + +/// Credentials for a test account. +/// Currently we support only EOA accounts. +#[derive(Debug, Clone)] +pub struct AccountCredentials { + /// Ethereum private key. + pub eth_pk: H256, + /// Ethereum address derived from the private key. + pub address: Address, +} + +impl Random for AccountCredentials { + fn random(rng: &mut LoadtestRng) -> Self { + let eth_pk = H256::random_using(rng); + let address = pk_to_address(ð_pk); + + Self { eth_pk, address } + } +} + +/// Type that contains the data required for the test wallet to operate. +#[derive(Debug, Clone)] +pub struct TestWallet { + /// Pre-initialized wallet object. + pub wallet: SyncWallet, + /// Wallet with corrupted signer. + pub corrupted_wallet: CorruptedSyncWallet, + /// Contract bytecode and calldata to be used for sending `Execute` transactions. + pub test_contract: TestContract, + /// Address of the deployed contract to be used for sending + /// `Execute` transaction. + pub deployed_contract_address: Arc>, + /// RNG object derived from a common loadtest seed and the wallet private key. + pub rng: LoadtestRng, +} + +/// Pool of accounts to be used in the test. +/// Each account is represented as `zksync::Wallet` in order to provide convenient interface of interation with zkSync. +#[derive(Debug)] +pub struct AccountPool { + /// Main wallet that will be used to initialize all the test wallets. + pub master_wallet: SyncWallet, + /// Collection of test wallets and their Ethereum private keys. + pub accounts: VecDeque, + /// Pool of addresses of the test accounts. + pub addresses: AddressPool, +} + +impl AccountPool { + /// Generates all the required test accounts and prepares `Wallet` objects. + pub async fn new(config: &LoadtestConfig) -> anyhow::Result { + let l2_chain_id = L2ChainId(config.l2_chain_id); + // Create a client for pinging the rpc. + let client = HttpClientBuilder::default() + .build(&config.l2_rpc_address) + .unwrap(); + // Perform a health check: check whether zkSync server is alive. + let mut server_alive = false; + for _ in 0usize..3 { + if let Ok(Ok(_)) = timeout(Duration::from_secs(3), client.get_main_contract()).await { + server_alive = true; + break; + } + } + if !server_alive { + anyhow::bail!("zkSync server does not respond. Please check RPC address and whether server is launched"); + } + + let test_contract = loadnext_contract(&config.test_contracts_path)?; + + let master_wallet = { + let eth_pk = H256::from_str(&config.master_wallet_pk) + .expect("Can't parse master wallet private key"); + let eth_signer = PrivateKeySigner::new(eth_pk); + let address = pk_to_address(ð_pk); + let signer = Signer::new(eth_signer, address, l2_chain_id); + + Arc::new(Wallet::with_http_client(&config.l2_rpc_address, signer).unwrap()) + }; + + let mut rng = LoadtestRng::new_generic(config.seed.clone()); + vlog::info!("Using RNG with master seed: {}", rng.seed_hex()); + + let group_size = config.accounts_group_size; + let accounts_amount = config.accounts_amount; + anyhow::ensure!( + group_size <= accounts_amount, + "Accounts group size is expected to be less than or equal to accounts amount" + ); + + let mut accounts = VecDeque::with_capacity(accounts_amount); + let mut addresses = Vec::with_capacity(accounts_amount); + + for i in (0..accounts_amount).step_by(group_size) { + let range_end = (i + group_size).min(accounts_amount); + // The next group shares the contract address. + let deployed_contract_address = Arc::new(OnceCell::new()); + + for _ in i..range_end { + let eth_credentials = AccountCredentials::random(&mut rng); + let eth_signer = PrivateKeySigner::new(eth_credentials.eth_pk); + let address = eth_credentials.address; + let signer = Signer::new(eth_signer, address, l2_chain_id); + + let corrupted_eth_signer = CorruptedSigner::new(address); + let corrupted_signer = Signer::new(corrupted_eth_signer, address, l2_chain_id); + + let wallet = Wallet::with_http_client(&config.l2_rpc_address, signer).unwrap(); + let corrupted_wallet = + Wallet::with_http_client(&config.l2_rpc_address, corrupted_signer).unwrap(); + + addresses.push(wallet.address()); + let account = TestWallet { + wallet: Arc::new(wallet), + corrupted_wallet: Arc::new(corrupted_wallet), + test_contract: test_contract.clone(), + deployed_contract_address: deployed_contract_address.clone(), + rng: rng.derive(eth_credentials.eth_pk), + }; + accounts.push_back(account); + } + } + + Ok(Self { + master_wallet, + accounts, + addresses: AddressPool::new(addresses), + }) + } +} + +fn pk_to_address(eth_pk: &H256) -> Address { + PackedEthSignature::address_from_private_key(eth_pk) + .expect("Can't get an address from the private key") +} diff --git a/core/tests/loadnext/src/all.rs b/core/tests/loadnext/src/all.rs new file mode 100644 index 000000000000..d845553927f1 --- /dev/null +++ b/core/tests/loadnext/src/all.rs @@ -0,0 +1,9 @@ +/// Trait that allows accessing all the possible variants of a sequence. +pub trait All: Sized { + fn all() -> &'static [Self]; +} + +/// Trait that extends `All` trait with the corresponding expected probability. +pub trait AllWeighted: Sized { + fn all_weighted() -> &'static [(Self, f32)]; +} diff --git a/core/tests/loadnext/src/command/api.rs b/core/tests/loadnext/src/command/api.rs new file mode 100644 index 000000000000..e865ab000318 --- /dev/null +++ b/core/tests/loadnext/src/command/api.rs @@ -0,0 +1,95 @@ +use num::Integer; +use rand::RngCore; + +use zksync::EthNamespaceClient; +use zksync_types::api; + +use crate::{ + account_pool::SyncWallet, + all::AllWeighted, + rng::{LoadtestRng, WeightedRandom}, +}; + +/// Helper enum for generating random block number. +#[derive(Debug, Copy, Clone)] +enum BlockNumber { + Committed, + Number, +} + +impl AllWeighted for BlockNumber { + fn all_weighted() -> &'static [(Self, f32)] { + const DEFAULT_WEIGHT: f32 = 1.0; + + &[ + (Self::Committed, DEFAULT_WEIGHT), + (Self::Number, DEFAULT_WEIGHT), + ] + } +} + +#[derive(Debug, Copy, Clone)] +pub enum ApiRequestType { + /// Requests block with full transactions list. + BlockWithTxs, + /// Requests account balance. + Balance, + /// Requests account-deployed contract events. + GetLogs, +} + +impl AllWeighted for ApiRequestType { + fn all_weighted() -> &'static [(Self, f32)] { + const DEFAULT_WEIGHT: f32 = 1.0; + + &[ + (Self::BlockWithTxs, DEFAULT_WEIGHT), + (Self::Balance, DEFAULT_WEIGHT), + (Self::GetLogs, DEFAULT_WEIGHT), + ] + } +} + +#[derive(Debug, Copy, Clone)] +pub struct ApiRequest { + /// Type of the request to be performed. + pub request_type: ApiRequestType, + /// ZkSync block number, generated randomly. + pub block_number: api::BlockNumber, +} + +impl ApiRequest { + pub async fn random(wallet: &SyncWallet, rng: &mut LoadtestRng) -> Self { + let block_number = random_block_number(wallet, rng).await; + let request_type = ApiRequestType::random(rng); + Self { + request_type, + block_number, + } + } +} + +async fn random_block_number(wallet: &SyncWallet, rng: &mut LoadtestRng) -> api::BlockNumber { + let block_number = BlockNumber::random(rng); + match block_number { + BlockNumber::Committed => api::BlockNumber::Committed, + BlockNumber::Number => { + // Choose a random block in the range [0, latest_committed_block_number). + match wallet + .provider + .get_block_by_number(api::BlockNumber::Committed, false) + .await + { + Ok(Some(block_number)) => { + let block_number = block_number.number.as_u64(); + let number = rng.next_u64().mod_floor(&block_number); + api::BlockNumber::Number(number.into()) + } + _ => { + // Fallback to the latest committed block. + api::BlockNumber::Committed + } + } + } + } +} diff --git a/core/tests/loadnext/src/command/explorer_api.rs b/core/tests/loadnext/src/command/explorer_api.rs new file mode 100644 index 000000000000..ed1c6fdf53aa --- /dev/null +++ b/core/tests/loadnext/src/command/explorer_api.rs @@ -0,0 +1,58 @@ +use crate::all::AllWeighted; +use crate::config::ExplorerApiRequestWeights; +use crate::rng::{LoadtestRng, WeightedRandom}; +use once_cell::sync::OnceCell; + +static WEIGHTS: OnceCell<[(ExplorerApiRequestType, f32); 9]> = OnceCell::new(); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ExplorerApiRequestType { + NetworkStats, + Blocks, + Block, + Transaction, + Transactions, + AccountTransactions, + Account, + Contract, + Token, +} + +impl ExplorerApiRequestType { + pub fn initialize_weights(weights: &ExplorerApiRequestWeights) { + WEIGHTS + .set([ + (ExplorerApiRequestType::NetworkStats, weights.network_stats), + (ExplorerApiRequestType::Blocks, weights.blocks), + (ExplorerApiRequestType::Block, weights.block), + (ExplorerApiRequestType::Transaction, weights.transaction), + (ExplorerApiRequestType::Transactions, weights.transactions), + ( + ExplorerApiRequestType::AccountTransactions, + weights.account_transactions, + ), + (ExplorerApiRequestType::Account, weights.account), + (ExplorerApiRequestType::Contract, weights.contract), + (ExplorerApiRequestType::Token, weights.token), + ]) + .unwrap(); + } +} +impl AllWeighted for ExplorerApiRequestType { + fn all_weighted() -> &'static [(Self, f32)] { + WEIGHTS.get().expect("Weights are not initialized") + } +} + +#[derive(Debug, Copy, Clone)] +pub struct ExplorerApiRequest { + /// Type of the request to be performed. + pub request_type: ExplorerApiRequestType, +} + +impl ExplorerApiRequest { + pub async fn random(rng: &mut LoadtestRng) -> Self { + let request_type = ExplorerApiRequestType::random(rng); + Self { request_type } + } +} diff --git a/core/tests/loadnext/src/command/mod.rs b/core/tests/loadnext/src/command/mod.rs new file mode 100644 index 000000000000..1b46acd00e35 --- /dev/null +++ b/core/tests/loadnext/src/command/mod.rs @@ -0,0 +1,11 @@ +pub use self::{ + api::{ApiRequest, ApiRequestType}, + explorer_api::{ExplorerApiRequest, ExplorerApiRequestType}, + pubsub::SubscriptionType, + tx_command::{ExpectedOutcome, IncorrectnessModifier, TxCommand, TxType}, +}; + +mod api; +mod explorer_api; +mod pubsub; +mod tx_command; diff --git a/core/tests/loadnext/src/command/pubsub.rs b/core/tests/loadnext/src/command/pubsub.rs new file mode 100644 index 000000000000..8dd13950ab9a --- /dev/null +++ b/core/tests/loadnext/src/command/pubsub.rs @@ -0,0 +1,32 @@ +use crate::all::AllWeighted; + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub enum SubscriptionType { + /// Subscribes for new block headers. + BlockHeaders, + /// Subscribes for new transactions. + PendingTransactions, + /// Subscribes for new logs. + Logs, +} + +impl AllWeighted for SubscriptionType { + fn all_weighted() -> &'static [(Self, f32)] { + const DEFAULT_WEIGHT: f32 = 1.0; + &[ + (Self::BlockHeaders, DEFAULT_WEIGHT), + (Self::PendingTransactions, DEFAULT_WEIGHT), + (Self::Logs, DEFAULT_WEIGHT), + ] + } +} + +impl SubscriptionType { + pub fn rpc_name(&self) -> &'static str { + match self { + Self::BlockHeaders => "newHeads", + Self::PendingTransactions => "newPendingTransactions", + Self::Logs => "logs", + } + } +} diff --git a/core/tests/loadnext/src/command/tx_command.rs b/core/tests/loadnext/src/command/tx_command.rs new file mode 100644 index 000000000000..945a7ca16bb5 --- /dev/null +++ b/core/tests/loadnext/src/command/tx_command.rs @@ -0,0 +1,195 @@ +use once_cell::sync::OnceCell; +use rand::Rng; +use static_assertions::const_assert; + +use zksync_types::{Address, U256}; + +use crate::{ + account_pool::AddressPool, + all::{All, AllWeighted}, + config::TransactionWeights, + rng::{LoadtestRng, WeightedRandom}, +}; + +static WEIGHTS: OnceCell<[(TxType, f32); 5]> = OnceCell::new(); + +/// Type of transaction. It doesn't copy the zkSync operation list, because +/// it divides some transactions in subcategories (e.g. to new account / to existing account; to self / to other; etc)/ +#[derive(Debug, Copy, Clone, PartialEq)] +pub enum TxType { + Deposit, + WithdrawToSelf, + WithdrawToOther, + DeployContract, + L1Execute, + L2Execute, +} + +impl TxType { + pub fn initialize_weights(transaction_weights: &TransactionWeights) { + WEIGHTS + .set([ + (TxType::Deposit, transaction_weights.deposit), + (TxType::L2Execute, transaction_weights.l2_transactions), + (TxType::L1Execute, transaction_weights.l1_transactions), + (TxType::WithdrawToSelf, transaction_weights.withdrawal / 2.0), + ( + TxType::WithdrawToOther, + transaction_weights.withdrawal / 2.0, + ), + ]) + .unwrap(); + } +} + +impl All for TxType { + fn all() -> &'static [Self] { + Self::const_all() + } +} + +impl AllWeighted for TxType { + fn all_weighted() -> &'static [(Self, f32)] { + WEIGHTS.get().expect("Weights are not initialized") + } +} + +impl TxType { + const fn const_all() -> &'static [Self] { + &[ + Self::Deposit, + Self::WithdrawToSelf, + Self::WithdrawToOther, + Self::L1Execute, + Self::L2Execute, + ] + } + + fn is_target_self(self) -> bool { + matches!(self, Self::WithdrawToSelf) + } +} + +/// Modifier to be applied to the transaction in order to make it incorrect. +/// Incorrect transactions are a significant part of loadtest, because we want to ensure +/// that server is resilient for all the possible kinds of user input. +#[derive(Debug, Copy, Clone, PartialEq)] +pub enum IncorrectnessModifier { + ZeroFee, + IncorrectSignature, + + // Last option goes for no modifier, + // since it's more convenient than dealing with `Option`. + None, +} + +impl IncorrectnessModifier { + // Have to implement this as a const function, since const functions in traits are not stabilized yet. + const fn const_all() -> &'static [Self] { + &[Self::ZeroFee, Self::IncorrectSignature, Self::None] + } +} + +impl All for IncorrectnessModifier { + fn all() -> &'static [Self] { + Self::const_all() + } +} + +impl AllWeighted for IncorrectnessModifier { + fn all_weighted() -> &'static [(Self, f32)] { + const VARIANT_AMOUNTS: f32 = IncorrectnessModifier::const_all().len() as f32; + // No modifier is 9 times probable than all the other variants in sum. + // In other words, 90% probability of no modifier. + const NONE_PROBABILITY: f32 = (VARIANT_AMOUNTS - 1.0) * 9.0; + const DEFAULT_PROBABILITY: f32 = 1.0f32; + + const WEIGHTED: &[(IncorrectnessModifier, f32)] = &[ + (IncorrectnessModifier::ZeroFee, DEFAULT_PROBABILITY), + ( + IncorrectnessModifier::IncorrectSignature, + DEFAULT_PROBABILITY, + ), + (IncorrectnessModifier::None, NONE_PROBABILITY), + ]; + + const_assert!(WEIGHTED.len() == IncorrectnessModifier::const_all().len()); + WEIGHTED + } +} + +/// Expected outcome of transaction: +/// Since we may create erroneous transactions on purpose, +/// we may expect different outcomes for each transaction. +#[derive(Debug, Copy, Clone, PartialEq)] +pub enum ExpectedOutcome { + /// Transactions was successfully executed. + TxSucceed, + /// Transaction sending should fail. + ApiRequestFailed, + /// Transaction should be accepted, but rejected at the + /// time of execution. + TxRejected, +} + +impl IncorrectnessModifier { + pub fn expected_outcome(self) -> ExpectedOutcome { + match self { + Self::None => ExpectedOutcome::TxSucceed, + Self::ZeroFee | Self::IncorrectSignature => ExpectedOutcome::ApiRequestFailed, + } + } +} + +/// Complete description of a transaction that must be executed by a test wallet. +#[derive(Debug, Clone)] +pub struct TxCommand { + /// Type of operation. + pub command_type: TxType, + /// Whether and how transaction should be corrupted. + pub modifier: IncorrectnessModifier, + /// Recipient address. + pub to: Address, + /// Transaction amount (0 if not applicable). + pub amount: U256, +} + +impl TxCommand { + /// Generates a fully random transaction command. + pub fn random(rng: &mut LoadtestRng, own_address: Address, addresses: &AddressPool) -> Self { + let command_type = TxType::random(rng); + + Self::new_with_type(rng, own_address, addresses, command_type) + } + + fn new_with_type( + rng: &mut LoadtestRng, + own_address: Address, + addresses: &AddressPool, + command_type: TxType, + ) -> Self { + let mut command = Self { + command_type, + modifier: IncorrectnessModifier::random(rng), + to: addresses.random_address(rng), + amount: Self::random_amount(rng), + }; + + // Check whether we should use a self as a target. + if command.command_type.is_target_self() { + command.to = own_address; + } + + // Fix incorrectness modifier: + // L1 txs should always have `None` modifier. + if matches!(command.command_type, TxType::Deposit | TxType::L1Execute) { + command.modifier = IncorrectnessModifier::None; + } + + command + } + + fn random_amount(rng: &mut LoadtestRng) -> U256 { + rng.gen_range(0u64..2u64.pow(18)).into() + } +} diff --git a/core/tests/loadnext/src/config.rs b/core/tests/loadnext/src/config.rs new file mode 100644 index 000000000000..4156ba6f6a45 --- /dev/null +++ b/core/tests/loadnext/src/config.rs @@ -0,0 +1,376 @@ +use std::path::PathBuf; +use std::time::Duration; + +use serde::Deserialize; + +use zksync_types::network::Network; +use zksync_types::{Address, L2ChainId, H160}; + +use crate::fs_utils::read_tokens; +use zksync_utils::test_utils::LoadnextContractExecutionParams; + +/// Configuration for the loadtest. +/// +/// This structure is meant to provide the least possible amount of parameters: +/// By the ideology of the test, it is OK for it to be opinionated. Thus we don't provide +/// kinds of operations we want to perform, do not configure fail or pass criteria. +/// +/// It is expected that the user will provide the basic settings, and the loadtest will +/// take care of everything else. +#[derive(Debug, Clone, Deserialize)] +pub struct LoadtestConfig { + /// Address of the Ethereum web3 API. + #[serde(default = "default_l1_rpc_address")] + pub l1_rpc_address: String, + + /// Ethereum private key of the wallet that has funds to perform a test. + #[serde(default = "default_master_wallet_pk")] + pub master_wallet_pk: String, + + /// Amount of accounts to be used in test. + /// This option configures the "width" of the test: + /// how many concurrent operation flows will be executed. + #[serde(default = "default_accounts_amount")] + pub accounts_amount: usize, + + /// Duration of the test. + #[serde(default = "default_duration_sec")] + pub duration_sec: u64, + + /// Address of the ERC-20 token to be used in test. + /// + /// Token must satisfy two criteria: + /// - Be supported by zkSync. + /// - Have `mint` operation. + /// + /// Note that we use ERC-20 token since we can't easily mint a lot of ETH on + /// Rinkeby or Ropsten without caring about collecting it back. + #[serde(default = "default_main_token")] + pub main_token: Address, + + /// Path to test contracts bytecode and ABI required for sending + /// deploy and execute L2 transactions. Each folder in the path is expected + /// to have the following structure: + ///```ignore + /// . + /// ├── bytecode + /// └── abi.json + ///``` + /// Contract folder names names are not restricted. + /// + /// An example: + ///```ignore + /// . + /// ├── erc-20 + /// │   ├── bytecode + /// │   └── abi.json + /// └── simple-contract + /// ├── bytecode + /// └── abi.json + ///``` + #[serde(default = "default_test_contracts_path")] + pub test_contracts_path: PathBuf, + /// Limits the number of simultaneous API requests being performed at any moment of time. + /// + /// Setting it to: + /// - 0 turns off API requests. + /// - `accounts_amount` relieves the limit. + #[serde(default = "default_sync_api_requests_limit")] + pub sync_api_requests_limit: usize, + + /// Limits the number of simultaneous active PubSub subscriptions at any moment of time. + /// + /// Setting it to: + /// - 0 turns off PubSub subscriptions. + #[serde(default = "default_sync_pubsub_subscriptions_limit")] + pub sync_pubsub_subscriptions_limit: usize, + + /// Time in seconds for a subscription to be active. Subscription will be closed after that time. + #[serde(default = "default_single_subscription_time_secs")] + pub single_subscription_time_secs: u64, + + /// Optional seed to be used in the test: normally you don't need to set the seed, + /// but you can re-use seed from previous run to reproduce the sequence of operations locally. + /// Seed must be represented as a hexadecimal string. + /// + /// Using the same seed doesn't guarantee reproducibility of API requests: unlike operations, these + /// are generated in flight by multiple accounts in parallel. + #[serde(default = "default_seed")] + pub seed: Option, + + /// Chain id of L2 node. + #[serde(default = "default_l2_chain_id")] + pub l2_chain_id: u16, + + /// RPC address of L2 node. + #[serde(default = "default_l2_rpc_address")] + pub l2_rpc_address: String, + + /// WS RPC address of L2 node. + #[serde(default = "default_l2_ws_rpc_address")] + pub l2_ws_rpc_address: String, + + /// Explorer api address of L2 node. + #[serde(default = "default_l2_explorer_api_address")] + pub l2_explorer_api_address: String, + + /// The maximum number of transactions per account that can be sent without waiting for confirmation + #[serde(default = "default_max_inflight_txs")] + pub max_inflight_txs: usize, + + /// All of test accounts get split into groups that share the + /// deployed contract address. This helps to emulate the behavior of + /// sending `Execute` to the same contract and reading its events by + /// single a group. This value should be less than or equal to `ACCOUNTS_AMOUNT`. + #[serde(default = "default_accounts_group_size")] + pub accounts_group_size: usize, + + /// The expected number of the processed transactions during loadtest + /// that should be compared to the actual result. + #[serde(default = "default_expected_tx_count")] + pub expected_tx_count: Option, +} + +fn default_max_inflight_txs() -> usize { + let result = 5; + vlog::info!("Using default MAX_INFLIGHT_TXS: {}", result); + result +} + +fn default_l1_rpc_address() -> String { + // https://rinkeby.infura.io/v3/8934c959275444d480834ba1587c095f for rinkeby + let result = "http://127.0.0.1:8545".to_string(); + vlog::info!("Using default L1_RPC_ADDRESS: {}", result); + result +} + +fn default_l2_explorer_api_address() -> String { + let result = "http://127.0.0.1:3070".to_string(); + vlog::info!("Using default L2_EXPLORER_API_ADDRESS: {}", result); + result +} + +fn default_master_wallet_pk() -> String { + // Use this key only for localhost because it is compromised! + // Using this key for rinkeby will result in losing rinkeby ETH. + // Corresponding wallet is 0x36615Cf349d7F6344891B1e7CA7C72883F5dc049 + let result = "7726827caac94a7f9e1b160f7ea819f172f7b6f9d2a97f992c38edeab82d4110".to_string(); + vlog::info!("Using default MASTER_WALLET_PK: {}", result); + result +} + +fn default_accounts_amount() -> usize { + let result = 80; + vlog::info!("Using default ACCOUNTS_AMOUNT: {}", result); + result +} + +fn default_duration_sec() -> u64 { + let result = 300; + vlog::info!("Using default DURATION_SEC: {}", result); + result +} + +fn default_accounts_group_size() -> usize { + let result = 40; + vlog::info!("Using default ACCOUNTS_GROUP_SIZE: {}", result); + result +} + +fn default_main_token() -> H160 { + // Read token addresses from `etc/tokens/localhost.json`. Use the first one + // as a main token since all of them are suitable. + + // 0xeb8f08a975Ab53E34D8a0330E0D34de942C95926 for rinkeby + let tokens = read_tokens(Network::Localhost).expect("Failed to parse tokens file"); + let main_token = tokens.first().expect("Loaded tokens list is empty"); + vlog::info!("Main token: {:?}", main_token); + main_token.address +} + +fn default_test_contracts_path() -> PathBuf { + let test_contracts_path = { + let home = std::env::var("ZKSYNC_HOME").unwrap(); + let path = PathBuf::from(&home); + path.join("etc/contracts-test-data") + }; + + vlog::info!("Test contracts path: {}", test_contracts_path.display()); + + test_contracts_path +} + +fn default_sync_api_requests_limit() -> usize { + let result = 20; + vlog::info!("Using default SYNC_API_REQUESTS_LIMIT: {}", result); + result +} + +fn default_sync_pubsub_subscriptions_limit() -> usize { + let result = 150; + vlog::info!("Using default SYNC_PUBSUB_SUBSCRIPTIONS_LIMIT: {}", result); + result +} + +fn default_single_subscription_time_secs() -> u64 { + let result = 30; + vlog::info!("Using default SINGLE_SUBSCRIPTION_TIME_SECS: {}", result); + result +} + +fn default_seed() -> Option { + let result = None; + vlog::info!("Using default SEED: {:?}", result); + result +} + +fn default_l2_chain_id() -> u16 { + // 270 for rinkeby + let result = *L2ChainId::default(); + vlog::info!("Using default L2_CHAIN_ID: {}", result); + result +} + +pub fn get_default_l2_rpc_address() -> String { + "http://127.0.0.1:3050".to_string() +} + +fn default_l2_rpc_address() -> String { + // http://z2-dev-api.zksync.dev/ for stage2 + let result = get_default_l2_rpc_address(); + vlog::info!("Using default L2_RPC_ADDRESS: {}", result); + result +} + +fn default_l2_ws_rpc_address() -> String { + // ws://z2-dev-api.zksync.dev:80/ws for stage2 + let result = "ws://127.0.0.1:3051".to_string(); + vlog::info!("Using default L2_WS_RPC_ADDRESS: {}", result); + result +} + +fn default_expected_tx_count() -> Option { + let result = None; + vlog::info!("Using default EXPECTED_TX_COUNT: {:?}", result); + result +} + +impl LoadtestConfig { + pub fn from_env() -> envy::Result { + envy::from_env() + } + pub fn duration(&self) -> Duration { + Duration::from_secs(self.duration_sec) + } +} + +/// Configuration for the weights of loadtest operations +/// We use a random selection based on weight of operations. To perform some operations frequently, the developer must set the weight higher. +/// +/// This configuration is independent from the main config for preserving simplicity of the main config +/// and do not break the backward compatibility +#[derive(Debug)] +pub struct ExecutionConfig { + pub transaction_weights: TransactionWeights, + pub contract_execution_params: LoadnextContractExecutionParams, + pub explorer_api_config_weights: ExplorerApiRequestWeights, +} + +impl ExecutionConfig { + pub fn from_env() -> Self { + let transaction_weights = + TransactionWeights::from_env().unwrap_or_else(default_transaction_weights); + let contract_execution_params = LoadnextContractExecutionParams::from_env() + .unwrap_or_else(default_contract_execution_params); + let explorer_api_config_weights = ExplorerApiRequestWeights::from_env() + .unwrap_or_else(default_explorer_api_request_weights); + Self { + transaction_weights, + contract_execution_params, + explorer_api_config_weights, + } + } +} + +#[derive(Debug, Clone, Deserialize)] +pub struct ExplorerApiRequestWeights { + pub network_stats: f32, + pub blocks: f32, + pub block: f32, + pub account_transactions: f32, + pub transaction: f32, + pub transactions: f32, + pub account: f32, + pub contract: f32, + pub token: f32, +} + +impl Default for ExplorerApiRequestWeights { + fn default() -> Self { + Self { + network_stats: 1.0, + blocks: 1.0, + block: 1.0, + transactions: 1.0, + account: 1.0, + token: 1.0, + contract: 1.0, + transaction: 1.0, + account_transactions: 1.0, + } + } +} + +impl ExplorerApiRequestWeights { + pub fn from_env() -> Option { + envy::prefixed("EXPLORER_API_REQUESTS_WEIGHTS_") + .from_env() + .ok() + } +} + +#[derive(Debug, Clone, Deserialize)] +pub struct TransactionWeights { + pub deposit: f32, + pub withdrawal: f32, + pub l1_transactions: f32, + pub l2_transactions: f32, +} + +impl TransactionWeights { + pub fn from_env() -> Option { + envy::prefixed("TRANSACTION_WEIGHTS_").from_env().ok() + } +} + +impl Default for TransactionWeights { + fn default() -> Self { + Self { + deposit: 0.1, + withdrawal: 0.5, + l1_transactions: 0.1, + l2_transactions: 1.0, + } + } +} + +fn default_transaction_weights() -> TransactionWeights { + let result = TransactionWeights::default(); + vlog::info!("Using default TransactionWeights: {:?}", &result); + result +} + +fn default_contract_execution_params() -> LoadnextContractExecutionParams { + let result = LoadnextContractExecutionParams::default(); + vlog::info!( + "Using default LoadnextContractExecutionParams: {:?}", + &result + ); + result +} + +fn default_explorer_api_request_weights() -> ExplorerApiRequestWeights { + let result = ExplorerApiRequestWeights::default(); + vlog::info!("Using default ExplorerApiRequestWeights: {:?}", &result); + result +} diff --git a/core/tests/loadnext/src/constants.rs b/core/tests/loadnext/src/constants.rs new file mode 100644 index 000000000000..e4153d9bfb36 --- /dev/null +++ b/core/tests/loadnext/src/constants.rs @@ -0,0 +1,23 @@ +use std::time::Duration; + +/// Normally, block is committed on Ethereum every 15 seconds; however there are no guarantees that our transaction +/// will be included in the next block right after sending. +pub const ETH_CONFIRMATION_TIMEOUT: Duration = Duration::from_secs(300); + +/// Normally, block is committed on Ethereum every 10-15 seconds, so there is no need to poll the ETH node +/// any frequently than once in 10 seconds. +pub const ETH_POLLING_INTERVAL: Duration = Duration::from_secs(10); + +/// Loadtest assumes that blocks on the server will be created relatively quickly (without timeouts set in hours), +/// but nonetheless we want to provide some buffer in case we'll spam the server with way too many transactions +/// and some tx will have to wait in the mempool for a while. +pub const COMMIT_TIMEOUT: Duration = Duration::from_secs(600); +/// We don't want to overload the server with too many requests; given the fact that blocks are expected to be created +/// every couple of seconds, chosen value seems to be adequate to provide the result in one or two calls at average. +pub const POLLING_INTERVAL: Duration = Duration::from_secs(3); + +pub const MAX_OUTSTANDING_NONCE: usize = 50; + +/// Each account continuously sends API requests in addition to transactions. Such requests are considered failed +/// after this amount of time elapsed without any server response. +pub const API_REQUEST_TIMEOUT: Duration = Duration::from_secs(5); diff --git a/core/tests/loadnext/src/corrupted_tx.rs b/core/tests/loadnext/src/corrupted_tx.rs new file mode 100644 index 000000000000..c3ada60472e0 --- /dev/null +++ b/core/tests/loadnext/src/corrupted_tx.rs @@ -0,0 +1,148 @@ +use async_trait::async_trait; + +use zksync::signer::Signer; +use zksync_eth_signer::{error::SignerError, EthereumSigner}; +use zksync_types::{Address, EIP712TypedStructure, Eip712Domain, PackedEthSignature, H256}; + +use crate::command::IncorrectnessModifier; +use zksync_eth_signer::raw_ethereum_tx::TransactionParameters; +use zksync_types::fee::Fee; +use zksync_types::l2::L2Tx; + +/// Trait that exists solely to extend the signed zkSync transaction interface, providing the ability +/// to modify transaction in a way that will make it invalid. +/// +/// Loadtest is expected to simulate the user behavior, and it's not that uncommon of users to send incorrect +/// transactions. +#[async_trait] +pub trait Corrupted: Sized { + /// Creates a transaction without fee provided. + async fn zero_fee(self, signer: &Signer) -> Self; + + /// Resigns the transaction after the modification in order to make signatures correct (if applicable). + async fn resign(&mut self, signer: &Signer); + + /// Automatically chooses one of the methods of this trait based on the provided incorrectness modifier. + async fn apply_modifier(self, modifier: IncorrectnessModifier, signer: &Signer) -> Self { + match modifier { + IncorrectnessModifier::None => self, + IncorrectnessModifier::IncorrectSignature => self, // signature will be changed before submitting transaction + IncorrectnessModifier::ZeroFee => self.zero_fee(signer).await, + } + } +} + +#[async_trait] +impl Corrupted for L2Tx +where + S: EthereumSigner, +{ + async fn resign(&mut self, signer: &Signer) { + let signature = signer.sign_transaction(&*self).await.unwrap(); + self.set_signature(signature); + } + + async fn zero_fee(mut self, signer: &Signer) -> Self { + self.common_data.fee = Fee::default(); + self.resign(signer).await; + self + } +} + +#[derive(Debug, Clone)] +pub struct CorruptedSigner { + address: Address, +} + +impl CorruptedSigner { + fn bad_signature() -> PackedEthSignature { + let private_key = H256::random(); + let message = b"bad message"; + PackedEthSignature::sign(&private_key, message).unwrap() + } + + pub fn new(address: Address) -> Self { + Self { address } + } +} + +#[async_trait] +impl EthereumSigner for CorruptedSigner { + async fn sign_message(&self, _message: &[u8]) -> Result { + Ok(Self::bad_signature()) + } + + async fn sign_typed_data( + &self, + _domain: &Eip712Domain, + _typed_struct: &S, + ) -> Result { + Ok(Self::bad_signature()) + } + + async fn sign_transaction( + &self, + _raw_tx: TransactionParameters, + ) -> Result, SignerError> { + Ok(b"bad bytes".to_vec()) + } + + async fn get_address(&self) -> Result { + Ok(self.address) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use zksync_eth_signer::PrivateKeySigner; + use zksync_types::fee::Fee; + use zksync_types::L2ChainId; + use zksync_types::{ + tokens::ETHEREUM_ADDRESS, tx::primitives::PackedEthSignature, Address, Nonce, H256, + }; + + const AMOUNT: u64 = 100; + const FEE: u64 = 100; + const NONCE: Nonce = Nonce(1); + + fn get_signer(chain_id: L2ChainId) -> Signer { + let eth_pk = H256::random(); + let eth_signer = PrivateKeySigner::new(eth_pk); + let address = PackedEthSignature::address_from_private_key(ð_pk) + .expect("Can't get an address from the private key"); + Signer::new(eth_signer, address, chain_id) + } + + async fn create_transfer(signer: &Signer) -> L2Tx { + let fee = Fee { + gas_limit: FEE.into(), + max_fee_per_gas: Default::default(), + max_priority_fee_per_gas: Default::default(), + gas_per_pubdata_limit: Default::default(), + }; + signer + .sign_transfer( + Address::repeat_byte(0x7e), + ETHEREUM_ADDRESS, + AMOUNT.into(), + fee, + NONCE, + Default::default(), + ) + .await + .unwrap() + } + + #[tokio::test] + async fn zero_fee() { + let chain_id = L2ChainId::default(); + let signer = get_signer(chain_id); + + let transfer = create_transfer(&signer).await; + + let modified_transfer = transfer.zero_fee(&signer).await; + + assert_eq!(modified_transfer.common_data.fee.gas_limit, 0u64.into()); + } +} diff --git a/core/tests/loadnext/src/executor.rs b/core/tests/loadnext/src/executor.rs new file mode 100644 index 000000000000..4a6d8abd617f --- /dev/null +++ b/core/tests/loadnext/src/executor.rs @@ -0,0 +1,651 @@ +use futures::{channel::mpsc, future::join_all}; +use std::ops::Add; +use tokio::task::JoinHandle; + +use zksync::ethereum::{PriorityOpHolder, DEFAULT_PRIORITY_FEE}; +use zksync::utils::{ + get_approval_based_paymaster_input, get_approval_based_paymaster_input_for_estimation, +}; +use zksync::web3::{contract::Options, types::TransactionReceipt}; +use zksync::{EthereumProvider, ZksNamespaceClient}; +use zksync_config::constants::MAX_L1_TRANSACTION_GAS_LIMIT; +use zksync_eth_client::EthInterface; +use zksync_eth_signer::PrivateKeySigner; +use zksync_types::api::{BlockNumber, U64}; +use zksync_types::{tokens::ETHEREUM_ADDRESS, Address, Nonce, U256}; + +use crate::{ + account::AccountLifespan, + account_pool::AccountPool, + config::{ExecutionConfig, LoadtestConfig}, + constants::*, + report_collector::{LoadtestResult, ReportCollector}, +}; + +pub const MAX_L1_TRANSACTIONS: u64 = 10; + +/// Executor is the entity capable of running the loadtest flow. +/// +/// It takes care of the following topics: +/// +/// - Minting the tokens on L1 for the main account. +/// - Depositing tokens to the main account in L2 and unlocking it. +/// - Spawning the report collector. +/// - Distributing the funds among the test wallets. +/// - Spawning account lifespan futures. +/// - Awaiting for all the account futures to complete. +/// - Getting the final test resolution from the report collector. +pub struct Executor { + config: LoadtestConfig, + execution_config: ExecutionConfig, + l2_main_token: Address, + pool: AccountPool, +} + +impl Executor { + /// Creates a new Executor entity. + pub async fn new( + config: LoadtestConfig, + execution_config: ExecutionConfig, + ) -> anyhow::Result { + let pool = AccountPool::new(&config).await?; + + // derive l2 main token address + let l2_main_token = pool + .master_wallet + .ethereum(&config.l1_rpc_address) + .await + .expect("Can't get Ethereum client") + .l2_token_address(config.main_token, None) + .await + .unwrap(); + + Ok(Self { + config, + execution_config, + pool, + l2_main_token, + }) + } + + /// Runs the loadtest until the completion. + pub async fn start(&mut self) -> LoadtestResult { + // If the error occurs during the main flow, we will consider it as a test failure. + self.start_inner().await.unwrap_or_else(|err| { + vlog::error!("Loadtest was interrupted by the following error: {}", err); + LoadtestResult::TestFailed + }) + } + + /// Inner representation of `start` function which returns a `Result`, so it can conveniently use `?`. + async fn start_inner(&mut self) -> anyhow::Result { + vlog::info!("Initializing accounts"); + vlog::info!("Running for MASTER {:?}", self.pool.master_wallet.address()); + self.check_onchain_balance().await?; + self.mint().await?; + self.deposit_to_master().await?; + + // Top up paymaster for local env. + if self.config.l2_rpc_address == crate::config::get_default_l2_rpc_address() { + self.deposit_eth_to_paymaster().await?; + } + + let (executor_future, account_futures) = self.send_initial_transfers().await?; + self.wait_account_routines(account_futures).await; + + let final_resultion = executor_future.await.unwrap_or(LoadtestResult::TestFailed); + + Ok(final_resultion) + } + + /// Verifies that onchain ETH balance for the main account is sufficient to run the loadtest. + async fn check_onchain_balance(&mut self) -> anyhow::Result<()> { + vlog::info!("Master Account: Checking onchain balance..."); + let master_wallet = &mut self.pool.master_wallet; + let ethereum = master_wallet.ethereum(&self.config.l1_rpc_address).await?; + let eth_balance = ethereum.balance().await?; + if eth_balance < 2u64.pow(17).into() { + anyhow::bail!( + "ETH balance on {:x} is too low to safely perform the loadtest: {}", + ethereum.client().sender_account(), + eth_balance + ); + } + + vlog::info!("Master Account: Onchain balance is OK"); + Ok(()) + } + + /// Mints the ERC-20 token on the main wallet. + async fn mint(&mut self) -> anyhow::Result<()> { + vlog::info!("Master Account: Minting ERC20 token..."); + let mint_amount = self.amount_to_deposit() + self.amount_for_l1_distribution(); + + let master_wallet = &self.pool.master_wallet; + let mut ethereum = master_wallet.ethereum(&self.config.l1_rpc_address).await?; + ethereum.set_confirmation_timeout(ETH_CONFIRMATION_TIMEOUT); + ethereum.set_polling_interval(ETH_POLLING_INTERVAL); + + let token = self.config.main_token; + + let eth_balance = ethereum + .erc20_balance(master_wallet.address(), token) + .await?; + + // Only send the mint transaction if it's necessary. + if eth_balance > U256::from(mint_amount) { + vlog::info!("There is already enough money on the master balance"); + return Ok(()); + } + + let mint_tx_hash = ethereum + .mint_erc20(token, U256::from(u128::MAX), master_wallet.address()) + .await; + + let mint_tx_hash = match mint_tx_hash { + Err(error) => { + let balance = ethereum.balance().await; + let gas_price = ethereum.client().get_gas_price("executor").await; + + anyhow::bail!( + "{:?}, Balance: {:?}, Gas Price: {:?}", + error, + balance, + gas_price + ); + } + Ok(value) => value, + }; + + vlog::info!("Mint tx with hash {:?}", mint_tx_hash); + let receipt = ethereum.wait_for_tx(mint_tx_hash).await?; + self.assert_eth_tx_success(&receipt).await; + + let erc20_balance = ethereum + .erc20_balance(master_wallet.address(), token) + .await?; + assert!( + erc20_balance >= mint_amount.into(), + "Minting didn't result in tokens added to balance" + ); + + vlog::info!("Master Account: Minting is OK (balance: {})", erc20_balance); + Ok(()) + } + + /// Deposits the ERC-20 token to main wallet in L2. + async fn deposit_to_master(&mut self) -> anyhow::Result<()> { + vlog::info!("Master Account: Performing an ERC-20 deposit to master"); + + let balance = self + .pool + .master_wallet + .get_balance(BlockNumber::Latest, self.l2_main_token) + .await?; + let necessary_balance = + U256::from(self.erc20_transfer_amount() * self.config.accounts_amount as u128); + + if balance > necessary_balance { + vlog::info!( + "Master account has enough money on l2, nothing to deposit. Current balance {:?},\ + necessary balance for initial transfers {:?}", + balance, + necessary_balance + ); + return Ok(()); + } + + let mut ethereum = self + .pool + .master_wallet + .ethereum(&self.config.l1_rpc_address) + .await?; + ethereum.set_confirmation_timeout(ETH_CONFIRMATION_TIMEOUT); + ethereum.set_polling_interval(ETH_POLLING_INTERVAL); + + let main_token = self.config.main_token; + let deposits_allowed = ethereum.is_erc20_deposit_approved(main_token, None).await?; + if !deposits_allowed { + // Approve ERC20 deposits. + let approve_tx_hash = ethereum + .approve_erc20_token_deposits(main_token, None) + .await?; + let receipt = ethereum.wait_for_tx(approve_tx_hash).await?; + self.assert_eth_tx_success(&receipt).await; + } + + vlog::info!("Approved ERC20 deposits"); + let receipt = deposit_with_attempts( + ðereum, + self.pool.master_wallet.address(), + main_token, + U256::from(self.amount_to_deposit()), + 3, + ) + .await?; + + self.assert_eth_tx_success(&receipt).await; + let mut priority_op_handle = receipt + .priority_op_handle(&self.pool.master_wallet.provider) + .unwrap_or_else(|| { + panic!( + "Can't get the handle for the deposit operation: {:?}", + receipt + ); + }); + + priority_op_handle + .polling_interval(POLLING_INTERVAL) + .unwrap(); + priority_op_handle + .commit_timeout(COMMIT_TIMEOUT) + .wait_for_commit() + .await?; + + vlog::info!("Master Account: ERC-20 deposit is OK"); + Ok(()) + } + + async fn deposit_eth_to_paymaster(&mut self) -> anyhow::Result<()> { + vlog::info!("Master Account: Performing an ETH deposit to the paymaster"); + let deposit_amount = U256::from(10u32).pow(U256::from(20u32)); // 100 ETH + let mut ethereum = self + .pool + .master_wallet + .ethereum(&self.config.l1_rpc_address) + .await?; + ethereum.set_confirmation_timeout(ETH_CONFIRMATION_TIMEOUT); + ethereum.set_polling_interval(ETH_POLLING_INTERVAL); + + let paymaster_address = self + .pool + .master_wallet + .provider + .get_testnet_paymaster() + .await? + .expect("No testnet paymaster is set"); + + // Perform the deposit itself. + let receipt = deposit_with_attempts( + ðereum, + paymaster_address, + ETHEREUM_ADDRESS, + deposit_amount, + 3, + ) + .await?; + + self.assert_eth_tx_success(&receipt).await; + let mut priority_op_handle = receipt + .priority_op_handle(&self.pool.master_wallet.provider) + .unwrap_or_else(|| { + panic!( + "Can't get the handle for the deposit operation: {:?}", + receipt + ); + }); + + priority_op_handle + .polling_interval(POLLING_INTERVAL) + .unwrap(); + priority_op_handle + .commit_timeout(COMMIT_TIMEOUT) + .wait_for_commit() + .await?; + + vlog::info!("Master Account: ETH deposit to the paymaster is OK"); + Ok(()) + } + + async fn send_initial_transfers_inner(&self, accounts_to_process: usize) -> anyhow::Result<()> { + let eth_to_distribute = self.eth_amount_to_distribute().await?; + let master_wallet = &self.pool.master_wallet; + + let l1_transfer_amount = + self.amount_for_l1_distribution() / self.config.accounts_amount as u128; + let l2_transfer_amount = self.erc20_transfer_amount(); + + let weight_of_l1_txs = self.execution_config.transaction_weights.l1_transactions + + self.execution_config.transaction_weights.deposit; + + let paymaster_address = self + .pool + .master_wallet + .provider + .get_testnet_paymaster() + .await? + .expect("No testnet paymaster is set"); + + let mut ethereum = master_wallet + .ethereum(&self.config.l1_rpc_address) + .await + .expect("Can't get Ethereum client"); + ethereum.set_confirmation_timeout(ETH_CONFIRMATION_TIMEOUT); + ethereum.set_polling_interval(ETH_POLLING_INTERVAL); + + // We request nonce each time, so that if one iteration was failed, it will be repeated on the next iteration. + let mut nonce = Nonce(master_wallet.get_nonce().await?); + + // 2 txs per account (1 ERC-20 & 1 ETH transfer) + 1 fee tx. + let txs_amount = accounts_to_process * 2 + 1; + let mut handles = Vec::with_capacity(txs_amount); + + let mut eth_txs = Vec::with_capacity((txs_amount + 1) * 2); + let mut eth_nonce = ethereum.client().pending_nonce("loadnext").await?; + + for account in self.pool.accounts.iter().take(accounts_to_process) { + let target_address = account.wallet.address(); + + // Prior to sending funds in L2, we will send funds in L1 for accounts + // to be able to perform priority operations. + // We don't actually care whether transactions will be successful or not; at worst we will not use + // priority operations in test. + + // If we don't need to send l1 txs we don't need to distribute the funds + if weight_of_l1_txs != 0.0 { + let balance = ethereum + .client() + .eth_balance(target_address, "loadnext") + .await?; + + if balance < eth_to_distribute { + let options = Options { + nonce: Some(eth_nonce), + ..Default::default() + }; + let res = ethereum + .transfer( + ETHEREUM_ADDRESS.to_owned(), + eth_to_distribute, + target_address, + Some(options), + ) + .await + .unwrap(); + eth_nonce = eth_nonce.add(U256::one()); + eth_txs.push(res); + } + + let ethereum_erc20_balance = ethereum + .erc20_balance(target_address, self.config.main_token) + .await?; + + if ethereum_erc20_balance < U256::from(l1_transfer_amount) { + let options = Options { + nonce: Some(eth_nonce), + ..Default::default() + }; + let res = ethereum + .transfer( + self.config.main_token, + U256::from(l1_transfer_amount), + target_address, + Some(options), + ) + .await?; + eth_nonce = eth_nonce.add(U256::one()); + eth_txs.push(res); + } + } + + // And then we will prepare an L2 transaction to send ERC20 token (for transfers and fees). + let mut builder = master_wallet + .start_transfer() + .to(target_address) + .amount(l2_transfer_amount.into()) + .token(self.l2_main_token) + .nonce(nonce); + + let paymaster_params = get_approval_based_paymaster_input_for_estimation( + paymaster_address, + self.l2_main_token, + ); + + let fee = builder.estimate_fee(Some(paymaster_params)).await?; + builder = builder.fee(fee.clone()); + + let paymaster_params = get_approval_based_paymaster_input( + paymaster_address, + self.l2_main_token, + fee.max_total_fee(), + Vec::new(), + ); + builder = builder.fee(fee); + builder = builder.paymaster_params(paymaster_params); + + let handle_erc20 = builder.send().await?; + handles.push(handle_erc20); + + *nonce += 1; + } + + // Wait for transactions to be committed, if at least one of them fails, + // return error. + for mut handle in handles { + handle.polling_interval(POLLING_INTERVAL).unwrap(); + + let result = handle + .commit_timeout(COMMIT_TIMEOUT) + .wait_for_commit() + .await?; + if result.status == Some(U64::zero()) { + return Err(anyhow::format_err!("Transfer failed")); + } + } + + vlog::info!( + "Master account: Wait for ethereum txs confirmations, {:?}", + ð_txs + ); + for eth_tx in eth_txs { + ethereum.wait_for_tx(eth_tx).await?; + } + + Ok(()) + } + + /// Returns the amount sufficient for wallets to perform many operations. + fn erc20_transfer_amount(&self) -> u128 { + let accounts_amount = self.config.accounts_amount; + let account_balance = self.amount_to_deposit(); + let for_fees = u64::MAX; // Leave some spare funds on the master account for fees. + let funds_to_distribute = account_balance - u128::from(for_fees); + funds_to_distribute / accounts_amount as u128 + } + + /// Initializes the loadtest by doing the following: + /// + /// - Spawning the `ReportCollector`. + /// - Distributing ERC-20 token in L2 among test wallets via `Transfer` operation. + /// - Distributing ETH in L1 among test wallets in order to make them able to perform priority operations. + /// - Spawning test account routine futures. + /// - Collecting all the spawned tasks and returning them to the caller. + async fn send_initial_transfers( + &mut self, + ) -> anyhow::Result<(JoinHandle, Vec>)> { + vlog::info!("Master Account: Sending initial transfers"); + // How many times we will resend a batch. + const MAX_RETRIES: usize = 3; + + // Prepare channels for the report collector. + let (report_sender, report_receiver) = mpsc::channel(256); + + let report_collector = ReportCollector::new(report_receiver, self.config.expected_tx_count); + let report_collector_future = tokio::spawn(report_collector.run()); + + let config = &self.config; + let accounts_amount = config.accounts_amount; + let addresses = self.pool.addresses.clone(); + let paymaster_address = self + .pool + .master_wallet + .provider + .get_testnet_paymaster() + .await? + .expect("No testnet paymaster is set"); + + let mut retry_counter = 0; + let mut accounts_processed = 0; + + let mut account_futures = Vec::new(); + while accounts_processed != accounts_amount { + if retry_counter > MAX_RETRIES { + anyhow::bail!("Reached max amount of retries when sending initial transfers"); + } + + let accounts_left = accounts_amount - accounts_processed; + let max_accounts_per_iter = MAX_OUTSTANDING_NONCE / 2; // We send two transfers per account: ERC-20 and ETH. + let accounts_to_process = std::cmp::min(accounts_left, max_accounts_per_iter); + + if let Err(err) = self.send_initial_transfers_inner(accounts_to_process).await { + vlog::warn!( + "Iteration of the initial funds distribution failed: {}", + err + ); + retry_counter += 1; + continue; + } + + vlog::info!( + "[{}/{}] Accounts processed", + accounts_processed, + accounts_amount + ); + + retry_counter = 0; + accounts_processed += accounts_to_process; + + let contract_execution_params = self.execution_config.contract_execution_params.clone(); + // Spawn each account lifespan. + let main_token = self.l2_main_token; + let new_account_futures = + self.pool + .accounts + .drain(..accounts_to_process) + .map(|wallet| { + let account = AccountLifespan::new( + config, + contract_execution_params.clone(), + addresses.clone(), + wallet, + report_sender.clone(), + main_token, + paymaster_address, + ); + tokio::spawn(account.run()) + }); + + account_futures.extend(new_account_futures); + } + + assert!( + self.pool.accounts.is_empty(), + "Some accounts were not drained" + ); + vlog::info!("All the initial transfers are completed"); + + Ok((report_collector_future, account_futures)) + } + + /// Calculates amount of ETH to be distributed per account in order to make them + /// able to perform priority operations. + async fn eth_amount_to_distribute(&self) -> anyhow::Result { + let ethereum = self + .pool + .master_wallet + .ethereum(&self.config.l1_rpc_address) + .await + .expect("Can't get Ethereum client"); + + // Assuming that gas prices on testnets are somewhat stable, we will consider it a constant. + let average_gas_price = ethereum.client().get_gas_price("executor").await?; + + Ok((average_gas_price + U256::from(DEFAULT_PRIORITY_FEE)) + * MAX_L1_TRANSACTION_GAS_LIMIT + * MAX_L1_TRANSACTIONS) + } + + /// Waits for all the test account futures to be completed. + async fn wait_account_routines(&self, account_futures: Vec>) { + vlog::info!("Waiting for the account futures to be completed..."); + join_all(account_futures).await; + vlog::info!("All the spawned tasks are completed"); + } + + /// Returns the amount of funds to be deposited on the main account in L2. + /// Amount is chosen to be big enough to not worry about precisely calculating the remaining balances on accounts, + /// but also to not be close to the supported limits in zkSync. + fn amount_to_deposit(&self) -> u128 { + u128::MAX >> 32 + } + + /// Returns the amount of funds to be distributed between accounts on l1. + fn amount_for_l1_distribution(&self) -> u128 { + u128::MAX >> 32 + } + + /// Ensures that Ethereum transaction was successfully executed. + async fn assert_eth_tx_success(&self, receipt: &TransactionReceipt) { + if receipt.status != Some(1u64.into()) { + let master_wallet = &self.pool.master_wallet; + let ethereum = master_wallet + .ethereum(&self.config.l1_rpc_address) + .await + .expect("Can't get Ethereum client"); + let failure_reason = ethereum + .client() + .failure_reason(receipt.transaction_hash) + .await + .expect("Can't connect to the Ethereum node"); + panic!( + "Ethereum transaction unexpectedly failed.\nReceipt: {:#?}\nFailure reason: {:#?}", + receipt, failure_reason + ); + } + } +} + +async fn deposit_with_attempts( + ethereum: &EthereumProvider, + to: Address, + token: Address, + deposit_amount: U256, + max_attempts: usize, +) -> anyhow::Result { + let nonce = ethereum.client().current_nonce("loadtest").await.unwrap(); + for attempt in 1..=max_attempts { + let pending_block_base_fee_per_gas = ethereum + .client() + .get_pending_block_base_fee_per_gas("loadtest") + .await + .unwrap(); + + let max_priority_fee_per_gas = U256::from(DEFAULT_PRIORITY_FEE * 10 * attempt as u64); + let max_fee_per_gas = U256::from( + (pending_block_base_fee_per_gas.as_u64() as f64 * (1.0 + 0.1 * attempt as f64)) as u64, + ) + max_priority_fee_per_gas; + + let options = Options { + max_fee_per_gas: Some(max_fee_per_gas), + max_priority_fee_per_gas: Some(max_priority_fee_per_gas), + nonce: Some(nonce), + ..Default::default() + }; + let deposit_tx_hash = ethereum + .deposit(token, deposit_amount, to, None, None, Some(options)) + .await?; + + vlog::info!("Deposit with tx_hash {:?}", deposit_tx_hash); + + // Wait for the corresponding priority operation to be committed in zkSync. + match ethereum.wait_for_tx(deposit_tx_hash).await { + Ok(eth_receipt) => { + return Ok(eth_receipt); + } + Err(err) => { + vlog::error!("Deposit error: {:?}", err); + } + }; + } + anyhow::bail!("Max attempts limits reached"); +} diff --git a/core/tests/loadnext/src/fs_utils.rs b/core/tests/loadnext/src/fs_utils.rs new file mode 100644 index 000000000000..b38b95556703 --- /dev/null +++ b/core/tests/loadnext/src/fs_utils.rs @@ -0,0 +1,109 @@ +//! Utilities used for reading tokens, contracts bytecode and ABI from the +//! filesystem. + +use std::fs::File; +use std::io::BufReader; +use std::path::Path; + +use serde::Deserialize; + +use zksync_types::network::Network; +use zksync_types::{ethabi::Contract, Address}; + +/// A token stored in `etc/tokens/{network}.json` files. +#[derive(Debug, Deserialize)] +pub struct Token { + pub name: String, + pub symbol: String, + pub decimals: u8, + pub address: Address, +} + +#[derive(Debug, Clone)] +pub struct TestContract { + /// Contract bytecode to be used for sending deploy transaction. + pub bytecode: Vec, + /// Contract ABI. + pub contract: Contract, + + pub factory_deps: Vec>, +} + +pub fn read_tokens(network: Network) -> anyhow::Result> { + let home = std::env::var("ZKSYNC_HOME")?; + let path = Path::new(&home); + let path = path.join(format!("etc/tokens/{}.json", network)); + + let file = File::open(path)?; + let reader = BufReader::new(file); + + Ok(serde_json::from_reader(reader)?) +} + +fn extract_bytecode(artifact: &serde_json::Value) -> anyhow::Result> { + let bytecode = artifact["bytecode"] + .as_str() + .ok_or_else(|| anyhow::anyhow!("Failed to parse contract bytecode from artifact",))?; + + if let Some(stripped) = bytecode.strip_prefix("0x") { + hex::decode(stripped) + } else { + hex::decode(bytecode) + } + .map_err(|e| e.into()) +} + +/// Reads test contract bytecode and its ABI. +fn read_contract_dir(path: &Path) -> anyhow::Result { + use serde_json::Value; + + let mut artifact: Value = + serde_json::from_reader(File::open(path.join("LoadnextContract.json"))?)?; + + let bytecode = extract_bytecode(&artifact)?; + + let abi = artifact["abi"].take(); + let contract: Contract = serde_json::from_value(abi)?; + + let factory_dep: Value = serde_json::from_reader(File::open(path.join("Foo.json"))?)?; + let factory_dep_bytecode = extract_bytecode(&factory_dep)?; + + anyhow::ensure!( + contract.functions().count() > 0, + "Invalid contract: no methods defined: {:?}", + path + ); + anyhow::ensure!( + contract.events().count() > 0, + "Invalid contract: no events defined: {:?}", + path + ); + + Ok(TestContract { + bytecode, + contract, + factory_deps: vec![factory_dep_bytecode], + }) +} + +pub fn loadnext_contract(path: &Path) -> anyhow::Result { + let path = path.join("artifacts-zk/contracts/loadnext/loadnext_contract.sol"); + read_contract_dir(&path) +} + +#[cfg(test)] +mod tests { + use super::*; + use std::path::PathBuf; + + #[test] + fn check_read_test_contract() { + let test_contracts_path = { + let home = std::env::var("ZKSYNC_HOME").unwrap(); + let path = PathBuf::from(&home); + path.join("etc/contracts-test-data") + }; + + loadnext_contract(&test_contracts_path).unwrap(); + } +} diff --git a/core/tests/loadnext/src/lib.rs b/core/tests/loadnext/src/lib.rs new file mode 100644 index 000000000000..57a9fa32d74b --- /dev/null +++ b/core/tests/loadnext/src/lib.rs @@ -0,0 +1,14 @@ +#![allow(clippy::derive_partial_eq_without_eq)] + +pub mod account; +pub mod account_pool; +pub mod all; +pub mod command; +pub mod config; +pub mod constants; +pub mod corrupted_tx; +pub mod executor; +pub mod fs_utils; +pub mod report; +pub mod report_collector; +pub mod rng; diff --git a/core/tests/loadnext/src/main.rs b/core/tests/loadnext/src/main.rs new file mode 100644 index 000000000000..2a2ac778942b --- /dev/null +++ b/core/tests/loadnext/src/main.rs @@ -0,0 +1,47 @@ +//! Loadtest: an utility to stress-test the zkSync server. +//! +//! In order to launch it, you must provide required environmental variables, for details see `README.md`. +//! Without required variables provided, test is launched in the localhost/development mode with some hard-coded +//! values to check the local zkSync deployment. + +use loadnext::{ + command::{ExplorerApiRequestType, TxType}, + config::{ExecutionConfig, LoadtestConfig}, + executor::Executor, + report_collector::LoadtestResult, +}; + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + vlog::init(); + + let config = LoadtestConfig::from_env() + .expect("Config parameters should be loaded from env or from default values"); + let execution_config = ExecutionConfig::from_env(); + TxType::initialize_weights(&execution_config.transaction_weights); + ExplorerApiRequestType::initialize_weights(&execution_config.explorer_api_config_weights); + + vlog::info!( + "Run with tx weights: {:?}", + execution_config.transaction_weights + ); + + vlog::info!( + "Run explorer api weights: {:?}", + execution_config.explorer_api_config_weights + ); + + let mut executor = Executor::new(config, execution_config).await?; + let final_resolution = executor.start().await; + + match final_resolution { + LoadtestResult::TestPassed => { + vlog::info!("Test passed"); + Ok(()) + } + LoadtestResult::TestFailed => { + vlog::error!("Test failed"); + Err(anyhow::anyhow!("Test failed")) + } + } +} diff --git a/core/tests/loadnext/src/report.rs b/core/tests/loadnext/src/report.rs new file mode 100644 index 000000000000..9aab28ed3b96 --- /dev/null +++ b/core/tests/loadnext/src/report.rs @@ -0,0 +1,230 @@ +use std::time::Duration; + +use zksync_types::Address; + +use crate::account::ExecutionType; +use crate::{ + all::All, + command::{ + ApiRequest, ApiRequestType, ExplorerApiRequestType, SubscriptionType, TxCommand, TxType, + }, +}; + +/// Report for any operation done by loadtest. +/// +/// Reports are yielded by `Executor` or `AccountLifespan` and are collected +/// by the `ReportCollector`. +/// +/// Reports are expected to contain any kind of information useful for the analysis +/// and deciding whether the test was passed. +#[derive(Debug, Clone)] +pub struct Report { + /// Address of the wallet that performed the action. + pub reporter: Address, + /// Obtained outcome of action. + pub label: ReportLabel, + /// Type of the action. + pub action: ActionType, + /// Amount of retries that it took the wallet to finish the action. + pub retries: usize, + /// Duration of the latest execution attempt. + pub time: Duration, +} + +/// Builder structure for `Report`. +#[derive(Debug, Clone)] +pub struct ReportBuilder { + report: Report, +} + +impl Default for ReportBuilder { + fn default() -> Self { + Self::new() + } +} + +impl ReportBuilder { + pub fn new() -> Self { + Self { + report: Report { + reporter: Default::default(), + label: ReportLabel::done(), + action: ActionType::Tx(TxActionType::Execute(ExecutionType::L2)), + retries: 0, + time: Default::default(), + }, + } + } + + pub fn reporter(mut self, reporter: Address) -> Self { + self.report.reporter = reporter; + self + } + + pub fn label(mut self, label: ReportLabel) -> Self { + self.report.label = label; + self + } + + pub fn action(mut self, action: impl Into) -> Self { + self.report.action = action.into(); + self + } + + pub fn retries(mut self, retries: usize) -> Self { + self.report.retries = retries; + self + } + + pub fn time(mut self, time: Duration) -> Self { + self.report.time = time; + self + } + + pub fn finish(self) -> Report { + self.report + } +} + +/// Denotes the outcome of a performed action. +#[derive(Debug, Clone)] +pub enum ReportLabel { + ActionDone, + ActionSkipped { reason: String }, + ActionFailed { error: String }, +} + +impl ReportLabel { + pub fn done() -> Self { + Self::ActionDone + } + + pub fn skipped(reason: &str) -> Self { + Self::ActionSkipped { + reason: reason.into(), + } + } + + pub fn failed(error: &str) -> Self { + Self::ActionFailed { + error: error.into(), + } + } +} + +/// Denotes the type of executed transaction. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum TxActionType { + Withdraw, + Deposit, + DeployContract, + Execute(ExecutionType), +} + +impl All for TxActionType { + fn all() -> &'static [Self] { + const ALL: &[TxActionType] = &[ + TxActionType::Withdraw, + TxActionType::Deposit, + TxActionType::DeployContract, + TxActionType::Execute(ExecutionType::L2), + TxActionType::Execute(ExecutionType::L1), + ]; + + ALL + } +} + +impl From for TxActionType { + fn from(command: TxType) -> Self { + match command { + TxType::Deposit => Self::Deposit, + TxType::WithdrawToSelf | TxType::WithdrawToOther => Self::Withdraw, + TxType::L2Execute => Self::Execute(ExecutionType::L2), + TxType::L1Execute => Self::Execute(ExecutionType::L1), + TxType::DeployContract => Self::DeployContract, + } + } +} + +/// Denotes the type of the performed API action. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ApiActionType { + BlockWithTxs, + Balance, + GetLogs, +} + +impl All for ApiActionType { + fn all() -> &'static [Self] { + const ALL: &[ApiActionType] = &[ + ApiActionType::BlockWithTxs, + ApiActionType::Balance, + ApiActionType::GetLogs, + ]; + + ALL + } +} + +impl From for ApiActionType { + fn from(request: ApiRequest) -> Self { + match request.request_type { + ApiRequestType::BlockWithTxs => Self::BlockWithTxs, + ApiRequestType::Balance => Self::Balance, + ApiRequestType::GetLogs => Self::GetLogs, + } + } +} + +/// Generic wrapper of all the actions that can be done in loadtest. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ActionType { + Tx(TxActionType), + Api(ApiActionType), + ExplorerApi(ExplorerApiRequestType), + Subscription(SubscriptionType), +} + +impl From for ActionType { + fn from(action: ExplorerApiRequestType) -> Self { + Self::ExplorerApi(action) + } +} + +impl From for ActionType { + fn from(action: TxActionType) -> Self { + Self::Tx(action) + } +} + +impl From for ActionType { + fn from(action: ApiActionType) -> Self { + Self::Api(action) + } +} + +impl From for ActionType { + fn from(command: TxCommand) -> Self { + Self::Tx(command.command_type.into()) + } +} + +impl From for ActionType { + fn from(subscription_type: SubscriptionType) -> Self { + Self::Subscription(subscription_type) + } +} + +impl ActionType { + /// Returns the vector containing the list of all the supported actions. + /// May be useful in different collectors to initialize their internal states. + pub fn all() -> Vec { + TxActionType::all() + .iter() + .copied() + .map(Self::from) + .chain(ApiActionType::all().iter().copied().map(Self::from)) + .collect() + } +} diff --git a/core/tests/loadnext/src/report_collector/metrics_collector.rs b/core/tests/loadnext/src/report_collector/metrics_collector.rs new file mode 100644 index 000000000000..f4d5edee5117 --- /dev/null +++ b/core/tests/loadnext/src/report_collector/metrics_collector.rs @@ -0,0 +1,244 @@ +use std::{ + collections::{BTreeMap, HashMap}, + time::Duration, +}; + +use crate::report::ActionType; + +#[derive(Debug, Clone)] +pub struct TimeHistogram { + /// Supported time ranges. + ranges: Vec<(u64, u64)>, + /// Mapping from the (lower time range) to (amount of elements) + histogram: BTreeMap, + /// Total entries in the histogram. + total: usize, +} + +impl Default for TimeHistogram { + fn default() -> Self { + Self::new() + } +} + +impl TimeHistogram { + pub fn new() -> Self { + // Ranges from the 0 to 1000 ms with windows of 100 ms. + let sub_sec_ranges = (0..10).map(|window_idx| Self::window(window_idx, 100)); + // Ranges from 1 second to 20 seconds with windows of 1 second. + let sec_ranges = (1..20).map(|window_idx| Self::window(window_idx, 1000)); + // Range for (20 sec; MAX). + let rest_range = std::iter::once((20_000u64, u64::max_value())); + + let ranges: Vec<_> = sub_sec_ranges.chain(sec_ranges).chain(rest_range).collect(); + let mut histogram = BTreeMap::new(); + + for &(start, _) in ranges.iter() { + histogram.insert(start, 0); + } + + Self { + ranges, + histogram, + total: 0, + } + } + + pub fn add_metric(&mut self, time: Duration) { + let range = self.range_for(time); + + self.histogram.entry(range).and_modify(|count| *count += 1); + self.total += 1; + } + + pub fn is_empty(&self) -> bool { + self.total == 0 + } + + /// Returns the time range for the requested distribution percentile. + pub fn percentile(&self, percentile: u64) -> (Duration, Duration) { + let lower_gap_float = self.total as f64 * percentile as f64 / 100.0; + let lower_gap = lower_gap_float.round() as usize; + debug_assert!(lower_gap <= self.total); + + let mut amount = 0; + for (range_start, current_amount) in self.histogram.iter() { + amount += current_amount; + + if amount >= lower_gap { + let (range_start, range_end) = self.full_range_for(*range_start); + return ( + Duration::from_millis(range_start), + Duration::from_millis(range_end), + ); + } + } + + unreachable!("Range for {} percentile was not found", percentile); + } + + /// Returns the histogram entry key for the provided duration. + fn range_for(&self, time: Duration) -> u64 { + let duration_millis = time.as_millis() as u64; + + self.full_range_for(duration_millis).0 + } + + /// Returns the full time range for the provided duration. + fn full_range_for(&self, duration_millis: u64) -> (u64, u64) { + debug_assert!(self.ranges[0].0 == 0, "Ranges don't start at 0"); + + for &(range_start, range_end) in self.ranges.iter().rev() { + if duration_millis >= range_start { + return (range_start, range_end); + } + } + + // First range starts from 0, and negative ranges are prohibited. + unreachable!("Range for duration {} was not found", duration_millis); + } + + fn window(window_idx: u64, window_size: u64) -> (u64, u64) { + let start = window_idx * window_size; + let end = start + window_size - 1; + + (start, end) + } +} + +/// Collector for the execution time metrics. +/// +/// It builds a distribution histogram for each type of action, thus reported results are represented +/// by a range window rather than a single concrete number. +#[derive(Debug, Clone)] +pub struct MetricsCollector { + pub action_stats: HashMap, +} + +impl Default for MetricsCollector { + fn default() -> Self { + Self::new() + } +} + +impl MetricsCollector { + pub fn new() -> Self { + Self { + action_stats: ActionType::all() + .into_iter() + .map(|action| (action, TimeHistogram::new())) + .collect(), + } + } + + pub fn add_metric(&mut self, action: ActionType, time: Duration) { + self.action_stats + .entry(action) + .and_modify(|hist| hist.add_metric(time)); + } + + pub fn report(&self) { + vlog::info!("Action: [10 percentile, 50 percentile, 90 percentile]"); + for (action, histogram) in self.action_stats.iter() { + // Only report data that was actually gathered. + if !histogram.is_empty() { + vlog::info!( + "{:?}: [>{}ms >{}ms >{}ms]", + action, + histogram.percentile(10).0.as_millis(), + histogram.percentile(50).0.as_millis(), + histogram.percentile(90).0.as_millis(), + ); + } + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn histogram_window_size() { + // Vector of ((window_idx, window_size), expected_range)). + let test_vector = [ + ((0, 100), (0, 99)), + ((1, 100), (100, 199)), + ((2, 1000), (2000, 2999)), + ]; + + for &((window_idx, window_size), expected_result) in test_vector.iter() { + assert_eq!( + TimeHistogram::window(window_idx, window_size), + expected_result + ); + } + } + + /// Checks that the whole diapason of u64 is covered by histogram windows. + #[test] + fn histogram_ranges() { + let histogram = TimeHistogram::new(); + // Check that we start at 0 and end at max. + assert_eq!(histogram.ranges[0].0, 0); + assert_eq!(histogram.ranges.last().unwrap().1, u64::max_value()); + + // Check that we go through all the range without gaps. + for idx in 0..(histogram.ranges.len() - 1) { + assert_eq!(histogram.ranges[idx].1, histogram.ranges[idx + 1].0 - 1); + } + } + + #[test] + fn histogram_item_addition() { + let mut histogram = TimeHistogram::new(); + + let (first_range_start, first_range_end) = histogram.ranges[0]; + let (second_range_start, _) = histogram.ranges[1]; + let (last_range_start, last_range_end) = *histogram.ranges.last().unwrap(); + + histogram.add_metric(Duration::from_millis(first_range_start)); + histogram.add_metric(Duration::from_millis(first_range_end)); + histogram.add_metric(Duration::from_millis(second_range_start)); + histogram.add_metric(Duration::from_millis(last_range_end)); + + assert_eq!(histogram.histogram[&first_range_start], 2); + assert_eq!(histogram.histogram[&second_range_start], 1); + assert_eq!(histogram.histogram[&last_range_start], 1); + } + + #[test] + fn histogram_percentile() { + let mut histogram = TimeHistogram::new(); + let first_range = ( + Duration::from_millis(histogram.ranges[0].0), + Duration::from_millis(histogram.ranges[0].1), + ); + let second_range = ( + Duration::from_millis(histogram.ranges[1].0), + Duration::from_millis(histogram.ranges[1].1), + ); + let third_range = ( + Duration::from_millis(histogram.ranges[2].0), + Duration::from_millis(histogram.ranges[2].1), + ); + + histogram.add_metric(Duration::from_millis(0)); + for percentile in &[0, 10, 50, 90, 100] { + assert_eq!(histogram.percentile(*percentile), first_range); + } + + histogram.add_metric(second_range.0); + for percentile in &[0, 10] { + assert_eq!(histogram.percentile(*percentile), first_range); + } + for percentile in &[90, 100] { + assert_eq!(histogram.percentile(*percentile), second_range); + } + + histogram.add_metric(third_range.0); + assert_eq!(histogram.percentile(0), first_range); + assert_eq!(histogram.percentile(50), second_range); + assert_eq!(histogram.percentile(100), third_range); + } +} diff --git a/core/tests/loadnext/src/report_collector/mod.rs b/core/tests/loadnext/src/report_collector/mod.rs new file mode 100644 index 000000000000..5285f51d0fd8 --- /dev/null +++ b/core/tests/loadnext/src/report_collector/mod.rs @@ -0,0 +1,112 @@ +use futures::{channel::mpsc::Receiver, StreamExt}; +use operation_results_collector::OperationResultsCollector; + +use crate::{ + report::{Report, ReportLabel}, + report_collector::metrics_collector::MetricsCollector, +}; + +mod metrics_collector; +mod operation_results_collector; + +/// Decision on whether loadtest considered passed or failed. +#[derive(Debug, Clone, Copy)] +pub enum LoadtestResult { + TestPassed, + TestFailed, +} + +/// ReportCollector is an entity capable of analyzing everything that happens in the loadtest. +/// +/// It is designed to be separated from the actual execution, so that logic of the execution does not +/// interfere with the logic of analyzing, reporting and decision making. +/// +/// Report collector by its nature only receives reports and uses different collectors in order to analyze them. +/// Currently, only the following collectors are used: +/// +/// - MetricsCollector, which builds time distribution histograms for each kind of performed action. +/// - OperationResultsCollector, a primitive collector that counts the amount of failures and decides whether +/// test is passed. +/// +/// Other possible collectors that can be implemented: +/// +/// - ScriptCollector, which records all the actions (including wallet private keys and signatures), which makes it +/// possible to reproduce test once again. +/// - RetryCollector, which analyzes the average amount of retries that have to be made in order to make operation +/// succeed. +/// - PrometheusCollector, which exposes the ongoing loadtest results to grafana via prometheus. +/// +/// Note that if you'll decide to implement a new collector, be sure that adding an entry in it is cheap: if you want +/// to do some IO (file or web), it's better to divide collector in two parts: an actor which receives commands through +/// a channel, and a cheap collector interface which just sends commands to the channel. This is strongly recommended, +/// since there are many reports generated during the loadtest, and otherwise it will result in the report channel +/// queue uncontrollable growth. +#[derive(Debug)] +pub struct ReportCollector { + reports_stream: Receiver, + metrics_collector: MetricsCollector, + operations_results_collector: OperationResultsCollector, + expected_tx_count: Option, +} + +impl ReportCollector { + pub fn new(reports_stream: Receiver, expected_tx_count: Option) -> Self { + Self { + reports_stream, + metrics_collector: MetricsCollector::new(), + operations_results_collector: OperationResultsCollector::new(), + expected_tx_count, + } + } + + pub async fn run(mut self) -> LoadtestResult { + while let Some(report) = self.reports_stream.next().await { + vlog::trace!("Report: {:?}", &report); + + if matches!(&report.label, ReportLabel::ActionDone) { + // We only count successfully created statistics. + self.metrics_collector + .add_metric(report.action, report.time); + } + + self.operations_results_collector + .add_status(&report.label, report.action); + + // Report failure, if it exists. + if let ReportLabel::ActionFailed { error } = &report.label { + vlog::warn!("Operation failed: {}", error); + } + } + + // All the receivers are gone, it's likely the end of the test. + // Now we can output the statistics. + self.metrics_collector.report(); + self.operations_results_collector.report(); + + self.final_resolution() + } + + fn final_resolution(&self) -> LoadtestResult { + let is_tx_count_acceptable = if let Some(expected_tx_count) = self.expected_tx_count { + const MIN_ACCEPTABLE_DELTA: f64 = -10.0; + const MAX_ACCEPTABLE_DELTA: f64 = 100.0; + + let actual_count = self.operations_results_collector.tx_results.successes() as f64; + let delta = + 100.0 * (actual_count - expected_tx_count as f64) / (expected_tx_count as f64); + vlog::info!("Expected number of processed txs: {}", expected_tx_count); + vlog::info!("Actual number of processed txs: {}", actual_count); + vlog::info!("Delta: {:.1}%", delta); + + (MIN_ACCEPTABLE_DELTA..=MAX_ACCEPTABLE_DELTA).contains(&delta) + } else { + true + }; + + if !is_tx_count_acceptable || self.operations_results_collector.tx_results.failures() > 0 { + LoadtestResult::TestFailed + } else { + LoadtestResult::TestPassed + } + } +} diff --git a/core/tests/loadnext/src/report_collector/operation_results_collector.rs b/core/tests/loadnext/src/report_collector/operation_results_collector.rs new file mode 100644 index 000000000000..8d066005de4a --- /dev/null +++ b/core/tests/loadnext/src/report_collector/operation_results_collector.rs @@ -0,0 +1,91 @@ +use crate::report::{ActionType, ReportLabel}; + +/// Collector that analyzes the outcomes of the performed operations. +/// Currently it's solely capable of deciding whether test was failed or not. +/// API requests are counted separately. +#[derive(Debug, Clone, Default)] +pub struct OperationResultsCollector { + pub(super) tx_results: ResultCollector, + api_requests_results: ResultCollector, + subscriptions_results: ResultCollector, + explorer_api_requests_results: ResultCollector, +} + +#[derive(Debug, Clone, Default)] +pub(super) struct ResultCollector { + successes: u64, + skipped: u64, + failures: u64, +} + +impl ResultCollector { + pub fn add_status(&mut self, status: &ReportLabel) { + match status { + ReportLabel::ActionDone => self.successes += 1, + ReportLabel::ActionSkipped { .. } => self.skipped += 1, + ReportLabel::ActionFailed { .. } => self.failures += 1, + } + } + + pub fn successes(&self) -> u64 { + self.successes + } + + pub fn skipped(&self) -> u64 { + self.skipped + } + + pub fn failures(&self) -> u64 { + self.failures + } + + pub fn total(&self) -> u64 { + self.successes + self.skipped + self.failures + } +} + +impl OperationResultsCollector { + pub fn new() -> Self { + Self::default() + } + + pub fn add_status(&mut self, status: &ReportLabel, action_type: ActionType) { + match action_type { + ActionType::Tx(_) => self.tx_results.add_status(status), + ActionType::Api(_) => self.api_requests_results.add_status(status), + ActionType::Subscription(_) => self.subscriptions_results.add_status(status), + ActionType::ExplorerApi(_) => self.explorer_api_requests_results.add_status(status), + } + } + + pub fn report(&self) { + vlog::info!( + "Loadtest status: {} successful operations, {} skipped, {} failures. {} actions total.", + self.tx_results.successes(), + self.tx_results.skipped(), + self.tx_results.failures(), + self.tx_results.total() + ); + vlog::info!( + "API requests stats: {} successful, {} skipped, {} failures. {} total. ", + self.api_requests_results.successes(), + self.api_requests_results.skipped(), + self.api_requests_results.failures(), + self.api_requests_results.total() + ); + vlog::info!( + "Subscriptions stats: {} successful, {} skipped, {} failures. {} total. ", + self.subscriptions_results.successes(), + self.subscriptions_results.skipped(), + self.subscriptions_results.failures(), + self.subscriptions_results.total() + ); + vlog::info!( + "Explorer api stats: {} successful, {} skipped, {} failures. {} total. ", + self.explorer_api_requests_results.successes(), + self.explorer_api_requests_results.skipped(), + self.explorer_api_requests_results.failures(), + self.explorer_api_requests_results.total() + ); + } +} diff --git a/core/tests/loadnext/src/rng.rs b/core/tests/loadnext/src/rng.rs new file mode 100644 index 000000000000..4d5ab84c714b --- /dev/null +++ b/core/tests/loadnext/src/rng.rs @@ -0,0 +1,103 @@ +use std::convert::TryInto; + +use rand::{rngs::SmallRng, seq::SliceRandom, thread_rng, RngCore, SeedableRng}; + +use zksync::web3::signing::keccak256; +use zksync_types::H256; + +use crate::all::AllWeighted; + +// SmallRng seed type is [u8; 32]. +const SEED_SIZE: usize = 32; + +#[derive(Debug, Clone)] +pub struct LoadtestRng { + pub seed: [u8; SEED_SIZE], + rng: SmallRng, +} + +impl LoadtestRng { + pub fn new_generic(seed_hex: Option) -> Self { + let seed: [u8; SEED_SIZE] = seed_hex + .map(|seed_str| { + let mut output = [0u8; SEED_SIZE]; + let decoded_seed = hex::decode(seed_str).expect("Incorrect seed hex"); + output.copy_from_slice(decoded_seed.as_ref()); + output + }) + .unwrap_or_else(|| { + let rng = &mut thread_rng(); + let mut output = [0u8; SEED_SIZE]; + rng.fill_bytes(&mut output); + + output + }); + + let rng = SmallRng::from_seed(seed); + + Self { seed, rng } + } + + pub fn seed_hex(&self) -> String { + hex::encode(self.seed) + } + + pub fn derive(&self, eth_pk: H256) -> Self { + // We chain the current seed bytes and the Ethereum private key together, + // and then calculate the hash of this data. + // This way we obtain a derived seed, unique for each wallet, which will result in + // an uniques set of operations for each account. + let input_bytes: Vec = self + .seed + .iter() + .flat_map(|val| val.to_be_bytes().to_vec()) + .chain(eth_pk.as_bytes().iter().copied()) + .collect(); + let data_hash = keccak256(input_bytes.as_ref()); + let new_seed = data_hash[..SEED_SIZE].try_into().unwrap(); + + let rng = SmallRng::from_seed(new_seed); + Self { + seed: new_seed, + rng, + } + } +} + +impl RngCore for LoadtestRng { + fn next_u32(&mut self) -> u32 { + self.rng.next_u32() + } + + fn next_u64(&mut self) -> u64 { + self.rng.next_u64() + } + + fn fill_bytes(&mut self, dest: &mut [u8]) { + self.rng.fill_bytes(dest) + } + + fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), rand::Error> { + self.rng.try_fill_bytes(dest) + } +} + +pub trait Random { + fn random(rng: &mut LoadtestRng) -> Self; +} + +pub trait WeightedRandom { + fn random(rng: &mut LoadtestRng) -> Self; +} + +impl WeightedRandom for V +where + V: 'static + AllWeighted + Sized + Copy, +{ + fn random(rng: &mut LoadtestRng) -> Self { + V::all_weighted() + .choose_weighted(rng, |item| item.1) + .unwrap() + .0 + } +} diff --git a/core/tests/revert-test/package.json b/core/tests/revert-test/package.json new file mode 100644 index 000000000000..275b61a2b6ba --- /dev/null +++ b/core/tests/revert-test/package.json @@ -0,0 +1,35 @@ +{ + "name": "revert-test", + "version": "1.0.0", + "license": "MIT", + "mocha": { + "timeout": 240000, + "exit": true, + "color": false, + "slow": 0, + "require": [ + "ts-node/register", + "mocha-steps" + ] + }, + "scripts": { + "revert-and-restart-test": "zk f mocha tests/revert-and-restart.test.ts" + }, + "devDependencies": { + "@types/chai": "^4.2.21", + "@types/mocha": "^8.2.3", + "@types/mocha-steps": "^1.3.0", + "@types/node": "^14.14.5", + "@types/node-fetch": "^2.5.7", + "chai": "^4.3.4", + "chai-as-promised": "^7.1.1", + "ethereumjs-abi": "^0.6.8", + "ethers": "~5.7.0", + "mocha": "^9.0.2", + "mocha-steps": "^1.3.0", + "node-fetch": "^2.6.1", + "ts-node": "^10.1.0", + "typescript": "^4.3.5", + "zksync-web3": "link:../../../sdk/zksync-web3.js" + } +} diff --git a/core/tests/revert-test/tests/revert-and-restart.test.ts b/core/tests/revert-test/tests/revert-and-restart.test.ts new file mode 100644 index 000000000000..304bf2257b3c --- /dev/null +++ b/core/tests/revert-test/tests/revert-and-restart.test.ts @@ -0,0 +1,204 @@ +import * as utils from 'zk/build/utils'; +import { Tester } from './tester'; +import * as zkweb3 from 'zksync-web3'; +import { BigNumber, Contract, ethers } from 'ethers'; +import { expect } from 'chai'; + +// Parses output of "print-suggested-values" command of the revert block tool. +function parseSuggestedValues(suggestedValuesString: string) { + let result = suggestedValuesString.match(/(?<=l1 batch number: |nonce: |priority fee: )[0-9]*/g)!; + return { lastL1BatchNumber: parseInt(result[0]), nonce: parseInt(result[1]), priorityFee: parseInt(result[2]) }; +} + +async function killServerAndWaitForShutdown(tester: Tester) { + await utils.exec('pkill zksync_server'); + // Wait until it's really stopped. + let iter = 0; + while (iter < 30) { + try { + await tester.syncWallet.provider.getBlockNumber(); + await utils.sleep(5); + iter += 1; + } catch (_) { + // When exception happens, we assume that server died. + return; + } + } + // It's going to panic anyway, since the server is a singleton entity, so better to exit early. + throw new Error("Server didn't stop after a kill request"); +} + +const depositAmount = ethers.utils.parseEther('0.001'); + +describe('Block reverting test', function () { + let tester: Tester; + let alice: zkweb3.Wallet; + let mainContract: Contract; + let blocksCommittedBeforeRevert: number; + + before('create test wallet', async () => { + tester = await Tester.init(process.env.CHAIN_ETH_NETWORK || 'localhost'); + alice = tester.emptyWallet(); + }); + + step('run server and execute some transactions', async () => { + // Make sure server isn't running. + try { + await killServerAndWaitForShutdown(tester); + } catch (_) {} + + // Set 1000 seconds deadline for `ExecuteBlocks` operation. + process.env.CHAIN_STATE_KEEPER_AGGREGATED_BLOCK_EXECUTE_DEADLINE = '1000'; + + // Run server in background. + utils.background(`zk server --components api,tree,tree_lightweight,eth,data_fetcher,state_keeper`); + // Server may need some time to recompile if it's a cold run, so wait for it. + let iter = 0; + while (iter < 30 && !mainContract) { + try { + mainContract = await tester.syncWallet.getMainContract(); + } catch (_) { + await utils.sleep(5); + iter += 1; + } + } + if (!mainContract) { + throw new Error('Server did not start'); + } + + // Seal 2 L1 batches. + // One is not enough to test the reversion of sk cache because + // it gets updated with some batch logs only at the start of the next batch. + const initialL1BatchNumber = await tester.web3Provider.getL1BatchNumber(); + + const firstDepositHandle = await tester.syncWallet.deposit({ + token: zkweb3.utils.ETH_ADDRESS, + amount: depositAmount, + to: alice.address + }); + await firstDepositHandle.wait(); + while ((await tester.web3Provider.getL1BatchNumber()) <= initialL1BatchNumber) { + await utils.sleep(1); + } + + const secondDepositHandle = await tester.syncWallet.deposit({ + token: zkweb3.utils.ETH_ADDRESS, + amount: depositAmount, + to: alice.address + }); + await secondDepositHandle.wait(); + while ((await tester.web3Provider.getL1BatchNumber()) <= initialL1BatchNumber + 1) { + await utils.sleep(1); + } + + const balance = await alice.getBalance(); + expect(balance.eq(depositAmount.mul(2)), 'Incorrect balance after deposits').to.be.true; + + // Check L1 committed and executed blocks. + let blocksCommitted = await mainContract.getTotalBlocksCommitted(); + let blocksExecuted = await mainContract.getTotalBlocksExecuted(); + let tryCount = 0; + while (blocksCommitted.eq(blocksExecuted) && tryCount < 10) { + blocksCommitted = await mainContract.getTotalBlocksCommitted(); + blocksExecuted = await mainContract.getTotalBlocksExecuted(); + tryCount += 1; + await utils.sleep(1); + } + expect(blocksCommitted.gt(blocksExecuted), 'There is no committed but not executed block').to.be.true; + blocksCommittedBeforeRevert = blocksCommitted; + + // Stop server. + await killServerAndWaitForShutdown(tester); + }); + + step('revert blocks', async () => { + let suggestedValuesOutput = ( + await utils.exec(`cd $ZKSYNC_HOME && cargo run --bin block_reverter --release -- print-suggested-values`) + ).stdout; + let { lastL1BatchNumber, nonce, priorityFee } = parseSuggestedValues(suggestedValuesOutput); + expect(lastL1BatchNumber < blocksCommittedBeforeRevert, 'There should be at least one block for revert').to.be + .true; + + console.log( + `Reverting with parameters: last unreverted L1 batch number: ${lastL1BatchNumber}, nonce: ${nonce}, priorityFee: ${priorityFee}` + ); + + console.log('Sending ETH transaction..'); + await utils.spawn( + `cd $ZKSYNC_HOME && cargo run --bin block_reverter --release -- send-eth-transaction --l1-batch-number ${lastL1BatchNumber} --nonce ${nonce} --priority-fee-per-gas ${priorityFee}` + ); + + console.log('Rolling back DB..'); + await utils.spawn( + `cd $ZKSYNC_HOME && cargo run --bin block_reverter --release -- rollback-db --l1-batch-number ${lastL1BatchNumber} --rollback-postgres --rollback-tree --rollback-sk-cache` + ); + + let blocksCommitted = await mainContract.getTotalBlocksCommitted(); + expect(blocksCommitted.eq(lastL1BatchNumber), 'Revert on contract was unsuccessful').to.be.true; + }); + + step('execute transaction after revert', async () => { + // Set 1 second deadline for `ExecuteBlocks` operation. + process.env.CHAIN_STATE_KEEPER_AGGREGATED_BLOCK_EXECUTE_DEADLINE = '1'; + + // Run server. + utils.background(`zk server --components api,tree,tree_lightweight,eth,data_fetcher,state_keeper`); + await utils.sleep(10); + + const balanceBefore = await alice.getBalance(); + expect(balanceBefore.eq(depositAmount.mul(2)), 'Incorrect balance after revert').to.be.true; + + // Execute a transaction + const depositHandle = await tester.syncWallet.deposit({ + token: zkweb3.utils.ETH_ADDRESS, + amount: depositAmount, + to: alice.address + }); + let receipt = await depositHandle.waitFinalize(); + expect(receipt.status).to.be.eql(1); + + const balanceAfter = await alice.getBalance(); + expect(balanceAfter.eq(BigNumber.from(depositAmount).mul(3)), 'Incorrect balance after another deposit').to.be + .true; + }); + + step('execute transactions after simple restart', async () => { + // Execute an L2 transaction + await checkedRandomTransfer(alice, BigNumber.from(1)); + + // Stop server. + await killServerAndWaitForShutdown(tester); + + // Run again. + utils.background(`zk server --components=api,tree,tree_lightweight,eth,data_fetcher,state_keeper`); + await utils.sleep(10); + + // Trying to send a transaction from the same address again + await checkedRandomTransfer(alice, BigNumber.from(1)); + }); + + after('Try killing server', async () => { + try { + await utils.exec('pkill zksync_server'); + } catch (_) {} + }); +}); + +async function checkedRandomTransfer(sender: zkweb3.Wallet, amount: BigNumber) { + const senderBalanceBefore = await sender.getBalance(); + const receiver = zkweb3.Wallet.createRandom().connect(sender.provider); + const transferHandle = await sender.sendTransaction({ + to: receiver.address, + value: amount + }); + const txReceipt = await transferHandle.wait(); + + const senderBalance = await sender.getBalance(); + const receiverBalance = await receiver.getBalance(); + + expect(receiverBalance.eq(amount), 'Failed updated the balance of the receiver').to.be.true; + + const spentAmount = txReceipt.gasUsed.mul(transferHandle.gasPrice!).add(amount); + expect(senderBalance.add(spentAmount).eq(senderBalanceBefore), 'Failed to update the balance of the sender').to.be + .true; +} diff --git a/core/tests/revert-test/tests/tester.ts b/core/tests/revert-test/tests/tester.ts new file mode 100644 index 000000000000..ce1581212a68 --- /dev/null +++ b/core/tests/revert-test/tests/tester.ts @@ -0,0 +1,103 @@ +import { expect } from 'chai'; +import * as ethers from 'ethers'; +import * as zkweb3 from 'zksync-web3'; +import * as fs from 'fs'; +import * as path from 'path'; + +type Network = string; + +export class Tester { + public runningFee: Map; + constructor( + public network: Network, + public ethProvider: ethers.providers.Provider, + public ethWallet: ethers.Wallet, + public syncWallet: zkweb3.Wallet, + public web3Provider: zkweb3.Provider + ) { + this.runningFee = new Map(); + } + + // prettier-ignore + static async init(network: Network) { + const ethProvider = new ethers.providers.JsonRpcProvider(process.env.L1_RPC_ADDRESS || process.env.ETH_CLIENT_WEB3_URL); + + let ethWallet; + if (network == 'localhost') { + ethProvider.pollingInterval = 100; + + const testConfigPath = path.join(process.env.ZKSYNC_HOME!, `etc/test_config/constant`); + const ethTestConfig = JSON.parse(fs.readFileSync(`${testConfigPath}/eth.json`, { encoding: 'utf-8' })); + ethWallet = ethers.Wallet.fromMnemonic( + ethTestConfig.test_mnemonic as string, + "m/44'/60'/0'/0/0" + ) + } + else { + ethWallet = new ethers.Wallet(process.env.MASTER_WALLET_PK!); + } + ethWallet = ethWallet.connect(ethProvider); + const web3Provider = new zkweb3.Provider(process.env.ZKSYNC_WEB3_API_URL || "http://localhost:3050"); + web3Provider.pollingInterval = 100; // It's OK to keep it low even on stage. + const syncWallet = new zkweb3.Wallet(ethWallet.privateKey, web3Provider, ethProvider); + + + // Since some tx may be pending on stage, we don't want to get stuck because of it. + // In order to not get stuck transactions, we manually cancel all the pending txs. + const latestNonce = await ethWallet.getTransactionCount('latest'); + const pendingNonce = await ethWallet.getTransactionCount('pending'); + const cancellationTxs = []; + for (let nonce = latestNonce; nonce != pendingNonce; nonce++) { + // For each transaction to override it, we need to provide greater fee. + // We would manually provide a value high enough (for a testnet) to be both valid + // and higher than the previous one. It's OK as we'll only be charged for the bass fee + // anyways. We will also set the miner's tip to 5 gwei, which is also much higher than the normal one. + const maxFeePerGas = ethers.utils.parseEther("0.00000025"); // 250 gwei + const maxPriorityFeePerGas = ethers.utils.parseEther("0.000000005"); // 5 gwei + cancellationTxs.push(ethWallet.sendTransaction({ to: ethWallet.address, nonce, maxFeePerGas, maxPriorityFeePerGas }).then((tx) => tx.wait())); + } + if (cancellationTxs.length > 0) { + await Promise.all(cancellationTxs); + console.log(`Canceled ${cancellationTxs.length} pending transactions`); + } + + return new Tester(network, ethProvider, ethWallet, syncWallet, web3Provider); + } + + async fundedWallet( + ethAmount: ethers.BigNumberish, + l1Token: zkweb3.types.Address, + tokenAmount: ethers.BigNumberish + ) { + const newWallet = zkweb3.Wallet.createRandom().connect(this.web3Provider).connectToL1(this.ethProvider); + + let ethBalance = await this.syncWallet.getBalanceL1(); + expect(ethBalance.gt(ethAmount), 'Insufficient eth balance to create funded wallet').to.be.true; + + // To make the wallet capable of requesting priority operations, + // send ETH to L1. + + const tx1 = await this.syncWallet.ethWallet().sendTransaction({ + to: newWallet.address, + value: ethAmount + }); + await tx1.wait(); + + // Funds the wallet with L1 token. + + let tokenBalance = await this.syncWallet.getBalanceL1(l1Token); + expect(tokenBalance.gt(tokenAmount), 'Insufficient token balance to create funded wallet').to.be.true; + + const erc20ABI = ['function transfer(address to, uint256 amount)']; + const erc20Contract = new ethers.Contract(l1Token, erc20ABI, this.ethWallet); + + const tx2 = await erc20Contract.transfer(newWallet.address, tokenAmount); + await tx2.wait(); + + return newWallet; + } + + emptyWallet() { + return zkweb3.Wallet.createRandom().connect(this.web3Provider).connectToL1(this.ethProvider); + } +} diff --git a/core/tests/revert-test/tsconfig.json b/core/tests/revert-test/tsconfig.json new file mode 100644 index 000000000000..6c8907a86016 --- /dev/null +++ b/core/tests/revert-test/tsconfig.json @@ -0,0 +1,9 @@ +{ + "compilerOptions": { + "target": "es2019", + "module": "commonjs", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true + } +} diff --git a/core/tests/test_account/Cargo.toml b/core/tests/test_account/Cargo.toml new file mode 100644 index 000000000000..78a1a2e08a86 --- /dev/null +++ b/core/tests/test_account/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "zksync_test_account" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-2" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] +# This Crate must be published, as it's a dev-dependency of some our library crates. + +[dependencies] +zksync_types = { path = "../../lib/types", version = "1.0" } +zksync_utils = { path = "../../lib/utils", version = "1.0" } +zksync_crypto = { path = "../../lib/crypto", version = "1.0" } +zksync_basic_types = { path = "../../lib/basic_types", version = "1.0" } + +num = { version = "0.3.1", features = ["serde"] } diff --git a/core/tests/test_account/src/lib.rs b/core/tests/test_account/src/lib.rs new file mode 100644 index 000000000000..8c203e0bdc8c --- /dev/null +++ b/core/tests/test_account/src/lib.rs @@ -0,0 +1,163 @@ +#![allow(clippy::upper_case_acronyms)] + +// Built-in imports +use std::{fmt, sync::Mutex}; + +// Workspace uses +use zksync_crypto::rand::{thread_rng, Rng}; +use zksync_types::{ + fee::Fee, l2::L2Tx, tx::primitives::PackedEthSignature, Address, Execute, L2ChainId, Nonce, + CONTRACT_DEPLOYER_ADDRESS, H256, U256, +}; +use zksync_utils::bytecode::hash_bytecode; + +/// Structure used to sign ZKSync transactions, keeps tracks of its nonce internally +pub struct ZkSyncAccount { + pub private_key: H256, + pub address: Address, + nonce: Mutex, +} + +impl Clone for ZkSyncAccount { + fn clone(&self) -> Self { + Self { + private_key: self.private_key, + address: self.address, + nonce: Mutex::new(*self.nonce.lock().unwrap()), + } + } +} + +impl fmt::Debug for ZkSyncAccount { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + // It is OK to disclose the private key contents for a testkit account. + f.debug_struct("ZkSyncAccount") + .field("private_key", &self.private_key) + .field("address", &self.address) + .field("nonce", &self.nonce) + .finish() + } +} + +impl ZkSyncAccount { + /// Note: probably not secure, use for testing. + pub fn rand() -> Self { + let rng = &mut thread_rng(); + let private_key = rng.gen::<[u8; 32]>().into(); + + Self::new(private_key, Nonce(0)) + } + + pub fn new(private_key: H256, nonce: Nonce) -> Self { + let address = PackedEthSignature::address_from_private_key(&private_key).unwrap(); + Self { + address, + private_key, + nonce: Mutex::new(nonce), + } + } + + pub fn nonce(&self) -> Nonce { + let n = self.nonce.lock().unwrap(); + *n + } + + pub fn set_nonce(&self, new_nonce: Nonce) { + *self.nonce.lock().unwrap() = new_nonce; + } + + pub fn sign_withdraw( + &self, + _token: Address, + _amount: U256, + _fee: Fee, + _to: Address, + _nonce: Option, + _increment_nonce: bool, + ) -> L2Tx { + // let mut stored_nonce = self.nonce.lock().unwrap(); + // let withdraw = GenericL2Tx::::new_signed( + // token, + // amount, + // to, + // nonce.unwrap_or(*stored_nonce), + // fee, + // L2ChainId(270), + // &self.private_key, + // ) + // .expect("should create a signed transfer transaction"); + + // if increment_nonce { + // **stored_nonce += 1; + // } + + // withdraw.into() + } + + pub fn sign_deploy_contract( + &self, + bytecode: Vec, + calldata: Vec, + fee: Fee, + nonce: Option, + increment_nonce: bool, + ) -> L2Tx { + let mut stored_nonce = self.nonce.lock().unwrap(); + let bytecode_hash = hash_bytecode(&bytecode); + + let execute_calldata = + Execute::encode_deploy_params_create(Default::default(), bytecode_hash, calldata); + + let deploy_contract = L2Tx::new_signed( + CONTRACT_DEPLOYER_ADDRESS, + execute_calldata, + nonce.unwrap_or(*stored_nonce), + fee, + U256::zero(), + L2ChainId(270), + &self.private_key, + Some(vec![bytecode]), + Default::default(), + ) + .expect("should create a signed transfer transaction"); + + if increment_nonce { + **stored_nonce += 1; + } + + deploy_contract + } + + pub fn sign_execute( + &self, + contract_address: Address, + calldata: Vec, + fee: Fee, + nonce: Option, + increment_nonce: bool, + ) -> L2Tx { + let mut stored_nonce = self.nonce.lock().unwrap(); + let execute = L2Tx::new_signed( + contract_address, + calldata, + nonce.unwrap_or(*stored_nonce), + fee, + U256::zero(), + L2ChainId(270), + &self.private_key, + None, + Default::default(), + ) + .expect("should create a signed transfer transaction"); + + if increment_nonce { + **stored_nonce += 1; + } + + execute + } + + pub fn get_private_key(&self) -> H256 { + self.private_key + } +} diff --git a/core/tests/testkit/Cargo.toml b/core/tests/testkit/Cargo.toml new file mode 100644 index 000000000000..710533aeb597 --- /dev/null +++ b/core/tests/testkit/Cargo.toml @@ -0,0 +1,40 @@ +[package] +name = "zksync_testkit" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync-2" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] +publish = false # We don't want to publish our tests. + +[dependencies] +zksync_core = { path = "../../bin/zksync_core", version = "1.0" } +zksync_state = { path = "../../lib/state", version = "1.0" } +zksync_storage = { path = "../../lib/storage", version = "1.0" } +zksync_eth_client = { path = "../../lib/eth_client", version = "1.0" } +zksync_eth_signer = { path = "../../lib/eth_signer", version = "1.0" } +zksync_mempool = { path = "../../lib/mempool", version = "1.0" } +zksync_test_account = { path = "../test_account", version = "1.0" } +zksync_types = { path = "../../lib/types", version = "1.0" } +zksync_crypto = { path = "../../lib/crypto", version = "1.0" } +zksync_utils = { path = "../../lib/utils", version = "1.0" } +zksync_config = { path = "../../lib/config", version = "1.0" } +zksync_contracts = {path = "../../lib/contracts", version = "1.0" } +zksync_prover_utils = {path = "../../lib/prover_utils", version = "1.0" } +zksync_dal = {path = "../../lib/dal", version = "1.0" } +vm = {path = "../../lib/vm", version="0.1.0" } + +tokio = { version = "1", features = ["full"] } +futures = "0.3" +serde = "1.0" +serde_json = "1.0" +structopt = "0.3.21" +once_cell = "1.7" +anyhow = "1.0" +rand = { version = "0.7" } +num = { version = "0.3", features = ["serde", "rand"] } +tempfile = "3.0.2" +vlog = { path = "../../lib/vlog", version = "1.0" } diff --git a/core/tests/testkit/src/commands/gas_price/mod.rs b/core/tests/testkit/src/commands/gas_price/mod.rs new file mode 100644 index 000000000000..c976af6585f4 --- /dev/null +++ b/core/tests/testkit/src/commands/gas_price/mod.rs @@ -0,0 +1,216 @@ +//! Gas price test is used to calculate costs of user transactions in terms of gas price. +//! It should be used as a fast benchmark tool for optimizations of out smart contracts, and +//! as a sanity check after contract refactoring. +//! +//! It is important for several reasons: +//! * Transfer cost determines maximum possible TPS of our network in larger block size limit. +//! * Cost of operations in the verify functions could stop block verification because of the block gas limit. +//! * It is useful to calculate cost of the "griefing" attack. +//! We don't take fees for deposit and full exit, but we must process them, so it is possible to spam us and force us to spend money. + +use std::time::Instant; + +use rand::thread_rng; +use zksync_core::genesis::ensure_genesis_state; + +use crate::commands::gas_price::utils::{ + commit_cost_of_add_tokens, commit_cost_of_deploys, commit_cost_of_deposits, + commit_cost_of_executes, commit_cost_of_n_empty_blocks, commit_cost_of_transfers, + commit_cost_of_transfers_to_new, commit_cost_of_withdrawals, +}; +use crate::commands::utils::{ + create_first_block, create_test_accounts, get_root_hash, get_test_config, TestDatabaseManager, +}; +use crate::external_commands::{deploy_contracts, deploy_erc20_tokens}; +use crate::tester::Tester; +use crate::types::ETHEREUM_ADDRESS; +use crate::utils::load_test_bytecode_and_calldata; + +mod types; +mod utils; + +pub async fn test_gas_price() { + const APPROX_TRANSFER_COMMIT_COST: usize = 5850; + let config = get_test_config(); + + let test_db_manager = TestDatabaseManager::new().await; + let mut storage = test_db_manager.connect_to_postgres().await; + { + ensure_genesis_state(&mut storage, config.clone()).await; + } + + println!("deploying contracts"); + let deploy_timer = Instant::now(); + + let root_hash = get_root_hash(&mut storage); + let contracts = deploy_contracts(false, root_hash); + + println!( + "contracts deployed {:#?}, {} secs", + contracts, + deploy_timer.elapsed().as_secs() + ); + + let (operator_account, _) = create_test_accounts(&config, &contracts); + + let mut tester = Tester::new(test_db_manager, operator_account.clone(), config.clone()).await; + + let token = contracts.test_erc20_address; + + create_first_block(&mut tester, config.clone()).await; + + let rng = &mut thread_rng(); + + let base_cost = + commit_cost_of_n_empty_blocks(&mut tester, 1, config.clone(), contracts.zk_sync).await; + + // Aggregated blocks amortization info + let n_blocks = 20; + { + let config_for_aggregated_block = { + let mut config_for_aggregated_block = config.clone(); + config_for_aggregated_block + .eth_sender + .sender + .max_aggregated_blocks_to_commit = n_blocks; + config_for_aggregated_block + .eth_sender + .sender + .max_aggregated_blocks_to_execute = n_blocks; + config_for_aggregated_block + .eth_sender + .sender + .aggregated_proof_sizes = vec![n_blocks as usize]; + config_for_aggregated_block + }; + let base_cost_n_blocks = commit_cost_of_n_empty_blocks( + &mut tester, + n_blocks, + config_for_aggregated_block.clone(), + contracts.zk_sync, + ) + .await; + let commit_cost_per_block = (base_cost_n_blocks.base_commit_cost + - base_cost.base_commit_cost.clone()) + / (n_blocks - 1) + - APPROX_TRANSFER_COMMIT_COST; + let commit_base_cost = + &base_cost.base_commit_cost - &commit_cost_per_block - APPROX_TRANSFER_COMMIT_COST; + let prove_cost_per_block = (base_cost_n_blocks.base_verify_cost + - base_cost.base_verify_cost.clone()) + / (n_blocks - 1); + let prove_base_cost = &base_cost.base_verify_cost - &prove_cost_per_block; + let execute_cost_per_block = (base_cost_n_blocks.base_execute_cost + - base_cost.base_execute_cost.clone()) + / (n_blocks - 1); + let execute_base_cost = &base_cost.base_execute_cost - &execute_cost_per_block; + println!("Cost of block operations (base_cost, cost_per_block):"); + println!("NOTE: aggregated blocks(n) cost of tx = base_cost + cost_per_block*n"); + println!( + "commit: ({}, {})\nprove: ({}, {})\nexecute: ({}, {})", + commit_base_cost, + commit_cost_per_block, + prove_base_cost, + prove_cost_per_block, + execute_base_cost, + execute_cost_per_block + ); + println!(); + } + + commit_cost_of_deposits( + &mut tester, + 20, + ETHEREUM_ADDRESS, + config.clone(), + contracts.zk_sync, + rng, + ) + .await + .report(&base_cost, "deposit ETH", true); + + commit_cost_of_deposits( + &mut tester, + 20, + token, + config.clone(), + contracts.zk_sync, + rng, + ) + .await + .report(&base_cost, "deposit ERC20", true); + + commit_cost_of_transfers( + &mut tester, + 50, + token, + config.clone(), + contracts.zk_sync, + rng, + ) + .await + .report(&base_cost, "transfer", false); + + commit_cost_of_transfers_to_new( + &mut tester, + 50, + token, + config.clone(), + contracts.zk_sync, + rng, + ) + .await + .report(&base_cost, "transfer to new", false); + + commit_cost_of_withdrawals( + &mut tester, + 20, + ETHEREUM_ADDRESS, + config.clone(), + contracts.zk_sync, + rng, + ) + .await + .report(&base_cost, "withdrawals ETH", false); + + commit_cost_of_withdrawals( + &mut tester, + 20, + token, + config.clone(), + contracts.zk_sync, + rng, + ) + .await + .report(&base_cost, "withdrawals ERC20", false); + + let (bytecode, constructor_calldata, calldata) = load_test_bytecode_and_calldata(); + + commit_cost_of_deploys( + &mut tester, + 20, + bytecode.clone(), + constructor_calldata.clone(), + config.clone(), + rng, + ) + .await + .report(&base_cost, "deploys", false); + + commit_cost_of_executes( + &mut tester, + 20, + bytecode, + constructor_calldata, + calldata, + config.clone(), + rng, + ) + .await + .report(&base_cost, "executes", false); + + let tokens = deploy_erc20_tokens(); + commit_cost_of_add_tokens(&mut tester, tokens, config.clone()) + .await + .report(&base_cost, "add_tokens", false); +} diff --git a/core/tests/testkit/src/commands/gas_price/types.rs b/core/tests/testkit/src/commands/gas_price/types.rs new file mode 100644 index 000000000000..5473283003ab --- /dev/null +++ b/core/tests/testkit/src/commands/gas_price/types.rs @@ -0,0 +1,177 @@ +use num::{rational::Ratio, BigInt, Zero}; +use zksync_types::{web3::types::TransactionReceipt, U256}; +use zksync_utils::{u256_to_biguint, UnsignedRatioSerializeAsDecimal}; + +#[derive(Debug, Clone)] +pub struct BlockExecutionResult { + pub commit_result: TransactionReceipt, + pub verify_result: TransactionReceipt, + pub execute_result: TransactionReceipt, +} + +impl BlockExecutionResult { + pub fn new( + commit_result: TransactionReceipt, + verify_result: TransactionReceipt, + execute_result: TransactionReceipt, + ) -> Self { + Self { + commit_result, + verify_result, + execute_result, + } + } +} + +/// Base cost of commit of one operation, we determine it by executing empty block. (with 2 noops) +#[derive(Debug, Clone)] +pub struct BaseCost { + pub base_commit_cost: BigInt, + pub base_verify_cost: BigInt, + pub base_execute_cost: BigInt, +} + +impl BaseCost { + pub fn from_block_execution_result(block_execution_res: BlockExecutionResult) -> Self { + Self { + base_commit_cost: block_execution_res + .commit_result + .gas_used + .map(|cost| u256_to_biguint(cost).into()) + .expect("commit gas used empty"), + base_verify_cost: block_execution_res + .verify_result + .gas_used + .map(|cost| u256_to_biguint(cost).into()) + .expect("verify gas used empty"), + base_execute_cost: block_execution_res + .execute_result + .gas_used + .map(|cost| u256_to_biguint(cost).into()) + .expect("execute gas used empty"), + } + } +} + +/// Gas cost data from one test in one test we process `samples` number of operations in one block. +#[derive(Debug, Clone)] +pub struct CostsSample { + /// number of operations in the test + samples: usize, + /// Total gas that user spent in this test + users_gas_cost: BigInt, + /// Operator commit gas cost + commit_cost: BigInt, + /// Operator verify gas cost + verify_cost: BigInt, + /// Operator execute gas cost + execute_cost: BigInt, +} + +impl CostsSample { + pub fn new(samples: usize, users_gas_cost: U256, block_result: BlockExecutionResult) -> Self { + Self { + samples, + users_gas_cost: u256_to_biguint(users_gas_cost).into(), + commit_cost: block_result + .commit_result + .gas_used + .map(|cost| u256_to_biguint(cost).into()) + .expect("commit gas used"), + verify_cost: block_result + .verify_result + .gas_used + .map(|cost| u256_to_biguint(cost).into()) + .expect("verify gas used"), + execute_cost: block_result + .execute_result + .gas_used + .map(|cost| u256_to_biguint(cost).into()) + .expect("execute gas used"), + } + } + + fn sub_per_operation(&self, base_cost: &BaseCost) -> CostPerOperation { + let samples = self.samples; + + let user_gas_cost = &self.users_gas_cost / samples; + + let commit_cost = (&self.commit_cost - &base_cost.base_commit_cost) / samples; + let verify_cost = (&self.verify_cost - &base_cost.base_verify_cost) / samples; + let execute_cost = (&self.execute_cost - &base_cost.base_execute_cost) / samples; + let total = &commit_cost + &verify_cost + &execute_cost; + + CostPerOperation { + user_gas_cost, + commit_cost, + verify_cost, + execute_cost, + total, + } + } + + pub fn report(&self, base_cost: &BaseCost, description: &str, report_grief: bool) { + let per_operation_cost = self.sub_per_operation(base_cost); + per_operation_cost.report(description, report_grief); + } +} + +/// User gas cost of performing one operation and additional gas cost +/// that operator spends in each of the processing step. +/// +/// # Note +/// +/// * Operation cost can be negative, because some operations reclaims storage slots. +/// * Operation gas cost for some operations (e.g. Deposit) depends on sample size +#[derive(Debug, Clone)] +struct CostPerOperation { + user_gas_cost: BigInt, + commit_cost: BigInt, + verify_cost: BigInt, + execute_cost: BigInt, + total: BigInt, +} + +impl CostPerOperation { + /// Grief factor when we neglect base commit/verify cost (when blocks are big) + fn asymptotic_grief_factor(&self) -> String { + let operator_total_cost_per_op = &self.commit_cost + &self.verify_cost + &self.execute_cost; + + if operator_total_cost_per_op.is_zero() { + "0".to_string() + } else { + UnsignedRatioSerializeAsDecimal::serialize_to_str_with_dot( + &Ratio::new( + self.user_gas_cost + .to_biguint() + .expect("user gas cost is negative"), + operator_total_cost_per_op + .to_biguint() + .expect("operator total cost is negative"), + ), + 4, + ) + } + } + + pub fn report(&self, description: &str, report_grief: bool) { + let grief_info = if report_grief { + let mut info = String::from("\nuser gas cost over operator cost: "); + info.push_str(&self.asymptotic_grief_factor()); + info + } else { + String::new() + }; + println!( + "Gas cost of {}:\nuser_gas_cost: {}\ncommit: {}\nprove: {}\nexecute: {}\ntotal: {}{}", + description, + self.user_gas_cost, + self.commit_cost, + self.verify_cost, + self.execute_cost, + self.total, + grief_info + ); + println!() + } +} diff --git a/core/tests/testkit/src/commands/gas_price/utils.rs b/core/tests/testkit/src/commands/gas_price/utils.rs new file mode 100644 index 000000000000..889b10859425 --- /dev/null +++ b/core/tests/testkit/src/commands/gas_price/utils.rs @@ -0,0 +1,336 @@ +use num::{bigint::RandBigInt, BigUint}; + +use zksync_config::ZkSyncConfig; +use zksync_types::{Address, U256}; + +use crate::commands::gas_price::types::{BaseCost, BlockExecutionResult, CostsSample}; +use crate::tester::Tester; +use crate::types::{AccountHandler, ETHEREUM_ADDRESS}; + +async fn process_blocks( + tester: &mut Tester, + expected_aggregated_blocks: u32, +) -> BlockExecutionResult { + // Expect that there are enough transactions for the block to close. + tester + .load_operations(expected_aggregated_blocks as usize) + .await; + + let (commit_block_range, commit_result) = tester + .commit_blocks() + .await + .expect("Commit blocks op failed"); + let (verify_block_range, verify_result) = tester + .verify_blocks() + .await + .expect("Verify blocks op failed"); + let (execute_block_range, execute_result) = tester + .execute_blocks() + .await + .expect("Execute blocks op failed"); + + assert_eq!( + commit_block_range, verify_block_range, + "Not the same blocks are committed and verified, committed: {:?}, verified: {:?}", + commit_block_range, verify_block_range + ); + assert_eq!( + verify_block_range, execute_block_range, + "Not the same blocks are verified and executed, verified: {:?}, executed: {:?}", + commit_block_range, execute_block_range + ); + assert_eq!( + *execute_block_range.1 - *execute_block_range.0 + 1, + expected_aggregated_blocks, + "The number of aggregated blocks is not as expected: real: {}, expected: {}", + *execute_block_range.1 - *execute_block_range.0 + 1, + expected_aggregated_blocks + ); + + BlockExecutionResult::new( + commit_result.expect_success(), + verify_result.expect_success(), + execute_result.expect_success(), + ) +} + +pub async fn commit_cost_of_n_empty_blocks( + tester: &mut Tester, + n: u32, + mut config: ZkSyncConfig, + zksync_contract: Address, +) -> BaseCost { + let operator_account = tester.operator_account.clone(); + + config.chain.state_keeper.transaction_slots = 1; + tester.change_config(config.clone()).await; + + for _ in 0..n { + let random_account = AccountHandler::rand(&config, zksync_contract); + // In order for the block to close, we should initialize the transaction, + // So far, we will use one transfer operation, but in the future it will need to be replaced with noop. + tester + .transfer( + &operator_account, + &random_account, + ETHEREUM_ADDRESS, + BigUint::from(10u32).pow(17), + BigUint::from(u32::MAX), + ) + .await; + } + tester.operator_account = operator_account; + let block_execution_res = process_blocks(tester, n).await; + + BaseCost::from_block_execution_result(block_execution_res) +} + +pub async fn commit_cost_of_deposits( + tester: &mut Tester, + n_operations: usize, + token: Address, + mut config: ZkSyncConfig, + zksync_contract: Address, + rng: &mut impl RandBigInt, +) -> CostsSample { + config.chain.state_keeper.transaction_slots = n_operations; + tester.change_config(config.clone()).await; + + let mut user_gas_cost = U256::zero(); + let operator_account = tester.operator_account.clone(); + for _ in 0..n_operations { + let random_account = AccountHandler::rand(&config, zksync_contract); + let deposit_tx_receipt = tester + .deposit( + &operator_account, + &random_account, + token, + rng.gen_biguint_range(&BigUint::from(10u32).pow(17), &BigUint::from(10u32).pow(18)), + ) + .await; + user_gas_cost += deposit_tx_receipt.gas_used.expect("deposit gas used"); + } + + let deposits_execute_result = process_blocks(tester, 1).await; + + CostsSample::new(n_operations, user_gas_cost, deposits_execute_result) +} + +pub async fn commit_cost_of_transfers_to_new( + tester: &mut Tester, + n_operations: usize, + token: Address, + mut config: ZkSyncConfig, + zksync_contract: Address, + rng: &mut impl RandBigInt, +) -> CostsSample { + config.chain.state_keeper.transaction_slots = n_operations; + tester.change_config(config.clone()).await; + + let operator_account = tester.operator_account.clone(); + for _ in 0..n_operations { + let random_account = AccountHandler::rand(&config, zksync_contract); + tester + .transfer( + &operator_account, + &random_account, + token, + rng.gen_biguint_range(&BigUint::from(10u32).pow(17), &BigUint::from(10u32).pow(18)), + rng.gen_biguint_range(&BigUint::from(u32::MAX / 2), &BigUint::from(u32::MAX)), + ) + .await; + } + tester.operator_account = operator_account; + + let transfers_execute_result = process_blocks(tester, 1).await; + CostsSample::new(n_operations, U256::zero(), transfers_execute_result) +} + +pub async fn commit_cost_of_transfers( + tester: &mut Tester, + n_operations: usize, + token: Address, + mut config: ZkSyncConfig, + zksync_contract: Address, + rng: &mut impl RandBigInt, +) -> CostsSample { + config.chain.state_keeper.transaction_slots = n_operations; + tester.change_config(config.clone()).await; + + let operator_account = tester.operator_account.clone(); + + let mut accounts = Vec::with_capacity(n_operations); + for _ in 0..n_operations { + let random_account = AccountHandler::rand(&config, zksync_contract); + tester + .deposit( + &operator_account, + &random_account, + token, + rng.gen_biguint_range(&BigUint::from(10u32).pow(17), &BigUint::from(10u32).pow(18)), + ) + .await; + + accounts.push(random_account); + } + process_blocks(tester, 1).await; + + for test_account in accounts { + tester + .transfer( + &operator_account, + &test_account, + token, + rng.gen_biguint_range(&BigUint::from(10u32).pow(17), &BigUint::from(10u32).pow(18)), + rng.gen_biguint_range(&BigUint::from(u32::MAX / 2), &BigUint::from(u32::MAX)), + ) + .await; + } + tester.operator_account = operator_account; + + let transfers_execute_result = process_blocks(tester, 1).await; + + CostsSample::new(n_operations, U256::zero(), transfers_execute_result) +} + +pub async fn commit_cost_of_withdrawals( + tester: &mut Tester, + n_operations: usize, + token: Address, + mut config: ZkSyncConfig, + zksync_contract: Address, + rng: &mut impl RandBigInt, +) -> CostsSample { + config.chain.state_keeper.transaction_slots = n_operations; + // It is needed to avoid closing block because of the gas limit. + // Constant for executing ERC20 withdrawals is quite high. + config.chain.state_keeper.max_single_tx_gas = 5000000; + tester.change_config(config.clone()).await; + + let operator_account = tester.operator_account.clone(); + + let mut accounts = Vec::with_capacity(n_operations); + for _ in 0..n_operations { + let random_account = AccountHandler::rand(&config, zksync_contract); + tester + .deposit( + &operator_account, + &random_account, + token, + rng.gen_biguint_range(&BigUint::from(10u32).pow(17), &BigUint::from(10u32).pow(18)), + ) + .await; + + accounts.push(random_account); + } + process_blocks(tester, 1).await; + + for test_account in accounts { + tester + .withdraw( + &operator_account, + &test_account, + token, + rng.gen_biguint_range(&BigUint::from(10u32).pow(16), &BigUint::from(10u32).pow(17)), + rng.gen_biguint_range(&BigUint::from(u32::MAX / 2), &BigUint::from(u32::MAX)), + ) + .await; + } + tester.operator_account = operator_account; + + let withdrawals_execute_result = process_blocks(tester, 1).await; + + CostsSample::new(n_operations, U256::zero(), withdrawals_execute_result) +} + +pub async fn commit_cost_of_deploys( + tester: &mut Tester, + n_operations: usize, + bytecode: Vec, + constructor_calldata: Vec, + mut config: ZkSyncConfig, + rng: &mut impl RandBigInt, +) -> CostsSample { + config.chain.state_keeper.transaction_slots = n_operations; + tester.change_config(config.clone()).await; + + let operator_account = tester.operator_account.clone(); + for n_accounts in 0..n_operations { + tester + .deploy_contract( + &operator_account, + bytecode.clone(), + constructor_calldata.clone(), + rng.gen_biguint_range(&BigUint::from(u32::MAX / 2), &BigUint::from(u32::MAX)), + n_accounts.into(), + ) + .await; + } + tester.operator_account = operator_account; + + let deploys_execute_result = process_blocks(tester, 1).await; + CostsSample::new(n_operations, U256::zero(), deploys_execute_result) +} + +#[allow(clippy::too_many_arguments)] +pub async fn commit_cost_of_executes( + tester: &mut Tester, + n_operations: usize, + bytecode: Vec, + constructor_calldata: Vec, + calldata: Vec, + mut config: ZkSyncConfig, + rng: &mut impl RandBigInt, +) -> CostsSample { + config.chain.state_keeper.transaction_slots = 1; + tester.change_config(config.clone()).await; + let operator_account = tester.operator_account.clone(); + + let address = tester + .deploy_contract( + &operator_account, + bytecode.clone(), + constructor_calldata.clone(), + rng.gen_biguint_range(&BigUint::from(u32::MAX / 2), &BigUint::from(u32::MAX)), + 0.into(), + ) + .await; + process_blocks(tester, 1).await; + + config.chain.state_keeper.transaction_slots = n_operations; + tester.change_config(config.clone()).await; + + for _ in 0..n_operations { + tester + .execute_contract( + &operator_account, + address, + calldata.clone(), + rng.gen_biguint_range(&BigUint::from(u32::MAX / 2), &BigUint::from(u32::MAX)), + ) + .await; + } + tester.operator_account = operator_account; + + let executes_execute_result = process_blocks(tester, 1).await; + CostsSample::new(n_operations, U256::zero(), executes_execute_result) +} + +pub async fn commit_cost_of_add_tokens( + tester: &mut Tester, + addresses: Vec
, + mut config: ZkSyncConfig, +) -> CostsSample { + let n_operations = addresses.len(); + config.chain.state_keeper.transaction_slots = n_operations; + tester.change_config(config.clone()).await; + + let operator_account = tester.operator_account.clone(); + for address in addresses { + tester.add_token(&operator_account, address).await; + } + tester.operator_account = operator_account; + + let add_tokens_execute_result = process_blocks(tester, 1).await; + CostsSample::new(n_operations, U256::zero(), add_tokens_execute_result) +} diff --git a/core/tests/testkit/src/commands/mod.rs b/core/tests/testkit/src/commands/mod.rs new file mode 100644 index 000000000000..99b665b57765 --- /dev/null +++ b/core/tests/testkit/src/commands/mod.rs @@ -0,0 +1,4 @@ +pub mod gas_price; +// pub mod revert_block; +// pub mod upgrade_contract; +pub mod utils; diff --git a/core/tests/testkit/src/commands/revert_block.rs b/core/tests/testkit/src/commands/revert_block.rs new file mode 100644 index 000000000000..0640dc72d333 --- /dev/null +++ b/core/tests/testkit/src/commands/revert_block.rs @@ -0,0 +1,68 @@ +use num::BigUint; +use std::time::Instant; + +use zksync_core::genesis::ensure_genesis_state; + +use crate::commands::utils::{ + create_first_block, create_test_accounts, get_root_hashes, get_test_config, + perform_transactions, TestDatabaseManager, +}; +use crate::external_commands::deploy_contracts; +use crate::tester::Tester; +use crate::types::{BlockProcessing, ETHEREUM_ADDRESS}; + +pub async fn test_revert_blocks() { + let config = get_test_config(); + + let test_db_manager = TestDatabaseManager::new().await; + let db = test_db_manager.get_db(); + ensure_genesis_state(db.clone(), config.clone()); + + println!("deploying contracts"); + let deploy_timer = Instant::now(); + + let (root_hash, porter_root_hash) = get_root_hashes(db.clone()); + let contracts = deploy_contracts(false, root_hash, porter_root_hash); + + println!( + "contracts deployed {:#?}, {} secs", + contracts, + deploy_timer.elapsed().as_secs() + ); + + let (operator_account, rich_account) = create_test_accounts(&config, &contracts); + + let mut tester = Tester::new(db.clone(), operator_account.clone(), config.clone()); + + let token = ETHEREUM_ADDRESS; + let fee_token = contracts.test_erc20_address; + let deposit_amount = BigUint::from(10u32).pow(18u32); + + create_first_block(&mut tester, fee_token, config.clone()).await; + + perform_transactions( + &mut tester, + rich_account.clone(), + token, + fee_token, + deposit_amount.clone(), + contracts.zk_sync, + config.clone(), + BlockProcessing::CommitAndRevert, + ) + .await; + + perform_transactions( + &mut tester, + rich_account.clone(), + token, + fee_token, + deposit_amount.clone(), + contracts.zk_sync, + config.clone(), + BlockProcessing::CommitAndExecute, + ) + .await; + + tester.assert_balances_correctness().await; +} diff --git a/core/tests/testkit/src/commands/upgrade_contract.rs b/core/tests/testkit/src/commands/upgrade_contract.rs new file mode 100644 index 000000000000..80835e616f84 --- /dev/null +++ b/core/tests/testkit/src/commands/upgrade_contract.rs @@ -0,0 +1,73 @@ +use num::BigUint; +use std::time::Instant; + +use zksync_core::genesis::ensure_genesis_state; + +use crate::commands::utils::{ + create_first_block, create_test_accounts, get_root_hashes, get_test_config, + perform_transactions, TestDatabaseManager, +}; +use crate::external_commands::{deploy_contracts, run_upgrade_contract}; +use crate::tester::Tester; +use crate::types::{BlockProcessing, ETHEREUM_ADDRESS}; + +pub async fn test_upgrade_contract() { + let config = get_test_config(); + + let test_db_manager = TestDatabaseManager::new().await; + let db = test_db_manager.get_db(); + + ensure_genesis_state(db.clone(), config.clone()); + + println!("deploying contracts"); + let deploy_timer = Instant::now(); + + let (root_hash, porter_root_hash) = get_root_hashes(db.clone()); + let contracts = deploy_contracts(false, root_hash, porter_root_hash); + + println!( + "contracts deployed {:#?}, {} secs", + contracts, + deploy_timer.elapsed().as_secs() + ); + + let (operator_account, rich_account) = create_test_accounts(&config, &contracts); + + let mut tester = Tester::new(db.clone(), operator_account.clone(), config.clone()); + + let token = ETHEREUM_ADDRESS; + let fee_token = contracts.test_erc20_address; + let deposit_amount = BigUint::from(10u32).pow(18u32); + + create_first_block(&mut tester, fee_token, config.clone()).await; + + perform_transactions( + &mut tester, + rich_account.clone(), + token, + fee_token, + deposit_amount.clone(), + contracts.zk_sync, + config.clone(), + BlockProcessing::CommitAndExecute, + ) + .await; + + let start_upgrade = Instant::now(); + run_upgrade_contract(contracts.zk_sync, contracts.upgrade_gatekeeper); + println!("Upgrade done in {:?}", start_upgrade.elapsed()); + + perform_transactions( + &mut tester, + rich_account.clone(), + token, + fee_token, + deposit_amount.clone(), + contracts.zk_sync, + config.clone(), + BlockProcessing::CommitAndExecute, + ) + .await; + + tester.assert_balances_correctness().await; +} diff --git a/core/tests/testkit/src/commands/utils.rs b/core/tests/testkit/src/commands/utils.rs new file mode 100644 index 000000000000..48d4bf4dd8ea --- /dev/null +++ b/core/tests/testkit/src/commands/utils.rs @@ -0,0 +1,340 @@ +use num::BigUint; +use std::convert::TryFrom; +use tempfile::TempDir; +use zksync_storage::RocksDB; +use zksync_types::web3::{transports::Http, types::FilterBuilder}; + +use crate::external_commands::{get_test_accounts, Contracts}; +use crate::tester::Tester; +use crate::types::{AccountHandler, BlockProcessing}; +use crate::utils::load_test_bytecode_and_calldata; +use zksync_config::ZkSyncConfig; +use zksync_contracts::zksync_contract; +use zksync_dal::{ConnectionPool, StorageProcessor}; +use zksync_storage::db::Database; +use zksync_types::{l1::L1Tx, tokens::ETHEREUM_ADDRESS, Address, L1BatchNumber, H256}; + +pub fn get_test_config() -> ZkSyncConfig { + let mut config = ZkSyncConfig::from_env(); + config.chain.operations_manager.delay_interval = 0; + config.chain.state_keeper.block_commit_deadline_ms = u64::MAX; + config.eth_sender.sender.aggregated_block_commit_deadline = u64::MAX; + config.eth_sender.sender.aggregated_block_prove_deadline = u64::MAX; + config.eth_sender.sender.aggregated_block_execute_deadline = u64::MAX; + config.eth_sender.sender.max_aggregated_blocks_to_commit = 1; + config.eth_sender.sender.max_aggregated_blocks_to_execute = 1; + config.eth_sender.sender.aggregated_proof_sizes = vec![1]; + + config +} + +pub struct TestDatabaseManager { + temp_dir: Option, + state_keeper_temp_dir: Option, +} + +impl TestDatabaseManager { + pub async fn new() -> Self { + let temp_dir = TempDir::new().expect("failed get temporary directory for RocksDb"); + let state_keeper_temp_dir = + TempDir::new().expect("failed get temporary directory for RocksDb"); + Self { + temp_dir: Some(temp_dir), + state_keeper_temp_dir: Some(state_keeper_temp_dir), + } + } + + pub fn get_db_path(&self) -> String { + self.temp_dir + .as_ref() + .unwrap() + .path() + .to_str() + .unwrap() + .to_string() + } + + pub fn get_state_keeper_db(&self) -> RocksDB { + RocksDB::new( + Database::StateKeeper, + self.state_keeper_temp_dir.as_ref().unwrap(), + false, + ) + } + + pub async fn connect_to_postgres(&self) -> StorageProcessor<'static> { + // This method is currently redundant, but it was created so that if some tweaks would be required, + // it'll be easier to introduce them through `TestDatabaseManager`. + StorageProcessor::establish_connection(true).await + } + + pub fn create_pool(&self) -> ConnectionPool { + // This method is currently redundant, but it was created so that if some tweaks would be required, + // it'll be easier to introduce them through `TestDatabaseManager`. + ConnectionPool::new(Some(1), true) + } +} + +impl Drop for TestDatabaseManager { + fn drop(&mut self) { + let temp_dir = self.temp_dir.take(); + if let Some(temp_dir) = temp_dir { + temp_dir + .close() + .expect("failed close temporary file for RocksDb"); + } + } +} + +pub fn get_root_hash(storage: &mut StorageProcessor<'static>) -> H256 { + let metadata = storage + .blocks_dal() + .get_block_metadata(L1BatchNumber(0)) + .unwrap(); + metadata.metadata.root_hash +} + +/// Return Operator account and rich zksync account with balance in L1. +pub fn create_test_accounts( + config: &ZkSyncConfig, + contracts: &Contracts, +) -> (AccountHandler, AccountHandler) { + let transport = Http::new(&config.eth_client.web3_url).expect("http transport start"); + let (test_accounts_info, operator_account_info) = get_test_accounts(); + + let operator_account = AccountHandler::new( + operator_account_info.private_key, + transport.clone(), + config, + contracts.zk_sync, + ); + let rich_account = test_accounts_info + .last() + .map(|test_eth_account| { + AccountHandler::new( + test_eth_account.private_key, + transport.clone(), + config, + contracts.zk_sync, + ) + }) + .expect("can't use testkit without rich test account"); + + (operator_account, rich_account) +} + +/// Eth is added to the contract as a first class citizen token during deployment, +/// so the priority operation of adding a token must be processed separately. +async fn add_eth_token(tester: &mut Tester, account: &AccountHandler, contract_address: Address) { + let new_priority_req_event_topic = zksync_contract() + .event("NewPriorityRequest") + .expect("zkSync contract abi error") + .signature(); + let filter = FilterBuilder::default() + .address(vec![contract_address]) + .from_block(0.into()) + .topics(Some(vec![new_priority_req_event_topic]), None, None, None) + .build(); + let logs = account + .eth_provider + .main_contract_eth_client + .logs(filter, "utils") + .await + .unwrap(); + + let tx = logs + .iter() + .find_map(|op| L1Tx::try_from(op.clone()).ok()) + .expect("failed get L1 tx from logs"); + + tester.add_tx_to_mempool(tx.into()).await; +} + +pub async fn create_first_block(tester: &mut Tester, mut config: ZkSyncConfig) { + println!("Create block with add tokens"); + config.chain.state_keeper.transaction_slots = 4; + tester.change_config(config).await; + + let operator_account = tester.operator_account.clone(); + + tester + .add_custom_token( + &operator_account, + ETHEREUM_ADDRESS, + "ETH".to_string(), + "ETH".to_string(), + 18, + ) + .await; + tester + .deposit( + &operator_account, + &operator_account, + ETHEREUM_ADDRESS, + BigUint::from(10u32).pow(20), + ) + .await; + + tester.load_operations(1).await; + + // Commit blocks + let (block_range, exec_result) = tester.commit_blocks().await.unwrap(); + let tx_receipt = exec_result.expect_success(); + println!( + "Commit blocks, block range: {:?} tx hash: {:?}", + block_range, tx_receipt.transaction_hash + ); + + // Verify blocks + let (block_range, exec_result) = tester.verify_blocks().await.unwrap(); + let tx_receipt = exec_result.expect_success(); + println!( + "Verify blocks, block range: {:?} tx hash: {:?}", + block_range, tx_receipt.transaction_hash + ); + + // Execute blocks + let (block_range, exec_result) = tester.execute_blocks().await.unwrap(); + let tx_receipt = exec_result.expect_success(); + println!( + "Execute blocks, block range: {:?} tx hash: {:?}", + block_range, tx_receipt.transaction_hash + ); +} + +#[allow(clippy::too_many_arguments)] +pub async fn perform_transactions( + tester: &mut Tester, + rich_account: AccountHandler, + token: Address, + fee_token: Address, + deposit_amount: BigUint, + zksync_contract: Address, + mut config: ZkSyncConfig, + block_proccesing: BlockProcessing, +) { + config.chain.state_keeper.transaction_slots = 8; + tester.change_config(config.clone()).await; + + let fee_amount = std::cmp::min( + &deposit_amount / BigUint::from(100u32), + BigUint::from(u32::MAX), + ); + + let alice = AccountHandler::rand(&config, zksync_contract); + let bob = AccountHandler::rand(&config, zksync_contract); + + tester + .deposit(&rich_account, &alice, fee_token, deposit_amount.clone()) + .await; + println!( + "Deposit to other account test success, token address: {}", + token + ); + tester + .deposit(&rich_account, &alice, token, deposit_amount.clone()) + .await; + println!( + "Deposit to other account test success, token address: {}", + fee_token + ); + + tester + .transfer( + &alice, + &alice, + token, + fee_amount.clone(), + fee_amount.clone(), + ) + .await; + println!("Transfer to self test success"); + + tester + .transfer( + &alice, + &bob, + token, + &deposit_amount / BigUint::from(2u32), + fee_amount.clone(), + ) + .await; + tester + .transfer( + &alice, + &bob, + ETHEREUM_ADDRESS, + fee_amount.clone(), + fee_amount.clone(), + ) + .await; + println!("Transfer to other test success"); + + tester + .withdraw( + &bob, + &bob, + ETHEREUM_ADDRESS, + fee_amount.clone(), + fee_amount.clone(), + ) + .await; + println!("Withdraw to self test success"); + + let (bytecode, constructor_calldata, calldata) = load_test_bytecode_and_calldata(); + + let contract_address = tester + .deploy_contract( + &alice, + bytecode, + constructor_calldata, + fee_amount.clone(), + 0.into(), + ) + .await; + println!("Deploy contract test success"); + + tester + .execute_contract(&alice, contract_address, calldata, fee_amount.clone()) + .await; + println!("Execute contract test success"); + + tester.load_operations(1).await; + + // Commit blocks + let (block_range, exec_result) = tester.commit_blocks().await.unwrap(); + let tx_receipt = exec_result.expect_success(); + println!( + "Commit blocks, block range: {:?}, tx hash: {:#x}", + block_range, tx_receipt.transaction_hash + ); + + match block_proccesing { + BlockProcessing::CommitAndExecute => { + // Verify blocks + let (block_range, exec_result) = tester.verify_blocks().await.unwrap(); + let tx_receipt = exec_result.expect_success(); + println!( + "Verify blocks, block range: {:?}, tx hash: {:#x}", + block_range, tx_receipt.transaction_hash + ); + + // Execute blocks + let (block_range, exec_result) = tester.execute_blocks().await.unwrap(); + let tx_receipt = exec_result.expect_success(); + println!( + "Execute blocks, block range: {:?}, tx hash: {:#x}", + block_range, tx_receipt.transaction_hash + ); + } + BlockProcessing::CommitAndRevert => { + // Revert blocks + let (block_range, exec_result) = tester.revert_blocks().await.unwrap(); + let tx_receipt = exec_result.expect_success(); + println!( + "Revert blocks,block range: {:?}, tx hash: {:#x}", + block_range, tx_receipt.transaction_hash + ); + } + } +} diff --git a/core/tests/testkit/src/eth_provider.rs b/core/tests/testkit/src/eth_provider.rs new file mode 100644 index 000000000000..4f548824a49d --- /dev/null +++ b/core/tests/testkit/src/eth_provider.rs @@ -0,0 +1,662 @@ +use anyhow::format_err; +use num::BigUint; +use zksync_eth_client::clients::http_client::{Error, EthInterface}; +use zksync_types::ethabi; +use zksync_types::web3::{ + contract::{tokens::Tokenize, Options}, + transports::Http, + types::{TransactionReceipt, H256, U256, U64}, +}; +use zksync_types::L1ChainId; + +use zksync_contracts::{erc20_contract, zksync_contract}; +use zksync_eth_client::ETHDirectClient; +use zksync_eth_signer::PrivateKeySigner; +use zksync_types::aggregated_operations::{ + BlocksCommitOperation, BlocksExecuteOperation, BlocksProofOperation, +}; +use zksync_types::{ + l1::{OpProcessingType, PriorityQueueType}, + tx::primitives::PackedEthSignature, + Address, +}; +use zksync_utils::{biguint_to_u256, u256_to_biguint}; + +const DEFAULT_PRIORITY_FEE: usize = 5; // 5 wei, doesn't really matter + +/// Used to sign and post ETH transactions for the zkSync contracts. +#[derive(Debug, Clone)] +pub struct EthereumProvider { + pub main_contract_eth_client: ETHDirectClient, + pub erc20_abi: ethabi::Contract, + pub address: Address, +} + +impl EthereumProvider { + pub fn new( + private_key: H256, + transport: Http, + contract_address: Address, + chain_id: L1ChainId, + ) -> Self { + let erc20_abi = erc20_contract(); + let address = PackedEthSignature::address_from_private_key(&private_key) + .expect("failed get address from private key"); + + let eth_signer = PrivateKeySigner::new(private_key); + let main_contract_eth_client = ETHDirectClient::new( + transport, + zksync_contract(), + address, + eth_signer, + contract_address, + DEFAULT_PRIORITY_FEE.into(), + chain_id, + ); + + Self { + main_contract_eth_client, + erc20_abi, + address, + } + } + + pub async fn eth_block_number(&self) -> Result { + self.main_contract_eth_client + .block_number("provider") + .await + .map(|num| num.as_u64()) + } + + pub async fn eth_balance(&self, account_address: Option
) -> Result { + let account_address = account_address.unwrap_or(self.address); + self.main_contract_eth_client + .eth_balance(account_address, "provider") + .await + .map(u256_to_biguint) + } + + pub async fn get_layer_1_base_cost( + &self, + _queue_type: PriorityQueueType, + _processing_type: OpProcessingType, + _layer_2_tip_fee: BigUint, + ) -> anyhow::Result { + // let get_base_cost_func_name = match tx_id { + // TransactionID::Deposit => "depositBaseCost", + // TransactionID::AddToken => "addTokenBaseCost", + // TransactionID::Withdraw => "withdrawBaseCost", + // TransactionID::Execute => unimplemented!(), + // }; + + // let gas_price = self + // .main_contract_eth_client + // .get_gas_price("provider") + // .await?; + + // let layer_1_base_fee = self + // .main_contract_eth_client + // .call_main_contract_function( + // get_base_cost_func_name, + // (gas_price, queue_type as u8, processing_type as u8), + // None, + // default_tx_options(), + // None, + // ) + // .await + // .map_err(|e| format_err!("Contract query fail: {}", e))?; + + // biguint_to_u256(layer_2_tip_fee) + // .checked_add(layer_1_base_fee) + // .ok_or_else(|| { + // format_err!("overflow when adding layer 1 base cost and layer 2 tip fee") + // }) + } + + pub async fn erc20_balance( + &self, + token_contract: &Address, + account_address: Option
, + ) -> anyhow::Result { + let account_address = account_address.unwrap_or(self.address); + self.main_contract_eth_client + .call_contract_function( + "balanceOf", + account_address, + None, + Options::default(), + None, + *token_contract, + erc20_contract(), + ) + .await + .map(u256_to_biguint) + .map_err(|e| format_err!("Contract query fail: {}", e)) + } + + pub async fn balances_to_withdraw( + &self, + token_address: Address, + account_address: Option
, + ) -> anyhow::Result { + let account_address = account_address.unwrap_or(self.address); + self.main_contract_eth_client + .call_main_contract_function( + "getPendingBalance", + (account_address, token_address), + None, + default_tx_options(), + None, + ) + .await + .map(u256_to_biguint) + .map_err(|e| format_err!("Contract query fail: {}", e)) + } + + pub async fn total_blocks_committed(&self) -> anyhow::Result { + self.main_contract_eth_client + .call_main_contract_function( + "getTotalBlocksCommitted", + (), + None, + default_tx_options(), + None, + ) + .await + .map_err(|e| format_err!("Contract query fail: {}", e)) + } + + pub async fn total_blocks_verified(&self) -> anyhow::Result { + self.main_contract_eth_client + .call_main_contract_function( + "getTotalBlocksVerified", + (), + None, + default_tx_options(), + None, + ) + .await + .map_err(|e| format_err!("Contract query fail: {}", e)) + } + + pub async fn total_blocks_executed(&self) -> anyhow::Result { + self.main_contract_eth_client + .call_main_contract_function( + "getTotalBlocksExecuted", + (), + None, + default_tx_options(), + None, + ) + .await + .map_err(|e| format_err!("Contract query fail: {}", e)) + } + + #[allow(clippy::too_many_arguments)] + pub async fn add_custom_token( + &self, + _token_address: Address, + _name: String, + _symbol: String, + _decimals: u8, + _queue_type: PriorityQueueType, + _processing_type: OpProcessingType, + _layer_2_tip_fee: BigUint, + ) -> anyhow::Result { + // let value = self + // .get_layer_1_base_cost( + // TransactionID::AddToken, + // queue_type, + // processing_type, + // layer_2_tip_fee, + // ) + // .await?; + + // let data = self.main_contract_eth_client.encode_tx_data( + // "addCustomToken", + // ( + // token_address, + // name, + // symbol, + // decimals, + // queue_type as u8, + // processing_type as u8, + // ), + // ); + + // let signed_tx = self + // .main_contract_eth_client + // .sign_prepared_tx( + // data, + // Options::with(|opt| { + // opt.value = Some(value); + // opt.gas = Some(500_000.into()); + // }), + // "provider", + // ) + // .await + // .map_err(|e| format_err!("Add token send err: {}", e))?; + + // let receipt = + // send_raw_tx_wait_confirmation(&self.main_contract_eth_client, signed_tx.raw_tx).await?; + + // Ok(EthExecResult::new(receipt, &self.main_contract_eth_client).await) + } + + pub async fn add_token( + &self, + _token_address: Address, + _queue_type: PriorityQueueType, + _processing_type: OpProcessingType, + _layer_2_tip_fee: BigUint, + ) -> anyhow::Result { + // let value = self + // .get_layer_1_base_cost( + // TransactionID::AddToken, + // queue_type, + // processing_type, + // layer_2_tip_fee, + // ) + // .await?; + + // let data = self.main_contract_eth_client.encode_tx_data( + // "addToken", + // (token_address, queue_type as u8, processing_type as u8), + // ); + + // let signed_tx = self + // .main_contract_eth_client + // .sign_prepared_tx( + // data, + // Options::with(|opt| { + // opt.value = Some(value); + // opt.gas = Some(500_000.into()); + // }), + // "provider", + // ) + // .await + // .map_err(|e| format_err!("Add token send err: {}", e))?; + + // let receipt = + // send_raw_tx_wait_confirmation(&self.main_contract_eth_client, signed_tx.raw_tx).await?; + + // Ok(EthExecResult::new(receipt, &self.main_contract_eth_client).await) + } + + pub async fn request_withdraw( + &self, + _token: Address, + _amount: BigUint, + _to: Address, + _queue_type: PriorityQueueType, + _processing_type: OpProcessingType, + _layer_2_tip_fee: BigUint, + ) -> anyhow::Result { + // let value = self + // .get_layer_1_base_cost( + // TransactionID::Withdraw, + // queue_type, + // processing_type, + // layer_2_tip_fee, + // ) + // .await?; + + // let data = self.main_contract_eth_client.encode_tx_data( + // "requestWithdraw", + // ( + // token, + // biguint_to_u256(amount), + // to, + // queue_type as u8, + // processing_type as u8, + // ), + // ); + + // let signed_tx = self + // .main_contract_eth_client + // .sign_prepared_tx( + // data, + // Options::with(|opt| { + // opt.value = Some(value); + // opt.gas = Some(500_000.into()); + // }), + // "provider", + // ) + // .await + // .map_err(|e| format_err!("Request withdraw send err: {}", e))?; + + // let receipt = + // send_raw_tx_wait_confirmation(&self.main_contract_eth_client, signed_tx.raw_tx).await?; + + // Ok(EthExecResult::new(receipt, &self.main_contract_eth_client).await) + } + + pub async fn deposit_eth( + &self, + _amount: BigUint, + _to: &Address, + _queue_type: PriorityQueueType, + _processing_type: OpProcessingType, + _layer_2_tip_fee: BigUint, + ) -> anyhow::Result { + // let value = self + // .get_layer_1_base_cost( + // TransactionID::Deposit, + // queue_type, + // processing_type, + // layer_2_tip_fee + &amount, + // ) + // .await?; + + // let data = self.main_contract_eth_client.encode_tx_data( + // "depositETH", + // ( + // biguint_to_u256(amount), + // *to, + // queue_type as u8, + // processing_type as u8, + // ), + // ); + + // let signed_tx = self + // .main_contract_eth_client + // .sign_prepared_tx( + // data, + // Options::with(|opt| { + // opt.value = Some(value); + // opt.gas = Some(500_000.into()); + // }), + // "provider", + // ) + // .await + // .map_err(|e| format_err!("Deposit eth send err: {}", e))?; + + // let receipt = + // send_raw_tx_wait_confirmation(&self.main_contract_eth_client, signed_tx.raw_tx).await?; + + // Ok(EthExecResult::new(receipt, &self.main_contract_eth_client).await) + } + + pub async fn send_eth(&self, to: Address, value: BigUint) -> anyhow::Result { + let signed_tx = self + .main_contract_eth_client + .sign_prepared_tx_for_addr( + Vec::new(), + to, + Options::with(|opt| { + opt.value = Some(biguint_to_u256(value)); + opt.gas = Some(500_000.into()); + }), + "provider", + ) + .await + .map_err(|e| format_err!("Send err: {}", e))?; + + let receipt = + send_raw_tx_wait_confirmation(&self.main_contract_eth_client, signed_tx.raw_tx).await?; + + Ok(EthExecResult::new(receipt, &self.main_contract_eth_client).await) + } + + pub async fn allowance(&self, token_address: Address) -> Result { + self.main_contract_eth_client + .allowance(token_address, self.erc20_abi.clone()) + .await + } + + pub async fn approve_erc20( + &self, + token_address: Address, + amount: BigUint, + ) -> anyhow::Result { + let contract_function = self + .erc20_abi + .function("approve") + .expect("failed to get function parameters"); + let params = ( + self.main_contract_eth_client.contract_addr(), + biguint_to_u256(amount), + ); + let data = contract_function + .encode_input(¶ms.into_tokens()) + .expect("failed to encode parameters"); + + let signed_tx = self + .main_contract_eth_client + .sign_prepared_tx_for_addr(data, token_address, default_tx_options(), "provider") + .await + .map_err(|e| format_err!("Approve send err: {}", e))?; + let receipt = + send_raw_tx_wait_confirmation(&self.main_contract_eth_client, signed_tx.raw_tx).await?; + + Ok(EthExecResult::new(receipt, &self.main_contract_eth_client).await) + } + + pub async fn deposit_erc20( + &self, + _token_contract: Address, + _amount: BigUint, + _to: &Address, + _queue_type: PriorityQueueType, + _processing_type: OpProcessingType, + _layer_2_tip_fee: BigUint, + ) -> anyhow::Result { + // let value = self + // .get_layer_1_base_cost( + // TransactionID::Deposit, + // queue_type, + // processing_type, + // layer_2_tip_fee, + // ) + // .await?; + + // let data = self.main_contract_eth_client.encode_tx_data( + // "depositERC20", + // ( + // token_contract, + // biguint_to_u256(amount.clone()), + // *to, + // queue_type as u8, + // processing_type as u8, + // ), + // ); + // let signed_tx = self + // .main_contract_eth_client + // .sign_prepared_tx( + // data, + // Options::with(|opt| { + // opt.value = Some(value); + // opt.gas = Some(500_000.into()); + // }), + // "provider", + // ) + // .await + // .map_err(|e| format_err!("Deposit erc20 send err: {}", e))?; + + // let receipt = + // send_raw_tx_wait_confirmation(&self.main_contract_eth_client, signed_tx.raw_tx).await?; + + // Ok(EthExecResult::new(receipt, &self.main_contract_eth_client).await) + } + + pub async fn commit_blocks( + &self, + commit_operation: &BlocksCommitOperation, + ) -> anyhow::Result { + let data = self.main_contract_eth_client.encode_tx_data( + "commitBlocks", + commit_operation.get_eth_tx_args().as_slice(), + ); + let signed_tx = self + .main_contract_eth_client + .sign_prepared_tx( + data, + Options::with(|f| f.gas = Some(U256::from(9 * 10u64.pow(6)))), + "provider", + ) + .await + .map_err(|e| format_err!("Commit block send err: {}", e))?; + + let receipt = + send_raw_tx_wait_confirmation(&self.main_contract_eth_client, signed_tx.raw_tx).await?; + + Ok(EthExecResult::new(receipt, &self.main_contract_eth_client).await) + } + + // Verifies block using provided proof or empty proof if None is provided. (`DUMMY_VERIFIER` should be enabled on the contract). + pub async fn verify_blocks( + &self, + proof_operation: &BlocksProofOperation, + ) -> anyhow::Result { + let data = self + .main_contract_eth_client + .encode_tx_data("proveBlocks", proof_operation.get_eth_tx_args().as_slice()); + let signed_tx = self + .main_contract_eth_client + .sign_prepared_tx( + data, + Options::with(|f| f.gas = Some(U256::from(10 * 10u64.pow(6)))), + "provider", + ) + .await + .map_err(|e| format_err!("Verify block send err: {}", e))?; + + let receipt = + send_raw_tx_wait_confirmation(&self.main_contract_eth_client, signed_tx.raw_tx).await?; + + Ok(EthExecResult::new(receipt, &self.main_contract_eth_client).await) + } + + pub async fn execute_blocks( + &self, + execute_operation: &BlocksExecuteOperation, + ) -> anyhow::Result { + let data = self.main_contract_eth_client.encode_tx_data( + "executeBlocks", + execute_operation.get_eth_tx_args().as_slice(), + ); + let signed_tx = self + .main_contract_eth_client + .sign_prepared_tx( + data, + Options::with(|f| f.gas = Some(U256::from(9 * 10u64.pow(6)))), + "provider", + ) + .await + .map_err(|e| format_err!("Execute block send err: {}", e))?; + + let receipt = + send_raw_tx_wait_confirmation(&self.main_contract_eth_client, signed_tx.raw_tx).await?; + + Ok(EthExecResult::new(receipt, &self.main_contract_eth_client).await) + } + + pub async fn revert_blocks(&self, last_committed_block: u32) -> anyhow::Result { + let data = self + .main_contract_eth_client + .encode_tx_data("revertBlocks", last_committed_block); + let signed_tx = self + .main_contract_eth_client + .sign_prepared_tx( + data, + Options::with(|f| f.gas = Some(U256::from(9 * 10u64.pow(6)))), + "provider", + ) + .await + .map_err(|e| format_err!("Revert blocks send err: {}", e))?; + + let receipt = + send_raw_tx_wait_confirmation(&self.main_contract_eth_client, signed_tx.raw_tx).await?; + + Ok(EthExecResult::new(receipt, &self.main_contract_eth_client).await) + } +} + +#[derive(Debug, Clone)] +pub struct EthExecResult { + receipt: TransactionReceipt, + revert_reason: Option, +} + +impl EthExecResult { + pub async fn new( + receipt: TransactionReceipt, + client: ÐDirectClient, + ) -> Self { + let revert_reason = if receipt.status == Some(U64::from(1)) { + None + } else { + let reason = client + .failure_reason(receipt.transaction_hash) + .await + .expect("Failed to get revert reason") + .unwrap() + .revert_reason; + + Some(reason) + }; + + Self { + receipt, + revert_reason, + } + } + + pub fn expect_success(self) -> TransactionReceipt { + assert!( + self.revert_reason.is_none(), + "Expected transaction success: revert_reason: {}, tx: 0x{:x}", + self.revert_reason.unwrap(), + self.receipt.transaction_hash + ); + + self.receipt + } + + pub fn expect_reverted(self, code: &str) -> TransactionReceipt { + if let Some(revert_reason) = self.revert_reason { + assert_eq!( + revert_reason, + code, + "Transaction reverted with incorrect revert reason expected: {:?}, found: {:?}, tx: 0x{:x}", + code, + revert_reason, + self.receipt.transaction_hash + ); + } else { + panic!( + "Expected transaction reverted: expected code: {:?}, tx: 0x{:x}", + code, self.receipt.transaction_hash + ); + } + + self.receipt + } +} + +async fn send_raw_tx_wait_confirmation( + client: ÐDirectClient, + raw_tx: Vec, +) -> Result { + let tx_hash = client + .send_raw_tx(raw_tx) + .await + .map_err(|e| format_err!("Failed to send raw tx: {}", e))?; + loop { + if let Some(receipt) = client + .tx_receipt(tx_hash, "provider") + .await + .map_err(|e| format_err!("Failed to get receipt from eth node: {}", e))? + { + return Ok(receipt); + } + } +} + +fn default_tx_options() -> Options { + // Set the gas limit, so `eth_client` won't complain about it. + Options { + gas: Some(500_000.into()), + ..Default::default() + } +} diff --git a/core/tests/testkit/src/external_commands.rs b/core/tests/testkit/src/external_commands.rs new file mode 100644 index 000000000000..201d5476ae86 --- /dev/null +++ b/core/tests/testkit/src/external_commands.rs @@ -0,0 +1,156 @@ +//! Run external commands from the zk toolkit +//! `zk` script should be in path. + +use std::collections::HashMap; +use std::fs::read_to_string; +use std::path::PathBuf; +use std::process::Command; +use std::str::FromStr; +use zksync_types::{Address, H256}; +use zksync_utils::parse_env; + +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone)] +pub struct Contracts { + pub verifier: Address, + pub zk_sync: Address, + pub test_erc20_address: Address, + pub fail_on_receive: Address, +} + +fn get_contract_address(deploy_script_out: &str) -> Option<(String, Address)> { + if let Some(output) = deploy_script_out.strip_prefix("CONTRACTS_PROXY_ADDR=0x") { + Some(( + String::from("CONTRACTS_PROXY_ADDR"), + Address::from_str(output).expect("can't parse contract address"), + )) + } else if let Some(output) = deploy_script_out.strip_prefix("CONTRACTS_VERIFIER_ADDR=0x") { + Some(( + String::from("CONTRACTS_VERIFIER_ADDR"), + Address::from_str(output).expect("can't parse contract address"), + )) + } else if let Some(output) = deploy_script_out.strip_prefix("CONTRACTS_TEST_ERC20=0x") { + Some(( + String::from("CONTRACTS_TEST_ERC20"), + Address::from_str(output).expect("can't parse contract address"), + )) + } else { + deploy_script_out + .strip_prefix("CONTRACTS_FAIL_ON_RECEIVE=0x") + .map(|output| { + ( + String::from("CONTRACTS_FAIL_ON_RECEIVE"), + Address::from_str(output).expect("can't parse contract address"), + ) + }) + } +} + +/// Runs external command and returns stdout output +fn run_external_command(command: &str, args: &[&str]) -> String { + let result = Command::new(command) + .args(args) + .output() + .unwrap_or_else(|e| panic!("failed to execute command: {}, err: {}", command, e)); + + let stdout = String::from_utf8(result.stdout).expect("stdout is not valid utf8"); + let stderr = String::from_utf8(result.stderr).expect("stderr is not valid utf8"); + + if !result.status.success() { + panic!( + "failed to run external command {}:\nstdout: {}\nstderr: {}", + command, stdout, stderr + ); + } + stdout +} + +pub fn deploy_contracts(use_prod_contracts: bool, genesis_root: H256) -> Contracts { + let mut args = vec!["run", "deploy-testkit"]; + args.push("--genesis-root"); + let genesis_root = format!("0x{:x}", genesis_root); + args.push(genesis_root.as_str()); + + if use_prod_contracts { + args.push("--prod-contracts"); + } + let stdout = run_external_command("zk", &args); + + let mut contracts = HashMap::new(); + for std_out_line in stdout.split_whitespace().collect::>() { + if let Some((name, address)) = get_contract_address(std_out_line) { + contracts.insert(name, address); + } + } + + Contracts { + verifier: contracts + .remove("CONTRACTS_VERIFIER_ADDR") + .expect("VERIFIER_ADDR missing"), + zk_sync: contracts + .remove("CONTRACTS_PROXY_ADDR") + .expect("CONTRACT_ADDR missing"), + test_erc20_address: contracts + .remove("CONTRACTS_TEST_ERC20") + .expect("TEST_ERC20 missing"), + fail_on_receive: contracts + .remove("CONTRACTS_FAIL_ON_RECEIVE") + .expect("FAIL_ON_RECEIVE missing"), + } +} + +pub fn deploy_erc20_tokens() -> Vec
{ + let args = vec!["run", "deploy-erc20", "dev"]; + run_external_command("zk", &args); + + let mut path = parse_env::("ZKSYNC_HOME"); + path.push("etc"); + path.push("tokens"); + path.push("localhost.json"); + let text = read_to_string(path).expect("Unable to read file"); + let value: serde_json::Value = serde_json::from_str(&text).expect("Unable to parse"); + + let mut addresses = Vec::new(); + for token in value.as_array().expect("Incorrect file format") { + let address = token["address"].clone().as_str().unwrap().to_string(); + let address = address.strip_prefix("0x").unwrap(); + addresses.push(Address::from_str(address).unwrap()); + } + addresses +} + +pub fn run_upgrade_contract(zksync_address: Address, upgrade_gatekeeper_address: Address) { + run_external_command( + "zk", + &[ + "run", + "test-upgrade", + &format!("0x{:x}", zksync_address), + &format!("0x{:x}", upgrade_gatekeeper_address), + ], + ); +} + +#[derive(Clone, Serialize, Deserialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct EthAccountInfo { + pub address: Address, + pub private_key: H256, +} + +/// First is vec of test accounts, second is operator account +pub fn get_test_accounts() -> (Vec, EthAccountInfo) { + let stdout = run_external_command("zk", &["run", "test-accounts"]); + + let mut parsed = serde_json::from_str::>(&stdout) + .expect("print test accounts script output is not parsed correctly"); + + let commit_account = parsed.remove(0); + assert!( + !parsed.is_empty(), + "can't use testkit without test accounts" + ); + + (parsed, commit_account) +} diff --git a/core/tests/testkit/src/main.rs b/core/tests/testkit/src/main.rs new file mode 100644 index 000000000000..7864664fe26e --- /dev/null +++ b/core/tests/testkit/src/main.rs @@ -0,0 +1,72 @@ +#![allow(clippy::derive_partial_eq_without_eq)] +#![allow(dead_code)] +use structopt::StructOpt; + +// use crate::commands::gas_price::test_gas_price; +// use crate::commands::revert_block::test_revert_blocks; +// use crate::commands::upgrade_contract::test_upgrade_contract; + +mod commands; +mod eth_provider; +mod external_commands; +mod server_handler; +mod tester; +mod types; +mod utils; + +#[derive(Debug, StructOpt)] +enum Command { + All, + RevertBlock, + UpgradeContract, + GasPrice, +} + +#[derive(Debug, StructOpt)] +#[structopt(name = "testkit", author = "Matter Labs")] +struct Opt { + #[structopt(subcommand)] + command: Command, + + #[structopt(short, long)] + debug: bool, +} + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + let opt = Opt::from_args(); + + if opt.debug { + vlog::init(); + } + + match opt.command { + Command::All => { + // println!("Start contract upgrade test"); + // test_upgrade_contract().await; + // println!(); + + // println!("Start gas price test"); + // test_gas_price().await; + // println!(); + + // println!("Start revert blocks test"); + // test_revert_blocks().await; + // println!(); + } + Command::RevertBlock => { + // println!("Start revert blocks test"); + // test_revert_blocks().await; + } + Command::UpgradeContract => { + // println!("Start contract upgrade test"); + // test_upgrade_contract().await; + } + Command::GasPrice => { + // println!("Start gas price test"); + // test_gas_price().await; + } + } + + Ok(()) +} diff --git a/core/tests/testkit/src/server_handler.rs b/core/tests/testkit/src/server_handler.rs new file mode 100644 index 000000000000..bfe67ea7e8df --- /dev/null +++ b/core/tests/testkit/src/server_handler.rs @@ -0,0 +1,42 @@ +use tokio::sync::watch; +use tokio::task::JoinHandle; + +use zksync_config::ZkSyncConfig; +use zksync_dal::ConnectionPool; +use zksync_storage::RocksDB; + +struct ProcessHandler(Option<(watch::Sender, JoinHandle<()>)>); + +impl Drop for ProcessHandler { + fn drop(&mut self) { + let stop_sender_and_thread_handle = self.0.take(); + + if let Some((stop_sender, _thread_handle)) = stop_sender_and_thread_handle { + // We don't actually need to await `_thread_handle` + // since sending stop signal must finish this task almost immediately + let _ = stop_sender.send(true); + } + } +} + +pub struct ServerHandler { + process_handler: ProcessHandler, +} + +impl ServerHandler { + pub fn spawn_server( + _db_path: String, + _state_keeper_db: RocksDB, + _config: ZkSyncConfig, + _state_keeper_pool: ConnectionPool, + _metadata_calculator_pool: ConnectionPool, + _mempool_pool: ConnectionPool, + ) -> Self { + } + + pub fn empty() -> Self { + Self { + process_handler: ProcessHandler(None), + } + } +} diff --git a/core/tests/testkit/src/tester.rs b/core/tests/testkit/src/tester.rs new file mode 100644 index 000000000000..52e29ea45668 --- /dev/null +++ b/core/tests/testkit/src/tester.rs @@ -0,0 +1,632 @@ +use num::{bigint::Sign, BigUint}; +use std::time::Instant; +use zksync_config::ZkSyncConfig; +use zksync_types::{ + web3::types::TransactionReceipt, Address, ExecuteTransactionCommon, L1BatchNumber, Transaction, + H256, U256, +}; +use zksync_utils::{biguint_to_u256, u256_to_biguint}; + +use crate::commands::utils::TestDatabaseManager; +use crate::eth_provider::EthExecResult; +use crate::server_handler::ServerHandler; +use crate::types::{ + AccountBalances, AccountHandler, BalanceUpdate, LayerType, OperationsQueue, ETHEREUM_ADDRESS, + VERY_BIG_BLOCK_NUMBER, +}; +use crate::utils::{get_executed_tx_fee, is_token_eth, l1_tx_from_logs}; +use zksync_contracts::erc20_contract; +use zksync_dal::StorageProcessor; +use zksync_storage::RocksDB; +use zksync_types::{ + aggregated_operations::BlocksCommitOperation, + api::web3::contract::tokens::Tokenize, + fee::Fee, + l1::{L1Tx, OpProcessingType, PriorityQueueType}, + l2::L2Tx, + utils::deployed_address_create, +}; + +#[derive(Clone, Debug, Eq, PartialEq, Default)] +pub struct State { + pub last_committed_block: L1BatchNumber, + pub last_executed_block: L1BatchNumber, + pub last_proved_block: L1BatchNumber, +} + +pub struct Tester { + pub db_manager: TestDatabaseManager, + pub storage: StorageProcessor<'static>, + pub operator_account: AccountHandler, + operations_queue: OperationsQueue, + account_balances: AccountBalances, + server_handler: ServerHandler, + state: State, +} + +impl Tester { + pub async fn new( + db_manager: TestDatabaseManager, + operator_account: AccountHandler, + config: ZkSyncConfig, + ) -> Self { + let state = State::default(); + let server_handler = ServerHandler::spawn_server( + db_manager.get_db_path(), + db_manager.get_state_keeper_db(), + config, + db_manager.create_pool(), + db_manager.create_pool(), + db_manager.create_pool(), + ); + + let storage = db_manager.connect_to_postgres().await; + Self { + db_manager, + storage, + operator_account: operator_account.clone(), + account_balances: AccountBalances::new(operator_account), + operations_queue: Default::default(), + server_handler, + state, + } + } + + pub async fn change_config(&mut self, config: ZkSyncConfig) { + // Server handler should be dropped firstly before spawning new server. + self.server_handler = ServerHandler::empty(); + RocksDB::await_rocksdb_termination(); + self.server_handler = ServerHandler::spawn_server( + self.db_manager.get_db_path(), + self.db_manager.get_state_keeper_db(), + config, + self.db_manager.create_pool(), + self.db_manager.create_pool(), + self.db_manager.create_pool(), + ); + } + + pub async fn add_tx_to_mempool(&mut self, tx: Transaction) { + let Transaction { + common_data, + execute, + received_timestamp_ms, + } = tx; + match common_data { + ExecuteTransactionCommon::L2(mut common_data) => { + common_data.set_input(H256::random().0.to_vec(), H256::random()); + self.storage.transactions_dal().insert_transaction_l2( + L2Tx { + execute, + common_data, + received_timestamp_ms, + }, + Default::default(), + ); + } + ExecuteTransactionCommon::L1(common_data) => { + self.storage.transactions_dal().insert_transaction_l1( + L1Tx { + execute, + common_data, + received_timestamp_ms, + }, + 0.into(), + ); + } + } + } + + async fn approve_erc20_if_need( + &mut self, + from: &AccountHandler, + token: Address, + amount: BigUint, + ) { + let allowance = from + .eth_provider + .allowance(token) + .await + .map(u256_to_biguint) + .unwrap(); + + if allowance < amount { + self.account_balances + .setup_balances(&mut self.storage, &[(from.clone(), ETHEREUM_ADDRESS)]) + .await; + + let execution_result = from + .eth_provider + .approve_erc20(token, u256_to_biguint(U256::max_value())) + .await + .expect("erc20 approve should not fail"); + let tx_receipt = execution_result.expect_success(); + let fee = get_executed_tx_fee(&from.eth_provider.main_contract_eth_client, &tx_receipt) + .await + .unwrap(); + + self.account_balances.update_balance( + from.address(), + ETHEREUM_ADDRESS, + BalanceUpdate::new(LayerType::Ethereum, Sign::Minus, fee), + ); + } + } + + pub async fn deposit( + &mut self, + from: &AccountHandler, + to: &AccountHandler, + token: Address, + amount: BigUint, + ) -> TransactionReceipt { + self.account_balances + .setup_balances( + &mut self.storage, + &[ + (from.clone(), token), + (to.clone(), token), + (from.clone(), ETHEREUM_ADDRESS), + ], + ) + .await; + + let execution_result = if is_token_eth(token) { + from.eth_provider + .deposit_eth( + amount.clone(), + &to.address(), + PriorityQueueType::Deque, + OpProcessingType::Common, + Default::default(), + ) + .await + .expect("eth deposit should not fail") + } else { + self.approve_erc20_if_need(from, token, amount.clone()) + .await; + from.eth_provider + .deposit_erc20( + token, + amount.clone(), + &to.address(), + PriorityQueueType::Deque, + OpProcessingType::Common, + Default::default(), + ) + .await + .expect("erc20 deposit should not fail") + }; + let tx_receipt = execution_result.expect_success(); + let fee = get_executed_tx_fee(&from.eth_provider.main_contract_eth_client, &tx_receipt) + .await + .unwrap(); + let deposit = l1_tx_from_logs(&tx_receipt); + self.add_tx_to_mempool(deposit.into()).await; + + self.account_balances.update_balance( + from.address(), + ETHEREUM_ADDRESS, + BalanceUpdate::new(LayerType::Ethereum, Sign::Minus, fee), + ); + self.account_balances.update_balance( + from.address(), + token, + BalanceUpdate::new(LayerType::Ethereum, Sign::Minus, amount.clone()), + ); + self.account_balances.update_balance( + to.address(), + token, + BalanceUpdate::new(LayerType::Zksync, Sign::Plus, amount), + ); + + tx_receipt + } + + pub async fn add_custom_token( + &mut self, + from: &AccountHandler, + token_address: Address, + name: String, + symbol: String, + decimals: u8, + ) -> TransactionReceipt { + let execution_result = from + .eth_provider + .add_custom_token( + token_address, + name, + symbol, + decimals, + PriorityQueueType::Deque, + OpProcessingType::Common, + Default::default(), + ) + .await + .expect("add token should not fail"); + let tx_receipt = execution_result.expect_success(); + let add_token = l1_tx_from_logs(&tx_receipt); + self.add_tx_to_mempool(add_token.into()).await; + + tx_receipt + } + + pub async fn add_token( + &mut self, + from: &AccountHandler, + token_address: Address, + ) -> TransactionReceipt { + let execution_result = from + .eth_provider + .add_token( + token_address, + PriorityQueueType::Deque, + OpProcessingType::Common, + Default::default(), + ) + .await + .expect("add token should not fail"); + let tx_receipt = execution_result.expect_success(); + let add_token = l1_tx_from_logs(&tx_receipt); + self.add_tx_to_mempool(add_token.into()).await; + + tx_receipt + } + + pub async fn transfer( + &mut self, + from: &AccountHandler, + to: &AccountHandler, + token: Address, + amount: BigUint, + fee: BigUint, + ) { + self.account_balances + .setup_balances( + &mut self.storage, + &[ + (from.clone(), token), + (to.clone(), token), + (from.clone(), ETHEREUM_ADDRESS), + (to.clone(), ETHEREUM_ADDRESS), + ], + ) + .await; + + let data = + create_transfer_calldata(to.sync_account.address, biguint_to_u256(amount.clone())); + + let fee = Fee { + gas_limit: biguint_to_u256(fee.clone()), + max_fee_per_gas: 1u32.into(), + max_priority_fee_per_gas: 1u32.into(), + gas_per_pubdata_limit: Default::default(), + }; + + let signed_tx = from + .sync_account + .sign_execute(token, data, fee.clone(), None, true); + + self.add_tx_to_mempool(signed_tx.into()).await; + + self.account_balances.update_balance( + from.address(), + token, + BalanceUpdate::new(LayerType::Zksync, Sign::Minus, amount.clone()), + ); + self.account_balances.update_balance( + from.address(), + ETHEREUM_ADDRESS, + BalanceUpdate::new( + LayerType::Zksync, + Sign::Minus, + u256_to_biguint(fee.max_total_fee()), + ), + ); + self.account_balances.update_balance( + to.address(), + token, + BalanceUpdate::new(LayerType::Zksync, Sign::Plus, amount), + ); + } + + pub async fn withdraw( + &mut self, + from: &AccountHandler, + to: &AccountHandler, + token: Address, + amount: BigUint, + fee: BigUint, + ) { + self.account_balances + .setup_balances( + &mut self.storage, + &[ + (from.clone(), token), + (to.clone(), token), + (from.clone(), ETHEREUM_ADDRESS), + ], + ) + .await; + let fee = Fee { + gas_limit: biguint_to_u256(fee.clone()), + max_fee_per_gas: 1u32.into(), + max_priority_fee_per_gas: 1u32.into(), + gas_per_pubdata_limit: Default::default(), + }; + let signed_tx = from.sync_account.sign_withdraw( + token, + biguint_to_u256(amount.clone()), + fee.clone(), + to.address(), + None, + true, + ); + self.add_tx_to_mempool(signed_tx.into()).await; + + self.account_balances.update_balance( + from.address(), + token, + BalanceUpdate::new(LayerType::Zksync, Sign::Minus, amount.clone()), + ); + self.account_balances.update_balance( + from.address(), + ETHEREUM_ADDRESS, + BalanceUpdate::new( + LayerType::Zksync, + Sign::Minus, + u256_to_biguint(fee.max_total_fee()), + ), + ); + self.account_balances.update_balance( + to.address(), + token, + BalanceUpdate::new(LayerType::Ethereum, Sign::Plus, amount), + ); + } + + pub async fn deploy_contract( + &mut self, + from: &AccountHandler, + bytecode: Vec, + calldata: Vec, + fee: BigUint, + amount_of_contracts_deployed_by_address: U256, + ) -> Address { + self.account_balances + .setup_balances(&mut self.storage, &[(from.clone(), ETHEREUM_ADDRESS)]) + .await; + + let fee = Fee { + gas_limit: biguint_to_u256(fee.clone()), + max_fee_per_gas: 1u32.into(), + max_priority_fee_per_gas: 1u32.into(), + gas_per_pubdata_limit: Default::default(), + }; + let signed_tx = + from.sync_account + .sign_deploy_contract(bytecode, calldata, fee.clone(), None, true); + + let contract_address = deployed_address_create( + signed_tx.initiator_account(), + amount_of_contracts_deployed_by_address, + ); + self.add_tx_to_mempool(signed_tx.into()).await; + + self.account_balances.update_balance( + from.address(), + ETHEREUM_ADDRESS, + BalanceUpdate::new( + LayerType::Zksync, + Sign::Minus, + u256_to_biguint(fee.max_total_fee()), + ), + ); + + contract_address + } + + pub async fn execute_contract( + &mut self, + from: &AccountHandler, + contract_address: Address, + calldata: Vec, + fee: BigUint, + ) { + self.account_balances + .setup_balances(&mut self.storage, &[(from.clone(), ETHEREUM_ADDRESS)]) + .await; + + let fee = Fee { + gas_limit: biguint_to_u256(fee.clone()), + max_fee_per_gas: 1u32.into(), + max_priority_fee_per_gas: 1u32.into(), + gas_per_pubdata_limit: Default::default(), + }; + let signed_tx = + from.sync_account + .sign_execute(contract_address, calldata, fee.clone(), None, true); + self.add_tx_to_mempool(signed_tx.into()).await; + self.account_balances.update_balance( + from.address(), + ETHEREUM_ADDRESS, + BalanceUpdate::new( + LayerType::Zksync, + Sign::Minus, + u256_to_biguint(fee.max_total_fee()), + ), + ); + } + + pub async fn load_operations(&mut self, limit: usize) { + let start = Instant::now(); + let blocks = loop { + if start.elapsed().as_secs() > 20 { + panic!("Expect load new operation"); + } + let all_blocks = self + .storage + .blocks_dal() + .get_ready_for_commit_blocks(VERY_BIG_BLOCK_NUMBER.0 as usize); + let blocks: Vec<_> = all_blocks + .into_iter() + .filter(|block| block.header.number > self.state.last_committed_block) + .take(limit) + .collect(); + if blocks.len() == limit { + break blocks; + } + }; + let last_committed_block = self + .storage + .blocks_dal() + .get_block_metadata(self.state.last_committed_block) + .unwrap(); + let commit_op = BlocksCommitOperation { + blocks, + last_committed_block, + }; + self.operations_queue.add_commit_op(commit_op); + } + + pub async fn commit_blocks( + &mut self, + ) -> Option<((L1BatchNumber, L1BatchNumber), EthExecResult)> { + let block_commit_op = self.operations_queue.get_commit_op(); + if let Some(block_commit_op) = block_commit_op { + let block_range = block_commit_op.block_range(); + let exec_result = self + .operator_account + .eth_provider + .commit_blocks(&block_commit_op) + .await + .expect("commit block tx"); + self.state.last_committed_block = block_range.1; + + Some((block_range, exec_result)) + } else { + None + } + } + + pub async fn verify_blocks( + &mut self, + ) -> Option<((L1BatchNumber, L1BatchNumber), EthExecResult)> { + let block_verify_op = self.operations_queue.get_verify_op(); + if let Some(block_verify_op) = block_verify_op { + let block_range = block_verify_op.block_range(); + let exec_result = self + .operator_account + .eth_provider + .verify_blocks(&block_verify_op) + .await + .expect("verify block tx"); + self.state.last_proved_block = block_range.1; + + Some((block_range, exec_result)) + } else { + None + } + } + + pub async fn execute_blocks( + &mut self, + ) -> Option<((L1BatchNumber, L1BatchNumber), EthExecResult)> { + let block_execute_op = self.operations_queue.get_execute_op(); + if let Some(block_execute_op) = block_execute_op { + let block_range = block_execute_op.block_range(); + let exec_result = self + .operator_account + .eth_provider + .execute_blocks(&block_execute_op) + .await + .expect("execute block tx"); + self.state.last_executed_block = block_range.1; + + Some((block_range, exec_result)) + } else { + None + } + } + + pub async fn revert_blocks(&mut self) -> Option<(L1BatchNumber, EthExecResult)> { + let blocks = self.operations_queue.revert_blocks(); + if let Some(blocks) = blocks { + let last_committed_block = blocks[0].header.number - 1; + + let exec_result = self + .operator_account + .eth_provider + .revert_blocks(*last_committed_block) + .await + .expect("execute block tx"); + + Some((last_committed_block, exec_result)) + } else { + None + } + } + + pub async fn assert_balances_correctness(&mut self) { + let mut checks_failed = false; + for ((eth_account, token_address), expected_balance) in + &self.account_balances.eth_accounts_balances + { + let real_balance = self + .account_balances + .get_real_balance( + &mut self.storage, + *eth_account, + *token_address, + LayerType::Ethereum, + ) + .await; + + if expected_balance != &real_balance { + println!( + "eth acc: {:x}, token address: {:x}", + *eth_account, token_address + ); + println!("expected: {}", expected_balance); + println!("real: {}", real_balance); + checks_failed = true; + } + } + + for ((zksync_account, token_address), expected_balance) in + &self.account_balances.sync_accounts_balances + { + let real_balance = self + .account_balances + .get_real_balance( + &mut self.storage, + *zksync_account, + *token_address, + LayerType::Zksync, + ) + .await; + + if &real_balance != expected_balance { + println!( + "zkSync acc: {:x}, token address: {:x}", + zksync_account, token_address + ); + println!("expected: {}", expected_balance); + println!("real: {}", real_balance); + checks_failed = true; + } + } + + assert!(!checks_failed, "failed check balances correctness"); + } +} +pub fn create_transfer_calldata(to: Address, amount: U256) -> Vec { + let contract = erc20_contract(); + let contract_function = contract + .function("transfer") + .expect("failed to get function parameters"); + let params = (to, amount); + contract_function + .encode_input(¶ms.into_tokens()) + .expect("failed to encode parameters") +} diff --git a/core/tests/testkit/src/types.rs b/core/tests/testkit/src/types.rs new file mode 100644 index 000000000000..b03bd3f3506e --- /dev/null +++ b/core/tests/testkit/src/types.rs @@ -0,0 +1,316 @@ +//! Common primitives used within testkit. +#![allow(clippy::map_entry)] // Clippy doesn't take `async` code in block into account. + +use std::collections::{HashMap, VecDeque}; +use std::default::Default; + +use num::{bigint::Sign, BigUint, Zero}; +use zksync_types::web3::transports::Http; +use zksync_types::L1ChainId; + +use zksync_config::ZkSyncConfig; +// use zksync_crypto::franklin_crypto::bellman::pairing::{ +// bn256::Fr, +// ff::{PrimeField, PrimeFieldRepr}, +// }; +// use zksync_prover_utils::precomputed::load_precomputed_proofs; +use zksync_test_account::ZkSyncAccount; +use zksync_types::aggregated_operations::{ + BlocksCommitOperation, BlocksExecuteOperation, BlocksProofOperation, +}; +use zksync_types::{ + api, commitment::BlockWithMetadata, AccountTreeId, Address, L1BatchNumber, Nonce, H256, +}; +use zksync_utils::u256_to_biguint; + +use crate::eth_provider::EthereumProvider; +use crate::utils::is_token_eth; +use zksync_dal::StorageProcessor; + +pub static ETHEREUM_ADDRESS: Address = Address::zero(); +pub const VERY_BIG_BLOCK_NUMBER: L1BatchNumber = L1BatchNumber(10000000); + +#[derive(PartialEq, Eq, Clone, Copy, Debug)] +pub enum BlockProcessing { + CommitAndExecute, + CommitAndRevert, +} + +#[derive(Debug, Clone)] +pub struct AccountHandler { + pub sync_account: ZkSyncAccount, + pub eth_provider: EthereumProvider, +} + +impl AccountHandler { + pub fn new( + private_key: H256, + transport: Http, + config: &ZkSyncConfig, + zksync_contract: Address, + ) -> Self { + let sync_account = ZkSyncAccount::new(private_key, Nonce(0)); + let eth_provider = EthereumProvider::new( + sync_account.private_key, + transport, + zksync_contract, + L1ChainId(config.eth_client.chain_id), + ); + + Self { + sync_account, + eth_provider, + } + } + + pub fn rand(config: &ZkSyncConfig, zksync_contract: Address) -> Self { + let sync_account = ZkSyncAccount::rand(); + let eth_provider = { + let transport = Http::new(&config.eth_client.web3_url).expect("http transport start"); + EthereumProvider::new( + sync_account.private_key, + transport, + zksync_contract, + L1ChainId(config.eth_client.chain_id), + ) + }; + + Self { + sync_account, + eth_provider, + } + } + + pub fn address(&self) -> Address { + self.eth_provider.address + } +} + +#[derive(PartialEq)] +pub enum LayerType { + Ethereum, + Zksync, +} + +pub struct BalanceUpdate { + layer_type: LayerType, + sign: Sign, + amount: BigUint, +} + +impl BalanceUpdate { + pub fn new(layer_type: LayerType, sign: Sign, amount: BigUint) -> Self { + Self { + layer_type, + sign, + amount, + } + } +} + +// Struct used to keep expected balance changes after transactions execution. +#[derive(Debug)] +pub struct AccountBalances { + operator_account: AccountHandler, + + /// (Account address, Token address) -> balance + pub eth_accounts_balances: HashMap<(Address, Address), BigUint>, + /// (Account address, Token address) -> balance + pub sync_accounts_balances: HashMap<(Address, Address), BigUint>, +} + +impl AccountBalances { + pub fn new(operator_account: AccountHandler) -> Self { + Self { + operator_account, + eth_accounts_balances: Default::default(), + sync_accounts_balances: Default::default(), + } + } + + pub async fn get_real_balance( + &self, + storage: &mut StorageProcessor<'static>, + account: Address, + token: Address, + layer_type: LayerType, + ) -> BigUint { + if layer_type == LayerType::Ethereum { + let balances_to_withdraw = self + .operator_account + .eth_provider + .balances_to_withdraw(token, Some(account)) + .await + .unwrap(); + + let main_balance = if is_token_eth(token) { + self.operator_account + .eth_provider + .eth_balance(Some(account)) + .await + .unwrap() + } else { + self.operator_account + .eth_provider + .erc20_balance(&token, Some(account)) + .await + .unwrap() + }; + + main_balance + balances_to_withdraw + } else { + let balance = storage + .storage_web3_dal() + .standard_token_historical_balance( + AccountTreeId::new(token), + AccountTreeId::new(account), + api::BlockId::Number(api::BlockNumber::Pending), + ) + .unwrap() + .unwrap(); + u256_to_biguint(balance) + } + } + + pub async fn setup_balances( + &mut self, + storage: &mut StorageProcessor<'static>, + accounts_and_tokens: &[(AccountHandler, Address)], + ) { + for (account_handler, token_address) in accounts_and_tokens { + if account_handler.address() == self.operator_account.address() { + continue; + } + + // Add info for L1 balance + if !self + .eth_accounts_balances + .contains_key(&(account_handler.address(), *token_address)) + { + let balance = self + .get_real_balance( + storage, + account_handler.address(), + *token_address, + LayerType::Ethereum, + ) + .await; + + self.eth_accounts_balances + .insert((account_handler.address(), *token_address), balance); + } + + // Add info for L2 balance + if !self + .sync_accounts_balances + .contains_key(&(account_handler.address(), *token_address)) + { + let balance = self + .get_real_balance( + storage, + account_handler.address(), + *token_address, + LayerType::Zksync, + ) + .await; + + self.sync_accounts_balances + .insert((account_handler.address(), *token_address), balance); + } + } + } + + pub fn update_balance( + &mut self, + account: Address, + token: Address, + balance_update: BalanceUpdate, + ) { + if account != self.operator_account.address() { + let balance = match balance_update.layer_type { + LayerType::Ethereum => self.eth_accounts_balances.get_mut(&(account, token)), + LayerType::Zksync => self.sync_accounts_balances.get_mut(&(account, token)), + } + .unwrap(); + match balance_update.sign { + Sign::Plus => { + *balance += balance_update.amount; + } + Sign::Minus => { + *balance -= balance_update.amount; + } + Sign::NoSign => { + assert!(balance_update.amount.is_zero()); + } + } + } + } +} + +#[derive(Default)] +pub struct OperationsQueue { + commit_queue: VecDeque, + verify_queue: VecDeque, + execute_queue: VecDeque, +} + +impl OperationsQueue { + pub fn add_commit_op(&mut self, commit_op: BlocksCommitOperation) { + self.commit_queue.push_back(commit_op); + } + + pub fn get_commit_op(&mut self) -> Option { + let commit_op = self.commit_queue.pop_front(); + if let Some(ref commit_op) = commit_op { + // let mut proof = load_precomputed_proofs().unwrap().aggregated_proof; + // proof.individual_vk_inputs = Vec::new(); + // for block in &commit_op.blocks { + // let mut block_commitment = block.block_commitment.as_bytes().to_vec(); + // block_commitment[0] &= 0xffu8 >> 3; + // + // //convert from bytes to fr + // let mut fr_repr = ::Repr::default(); + // fr_repr.read_be(&*block_commitment).unwrap(); + // let block_commitment = Fr::from_repr(fr_repr).unwrap(); + // + // proof.individual_vk_inputs.push(block_commitment); + // proof.individual_vk_idxs.push(0); + // } + + let verify_op = BlocksProofOperation { + // This should be changed if testkit is be restored as it is not a previous block. + prev_block: commit_op.blocks.first().unwrap().clone(), + blocks: commit_op.blocks.clone(), + proofs: Vec::default(), + should_verify: false, + }; + + self.verify_queue.push_back(verify_op); + } + commit_op + } + + pub fn get_verify_op(&mut self) -> Option { + let verify_op = self.verify_queue.pop_front(); + if let Some(ref verify_op) = verify_op { + let execute_op = BlocksExecuteOperation { + blocks: verify_op.blocks.clone(), + }; + + self.execute_queue.push_back(execute_op); + } + verify_op + } + + pub fn get_execute_op(&mut self) -> Option { + self.execute_queue.pop_front() + } + + pub fn revert_blocks(&mut self) -> Option> { + // the block that needs to be reverted is already committed, + // so it is in the queue for verification. + self.verify_queue + .pop_front() + .map(|verify_op| verify_op.blocks) + } +} diff --git a/core/tests/testkit/src/utils.rs b/core/tests/testkit/src/utils.rs new file mode 100644 index 000000000000..5339f259df7e --- /dev/null +++ b/core/tests/testkit/src/utils.rs @@ -0,0 +1,69 @@ +use anyhow::format_err; +use num::BigUint; +use std::convert::TryFrom; +use std::fs::File; +use std::io::Read; +use std::path::PathBuf; +use zksync_utils::parse_env; + +use zksync_contracts::read_sys_contract_bytecode; +use zksync_eth_client::ETHDirectClient; +use zksync_eth_signer::PrivateKeySigner; +use zksync_types::{l1::L1Tx, web3::types::TransactionReceipt, Address}; +use zksync_utils::u256_to_biguint; + +use crate::types::ETHEREUM_ADDRESS; + +pub fn load_test_bytecode_and_calldata() -> (Vec, Vec, Vec) { + let mut dir_path = parse_env::("ZKSYNC_HOME"); + dir_path.push("etc/contracts-test-data/e"); + let bytecode = read_sys_contract_bytecode("", "Emitter"); + + let mut dir_path = parse_env::("ZKSYNC_HOME"); + dir_path.push("etc"); + dir_path.push("contracts-test-data"); + dir_path.push("events"); + let calldata = { + let mut calldata_path = dir_path; + calldata_path.push("sample-calldata"); + + let mut calldata_file = File::open(calldata_path).unwrap(); + let mut calldata = Vec::new(); + calldata_file.read_to_end(&mut calldata).unwrap(); + calldata + }; + + (bytecode, Vec::new(), calldata) +} + +pub fn l1_tx_from_logs(receipt: &TransactionReceipt) -> L1Tx { + receipt + .logs + .iter() + .find_map(|op| L1Tx::try_from(op.clone()).ok()) + .expect("failed get L1 tx from logs") +} + +pub fn is_token_eth(token_address: Address) -> bool { + token_address == ETHEREUM_ADDRESS +} + +/// Get fee paid in wei for tx execution +pub async fn get_executed_tx_fee( + client: ÐDirectClient, + receipt: &TransactionReceipt, +) -> anyhow::Result { + let gas_used = receipt.gas_used.ok_or_else(|| { + format_err!( + "Not used gas in the receipt: 0x{:x?}", + receipt.transaction_hash + ) + })?; + + let tx = client + .get_tx(receipt.transaction_hash, "utils") + .await? + .ok_or_else(|| format_err!("Transaction not found: 0x{:x?}", receipt.transaction_hash))?; + + Ok(u256_to_biguint(gas_used * tx.gas_price.unwrap_or_default())) +} diff --git a/core/tests/ts-integration/README.md b/core/tests/ts-integration/README.md new file mode 100644 index 000000000000..1e86d023befc --- /dev/null +++ b/core/tests/ts-integration/README.md @@ -0,0 +1,189 @@ +# NFTF -- New Fancy Test Framework + +This folder contains a framework for writing integration tests for zkSync Era, as well as set of integration test +suites. + +This framework is built atop of [jest](https://jestjs.io/). It is _highly recommended_ to familiarize yourself with its +documentation to be able to write tests. + +## Features + +- Separated framework/tests logic. +- Parallel test suite execution. +- Ability to run tests concurrently within one suite. +- Custom matchers & helpers. +- Automated used funds recollection. + +### Test run lifecycle + +Before starting any test, framework would analyze `tests` folder to pick up all the test suites we have, and then will +prepare the context for tests. Context initialization consists of: + +- Waiting for server to start. +- Creating personal accounts for each test suite. +- Providing funds to these accounts. + +Basically, during initialization, everything is prepared for writing tests that interact with zkSync. + +After that, each test suite is ran _in parallel_. Each test suite can claim its own account and be sure that this +account has funds on it and is not used by any other suite. + +After all the test suites are completed, funds from all the used accounts are recollected and sent back to the main +account used to setup the test. + +### Sample test suite + +To create a new test suite, you just need to create a file named `.test.ts` in the `tests` directory. + +Sample test suite would look like this: + +```typescript +/** + * This suite contains tests checking our handling of Ether (such as depositing, checking `msg.value`, etc). + */ + +import { TestMaster } from '../src/index'; +import { shouldChangeETHBalances } from '../src/matchers/transaction-modifiers'; + +import * as zksync from 'zksync-web3'; +import { BigNumber } from 'ethers'; + +describe('ETH token checks', () => { + let testMaster: TestMaster; + let alice: zksync.Wallet; + let bob: zksync.Wallet; + + beforeAll(() => { + // Test master is an interface to access prefunded account. + testMaster = TestMaster.getInstance(__filename); + // ...through it you can obtain a personal funded account. + alice = testMaster.mainAccount(); + // ...and an unlimited amount of empty ones. + bob = testMaster.newEmptyAccount(); + }); + + test('Can perform a transfer', async () => { + const value = BigNumber.from(200); + + // Declare modifier to check that ETH balances would change in the following way. + const ethBalanceChange = await shouldChangeETHBalances([ + { wallet: alice, change: -value }, + { wallet: bob, change: value } + ]); + + // Send transfer, it should succeed. Apply a modifier we declared above. + await expect(alice.sendTransaction({ to: bob.address, value })).toBeAccepted([ethBalanceChange]); + }); + + afterAll(async () => { + // It will collect all the funds from temporary accounts back. + await testMaster.deinitialize(); + }); +}); +``` + +### Custom matchers + +One of the big features of this framework is the ability to create custom matchers to not duplicate the checking logic +that is used in multiple tests. + +To see documentation on the matchers available by default, see the [jest documentation](https://jestjs.io/docs/expect). +To see documentation on the custom matchers of this framework, see [the type definitions file](./typings/jest.d.ts). + +To declare your own matcher, look at [existing matchers](./src/matchers/) and use them as an example. Once matchers are +implemented, register them at [setup file](./src/jest-setup/add-matchers.ts) and add type declarations to the +[typings file](./typings/jest.d.ts). Don't forget to add documentation for the matcher! + +### Matcher modifiers + +`toBeAccepted` and `toBeRejected` matchers accept modifiers. You can see one (`shouldChangeETHBalances`) above. There +are others (like `shouldChangeTokenBalances` or `shouldOnlyTakeFee`), and if needed you can create your onw ones. + +These modifiers would be applied to the transaction receipt, and you can implement any kind of custom logic there. To do +so, you just need to declare a class that inherits `MatcherModifier` class and implements the `check` method. + +For more details on the interface, see the +[transaction-modifiers.ts](./src/transaction-matcher-modifiers/transaction-modifiers.ts). + +Note: you don't have to always declare modifiers there. If your modifier is specific to one suite only, you can declare +it right there and use in your tests. + +## Helpers + +For common actions like deploying a contract or transferring funds, a set of helpers is provided. These helpers can be +found in the [corresponding file](./src/helpers.ts). + +Feel free to add new functionality there if it'll be used by multiple test suites. If the logic is exclusive for just +one test suite, you may declare your own helper functions right in the suite file. + +## Debug logging + +During context initialization and teardown you can enable showing the debug logs to troubleshoot issues. To do so, run +tests with the `ZKSYNC_DEBUG_LOGS` environment variable set. + +To add more logs to the context initialization, use `reporter` object in the `ContextOwner` class. Example: + +```typescript +this.reporter.debug(`Some useful info`); +``` + +## Fast and long modes + +Some integration tests may be very long to run in real environment (e.g. stage). Namely, tests that wait for the block +finalization: it make take several hours to generate a proof and send it onchain. + +Because of that, framework supports "fast" and "long" modes. `TestMaster` objects have `isFastMode` method to determine +which mode is currently being used. + +If you're going to write a test that can make test run duration longer, it is adviced to guard the "long" part with the +corresponding check. + +By default, "long" mode is assumed, and to enable the "fast" mode one must set the `ZK_INTEGRATION_TESTS_FAST_MODE` +environment variable to `true`. + +## Helpful patterns + +### Checking promise and then using it + +Sometimes you need to check promise in a matcher, but then you need to actually use the resolved result. Solution: in +JS/TS it's OK to await the promise twice. + +You can do it as follows: + +```typescript +// Create promise +const withdrawalPromise = alice.withdraw({ token: zksync.utils.ETH_ADDRESS, amount }); +// Here you use promise in the matcher. +await expect(withdrawalPromise).toBeAccepted([l2ethBalanceChange]); +// And here you retrieve its value. It's OK to await same promise twice. +const withdrawalTx = await withdrawalPromise; +await withdrawalTx.waitFinalize(); +``` + +### Focus on what you're checking + +Don't try to put all the checks in the world to your test. + +For example: If you need to do a transfer, and we already have a test for transfers, you don't need to check if it +succeeds. Every failed transaction would throw an error and fail the tests, and the exact test for transfers would show +the cause in this case. + +Focus on what you need to check. Assume that all the prerequisites for that work as expected. Keep the tests short and +simple. + +## Big DONTs + +To avoid the test suite becoming a mess, please DON'T: + +- Test more than one thing in one `test` block. Tests should be short and check the thing that is written in its + description. +- Duplicate test logic in multiple tests. If you need to check the same thing twice, try to implement a matcher instead. +- Add tests that don't match suite logic. If your test (or tests) check something that is nor strictly related to the + topic of suite, create a new suite instead. Suites are being run in parallel, and we should use this fact. +- Perform L1 operations if not strictly required. Depositing and waiting for finalization are operations that take a lot + of time, especially on stage. It should be avoided, and if it can't be avoided, try to optimize tests so that less + time is spent waiting without any actions. +- Create accounts yourself. Framework recollects used funds back after the execution, so the only right way to create a + new wallet is through `TestMaster`. +- Rely on things that you don't control. For example, balance of the fee account: multiple transactions may affect it in + parallel, so don't expect it to have the exact value. diff --git a/core/tests/ts-integration/contracts/README.md b/core/tests/ts-integration/contracts/README.md new file mode 100644 index 000000000000..d08f934e8456 --- /dev/null +++ b/core/tests/ts-integration/contracts/README.md @@ -0,0 +1,4 @@ +# Contracts test data + +This folder contains data for contracts that are being used for testing to check the correctness of the smart contract +flow in zkSync. diff --git a/core/tests/ts-integration/contracts/basic-constructor/basic-constructor.sol b/core/tests/ts-integration/contracts/basic-constructor/basic-constructor.sol new file mode 100644 index 000000000000..35fa736befe6 --- /dev/null +++ b/core/tests/ts-integration/contracts/basic-constructor/basic-constructor.sol @@ -0,0 +1,16 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 + +pragma solidity ^0.8.0; + +contract SimpleConstructor { + uint256 c; + + constructor(uint256 a, uint256 b, bool shouldRevert) { + c = a * b; + require(!shouldRevert); + } + + function get() public view returns (uint256) { + return c; + } +} diff --git a/core/tests/ts-integration/contracts/context/context.sol b/core/tests/ts-integration/contracts/context/context.sol new file mode 100644 index 000000000000..94969ac66f9e --- /dev/null +++ b/core/tests/ts-integration/contracts/context/context.sol @@ -0,0 +1,47 @@ +// SPDX-License-Identifier: UNLICENSED + +pragma solidity ^0.8.0; + +contract Context { + function getBlockNumber() public view returns (uint256) { + return block.number; + } + + function getBlockTimestamp() public view returns (uint256) { + return block.timestamp; + } + + function getBlockGasLimit() public view returns (uint256) { + return block.gaslimit; + } + + function getTxGasPrice() public view returns (uint256) { + return tx.gasprice; + } + + function checkBlockNumber(uint256 fromBlockNumber, uint256 toBlockNumber) public { + require(fromBlockNumber <= block.number && block.number <= toBlockNumber, "block number is out of range"); + } + + function checkBlockTimestamp(uint256 fromTimestamp, uint256 toTimestamp) public { + require(fromTimestamp <= block.timestamp && block.timestamp <= toTimestamp, "block timestamp is out of range"); + } + + function checkTxOrigin(address expectedOrigin) public { + require(tx.origin == expectedOrigin, "tx.origin is invalid"); + } + + function getBaseFee() public view returns (uint256) { + return block.basefee; + } + + function requireMsgValue(uint256 _requiredValue) external payable { + require(msg.value == _requiredValue); + } + + uint256 public valueOnCreate; + + constructor() payable { + valueOnCreate = msg.value; + } +} diff --git a/core/tests/ts-integration/contracts/counter/counter.sol b/core/tests/ts-integration/contracts/counter/counter.sol new file mode 100644 index 000000000000..841e4caa7d7e --- /dev/null +++ b/core/tests/ts-integration/contracts/counter/counter.sol @@ -0,0 +1,26 @@ +// SPDX-License-Identifier: UNLICENSED + +pragma solidity ^0.8.0; + +contract Counter { + uint256 value; + + function increment(uint256 x) public { + value += x; + } + + function incrementWithRevert(uint256 x, bool shouldRevert) public { + value += x; + if(shouldRevert) { + revert("This method always reverts"); + } + } + + function set(uint256 x) public { + value = x; + } + + function get() public view returns (uint256) { + return value; + } +} diff --git a/core/tests/ts-integration/contracts/create/Foo.sol b/core/tests/ts-integration/contracts/create/Foo.sol new file mode 100644 index 000000000000..1ae4868e5bf6 --- /dev/null +++ b/core/tests/ts-integration/contracts/create/Foo.sol @@ -0,0 +1,8 @@ +// SPDX-License-Identifier: MIT + +pragma solidity >=0.8.1; +pragma abicoder v2; + +contract Foo { + string public name = "Foo"; +} diff --git a/core/tests/ts-integration/contracts/create/create.sol b/core/tests/ts-integration/contracts/create/create.sol new file mode 100644 index 000000000000..ef03e7c457ce --- /dev/null +++ b/core/tests/ts-integration/contracts/create/create.sol @@ -0,0 +1,17 @@ +// SPDX-License-Identifier: MIT + +pragma solidity >=0.8.1; +pragma abicoder v2; + +// import Foo.sol from current directory +import "./Foo.sol"; + +contract Import { + // Initialize Foo.sol + Foo public foo = new Foo(); + + // Test Foo.sol by getting it's name. + function getFooName() public view returns (string memory) { + return foo.name(); + } +} \ No newline at end of file diff --git a/core/tests/ts-integration/contracts/custom-account/Constants.sol b/core/tests/ts-integration/contracts/custom-account/Constants.sol new file mode 100644 index 000000000000..76d5b000d6f2 --- /dev/null +++ b/core/tests/ts-integration/contracts/custom-account/Constants.sol @@ -0,0 +1,37 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 + +pragma solidity ^0.8.0; + +import "./interfaces/INonceHolder.sol"; +import "./interfaces/IContractDeployer.sol"; +import "./SystemContext.sol"; + +uint160 constant SYSTEM_CONTRACTS_OFFSET = 0x8000; // 2^15 + +address constant ECRECOVER_SYSTEM_CONTRACT = address(0x01); +address constant SHA256_SYSTEM_CONTRACT = address(0x02); + +address payable constant BOOTLOADER_FORMAL_ADDRESS = payable(address(SYSTEM_CONTRACTS_OFFSET + 0x01)); +INonceHolder constant NONCE_HOLDER_SYSTEM_CONTRACT = INonceHolder(address(SYSTEM_CONTRACTS_OFFSET + 0x03)); + +// A contract that is allowed to deploy any codehash +// on any address. To be used only during an upgrade. +address constant FORCE_DEPLOYER = address(SYSTEM_CONTRACTS_OFFSET + 0x07); +address constant MSG_VALUE_SYSTEM_CONTRACT = address(SYSTEM_CONTRACTS_OFFSET + 0x09); +IContractDeployer constant DEPLOYER_SYSTEM_CONTRACT = IContractDeployer(address(SYSTEM_CONTRACTS_OFFSET + 0x06)); + + +address constant KECCAK256_SYSTEM_CONTRACT = address(SYSTEM_CONTRACTS_OFFSET + 0x10); + +address constant ETH_TOKEN_SYSTEM_CONTRACT = address(SYSTEM_CONTRACTS_OFFSET + 0x0a); +SystemContext constant SYSTEM_CONTEXT_CONTRACT = SystemContext(address(SYSTEM_CONTRACTS_OFFSET + 0x0b)); + +uint256 constant MAX_SYSTEM_CONTRACT_ADDRESS = 0xffff; + +bytes32 constant DEFAULT_ACCOUNT_CODE_HASH = 0x00; + +// The number of bytes that are published during the contract deployment +// in addition to the bytecode itself. +uint256 constant BYTECODE_PUBLISHING_OVERHEAD = 100; + +uint256 constant MSG_VALUE_SIMULATOR_IS_SYSTEM_BIT = 2**128; diff --git a/core/tests/ts-integration/contracts/custom-account/RLPEncoder.sol b/core/tests/ts-integration/contracts/custom-account/RLPEncoder.sol new file mode 100644 index 000000000000..13884b71727b --- /dev/null +++ b/core/tests/ts-integration/contracts/custom-account/RLPEncoder.sol @@ -0,0 +1,99 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 + +pragma solidity ^0.8.0; + +library RLPEncoder { + function encodeAddress(address _val) internal pure returns (bytes memory encoded) { + // The size is equal to 14 bytes of the address itself + 1 for encoding bytes length in RLP. + encoded = new bytes(0x15); + + bytes20 shiftedVal = bytes20(_val); + assembly { + // In the first byte we write the encoded length as 0x80 + 0x14 == 0x94. + mstore(add(encoded, 0x20), 0x9400000000000000000000000000000000000000000000000000000000000000) + // Write address data without stripping zeros. + mstore(add(encoded, 0x21), shiftedVal) + } + } + + function encodeUint256(uint256 _val) internal pure returns (bytes memory encoded) { + unchecked { + if (_val < 128) { + encoded = new bytes(1); + // Handle zero as a non-value, since stripping zeroes results in an empty byte array + encoded[0] = (_val == 0) ? bytes1(uint8(128)) : bytes1(uint8(_val)); + } else { + uint256 hbs = _highestByteSet(_val); + + encoded = new bytes(hbs + 2); + encoded[0] = bytes1(uint8(hbs + 0x81)); + + uint256 lbs = 31 - hbs; + uint256 shiftedVal = _val << (lbs * 8); + + assembly { + mstore(add(encoded, 0x21), shiftedVal) + } + } + } + } + + /// @notice Encodes the size of bytes in RLP format. + /// NOTE: panics if the length is 1, since the length encoding is ambiguous in this case. + function encodeNonSingleBytesLen(uint256 _len) internal pure returns (bytes memory) { + assert(_len != 1); + return _encodeLength(_len, 0x80); + } + + /// @notice Encodes the size of list items in RLP format. + function encodeListLen(uint256 _len) internal pure returns (bytes memory) { + return _encodeLength(_len, 0xc0); + } + + function _encodeLength(uint256 _len, uint256 _offset) private pure returns (bytes memory encoded) { + unchecked { + if (_len < 56) { + encoded = new bytes(1); + encoded[0] = bytes1(uint8(_len + _offset)); + } else { + uint256 hbs = _highestByteSet(_len); + + encoded = new bytes(hbs + 2); + encoded[0] = bytes1(uint8(_offset + hbs + 56)); + + uint256 lbs = 31 - hbs; + uint256 shiftedVal = _len << (lbs * 8); + + assembly { + mstore(add(encoded, 0x21), shiftedVal) + } + } + } + } + + /// @notice Computes the index of the highest byte set in number. + /// @notice Uses little endian ordering (The least significant byte has index `0`). + /// NOTE: returns `0` for `0` + function _highestByteSet(uint256 _number) private pure returns (uint256 hbs) { + // should be resolver after evaluating the cost of opcodes. + if (_number >= 2**128) { + _number >>= 128; + hbs += 16; + } + if (_number >= 2**64) { + _number >>= 64; + hbs += 8; + } + if (_number >= 2**32) { + _number >>= 32; + hbs += 4; + } + if (_number >= 2**16) { + _number >>= 16; + hbs += 2; + } + if (_number >= 2**8) { + hbs += 1; + } + } +} diff --git a/core/tests/ts-integration/contracts/custom-account/SystemContext.sol b/core/tests/ts-integration/contracts/custom-account/SystemContext.sol new file mode 100644 index 000000000000..921e14383684 --- /dev/null +++ b/core/tests/ts-integration/contracts/custom-account/SystemContext.sol @@ -0,0 +1,103 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 + +pragma solidity ^0.8.0; + +import {BOOTLOADER_FORMAL_ADDRESS} from "./Constants.sol"; + +/** + * @author Matter Labs + * @notice Contract that stores some of the context variables, that may be either + * block-scoped, tx-scoped or system-wide. + */ +contract SystemContext { + modifier onlyBootloader() { + require(msg.sender == BOOTLOADER_FORMAL_ADDRESS); + _; + } + + /// @notice The chainId of the network. It is set at the genesis. + uint256 public chainId; + + /// @notice The `tx.origin` in the current transaction. + /// @dev It is updated before each transaction by the bootloader + address public origin; + + /// @notice The `tx.gasPrice` in the current transaction. + /// @dev It is updated before each transaction by the bootloader + uint256 public gasPrice; + + /// @notice The current block's gasLimit (gasLimit in Ethereum terms). + /// @dev Currently set to some dummy value, it will be changed closer to mainnet. + uint256 public blockGasLimit = (1 << 30); + + /// @notice The `block.coinbase` in the current transaction. + /// @dev For the support of coinbase, we will the bootloader formal address for now + address public coinbase = BOOTLOADER_FORMAL_ADDRESS; + + /// @notice Formal `block.difficulty` parameter. + uint256 public difficulty = 2500000000000000; + + /// @notice the field for the temporary value of msize. It was added before the + /// msize was supported natively the VM and currently we keep it for the storage layout + /// consistency. + uint256 public msize = (1 << 24); + + /// @notice The `block.basefee`. + /// @dev It is currently a constant. + uint256 public baseFee; + + /// @notice The coefficient with which the current block's number + /// is stored in the current block info + uint256 constant BLOCK_INFO_BLOCK_NUMBER_PART = 2 ** 128; + + /// @notice block.number and block.timestamp stored packed. + /// @dev It is equal to 2^128 * block_number + block_timestamp. + uint256 public currentBlockInfo; + + /// @notice The hashes of blocks. + /// @dev It stores block hashes for all previous blocks. + mapping(uint256 => bytes32) public blockHash; + + /// @notice Set the current tx origin. + /// @param _newOrigin The new tx origin. + function setTxOrigin(address _newOrigin) external onlyBootloader { + origin = _newOrigin; + } + + /// @notice Set the current tx origin. + /// @param _gasPrice The new tx gasPrice. + function setGasPrice(uint256 _gasPrice) external onlyBootloader { + gasPrice = _gasPrice; + } + + /// @notice The method that emulates `blockhash` opcode in EVM. + /// @dev Just like the blockhash in the EVM, it returns bytes32(0), when + /// when queried about hashes that are older than 256 blocks ago. + function getBlockHashEVM(uint256 _block) external view returns (bytes32 hash) { + if (block.number < _block || block.number - _block > 256) { + hash = bytes32(0); + } else { + hash = blockHash[_block]; + } + } + + /// @notice Returns the current blocks' number and timestamp. + /// @return blockNumber and blockTimestamp tuple of the current block's number and the current block's timestamp + function getBlockNumberAndTimestamp() public view returns (uint256 blockNumber, uint256 blockTimestamp) { + uint256 blockInfo = currentBlockInfo; + blockNumber = blockInfo / BLOCK_INFO_BLOCK_NUMBER_PART; + blockTimestamp = blockInfo % BLOCK_INFO_BLOCK_NUMBER_PART; + } + + /// @notice Returns the current block's number. + /// @return blockNumber The current block's number. + function getBlockNumber() public view returns (uint256 blockNumber) { + (blockNumber, ) = getBlockNumberAndTimestamp(); + } + + /// @notice Returns the current block's timestamp. + /// @return timestamp The current block's timestamp. + function getBlockTimestamp() public view returns (uint256 timestamp) { + (, timestamp) = getBlockNumberAndTimestamp(); + } +} diff --git a/core/tests/ts-integration/contracts/custom-account/SystemContractsCaller.sol b/core/tests/ts-integration/contracts/custom-account/SystemContractsCaller.sol new file mode 100644 index 000000000000..01b7b5198add --- /dev/null +++ b/core/tests/ts-integration/contracts/custom-account/SystemContractsCaller.sol @@ -0,0 +1,249 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 + +pragma solidity ^0.8; + +import {MSG_VALUE_SIMULATOR_IS_SYSTEM_BIT, MSG_VALUE_SYSTEM_CONTRACT} from "./Constants.sol"; +import "./Utils.sol"; + +// Addresses used for the compiler to be replaced with the +// zkSync-specific opcodes during the compilation. +// IMPORTANT: these are just compile-time constants and are used +// only if used in-place by Yul optimizer. +address constant TO_L1_CALL_ADDRESS = address((1 << 16) - 1); +address constant CODE_ADDRESS_CALL_ADDRESS = address((1 << 16) - 2); +address constant PRECOMPILE_CALL_ADDRESS = address((1 << 16) - 3); +address constant META_CALL_ADDRESS = address((1 << 16) - 4); +address constant MIMIC_CALL_CALL_ADDRESS = address((1 << 16) - 5); +address constant SYSTEM_MIMIC_CALL_CALL_ADDRESS = address((1 << 16) - 6); +address constant MIMIC_CALL_BY_REF_CALL_ADDRESS = address((1 << 16) - 7); +address constant SYSTEM_MIMIC_CALL_BY_REF_CALL_ADDRESS = address((1 << 16) - 8); +address constant RAW_FAR_CALL_CALL_ADDRESS = address((1 << 16) - 9); +address constant RAW_FAR_CALL_BY_REF_CALL_ADDRESS = address((1 << 16) - 10); +address constant SYSTEM_CALL_CALL_ADDRESS = address((1 << 16) - 11); +address constant SYSTEM_CALL_BY_REF_CALL_ADDRESS = address((1 << 16) - 12); +address constant SET_CONTEXT_VALUE_CALL_ADDRESS = address((1 << 16) - 13); +address constant SET_PUBDATA_PRICE_CALL_ADDRESS = address((1 << 16) - 14); +address constant INCREMENT_TX_COUNTER_CALL_ADDRESS = address((1 << 16) - 15); +address constant PTR_CALLDATA_CALL_ADDRESS = address((1 << 16) - 16); +address constant CALLFLAGS_CALL_ADDRESS = address((1 << 16) - 17); +address constant PTR_RETURNDATA_CALL_ADDRESS = address((1 << 16) - 18); +address constant EVENT_INITIALIZE_ADDRESS = address((1 << 16) - 19); +address constant EVENT_WRITE_ADDRESS = address((1 << 16) - 20); +address constant LOAD_CALLDATA_INTO_ACTIVE_PTR_CALL_ADDRESS = address((1 << 16) - 21); +address constant LOAD_LATEST_RETURNDATA_INTO_ACTIVE_PTR_CALL_ADDRESS = address((1 << 16) - 22); +address constant PTR_ADD_INTO_ACTIVE_CALL_ADDRESS = address((1 << 16) - 23); +address constant PTR_SHRINK_INTO_ACTIVE_CALL_ADDRESS = address((1 << 16) - 24); +address constant PTR_PACK_INTO_ACTIVE_CALL_ADDRESS = address((1 << 16) - 25); +address constant MULTIPLICATION_HIGH_ADDRESS = address((1 << 16) - 26); +address constant GET_EXTRA_ABI_DATA_ADDRESS = address((1 << 16) - 27); + +// All the offsets are in bits +uint256 constant META_GAS_PER_PUBDATA_BYTE_OFFSET = 0 * 8; +uint256 constant META_HEAP_SIZE_OFFSET = 8 * 8; +uint256 constant META_AUX_HEAP_SIZE_OFFSET = 12 * 8; +uint256 constant META_SHARD_ID_OFFSET = 28 * 8; +uint256 constant META_CALLER_SHARD_ID_OFFSET = 29 * 8; +uint256 constant META_CODE_SHARD_ID_OFFSET = 30 * 8; + +/// @notice The way to forward the calldata: +/// - Use the current heap (i.e. the same as on EVM). +/// - Use the auxiliary heap. +/// - Forward via a pointer +/// @dev Note, that currently, users do not have access to the auxiliary +/// heap and so the only type of forwarding that will be used by the users +/// are UseHeap and ForwardFatPointer for forwarding a slice of the current calldata +/// to the next call. +enum CalldataForwardingMode { + UseHeap, + ForwardFatPointer, + UseAuxHeap +} + +/** + * @author Matter Labs + * @notice A library that allows calling contracts with the `isSystem` flag. + * @dev It is needed to call ContractDeployer and NonceHolder. + */ +library SystemContractsCaller { + /// @notice Makes a call with the `isSystem` flag. + /// @param gasLimit The gas limit for the call. + /// @param to The address to call. + /// @param value The value to pass with the transaction. + /// @param data The calldata. + /// @return success Whether the transaction has been successful. + /// @dev Note, that the `isSystem` flag can only be set when calling system contracts. + function systemCall( + uint32 gasLimit, + address to, + uint128 value, + bytes memory data + ) internal returns (bool success) { + address callAddr = SYSTEM_CALL_CALL_ADDRESS; + + uint32 dataStart; + assembly { + dataStart := add(data, 0x20) + } + uint32 dataLength = uint32(Utils.safeCastToU32(data.length)); + + uint256 farCallAbi = SystemContractsCaller.getFarCallABI( + 0, + 0, + dataStart, + dataLength, + gasLimit, + // Only rollup is supported for now + 0, + CalldataForwardingMode.UseHeap, + false, + true + ); + + if (value == 0) { + // Doing the system call directly + assembly { + success := call(to, callAddr, 0, 0, farCallAbi, 0, 0) + } + } else { + require(value <= MSG_VALUE_SIMULATOR_IS_SYSTEM_BIT, "Value can not be greater than 2**128"); + // We must direct the call through the MSG_VALUE_SIMULATOR + // The first abi param for the MSG_VALUE_SIMULATOR carries + // the value of the call and whether the call should be a system one + // (in our case, it should be) + uint256 abiParam1 = (MSG_VALUE_SIMULATOR_IS_SYSTEM_BIT | value); + + // The second abi param carries the address to call. + uint256 abiParam2 = uint256(uint160(to)); + + address msgValueSimulator = MSG_VALUE_SYSTEM_CONTRACT; + assembly { + success := call(msgValueSimulator, callAddr, abiParam1, abiParam2, farCallAbi, 0, 0) + } + } + } + + /// @notice Makes a call with the `isSystem` flag. + /// @param gasLimit The gas limit for the call. + /// @param to The address to call. + /// @param value The value to pass with the transaction. + /// @param data The calldata. + /// @return success Whether the transaction has been successful. + /// @return returnData The returndata of the transaction (revert reason in case the transaction has failed). + /// @dev Note, that the `isSystem` flag can only be set when calling system contracts. + function systemCallWithReturndata( + uint32 gasLimit, + address to, + uint128 value, + bytes memory data + ) internal returns (bool success, bytes memory returnData) { + success = systemCall(gasLimit, to, value, data); + + uint256 size; + assembly { + size := returndatasize() + } + + returnData = new bytes(size); + assembly { + returndatacopy(add(returnData, 0x20), 0, size) + } + } + + /// @notice Makes a call with the `isSystem` flag. + /// @param gasLimit The gas limit for the call. + /// @param to The address to call. + /// @param value The value to pass with the transaction. + /// @param data The calldata. + /// @return returnData The returndata of the transaction. In case the transaction reverts, the error + /// bubbles up to the parent frame. + /// @dev Note, that the `isSystem` flag can only be set when calling system contracts. + function systemCallWithPropagatedRevert( + uint32 gasLimit, + address to, + uint128 value, + bytes memory data + ) internal returns (bytes memory returnData) { + bool success; + (success, returnData) = systemCallWithReturndata(gasLimit, to, value, data); + + if(!success) { + assembly { + let size := mload(returnData) + revert(add(returnData, 0x20), size) + } + } + } + + /// @notice Calculates the packed representation of the FarCallABI. + /// @param dataOffset Calldata offset in memory. Provide 0 unless using custom pointer. + /// @param memoryPage Memory page to use. Provide 0 unless using custom pointer. + /// @param dataStart The start of the calldata slice. Provide the offset in memory + /// if not using custom pointer. + /// @param dataLength The calldata length. Provide the length of the calldata in bytes + /// unless using custom pointer. + /// @param gasPassed The gas to pass with the call. + /// @param shardId Of the account to call. Currently only 0 is supported. + /// @param forwardingMode The forwarding mode to use: + /// - provide CalldataForwardingMode.UseHeap when using your current memory + /// - provide CalldataForwardingMode.ForwardFatPointer when using custom pointer. + /// @param isConstructorCall Whether the call will be a call to the constructor + /// (ignored when the caller is not a system contract). + /// @param isSystemCall Whether the call will have the `isSystem` flag. + /// @return farCallAbi The far call ABI. + /// @dev The `FarCallABI` has the following structure: + /// pub struct FarCallABI { + /// pub memory_quasi_fat_pointer: FatPointer, + /// pub gas_passed: u32, + /// pub shard_id: u8, + /// pub forwarding_mode: FarCallForwardPageType, + /// pub constructor_call: bool, + /// pub to_system: bool, + /// } + /// + /// The FatPointer struct: + /// + /// pub struct FatPointer { + /// pub offset: u32, // offset relative to `start` + /// pub memory_page: u32, // memory page where slice is located + /// pub start: u32, // absolute start of the slice + /// pub length: u32, // length of the slice + /// } + /// + /// @dev Note, that the actual layout is the following: + /// + /// [0..32) bits -- the calldata offset + /// [32..64) bits -- the memory page to use. Can be left blank in most of the cases. + /// [64..96) bits -- the absolute start of the slice + /// [96..128) bits -- the length of the slice. + /// [128..192) bits -- empty bits. + /// [192..224) bits -- gasPassed. + /// [224..232) bits -- shard id. + /// [232..240) bits -- forwarding_mode + /// [240..248) bits -- constructor call flag + /// [248..256] bits -- system call flag + function getFarCallABI( + uint32 dataOffset, + uint32 memoryPage, + uint32 dataStart, + uint32 dataLength, + uint32 gasPassed, + uint8 shardId, + CalldataForwardingMode forwardingMode, + bool isConstructorCall, + bool isSystemCall + ) internal pure returns (uint256 farCallAbi) { + farCallAbi |= dataOffset; + farCallAbi |= (uint256(memoryPage) << 32); + farCallAbi |= (uint256(dataStart) << 64); + farCallAbi |= (uint256(dataLength) << 96); + farCallAbi |= (uint256(gasPassed) << 192); + farCallAbi |= (uint256(shardId) << 224); + farCallAbi |= (uint256(forwardingMode) << 232); + if (isConstructorCall) { + farCallAbi |= (1 << 240); + } + if (isSystemCall) { + farCallAbi |= (1 << 248); + } + } +} diff --git a/core/tests/ts-integration/contracts/custom-account/TransactionHelper.sol b/core/tests/ts-integration/contracts/custom-account/TransactionHelper.sol new file mode 100644 index 000000000000..7097097437c5 --- /dev/null +++ b/core/tests/ts-integration/contracts/custom-account/TransactionHelper.sol @@ -0,0 +1,467 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 + +pragma solidity ^0.8.0; + +import "@openzeppelin/contracts/token/ERC20/IERC20.sol"; +import "@openzeppelin/contracts/token/ERC20/utils/SafeERC20.sol"; + +import "./interfaces/IPaymasterFlow.sol"; +import "./interfaces/IContractDeployer.sol"; +import {ETH_TOKEN_SYSTEM_CONTRACT, BOOTLOADER_FORMAL_ADDRESS} from "./Constants.sol"; +import "./RLPEncoder.sol"; + +/// @dev The type id of zkSync's EIP-712-signed transaction. +uint8 constant EIP_712_TX_TYPE = 0x71; + +/// @dev The type id of legacy transactions. +uint8 constant LEGACY_TX_TYPE = 0x0; +/// @dev The type id of legacy transactions. +uint8 constant EIP_2930_TX_TYPE = 0x01; +/// @dev The type id of EIP1559 transactions. +uint8 constant EIP_1559_TX_TYPE = 0x02; + +/// @notice Structure used to represent zkSync transaction. +struct Transaction { + // The type of the transaction. + uint256 txType; + // The caller. + uint256 from; + // The callee. + uint256 to; + // The gasLimit to pass with the transaction. + // It has the same meaning as Ethereum's gasLimit. + uint256 gasLimit; + // The maximum amount of gas the user is willing to pay for a byte of pubdata. + uint256 gasPerPubdataByteLimit; + // The maximum fee per gas that the user is willing to pay. + // It is akin to EIP1559's maxFeePerGas. + uint256 maxFeePerGas; + // The maximum priority fee per gas that the user is willing to pay. + // It is akin to EIP1559's maxPriorityFeePerGas. + uint256 maxPriorityFeePerGas; + // The transaction's paymaster. If there is no paymaster, it is equal to 0. + uint256 paymaster; + // The nonce of the transaction. + uint256 nonce; + // The value to pass with the transaction. + uint256 value; + // In the future, we might want to add some + // new fields to the struct. The `txData` struct + // is to be passed to account and any changes to its structure + // would mean a breaking change to these accounts. In order to prevent this, + // we should keep some fields as "reserved". + // It is also recommended that their length is fixed, since + // it would allow easier proof integration (in case we will need + // some special circuit for preprocessing transactions). + uint256[4] reserved; + // The transaction's calldata. + bytes data; + // The signature of the transaction. + bytes signature; + // The properly formatted hashes of bytecodes that must be published on L1 + // with the inclusion of this transaction. Note, that a bytecode has been published + // before, the user won't pay fees for its republishing. + bytes32[] factoryDeps; + // The input to the paymaster. + bytes paymasterInput; + // Reserved dynamic type for the future use-case. Using it should be avoided, + // But it is still here, just in case we want to enable some additional functionality. + bytes reservedDynamic; +} + +/** + * @author Matter Labs + * @notice Library is used to help custom accounts to work with common methods for the Transaction type. + */ +library TransactionHelper { + using SafeERC20 for IERC20; + + /// @notice The EIP-712 typehash for the contract's domain + bytes32 constant EIP712_DOMAIN_TYPEHASH = + keccak256("EIP712Domain(string name,string version,uint256 chainId)"); + + bytes32 constant EIP712_TRANSACTION_TYPE_HASH = + keccak256( + "Transaction(uint256 txType,uint256 from,uint256 to,uint256 gasLimit,uint256 gasPerPubdataByteLimit,uint256 maxFeePerGas,uint256 maxPriorityFeePerGas,uint256 paymaster,uint256 nonce,uint256 value,bytes data,bytes32[] factoryDeps,bytes paymasterInput)" + ); + + /// @notice Whether the token is Ethereum. + /// @param _addr The address of the token + /// @return `true` or `false` based on whether the token is Ether. + /// @dev This method assumes that address is Ether either if the address is 0 (for convenience) + /// or if the address is the address of the L2EthToken system contract. + function isEthToken(uint256 _addr) internal pure returns (bool) { + return + _addr == uint256(uint160(address(ETH_TOKEN_SYSTEM_CONTRACT))) || + _addr == 0; + } + + /// @notice Calculate the suggested signed hash of the transaction, + /// i.e. the hash that is signed by EOAs and is recommended to be signed by other accounts. + function encodeHash(Transaction calldata _transaction) + internal + view + returns (bytes32 resultHash) + { + if (_transaction.txType == LEGACY_TX_TYPE) { + resultHash = _encodeHashLegacyTransaction(_transaction); + } else if (_transaction.txType == EIP_712_TX_TYPE) { + resultHash = _encodeHashEIP712Transaction(_transaction); + } else if (_transaction.txType == EIP_1559_TX_TYPE) { + resultHash = _encodeHashEIP1559Transaction(_transaction); + } else if (_transaction.txType == EIP_2930_TX_TYPE) { + resultHash = _encodeHashEIP2930Transaction(_transaction); + } else { + // Currently no other transaction types are supported. + // Any new transaction types will be processed in a similar manner. + revert("Encoding unsupported tx"); + } + } + + /// @notice Encode hash of the zkSync native transaction type. + /// @return keccak256 hash of the EIP-712 encoded representation of transaction + function _encodeHashEIP712Transaction(Transaction calldata _transaction) + private + view + returns (bytes32) + { + bytes32 structHash = keccak256( + abi.encode( + EIP712_TRANSACTION_TYPE_HASH, + _transaction.txType, + _transaction.from, + _transaction.to, + _transaction.gasLimit, + _transaction.gasPerPubdataByteLimit, + _transaction.maxFeePerGas, + _transaction.maxPriorityFeePerGas, + _transaction.paymaster, + _transaction.nonce, + _transaction.value, + keccak256(_transaction.data), + keccak256(abi.encodePacked(_transaction.factoryDeps)), + keccak256(_transaction.paymasterInput) + ) + ); + + bytes32 domainSeparator = keccak256( + abi.encode( + EIP712_DOMAIN_TYPEHASH, + keccak256("zkSync"), + keccak256("2"), + block.chainid + ) + ); + + return + keccak256( + abi.encodePacked("\x19\x01", domainSeparator, structHash) + ); + } + + /// @notice Encode hash of the legacy transaction type. + /// @return keccak256 of the serialized RLP encoded representation of transaction + function _encodeHashLegacyTransaction(Transaction calldata _transaction) + private + view + returns (bytes32) + { + // Hash of legacy transactions are encoded as one of the: + // - RLP(nonce, gasPrice, gasLimit, to, value, data, chainId, 0, 0) + // - RLP(nonce, gasPrice, gasLimit, to, value, data) + // + // In this RLP encoding, only the first one above list appears, so we encode each element + // inside list and then concatenate the length of all elements with them. + + bytes memory encodedNonce = RLPEncoder.encodeUint256(_transaction.nonce); + // Encode `gasPrice` and `gasLimit` together to prevent "stack too deep error". + bytes memory encodedGasParam; + { + bytes memory encodedGasPrice = RLPEncoder.encodeUint256( + _transaction.maxFeePerGas + ); + bytes memory encodedGasLimit = RLPEncoder.encodeUint256( + _transaction.gasLimit + ); + encodedGasParam = bytes.concat(encodedGasPrice, encodedGasLimit); + } + + bytes memory encodedTo = RLPEncoder.encodeAddress(address(uint160(_transaction.to))); + bytes memory encodedValue = RLPEncoder.encodeUint256(_transaction.value); + // Encode only the length of the transaction data, and not the data itself, + // so as not to copy to memory a potentially huge transaction data twice. + bytes memory encodedDataLength; + { + // Safe cast, because the length of the transaction data can't be so large. + uint64 txDataLen = uint64(_transaction.data.length); + if (txDataLen != 1) { + // If the length is not equal to one, then only using the length can it be encoded definitely. + encodedDataLength = RLPEncoder.encodeNonSingleBytesLen( + txDataLen + ); + } else if (_transaction.data[0] >= 0x80) { + // If input is a byte in [0x80, 0xff] range, RLP encoding will concatenates 0x81 with the byte. + encodedDataLength = hex"81"; + } + // Otherwise the length is not encoded at all. + } + + // Encode `chainId` according to EIP-155, but only if the `chainId` is specified in the transaction. + bytes memory encodedChainId; + if (_transaction.reserved[0] != 0) { + encodedChainId = bytes.concat(RLPEncoder.encodeUint256(block.chainid), hex"80_80"); + } + + bytes memory encodedListLength; + unchecked { + uint256 listLength = encodedNonce.length + + encodedGasParam.length + + encodedTo.length + + encodedValue.length + + encodedDataLength.length + + _transaction.data.length + + encodedChainId.length; + + // Safe cast, because the length of the list can't be so large. + encodedListLength = RLPEncoder.encodeListLen(uint64(listLength)); + } + + return + keccak256( + bytes.concat( + encodedListLength, + encodedNonce, + encodedGasParam, + encodedTo, + encodedValue, + encodedDataLength, + _transaction.data, + encodedChainId + ) + ); + } + + /// @notice Encode hash of the EIP2930 transaction type. + /// @return keccak256 of the serialized RLP encoded representation of transaction + function _encodeHashEIP2930Transaction(Transaction calldata _transaction) + private + view + returns (bytes32) + { + // Hash of EIP2930 transactions is encoded the following way: + // H(0x01 || RLP(chain_id, nonce, gas_price, gas_limit, destination, amount, data, access_list)) + // + // Note, that on zkSync access lists are not supported and should always be empty. + + // Encode all fixed-length params to avoid "stack too deep error" + bytes memory encodedFixedLengthParams; + { + bytes memory encodedChainId = RLPEncoder.encodeUint256(block.chainid); + bytes memory encodedNonce = RLPEncoder.encodeUint256(_transaction.nonce); + bytes memory encodedGasPrice = RLPEncoder.encodeUint256(_transaction.maxFeePerGas); + bytes memory encodedGasLimit = RLPEncoder.encodeUint256(_transaction.gasLimit); + bytes memory encodedTo = RLPEncoder.encodeAddress(address(uint160(_transaction.to))); + bytes memory encodedValue = RLPEncoder.encodeUint256(_transaction.value); + encodedFixedLengthParams = bytes.concat( + encodedChainId, + encodedNonce, + encodedGasPrice, + encodedGasLimit, + encodedTo, + encodedValue + ); + } + + // Encode only the length of the transaction data, and not the data itself, + // so as not to copy to memory a potentially huge transaction data twice. + bytes memory encodedDataLength; + { + // Safe cast, because the length of the transaction data can't be so large. + uint64 txDataLen = uint64(_transaction.data.length); + if (txDataLen != 1) { + // If the length is not equal to one, then only using the length can it be encoded definitely. + encodedDataLength = RLPEncoder.encodeNonSingleBytesLen( + txDataLen + ); + } else if (_transaction.data[0] >= 0x80) { + // If input is a byte in [0x80, 0xff] range, RLP encoding will concatenates 0x81 with the byte. + encodedDataLength = hex"81"; + } + // Otherwise the length is not encoded at all. + } + + // On zkSync, access lists are always zero length (at least for now). + bytes memory encodedAccessListLength = RLPEncoder.encodeListLen(0); + + bytes memory encodedListLength; + unchecked { + uint256 listLength = encodedFixedLengthParams.length + + encodedDataLength.length + + _transaction.data.length + + encodedAccessListLength.length; + + // Safe cast, because the length of the list can't be so large. + encodedListLength = RLPEncoder.encodeListLen(uint64(listLength)); + } + + return + keccak256( + bytes.concat( + "\x01", + encodedListLength, + encodedFixedLengthParams, + encodedDataLength, + _transaction.data, + encodedAccessListLength + ) + ); + } + + /// @notice Encode hash of the EIP1559 transaction type. + /// @return keccak256 of the serialized RLP encoded representation of transaction + function _encodeHashEIP1559Transaction(Transaction calldata _transaction) + private + view + returns (bytes32) + { + // Hash of EIP1559 transactions is encoded the following way: + // H(0x02 || RLP(chain_id, nonce, max_priority_fee_per_gas, max_fee_per_gas, gas_limit, destination, amount, data, access_list)) + // + // Note, that on zkSync access lists are not supported and should always be empty. + + // Encode all fixed-length params to avoid "stack too deep error" + bytes memory encodedFixedLengthParams; + { + bytes memory encodedChainId = RLPEncoder.encodeUint256(block.chainid); + bytes memory encodedNonce = RLPEncoder.encodeUint256(_transaction.nonce); + bytes memory encodedMaxPriorityFeePerGas = RLPEncoder.encodeUint256(_transaction.maxPriorityFeePerGas); + bytes memory encodedMaxFeePerGas = RLPEncoder.encodeUint256(_transaction.maxFeePerGas); + bytes memory encodedGasLimit = RLPEncoder.encodeUint256(_transaction.gasLimit); + bytes memory encodedTo = RLPEncoder.encodeAddress(address(uint160(_transaction.to))); + bytes memory encodedValue = RLPEncoder.encodeUint256(_transaction.value); + encodedFixedLengthParams = bytes.concat( + encodedChainId, + encodedNonce, + encodedMaxPriorityFeePerGas, + encodedMaxFeePerGas, + encodedGasLimit, + encodedTo, + encodedValue + ); + } + + // Encode only the length of the transaction data, and not the data itself, + // so as not to copy to memory a potentially huge transaction data twice. + bytes memory encodedDataLength; + { + // Safe cast, because the length of the transaction data can't be so large. + uint64 txDataLen = uint64(_transaction.data.length); + if (txDataLen != 1) { + // If the length is not equal to one, then only using the length can it be encoded definitely. + encodedDataLength = RLPEncoder.encodeNonSingleBytesLen( + txDataLen + ); + } else if (_transaction.data[0] >= 0x80) { + // If input is a byte in [0x80, 0xff] range, RLP encoding will concatenates 0x81 with the byte. + encodedDataLength = hex"81"; + } + // Otherwise the length is not encoded at all. + } + + // On zkSync, access lists are always zero length (at least for now). + bytes memory encodedAccessListLength = RLPEncoder.encodeListLen(0); + + bytes memory encodedListLength; + unchecked { + uint256 listLength = encodedFixedLengthParams.length + + encodedDataLength.length + + _transaction.data.length + + encodedAccessListLength.length; + + // Safe cast, because the length of the list can't be so large. + encodedListLength = RLPEncoder.encodeListLen(uint64(listLength)); + } + + return + keccak256( + bytes.concat( + "\x02", + encodedListLength, + encodedFixedLengthParams, + encodedDataLength, + _transaction.data, + encodedAccessListLength + ) + ); + } + + /// @notice Processes the common paymaster flows, e.g. setting proper allowance + /// for tokens, etc. For more information on the expected behavior, check out + /// the "Paymaster flows" section in the documentation. + function processPaymasterInput(Transaction calldata _transaction) internal { + require( + _transaction.paymasterInput.length >= 4, + "The standard paymaster input must be at least 4 bytes long" + ); + + bytes4 paymasterInputSelector = bytes4( + _transaction.paymasterInput[0:4] + ); + if (paymasterInputSelector == IPaymasterFlow.approvalBased.selector) { + require( + _transaction.paymasterInput.length >= 68, + "The approvalBased paymaster input must be at least 68 bytes long" + ); + + // While the actual data consists of address, uint256 and bytes data, + // the data is needed only for the paymaster, so we ignore it here for the sake of optimization + (address token, uint256 minAllowance) = abi.decode( + _transaction.paymasterInput[4:68], + (address, uint256) + ); + address paymaster = address(uint160(_transaction.paymaster)); + + uint256 currentAllowance = IERC20(token).allowance( + address(this), + paymaster + ); + if (currentAllowance < minAllowance) { + // Some tokens, e.g. USDT require that the allowance is firsty set to zero + // and only then updated to the new value. + + IERC20(token).safeApprove(paymaster, 0); + IERC20(token).safeApprove(paymaster, minAllowance); + } + } else if (paymasterInputSelector == IPaymasterFlow.general.selector) { + // Do nothing. general(bytes) paymaster flow means that the paymaster must interpret these bytes on his own. + } else { + revert("Unsupported paymaster flow"); + } + } + + /// @notice Pays the required fee for the transaction to the bootloader. + /// @dev Currently it pays the maximum amount "_transaction.maxFeePerGas * _transaction.gasLimit", + /// it will change in the future. + function payToTheBootloader(Transaction calldata _transaction) + internal + returns (bool success) + { + address bootloaderAddr = BOOTLOADER_FORMAL_ADDRESS; + uint256 amount = _transaction.maxFeePerGas * _transaction.gasLimit; + + assembly { + success := call(gas(), bootloaderAddr, amount, 0, 0, 0, 0) + } + } + + // Returns the balance required to process the transaction. + function totalRequiredBalance(Transaction calldata _transaction) internal pure returns (uint256 requiredBalance) { + if(address(uint160(_transaction.paymaster)) != address(0)) { + // Paymaster pays for the fee + requiredBalance = _transaction.value; + } else { + // The user should have enough balance for both the fee and the value of the transaction + requiredBalance = _transaction.maxFeePerGas * _transaction.gasLimit + _transaction.value; + } + } +} diff --git a/core/tests/ts-integration/contracts/custom-account/Utils.sol b/core/tests/ts-integration/contracts/custom-account/Utils.sol new file mode 100644 index 000000000000..da3d4eb60878 --- /dev/null +++ b/core/tests/ts-integration/contracts/custom-account/Utils.sol @@ -0,0 +1,38 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +pragma solidity >=0.8.0; + +/** + * @author Matter Labs + * @dev Common utilities used in zkSync system contracts + */ +library Utils { + function safeCastToU128(uint256 _x) internal pure returns (uint128) { + require(_x <= type(uint128).max, "Overflow"); + + return uint128(_x); + } + + function safeCastToU32(uint256 _x) internal pure returns (uint32) { + require(_x <= type(uint32).max, "Overflow"); + + return uint32(_x); + } + + function safeCastToU24(uint256 _x) internal pure returns (uint24) { + require(_x <= type(uint24).max, "Overflow"); + + return uint24(_x); + } + + /// @return codeLength The bytecode length in bytes + function bytecodeLenInBytes(bytes32 _bytecodeHash) internal pure returns (uint256 codeLength) { + codeLength = bytecodeLenInWords(_bytecodeHash) << 5; // _bytecodeHash * 32 + } + + /// @return codeLengthInWords The bytecode length in machine words + function bytecodeLenInWords(bytes32 _bytecodeHash) internal pure returns (uint256 codeLengthInWords) { + unchecked { + codeLengthInWords = uint256(uint8(_bytecodeHash[2])) * 256 + uint256(uint8(_bytecodeHash[3])); + } + } +} diff --git a/core/tests/ts-integration/contracts/custom-account/custom-account.sol b/core/tests/ts-integration/contracts/custom-account/custom-account.sol new file mode 100644 index 000000000000..864618ceca12 --- /dev/null +++ b/core/tests/ts-integration/contracts/custom-account/custom-account.sol @@ -0,0 +1,111 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 + +pragma solidity ^0.8.0; + +import './Constants.sol'; +import './TransactionHelper.sol'; + +import './SystemContractsCaller.sol'; + +import './interfaces/IAccount.sol'; + +contract CustomAccount is IAccount { + event BootloaderBalance(uint256); + + using TransactionHelper for Transaction; + + bool public violateValidationRules; + + bytes32 public lastTxHash; + + constructor(bool _violateValidationRules) { + violateValidationRules = _violateValidationRules; + } + + // bytes4(keccak256("isValidSignature(bytes32,bytes)") + bytes4 constant EIP1271_SUCCESS_RETURN_VALUE = 0x1626ba7e; + + function validateTransaction(bytes32 _txHash, bytes32 _suggestedSignedTxHash, Transaction calldata _transaction) external payable override returns (bytes4 magic) { + magic = _validateTransaction(_suggestedSignedTxHash, _transaction); + lastTxHash = _txHash; + + if (violateValidationRules) { + // Emitting an event to definitely prevent this clause from being optimized + // out by the compiler + emit BootloaderBalance(BOOTLOADER_FORMAL_ADDRESS.balance); + } + } + + function _validateTransaction(bytes32 _suggestedSignedTxHash, Transaction calldata _transaction) internal returns (bytes4 magic) { + if (_suggestedSignedTxHash == bytes32(0)) { + _suggestedSignedTxHash = _transaction.encodeHash(); + } + + SystemContractsCaller.systemCallWithPropagatedRevert( + uint32(gasleft()), + address(NONCE_HOLDER_SYSTEM_CONTRACT), + 0, + abi.encodeCall(INonceHolder.incrementMinNonceIfEquals, (_transaction.nonce)) + ); + + bytes memory correctSignature = abi.encodePacked(_suggestedSignedTxHash, address(this)); + + if (keccak256(_transaction.signature) == keccak256(correctSignature)) { + magic = ACCOUNT_VALIDATION_SUCCESS_MAGIC; + } else { + magic = bytes4(0); + } + } + + function executeTransaction(bytes32, bytes32, Transaction calldata _transaction) external payable override { + _execute(_transaction); + } + + function executeTransactionFromOutside(Transaction calldata _transaction) external payable override { + _validateTransaction(bytes32(0), _transaction); + _execute(_transaction); + } + + function _execute(Transaction calldata _transaction) internal { + address to = address(uint160(_transaction.to)); + uint256 value = _transaction.reserved[1]; + bytes memory data = _transaction.data; + + if(to == address(DEPLOYER_SYSTEM_CONTRACT)) { + // We allow calling ContractDeployer with any calldata + SystemContractsCaller.systemCallWithPropagatedRevert( + uint32(gasleft()), + to, + uint128(_transaction.reserved[1]), // By convention, reserved[1] is `value` + _transaction.data + ); + } else { + bool success; + assembly { + success := call(gas(), to, value, add(data, 0x20), mload(data), 0, 0) + } + require(success); + } + } + + // Here, the user pays the bootloader for the transaction + function payForTransaction(bytes32, bytes32, Transaction calldata _transaction) external payable { + bool success = _transaction.payToTheBootloader(); + require(success, "Failed to pay the fee to the operator"); + } + + // Here, the user should prepare for the transaction to be paid for by a paymaster + // Here, the account should set the allowance for the smart contracts + function prepareForPaymaster(bytes32, bytes32, Transaction calldata _transaction) external payable { + _transaction.processPaymasterInput(); + } + + fallback() external payable { + // fallback of default AA shouldn't be called by bootloader under no circumstances + assert(msg.sender != BOOTLOADER_FORMAL_ADDRESS); + + // If the contract is called directly, behave like an EOA + } + + receive() external payable {} +} diff --git a/core/tests/ts-integration/contracts/custom-account/custom-paymaster.sol b/core/tests/ts-integration/contracts/custom-account/custom-paymaster.sol new file mode 100644 index 000000000000..164aee98518c --- /dev/null +++ b/core/tests/ts-integration/contracts/custom-account/custom-paymaster.sol @@ -0,0 +1,88 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.0; + +import "./interfaces/IPaymaster.sol"; +import "./interfaces/IPaymasterFlow.sol"; +import "./TransactionHelper.sol"; +import "./Constants.sol"; + +// This is a dummy paymaster. It expects the paymasterInput to contain its "signature" as well as the needed exchange rate. +// It supports only approval-based paymaster flow. +contract CustomPaymaster is IPaymaster { + using TransactionHelper for Transaction; + + uint256 public txCounter = 0; + mapping(uint256 => bool) public calledContext; + uint256 public wasAnytime = 0; + + bytes32 lastTxHash = 0; + + function validateSignature(bytes memory _signature) internal pure { + // For the purpose of this test, any signature of length 46 is fine. + require(_signature.length == 46); + } + + function validateAndPayForPaymasterTransaction(bytes32 _txHash, bytes32, Transaction calldata _transaction) override external payable returns (bytes4 magic, bytes memory context) { + // By default we consider the transaction as passed + magic = PAYMASTER_VALIDATION_SUCCESS_MAGIC; + + lastTxHash = _txHash; + require(_transaction.paymasterInput.length >= 4, "The standard paymaster input must be at least 4 bytes long"); + + bytes4 paymasterInputSelector = bytes4(_transaction.paymasterInput[0:4]); + if (paymasterInputSelector == IPaymasterFlow.approvalBased.selector) { + // While the actual data consists of address, uint256 and bytes data, + // the data is needed only for the paymaster, so we ignore it here for the sake of optimization + (address token,, bytes memory input) = abi.decode(_transaction.paymasterInput[4:], (address, uint256, bytes)); + + (bytes memory pseudoSignature, uint256 rateNumerator, uint256 rateDenominator, uint256 amount) = abi.decode(input, (bytes, uint256, uint256, uint256)); + validateSignature(pseudoSignature); + + // Firstly, we verify that the user has provided enough allowance + address userAddress = address(uint160(_transaction.from)); + address thisAddress = address(this); + + uint256 providedAllowance = IERC20(token).allowance(userAddress, thisAddress); + require(providedAllowance >= amount, "The user did not provide enough allowance"); + + uint256 requiredETH = _transaction.gasLimit * _transaction.maxFeePerGas; + uint256 ethExchnaged = amount * rateNumerator / rateDenominator; + + if (ethExchnaged < requiredETH) { + // Important note: while this clause definitely means that the user + // has underpaid the paymaster and the transaction should not accepted, + // we do not want the transaction to revert, because for fee estimation + // we allow users to provide smaller amount of funds then necessary to preserve + // the property that if using X gas the transaction success, then it will succeed with X+1 gas. + magic = bytes4(0); + } + + // Pulling all the tokens from the user + IERC20(token).transferFrom(userAddress, thisAddress, amount); + bool success = _transaction.payToTheBootloader(); + require(success, "Failed to transfer funds to the bootloader"); + + // For now, refunds are not supported, so we just test the fact that the transfered context is correct + txCounter += 1; + context = abi.encode(txCounter); + } else { + revert("Unsupported paymaster flow"); + } + } + + function postTransaction( + bytes calldata _context, + Transaction calldata, + bytes32 _txHash, + bytes32, + ExecutionResult, + uint256 + ) override external payable { + require(_txHash == lastTxHash, "Incorrect last tx hash"); + uint256 contextCounter = abi.decode(_context, (uint256)); + calledContext[contextCounter] = true; + } + + receive() external payable {} +} diff --git a/core/tests/ts-integration/contracts/custom-account/interfaces/IAccount.sol b/core/tests/ts-integration/contracts/custom-account/interfaces/IAccount.sol new file mode 100644 index 000000000000..9e8bfb443f15 --- /dev/null +++ b/core/tests/ts-integration/contracts/custom-account/interfaces/IAccount.sol @@ -0,0 +1,47 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 + +pragma solidity ^0.8.0; + +import "../TransactionHelper.sol"; + +bytes4 constant ACCOUNT_VALIDATION_SUCCESS_MAGIC = IAccount.validateTransaction.selector; + +interface IAccount { + /// @notice Called by the bootloader to validate that an account agrees to process the transaction + /// (and potentially pay for it). + /// @param _txHash The hash of the transaction to be used in the explorer + /// @param _suggestedSignedHash The hash of the transaction is signed by EOAs + /// @param _transaction The transaction itself + /// @return magic The magic value that should be equal to the signature of this function + /// if the user agrees to proceed with the transaction. + /// @dev The developer should strive to preserve as many steps as possible both for valid + /// and invalid transactions as this very method is also used during the gas fee estimation + /// (without some of the necessary data, e.g. signature). + function validateTransaction( + bytes32 _txHash, + bytes32 _suggestedSignedHash, + Transaction calldata _transaction + ) external payable returns (bytes4 magic); + + function executeTransaction( + bytes32 _txHash, + bytes32 _suggestedSignedHash, + Transaction calldata _transaction + ) external payable; + + // There is no point in providing possible signed hash in the `executeTransactionFromOutside` method, + // since it typically should not be trusted. + function executeTransactionFromOutside(Transaction calldata _transaction) external payable; + + function payForTransaction( + bytes32 _txHash, + bytes32 _suggestedSignedHash, + Transaction calldata _transaction + ) external payable; + + function prepareForPaymaster( + bytes32 _txHash, + bytes32 _possibleSignedHash, + Transaction calldata _transaction + ) external payable; +} diff --git a/core/tests/ts-integration/contracts/custom-account/interfaces/IContractDeployer.sol b/core/tests/ts-integration/contracts/custom-account/interfaces/IContractDeployer.sol new file mode 100644 index 000000000000..aa8a7718c8da --- /dev/null +++ b/core/tests/ts-integration/contracts/custom-account/interfaces/IContractDeployer.sol @@ -0,0 +1,108 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 + +pragma solidity ^0.8.0; + +interface IContractDeployer { + /// @notice Defines the version of the account abstraction protocol + /// that a contract claims to follow. + /// - `None` means that the account is just a contract and it should never be interacted + /// with as a custom account + /// - `Version1` means that the account follows the first version of the account abstraction protocol + enum AccountAbstractionVersion { + None, + Version1 + } + + /// @notice Defines the nonce ordering used by the account + /// - `Sequential` means that it is expected that the nonces are monotonic and increment by 1 + /// at a time (the same as EOAs). + /// - `Arbitrary` means that the nonces for the accounts can be arbitrary. The operator + /// should serve the transactions from such an account on a first-come-first-serve basis. + /// @dev This ordering is more of a suggestion to the operator on how the AA expects its transactions + /// to be processed and is not considered as a system invariant. + enum AccountNonceOrdering { + Sequential, + Arbitrary + } + + struct AccountInfo { + AccountAbstractionVersion supportedAAVersion; + AccountNonceOrdering nonceOrdering; + } + + event ContractDeployed( + address indexed deployerAddress, + bytes32 indexed bytecodeHash, + address indexed contractAddress + ); + + function getNewAddressCreate2( + address _sender, + bytes32 _bytecodeHash, + bytes32 _salt, + bytes calldata _input + ) external pure returns (address newAddress); + + function getNewAddressCreate(address _sender, uint256 _senderNonce) external pure returns (address newAddress); + + function create2( + bytes32 _salt, + bytes32 _bytecodeHash, + bytes calldata _input + ) external payable returns (address newAddress); + + function create2Account( + bytes32 _salt, + bytes32 _bytecodeHash, + bytes calldata _input, + AccountAbstractionVersion _aaVersion + ) external payable returns (address newAddress); + + /// @dev While the `_salt` parameter is not used anywhere here, + /// it is still needed for consistency between `create` and + /// `create2` functions (required by the compiler). + function create( + bytes32 _salt, + bytes32 _bytecodeHash, + bytes calldata _input + ) external payable returns (address newAddress); + + /// @dev While `_salt` is never used here, we leave it here as a parameter + /// for the consistency with the `create` function. + function createAccount( + bytes32 _salt, + bytes32 _bytecodeHash, + bytes calldata _input, + AccountAbstractionVersion _aaVersion + ) external payable returns (address newAddress); + + /// @notice Returns the information about a certain AA. + function getAccountInfo( + address _address + ) external view returns (AccountInfo memory info); + + /// @notice Can be called by an account to update its account version + function updateAccountVersion(AccountAbstractionVersion _version) external; + + /// @notice Can be called by an account to update its nonce ordering + function updateNonceOrdering(AccountNonceOrdering _nonceOrdering) external; + + /// @notice A struct that describes a forced deployment on an address + struct ForceDeployment { + // The bytecode hash to put on an address + bytes32 bytecodeHash; + // The address on which to deploy the bytecodehash to + address newAddress; + // The value with which to initialize a contract + uint256 value; + // The constructor calldata + bytes input; + } + + /// @notice This method is to be used only during an upgrade to set a bytecode on any address. + /// @dev We do not require `onlySystemCall` here, since the method is accessible only + /// by `FORCE_DEPLOYER`. + function forceDeployOnAddresses( + ForceDeployment[] calldata _deployments + ) external payable; +} diff --git a/core/tests/ts-integration/contracts/custom-account/interfaces/IERC20.sol b/core/tests/ts-integration/contracts/custom-account/interfaces/IERC20.sol new file mode 100644 index 000000000000..b816bfed0863 --- /dev/null +++ b/core/tests/ts-integration/contracts/custom-account/interfaces/IERC20.sol @@ -0,0 +1,82 @@ +// SPDX-License-Identifier: MIT +// OpenZeppelin Contracts (last updated v4.6.0) (token/ERC20/IERC20.sol) + +pragma solidity ^0.8.0; + +/** + * @dev Interface of the ERC20 standard as defined in the EIP. + */ +interface IERC20 { + /** + * @dev Emitted when `value` tokens are moved from one account (`from`) to + * another (`to`). + * + * Note that `value` may be zero. + */ + event Transfer(address indexed from, address indexed to, uint256 value); + + /** + * @dev Emitted when the allowance of a `spender` for an `owner` is set by + * a call to {approve}. `value` is the new allowance. + */ + event Approval(address indexed owner, address indexed spender, uint256 value); + + /** + * @dev Returns the amount of tokens in existence. + */ + function totalSupply() external view returns (uint256); + + /** + * @dev Returns the amount of tokens owned by `account`. + */ + function balanceOf(address account) external view returns (uint256); + + /** + * @dev Moves `amount` tokens from the caller's account to `to`. + * + * Returns a boolean value indicating whether the operation succeeded. + * + * Emits a {Transfer} event. + */ + function transfer(address to, uint256 amount) external returns (bool); + + /** + * @dev Returns the remaining number of tokens that `spender` will be + * allowed to spend on behalf of `owner` through {transferFrom}. This is + * zero by default. + * + * This value changes when {approve} or {transferFrom} are called. + */ + function allowance(address owner, address spender) external view returns (uint256); + + /** + * @dev Sets `amount` as the allowance of `spender` over the caller's tokens. + * + * Returns a boolean value indicating whether the operation succeeded. + * + * IMPORTANT: Beware that changing an allowance with this method brings the risk + * that someone may use both the old and the new allowance by unfortunate + * transaction ordering. One possible solution to mitigate this race + * condition is to first reduce the spender's allowance to 0 and set the + * desired value afterwards: + * https://github.com/ethereum/EIPs/issues/20#issuecomment-263524729 + * + * Emits an {Approval} event. + */ + function approve(address spender, uint256 amount) external returns (bool); + + /** + * @dev Moves `amount` tokens from `from` to `to` using the + * allowance mechanism. `amount` is then deducted from the caller's + * allowance. + * + * Returns a boolean value indicating whether the operation succeeded. + * + * Emits a {Transfer} event. + */ + function transferFrom( + address from, + address to, + uint256 amount + ) external returns (bool); +} diff --git a/core/tests/ts-integration/contracts/custom-account/interfaces/INonceHolder.sol b/core/tests/ts-integration/contracts/custom-account/interfaces/INonceHolder.sol new file mode 100644 index 000000000000..18ac47023266 --- /dev/null +++ b/core/tests/ts-integration/contracts/custom-account/interfaces/INonceHolder.sol @@ -0,0 +1,42 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 + +pragma solidity ^0.8.0; + +/** + * @author Matter Labs + * @dev Interface of the nonce holder contract -- a contract used by the system to ensure + * that there is always a unique identifier for a transaction with a particular account (we call it nonce). + * In other words, the pair of (address, nonce) should always be unique. + * @dev Custom accounts should use methods of this contract to store nonces or other possible unique identifiers + * for the transaction. + */ +interface INonceHolder { + /// @dev Returns the current minimal nonce for account. + function getMinNonce(address _address) external view returns (uint256); + + /// @dev Returns the raw version of the current minimal nonce + /// (equal to minNonce + 2^128 * deployment nonce). + function getRawNonce(address _address) external view returns (uint256); + + /// @dev Increases the minimal nonce for the msg.sender. + function increaseMinNonce(uint256 _value) external returns (uint256); + + /// @dev Sets the nonce value `key` as used. + function setValueUnderNonce(uint256 _key, uint256 _value) external; + + /// @dev Gets the value stored inside a custom nonce. + function getValueUnderNonce(uint256 _key) external view returns (uint256); + + /// @dev A convenience method to increment the minimal nonce if it is equal + /// to the `_expectedNonce`. + function incrementMinNonceIfEquals(uint256 _expectedNonce) external; + + /// @dev Returns the deployment nonce for the accounts used for CREATE opcode. + function getDeploymentNonce(address _address) external view returns (uint256); + + /// @dev Increments the deployment nonce for the account and returns the previous one. + function incrementDeploymentNonce(address _address) external returns (uint256); + + /// @dev Determines whether a certain nonce has been already used for an account. + function validateNonceUsage(address _address, uint256 _key, bool _shouldBeUsed) external view; +} diff --git a/core/tests/ts-integration/contracts/custom-account/interfaces/IPaymaster.sol b/core/tests/ts-integration/contracts/custom-account/interfaces/IPaymaster.sol new file mode 100644 index 000000000000..cf5ced948782 --- /dev/null +++ b/core/tests/ts-integration/contracts/custom-account/interfaces/IPaymaster.sol @@ -0,0 +1,51 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 + +pragma solidity ^0.8.0; + +import "../TransactionHelper.sol"; + +enum ExecutionResult { + Revert, + Success +} + +bytes4 constant PAYMASTER_VALIDATION_SUCCESS_MAGIC = IPaymaster.validateAndPayForPaymasterTransaction.selector; + +interface IPaymaster { + /// @dev Called by the bootloader to verify that the paymaster agrees to pay for the + /// fee for the transaction. This transaction should also send the necessary amount of funds onto the bootloader + /// address. + /// @param _txHash The hash of the transaction + /// @param _suggestedSignedHash The hash of the transaction that is signed by an EOA + /// @param _transaction The transaction itself. + /// @return magic The value that should be equal to the signature of the validateAndPayForPaymasterTransaction + /// if the paymaster agrees to pay for the transaction. + /// @return context The "context" of the transaction: an array of bytes of length at most 1024 bytes, which will be + /// passed to the `postTransaction` method of the account. + /// @dev The developer should strive to preserve as many steps as possible both for valid + /// and invalid transactions as this very method is also used during the gas fee estimation + /// (without some of the necessary data, e.g. signature). + function validateAndPayForPaymasterTransaction( + bytes32 _txHash, + bytes32 _suggestedSignedHash, + Transaction calldata _transaction + ) external payable returns (bytes4 magic, bytes memory context); + + /// @dev Called by the bootloader after the execution of the transaction. Please note that + /// there is no guarantee that this method will be called at all. Unlike the original EIP4337, + /// this method won't be called if the transaction execution results in out-of-gas. + /// @param _context, the context of the execution, returned by the "validateAndPayForPaymasterTransaction" method. + /// @param _transaction, the users' transaction. + /// @param _txResult, the result of the transaction execution (success or failure). + /// @param _maxRefundedGas, the upper bound on the amout of gas that could be refunded to the paymaster. + /// @dev The exact amount refunded depends on the gas spent by the "postOp" itself and so the developers should + /// take that into account. + function postTransaction( + bytes calldata _context, + Transaction calldata _transaction, + bytes32 _txHash, + bytes32 _suggestedSignedHash, + ExecutionResult _txResult, + uint256 _maxRefundedGas + ) external payable; +} diff --git a/core/tests/ts-integration/contracts/custom-account/interfaces/IPaymasterFlow.sol b/core/tests/ts-integration/contracts/custom-account/interfaces/IPaymasterFlow.sol new file mode 100644 index 000000000000..97bd95079292 --- /dev/null +++ b/core/tests/ts-integration/contracts/custom-account/interfaces/IPaymasterFlow.sol @@ -0,0 +1,16 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 + +pragma solidity ^0.8.0; + +/** + * @author Matter Labs + * @dev The interface that is used for encoding/decoding of + * different types of paymaster flows. + * @notice This is NOT an interface to be implementated + * by contracts. It is just used for encoding. + */ +interface IPaymasterFlow { + function general(bytes calldata input) external; + + function approvalBased(address _token, uint256 _minAllowance, bytes calldata _innerInput) external; +} diff --git a/core/tests/ts-integration/contracts/custom-account/nonce-holder-test.sol b/core/tests/ts-integration/contracts/custom-account/nonce-holder-test.sol new file mode 100644 index 000000000000..d53151cea6c6 --- /dev/null +++ b/core/tests/ts-integration/contracts/custom-account/nonce-holder-test.sol @@ -0,0 +1,100 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 + +pragma solidity ^0.8.0; + +import './Constants.sol'; +import './TransactionHelper.sol'; + +import './interfaces/IAccount.sol'; +import './interfaces/IContractDeployer.sol'; + +import './SystemContractsCaller.sol'; + +/** +* @author Matter Labs +* @dev Dummy account used for tests that accepts any transaction. +*/ +contract NonceHolderTest is IAccount { + using TransactionHelper for Transaction; + + // bytes4(keccak256("isValidSignature(bytes32,bytes)") + bytes4 constant EIP1271_SUCCESS_RETURN_VALUE = 0x1626ba7e; + + function validateTransaction(bytes32, bytes32, Transaction calldata _transaction) external payable override returns (bytes4 magic) { + // By default we consider the transaction as successful + magic = ACCOUNT_VALIDATION_SUCCESS_MAGIC; + + _validateTransaction(_transaction); + } + + function _validateTransaction(Transaction calldata _transaction) internal { + bytes memory data; + + if (uint8(_transaction.signature[0]) == 0) { + // It only erases nonce as non-allowed + data = abi.encodeCall(NONCE_HOLDER_SYSTEM_CONTRACT.setValueUnderNonce, (_transaction.nonce, 1)); + } else if(uint8(_transaction.signature[0]) == 1) { + // It should increase minimal nonce by 5 + data = abi.encodeCall(NONCE_HOLDER_SYSTEM_CONTRACT.increaseMinNonce, (5)); + } else if(uint8(_transaction.signature[0]) == 2) { + // It should try increasing nnonce by 2**90 + data = abi.encodeCall(NONCE_HOLDER_SYSTEM_CONTRACT.increaseMinNonce, (2**90)); + } else if (uint8(_transaction.signature[0]) == 3) { + // Do nothing + return; + } else if(uint8(_transaction.signature[0]) == 4) { + // It should increase minimal nonce by 1 + data = abi.encodeCall(NONCE_HOLDER_SYSTEM_CONTRACT.increaseMinNonce, (1)); + } else if (uint8(_transaction.signature[0]) == 5) { + // Increase minimal nonce by 5 and set the nonce ordering of the account as arbitrary + data = abi.encodeCall(NONCE_HOLDER_SYSTEM_CONTRACT.increaseMinNonce, (5)); + SystemContractsCaller.systemCallWithPropagatedRevert( + uint32(gasleft()), + address(DEPLOYER_SYSTEM_CONTRACT), + 0, + abi.encodeCall(DEPLOYER_SYSTEM_CONTRACT.updateNonceOrdering, (IContractDeployer.AccountNonceOrdering.Arbitrary)) + ); + } else { + revert("Unsupported test"); + } + + SystemContractsCaller.systemCallWithPropagatedRevert( + uint32(gasleft()), + address(NONCE_HOLDER_SYSTEM_CONTRACT), + 0, + data + ); + } + + function executeTransaction(bytes32, bytes32, Transaction calldata _transaction) external payable override { + _execute(_transaction); + } + + function executeTransactionFromOutside(Transaction calldata _transaction) external payable override { + _validateTransaction(_transaction); + _execute(_transaction); + } + + function _execute(Transaction calldata _transaction) internal {} + + // Here, the user pays the bootloader for the transaction + function payForTransaction(bytes32, bytes32, Transaction calldata _transaction) external payable override { + bool success = _transaction.payToTheBootloader(); + require(success, "Failed to pay the fee to the operator"); + } + + // Here, the user should prepare for the transaction to be paid for by a paymaster + // Here, the account should set the allowance for the smart contracts + function prepareForPaymaster(bytes32, bytes32, Transaction calldata _transaction) external payable override { + _transaction.processPaymasterInput(); + } + + fallback() external payable { + // fallback of default AA shouldn't be called by bootloader under no circumstances + assert(msg.sender != BOOTLOADER_FORMAL_ADDRESS); + + // If the contract is called directly, behave like an EOA + } + + receive() external payable {} +} diff --git a/core/tests/ts-integration/contracts/error/error.sol b/core/tests/ts-integration/contracts/error/error.sol new file mode 100644 index 000000000000..ba8085c26654 --- /dev/null +++ b/core/tests/ts-integration/contracts/error/error.sol @@ -0,0 +1,22 @@ +pragma solidity ^0.8.0; + +// SPDX-License-Identifier: MIT OR Apache-2.0 + +contract SimpleRequire { + error TestError(uint256 one, uint256 two, uint256 three, string data); + + function new_error() public pure { + revert TestError({one: 1, two: 2, three: 1, data: "data"}); + } + + function require_short() public pure { + require(false, "short"); + } + + function require_long() public pure { + require( + false, + 'longlonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglong' + ); + } +} diff --git a/core/tests/ts-integration/contracts/events/events.sol b/core/tests/ts-integration/contracts/events/events.sol new file mode 100644 index 000000000000..d65ccda95142 --- /dev/null +++ b/core/tests/ts-integration/contracts/events/events.sol @@ -0,0 +1,21 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.0; + +contract Emitter { + event Trivial(); + event Simple(uint256 Number, address Account); + event Indexed(uint256 indexed Number, address Account); + + function test(uint256 number) public { + emit Trivial(); + emit Simple(number, address(0xdeadbeef)); + emit Indexed(number, address(0xc0ffee)); + } + + function emitManyEvents(uint256 iterations) public { + for (uint i = 0; i < iterations; i++) { + emit Trivial(); + } + } +} diff --git a/core/tests/ts-integration/contracts/events/sample-calldata b/core/tests/ts-integration/contracts/events/sample-calldata new file mode 100644 index 0000000000000000000000000000000000000000..c137101ba026010f41d872325c4d53eab9d99a27 GIT binary patch literal 96 UcmY#kARn;Lf2oO2HzQCI07%#Y-T(jq literal 0 HcmV?d00001 diff --git a/core/tests/ts-integration/contracts/expensive/expensive.sol b/core/tests/ts-integration/contracts/expensive/expensive.sol new file mode 100644 index 000000000000..c3b99df48923 --- /dev/null +++ b/core/tests/ts-integration/contracts/expensive/expensive.sol @@ -0,0 +1,15 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.0; +pragma abicoder v2; + +contract Expensive { + uint[] array; + + function expensive(uint iterations) public returns (bytes32) { + for (uint i = 0; i < iterations; i++) { + array.push(i); + } + return keccak256(abi.encodePacked(array)); + } +} diff --git a/core/tests/ts-integration/contracts/infinite/infinite.sol b/core/tests/ts-integration/contracts/infinite/infinite.sol new file mode 100644 index 000000000000..3ed4e035f601 --- /dev/null +++ b/core/tests/ts-integration/contracts/infinite/infinite.sol @@ -0,0 +1,19 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.0; +pragma abicoder v2; + +contract InfiniteLoop { + event Iteration(uint256 number); + + function infiniteLoop() public { + uint256 x = 0; + + while (true) { + x += 1; + // This event is needed so that LLVM + // won't optimize the loop away. + emit Iteration(x); + } + } +} diff --git a/core/tests/ts-integration/contracts/writes-and-messages/writes-and-messages.sol b/core/tests/ts-integration/contracts/writes-and-messages/writes-and-messages.sol new file mode 100644 index 000000000000..bbf68a146ff4 --- /dev/null +++ b/core/tests/ts-integration/contracts/writes-and-messages/writes-and-messages.sol @@ -0,0 +1,41 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.0; +pragma abicoder v2; + +interface IL2Messenger { + function sendToL1(bytes memory _message) external returns (bytes32); +} + +contract WritesAndMessages { + IL2Messenger constant L2_MESSENGER = IL2Messenger(address(0x8008)); + mapping(uint256 => uint256) public s; + + function writes(uint from, uint iterations, uint value) public { + unchecked { + for (uint i = 0; i < iterations; i++) { + assembly { + sstore(add(from, i), value) + } + } + } + } + + function l2_l1_messages(uint iterations) public { + unchecked { + for (uint i = 0; i < iterations; i++) { + L2_MESSENGER.sendToL1(abi.encode(i)); + } + } + } + + function big_l2_l1_message(uint size) public { + unchecked { + bytes memory message = new bytes(size); + for (uint i = 0; i < size; i++) { + message[i] = bytes1(uint8(i % 256)); + } + L2_MESSENGER.sendToL1(message); + } + } +} diff --git a/core/tests/ts-integration/hardhat.config.ts b/core/tests/ts-integration/hardhat.config.ts new file mode 100644 index 000000000000..59080306c84e --- /dev/null +++ b/core/tests/ts-integration/hardhat.config.ts @@ -0,0 +1,19 @@ +import '@matterlabs/hardhat-zksync-solc'; + +export default { + zksolc: { + version: '1.3.1', + compilerSource: 'binary', + settings: { + isSystem: true + } + }, + networks: { + hardhat: { + zksync: true + } + }, + solidity: { + version: '0.8.16' + } +}; diff --git a/core/tests/ts-integration/jest.config.json b/core/tests/ts-integration/jest.config.json new file mode 100644 index 000000000000..109e7a1e008a --- /dev/null +++ b/core/tests/ts-integration/jest.config.json @@ -0,0 +1,20 @@ +{ + "reporters": [ + "default", + "github-actions" + ], + "transform": { + "^.+\\.ts?$": "ts-jest" + }, + "//": "!!! Do not increase the test timeout blindly!!!", + "//": "Timeout is set to match ~4 L1 operations with 10 blocks confirmation", + "//": "If you need bigger timeout, consider either disabling the test outside of fast mode or increasing timeout on a single test", + "//": "If this value would be too big, it may cause tests on stage to get stuck for too long", + "testTimeout": 605000, + "globalSetup": "/src/jest-setup/global-setup.ts", + "globalTeardown": "/src/jest-setup/global-teardown.ts", + "setupFilesAfterEnv": [ + "/src/jest-setup/add-matchers.ts" + ], + "slowTestThreshold": 120 +} diff --git a/core/tests/ts-integration/package.json b/core/tests/ts-integration/package.json new file mode 100644 index 000000000000..7d36a9bf270c --- /dev/null +++ b/core/tests/ts-integration/package.json @@ -0,0 +1,26 @@ +{ + "name": "ts-integration", + "version": "0.1.0", + "license": "MIT", + "private": true, + "scripts": { + "test": "zk f jest" + }, + "devDependencies": { + "@matterlabs/hardhat-zksync-solc": "^0.3.14-beta.3", + "@types/jest": "^29.0.3", + "@types/node": "^14.14.5", + "@types/node-fetch": "^2.5.7", + "chalk": "^4.0.0", + "ethereumjs-abi": "^0.6.8", + "ethers": "~5.7.0", + "hardhat": "^2.12.4", + "jest": "^29.0.3", + "jest-matcher-utils": "^29.0.3", + "node-fetch": "^2.6.1", + "ts-jest": "^29.0.1", + "ts-node": "^10.1.0", + "typescript": "^4.3.5", + "zksync-web3": "link:../../../sdk/zksync-web3.js" + } +} diff --git a/core/tests/ts-integration/src/context-owner.ts b/core/tests/ts-integration/src/context-owner.ts new file mode 100644 index 000000000000..d52bf916964f --- /dev/null +++ b/core/tests/ts-integration/src/context-owner.ts @@ -0,0 +1,533 @@ +import * as zksync from 'zksync-web3'; +import * as ethers from 'ethers'; + +import { TestContext, TestEnvironment, TestWallets } from './types'; +import { lookupPrerequisites } from './prerequisites'; +import { Reporter } from './reporter'; +import { scaledGasPrice } from './helpers'; +import { RetryProvider } from './retry-provider'; + +// These amounts of ETH would be provided to each test suite through its "main" account. +// It is assumed to be enough to run a set of "normal" transactions. +// If any test or test suite requires you to have more funds, please first think whether it can be avoided +// (e.g. use minted ERC20 token). If it's indeed necessary, deposit more funds from the "main" account separately. +// +// Please DO NOT change these constants if you don't know why you have to do that. Try to debug the particular issue +// you face first. +export const L1_ETH_PER_ACCOUNT = ethers.utils.parseEther('0.05'); +export const L2_ETH_PER_ACCOUNT = ethers.utils.parseEther('0.50'); +export const ERC20_PER_ACCOUNT = ethers.utils.parseEther('10000.0'); + +/** + * This class is responsible for preparing the test environment for all the other test suites. + * + * ## Context initialization + * + * To enable test suites to do their checks, this class: + * - Waits for server to launch (required for running tests on stage right after deployment). + * - Ensures that "master" wallet has enough balances to perform the tests. + * - Prepares wallets for each test suite and adds funds to them. + * - Deploys the test contracts. + * + * ## Performed checks + * + * Given the fact that all the test suites would be run in parallel, while context initialization + * is performed sequentially, during initialization this class also performs several basic "sanity" + * checks (e.g. "ether transfers works" and "fees are being collected"). + * Checks performed by this class should belong to one of the following categories: + * 1) It's a "building block" for other tests (e.g., if contract deployments don't work, other test suites won't work as well). + * 2) It must be run sequentially (e.g. it's hard to ensure that fee account received exact amount of fee if multiple processes + * send transactions). + * + * Important! + * Only add the essential checks to this class, as it should be kept minimal. Whenever possible, prefer creating a new test suite + * or extending an existing one. + */ +export class TestContextOwner { + private env: TestEnvironment; + private wallets?: TestWallets; + + private mainEthersWallet: ethers.Wallet; + private mainSyncWallet: zksync.Wallet; + + private l1Provider: ethers.providers.JsonRpcProvider; + private l2Provider: zksync.Provider; + + private reporter: Reporter = new Reporter(); + + constructor(env: TestEnvironment) { + this.env = env; + + this.l1Provider = new ethers.providers.JsonRpcProvider(env.l1NodeUrl); + this.l2Provider = new RetryProvider({ + url: env.l2NodeUrl, + timeout: 1200 * 1000 + }); + + if (env.network == 'localhost') { + // Setup small polling interval on localhost to speed up tests. + this.l1Provider.pollingInterval = 100; + this.l2Provider.pollingInterval = 100; + } + + this.mainEthersWallet = new ethers.Wallet(env.mainWalletPK, this.l1Provider); + this.mainSyncWallet = new zksync.Wallet(env.mainWalletPK, this.l2Provider, this.l1Provider); + } + + /** + * Performs the test context initialization. + * + * @returns Context object required for test suites. + */ + async setupContext(): Promise { + try { + this.reporter.startAction('Setting up the context'); + await this.cancelPendingTxs(); + this.wallets = await this.prepareWallets(); + this.reporter.finishAction(); + } catch (error: any) { + // Report the issue to the console and mark the last action as failed. + this.reporter.error(`An error occurred: ${error.message || error}`); + this.reporter.failAction(); + + // Then propagate the exception. + throw error; + } + return { + wallets: this.wallets, + environment: this.env + }; + } + + /** + * Checks if there are any pending transactions initiated from the main wallet. + * If such transactions are found, cancels them by sending blank ones with exaggregated fee allowance. + */ + private async cancelPendingTxs() { + this.reporter.startAction(`Cancelling pending transactions`); + // Since some tx may be pending on stage, we don't want to get stuck because of it. + // In order to not get stuck transactions, we manually cancel all the pending txs. + const ethWallet = this.mainEthersWallet; + const latestNonce = await ethWallet.getTransactionCount('latest'); + const pendingNonce = await ethWallet.getTransactionCount('pending'); + this.reporter.debug(`Latest nonce is ${latestNonce}, pending nonce is ${pendingNonce}`); + const cancellationTxs = []; + for (let nonce = latestNonce; nonce < pendingNonce; nonce++) { + // For each transaction to override it, we need to provide greater fee. + // We would manually provide a value high enough (for a testnet) to be both valid + // and higher than the previous one. It's OK as we'll only be charged for the bass fee + // anyways. We will also set the miner's tip to 5 gwei, which is also much higher than the normal one. + const maxFeePerGas = ethers.utils.parseEther('0.00000025'); // 250 gwei + const maxPriorityFeePerGas = ethers.utils.parseEther('0.000000005'); // 5 gwei + cancellationTxs.push( + ethWallet + .sendTransaction({ to: ethWallet.address, nonce, maxFeePerGas, maxPriorityFeePerGas }) + .then((tx) => tx.wait()) + ); + } + if (cancellationTxs.length > 0) { + await Promise.all(cancellationTxs); + this.reporter.message(`Canceled ${cancellationTxs.length} pending transactions`); + } + this.reporter.finishAction(); + } + + /** + * Looks for the declared test suites, prepares wallets for each test suite + * and adds funds to them. + * + * @returns Object containing private keys of wallets for each test suite. + */ + private async prepareWallets(): Promise { + this.reporter.startAction(`Preparing wallets`); + const suites = lookupPrerequisites(); + this.reporter.message(`Found following suites: ${suites.join(', ')}`); + + // `+ 1 for the main account (it has to send all these transactions). + const accountsAmount = suites.length + 1; + + const l2ETHAmountToDeposit = await this.ensureBalances(accountsAmount); + const l2ERC20AmountToDeposit = ERC20_PER_ACCOUNT.mul(accountsAmount); + const wallets = this.createTestWallets(suites); + await this.distributeL1Tokens(wallets, l2ETHAmountToDeposit, l2ERC20AmountToDeposit); + await this.distributeL2Tokens(wallets); + + this.reporter.finishAction(); + return wallets; + } + + /** + * Checks the operator account balances on L1 and L2 and deposits funds if required. + */ + private async ensureBalances(accountsAmount: number): Promise { + this.reporter.startAction(`Checking main account balance`); + + this.reporter.message(`Operator address is ${this.mainEthersWallet.address}`); + + const requiredL2ETHAmount = L2_ETH_PER_ACCOUNT.mul(accountsAmount); + const actualL2ETHAmount = await this.mainSyncWallet.getBalance(); + this.reporter.message(`Operator balance on L2 is ${ethers.utils.formatEther(actualL2ETHAmount)} ETH`); + + // We may have enough funds in L2. If that's the case, no need to deposit more than required. + const l2ETHAmountToDeposit = requiredL2ETHAmount.gt(actualL2ETHAmount) + ? requiredL2ETHAmount.sub(actualL2ETHAmount) + : ethers.BigNumber.from(0); + + const requiredL1ETHAmount = L1_ETH_PER_ACCOUNT.mul(accountsAmount).add(l2ETHAmountToDeposit); + const actualL1ETHAmount = await this.mainSyncWallet.getBalanceL1(); + this.reporter.message(`Operator balance on L1 is ${ethers.utils.formatEther(actualL1ETHAmount)} ETH`); + + if (requiredL1ETHAmount.gt(actualL1ETHAmount)) { + const required = ethers.utils.formatEther(requiredL1ETHAmount); + const actual = ethers.utils.formatEther(actualL1ETHAmount); + const errorMessage = `There must be at least ${required} ETH on main account, but only ${actual} is available`; + throw new Error(errorMessage); + } + this.reporter.finishAction(); + + return l2ETHAmountToDeposit; + } + + /** + * Generates wallet objects for the test suites. + */ + private createTestWallets(suites: string[]): TestWallets { + this.reporter.startAction(`Creating test wallets`); + const wallets: TestWallets = {}; + for (const suiteFile of suites) { + const randomWallet = ethers.Wallet.createRandom().privateKey; + wallets[suiteFile] = randomWallet; + } + this.reporter.debug(`Test wallets: ${JSON.stringify(wallets, undefined, 2)}`); + this.reporter.finishAction(); + return wallets; + } + + /** + * Sends L1 tokens to the test wallet accounts. + * Additionally, deposits L1 tokens to the main account for further distribution on L2 (if required). + */ + private async distributeL1Tokens( + wallets: TestWallets, + l2ETHAmountToDeposit: ethers.BigNumber, + l2erc20DepositAmount: ethers.BigNumber + ) { + this.reporter.startAction(`Distributing tokens on L1`); + const l1startNonce = await this.mainEthersWallet.getTransactionCount(); + this.reporter.debug(`Start nonce is ${l1startNonce}`); + + // All the promises we send in this function. + const l1TxPromises: Promise[] = []; + // Mutable nonce to send the transactions before actually `await`ing them. + let nonce = l1startNonce; + // Scaled gas price to be used to prevent transactions from being stuck. + const gasPrice = await scaledGasPrice(this.mainEthersWallet); + + // Deposit L2 tokens (if needed). + if (!l2ETHAmountToDeposit.isZero()) { + // Given that we've already sent a number of transactions, + // we have to correctly send nonce. + const depositHandle = this.mainSyncWallet + .deposit({ + token: zksync.utils.ETH_ADDRESS, + amount: l2ETHAmountToDeposit, + overrides: { + nonce: nonce++, + gasPrice + } + }) + .then((tx) => { + const amount = ethers.utils.formatEther(l2ETHAmountToDeposit); + this.reporter.debug(`Sent ETH deposit. Nonce ${tx.nonce}, amount: ${amount}, hash: ${tx.hash}`); + tx.wait(); + }); + + // Add this promise to the list of L1 tx promises. + l1TxPromises.push(depositHandle); + } + + // Define values for handling ERC20 transfers/deposits. + const erc20Token = this.env.erc20Token.l1Address; + const erc20MintAmount = l2erc20DepositAmount.mul(2); + + // Mint ERC20. + const l1Erc20ABI = ['function mint(address to, uint256 amount)']; + const l1Erc20Contract = new ethers.Contract(erc20Token, l1Erc20ABI, this.mainEthersWallet); + const erc20MintPromise = l1Erc20Contract + .mint(this.mainSyncWallet.address, erc20MintAmount, { + nonce: nonce++, + gasPrice + }) + .then((tx: any) => { + this.reporter.debug(`Sent ERC20 mint transaction. Hash: ${tx.hash}, nonce ${tx.nonce}`); + return tx.wait(); + }); + + // Deposit ERC20. + const erc20DepositPromise = this.mainSyncWallet + .deposit({ + token: erc20Token, + amount: l2erc20DepositAmount, + approveERC20: true, + approveOverrides: { + nonce: nonce++, + gasPrice + }, + overrides: { + nonce: nonce++, + gasPrice + } + }) + .then((tx) => { + // Note: there is an `approve` tx, not listed here. + this.reporter.debug(`Sent ERC20 deposit transaction. Hash: ${tx.hash}, nonce: ${tx.nonce}`); + return tx.wait(); + }); + + // Send ETH on L1. + const ethTransfers = await sendTransfers( + zksync.utils.ETH_ADDRESS, + this.mainEthersWallet, + wallets, + L1_ETH_PER_ACCOUNT, + nonce, + gasPrice, + this.reporter + ); + nonce += ethTransfers.length; + + // Send ERC20 on L1. + const erc20Transfers = await sendTransfers( + erc20Token, + this.mainEthersWallet, + wallets, + ERC20_PER_ACCOUNT, + nonce, + gasPrice, + this.reporter + ); + + l1TxPromises.push(...ethTransfers); + l1TxPromises.push(erc20MintPromise); + l1TxPromises.push(erc20DepositPromise); + l1TxPromises.push(...erc20Transfers); + + this.reporter.debug(`Sent ${l1TxPromises.length} initial transactions on L1`); + + await Promise.all(l1TxPromises); + this.reporter.finishAction(); + } + + /** + * Sends L2 tokens to the test wallet accounts. + */ + private async distributeL2Tokens(wallets: TestWallets) { + this.reporter.startAction(`Distributing tokens on L2`); + let l2startNonce = await this.mainSyncWallet.getTransactionCount(); + + // ETH transfers. + const l2TxPromises = await sendTransfers( + zksync.utils.ETH_ADDRESS, + this.mainSyncWallet, + wallets, + L2_ETH_PER_ACCOUNT, + l2startNonce, + undefined, + this.reporter + ); + l2startNonce += l2TxPromises.length; + + // ERC20 transfers. + const l2TokenAddress = await this.mainSyncWallet.l2TokenAddress(this.env.erc20Token.l1Address); + const erc20Promises = await sendTransfers( + l2TokenAddress, + this.mainSyncWallet, + wallets, + ERC20_PER_ACCOUNT, + l2startNonce, + undefined, + this.reporter + ); + l2TxPromises.push(...erc20Promises); + + await Promise.all(l2TxPromises); + this.reporter.finishAction(); + } + + /** + * Performs context deinitialization. + */ + async teardownContext() { + // Reset the reporter context. + this.reporter = new Reporter(); + try { + this.reporter.startAction(`Tearing down the context`); + + await this.collectFunds(); + + this.reporter.finishAction(); + } catch (error: any) { + // Report the issue to the console and mark the last action as failed. + this.reporter.error(`An error occurred: ${error.message || error}`); + this.reporter.failAction(); + + // Then propagate the exception. + throw error; + } + } + + /** + * Returns funds from suite-specific wallets back to the main account. + */ + private async collectFunds() { + this.reporter.startAction(`Collecting funds back to the main account`); + + const l1Wallets = Object.values(this.wallets!).map((pk) => new ethers.Wallet(pk, this.l1Provider)); + const l2Wallets = Object.values(this.wallets!).map( + (pk) => new zksync.Wallet(pk, this.l2Provider, this.l1Provider) + ); + const wallets = l1Wallets.concat(l2Wallets); + + const txPromises: ReceiptFuture[] = await claimEtherBack(wallets, this.mainEthersWallet.address, this.reporter); + + await Promise.all(txPromises); + + this.reporter.finishAction(); + + // We don't really need to withdraw funds back, since test takes existing L2 balance + // into account. If the same wallet would be reused (e.g. on stage), it'll just have to + // deposit less next time. + } +} + +/** + * Sends transfer from the "main" wallet to the list of "receiver" wallets. + * Can work both with L1 and L2 wallets. + * + * @param wallet Main wallet to send Ether from + * @param wallets Receiver wallets. + * @param value Amount of Ether to distribute. + * @param overrideStartNonce (optional): Nonce to use for the first transaction. + * @param gasPrice (optional): Gas price to use in transactions. + * @param reporter (optional): Reporter object to write logs to. + * @returns List of promises for each sent transaction. + */ +export async function sendTransfers( + token: string, + wallet: ethers.Wallet | zksync.Wallet, + wallets: TestWallets, + value: ethers.BigNumber, + overrideStartNonce?: number, + gasPrice?: ethers.BigNumber, + reporter?: Reporter +): Promise[]> { + const erc20Contract = + wallet instanceof zksync.Wallet + ? new zksync.Contract(token, zksync.utils.IERC20, wallet) + : new ethers.Contract(token, zksync.utils.IERC20, wallet); + const startNonce = overrideStartNonce ?? (await wallet.getTransactionCount()); + reporter?.debug(`Sending transfers. Token address is ${token}`); + const txPromises = Array.from(Object.values(wallets)).map((testWalletPK, index) => { + if (token == zksync.utils.ETH_ADDRESS) { + const tx = { + to: ethers.utils.computeAddress(testWalletPK), + value, + nonce: startNonce + index, + gasPrice + }; + + reporter?.debug(`Inititated ETH transfer with nonce: ${tx.nonce}`); + return wallet.sendTransaction(tx).then((tx) => { + reporter?.debug(`Sent ETH transfer tx: ${tx.hash}, nonce: ${tx.nonce}`); + return tx.wait(); + }); + } else { + const txNonce = startNonce + index; + const tx = erc20Contract.transfer(ethers.utils.computeAddress(testWalletPK), value, { + nonce: txNonce, + gasPrice + }); + reporter?.debug(`Inititated ERC20 transfer with nonce: ${tx.nonce}`); + // @ts-ignore + return tx.then((tx) => { + reporter?.debug(`Sent ERC20 transfer tx: ${tx.hash}, nonce: ${tx.nonce}`); + return tx.wait(); + }); + } + }); + reporter?.debug( + `Initiated ${txPromises.length} transfers. Nonce range is ${startNonce} - ${startNonce + txPromises.length - 1}` + ); + + return txPromises; +} + +/** + * Sends all the Ether from one account to another. + * Can work both with L1 and L2 wallets. + * + * @param from Initiator wallet + * @param toAddress Address of the receiver wallet. + * @returns Promise for the transaction. + */ +export async function claimEtherBack( + wallets: ethers.Wallet[] | zksync.Wallet[], + toAddress: string, + reporter?: Reporter +): Promise[]> { + const promises = []; + + for (const from of wallets) { + // We do this for each wallets separately, since we may have L1/L2 objects together in the list. + let gasLimit; + try { + gasLimit = await from.estimateGas({ value: 1, to: toAddress }); + } catch (_error) { + // If gas estimation fails, we just skip this wallet. + continue; + } + // We use scaled gas price to increase chances of tx not being stuck. + const gasPrice = await scaledGasPrice(from); + const transferPrice = gasLimit.mul(gasPrice); + + const balance = await from.getBalance(); + + // If we can't afford sending funds back (or the wallet is empty), do nothing. + if (transferPrice.gt(balance)) { + continue; + } + + const value = balance.sub(transferPrice); + + reporter?.debug( + `Wallet balance: ${ethers.utils.formatEther(balance)} ETH,\ + estimated cost is ${ethers.utils.formatEther(transferPrice)} ETH,\ + value for tx is ${ethers.utils.formatEther(value)} ETH` + ); + + const txPromise = from + .sendTransaction({ + to: toAddress, + value, + gasLimit, + gasPrice + }) + .then((tx) => { + reporter?.debug(`Sent tx: ${tx.hash}`); + return tx.wait(); + }) + .catch((reason) => { + // We don't care about failed transactions. + reporter?.debug(`One of the transactions failed. Info: ${reason}`); + }); + + promises.push(txPromise); + } + + return promises; +} + +/** + * Type represents a transaction that may have been sent. + */ +type ReceiptFuture = Promise; diff --git a/core/tests/ts-integration/src/env.ts b/core/tests/ts-integration/src/env.ts new file mode 100644 index 000000000000..d720846552bd --- /dev/null +++ b/core/tests/ts-integration/src/env.ts @@ -0,0 +1,110 @@ +import * as path from 'path'; +import * as fs from 'fs'; +import * as ethers from 'ethers'; +import * as zksync from 'zksync-web3'; +import { getTokens } from 'reading-tool'; +import { TestEnvironment } from './types'; +import { Reporter } from './reporter'; + +/** + * Attempts to connect to server. + * This function returns once connection can be established, or throws an exception in case of timeout. + * + * This function is expected to be called *before* loading an environment via `loadTestEnvironment`, + * because the latter expects server to be running and may throw otherwise. + */ +export async function waitForServer() { + const reporter = new Reporter(); + // Server startup may take a lot of time on the staging. + const attemptIntervalMs = 1000; + const maxAttempts = 20 * 60; // 20 minutes + + const l2NodeUrl = ensureVariable( + process.env.ZKSYNC_WEB3_API_URL || process.env.API_WEB3_JSON_RPC_HTTP_URL, + 'L2 node URL' + ); + const l2Provider = new zksync.Provider(l2NodeUrl); + + reporter.startAction('Connecting to server'); + let ready = false; + for (let i = 0; i < maxAttempts; ++i) { + try { + await l2Provider.getNetwork(); // Will throw if the server is not ready yet. + ready = true; + reporter.finishAction(); + return; + } catch (e) { + reporter.message(`Attempt #${i + 1} to check the server readiness failed`); + await zksync.utils.sleep(attemptIntervalMs); + } + } + + if (!ready) { + throw new Error('Failed to wait for the server to start'); + } +} + +/** + * Loads the test environment from the env variables. + */ +export async function loadTestEnvironment(): Promise { + const network = process.env.CHAIN_ETH_NETWORK || 'localhost'; + + let mainWalletPK; + if (network == 'localhost') { + const testConfigPath = path.join(process.env.ZKSYNC_HOME!, `etc/test_config/constant`); + const ethTestConfig = JSON.parse(fs.readFileSync(`${testConfigPath}/eth.json`, { encoding: 'utf-8' })); + mainWalletPK = ethers.Wallet.fromMnemonic(ethTestConfig.test_mnemonic as string, "m/44'/60'/0'/0/0").privateKey; + } else { + mainWalletPK = ensureVariable(process.env.MASTER_WALLET_PK, 'Main wallet private key'); + } + + const l2NodeUrl = ensureVariable( + process.env.ZKSYNC_WEB3_API_URL || process.env.API_WEB3_JSON_RPC_HTTP_URL, + 'L2 node URL' + ); + const l1NodeUrl = ensureVariable(process.env.L1_RPC_ADDRESS || process.env.ETH_CLIENT_WEB3_URL, 'L1 node URL'); + const wsL2NodeUrl = ensureVariable( + process.env.ZKSYNC_WEB3_WS_API_URL || process.env.API_WEB3_JSON_RPC_WS_URL, + 'WS L2 node URL' + ); + const explorerUrl = ensureVariable(process.env.API_EXPLORER_URL, 'Explorer API'); + + const tokens = getTokens(process.env.CHAIN_ETH_NETWORK || 'localhost'); + // wBTC is chosen because it has decimals different from ETH (8 instead of 18). + // Using this token will help us to detect decimals-related errors. + const wBTC = tokens.find((token: { symbol: string }) => token.symbol == 'wBTC')!; + + // `waitForServer` is expected to be executed. Otherwise this call may throw. + const wBTCl2Address = await new zksync.Wallet( + mainWalletPK, + new zksync.Provider(l2NodeUrl), + ethers.getDefaultProvider(l1NodeUrl) + ).l2TokenAddress(wBTC.address); + + return { + network, + mainWalletPK, + l2NodeUrl, + l1NodeUrl, + wsL2NodeUrl, + explorerUrl, + erc20Token: { + name: wBTC.name, + symbol: wBTC.symbol, + decimals: wBTC.decimals, + l1Address: wBTC.address, + l2Address: wBTCl2Address + } + }; +} + +/** + * Checks that variable is not `undefined`, throws an error otherwise. + */ +function ensureVariable(value: string | undefined, variableName: string): string { + if (!value) { + throw new Error(`${variableName} is not defined in the env`); + } + return value; +} diff --git a/core/tests/ts-integration/src/helpers.ts b/core/tests/ts-integration/src/helpers.ts new file mode 100644 index 000000000000..5bb2b3f6351e --- /dev/null +++ b/core/tests/ts-integration/src/helpers.ts @@ -0,0 +1,107 @@ +import * as fs from 'fs'; +import * as zksync from 'zksync-web3'; +import * as ethers from 'ethers'; +import * as hre from 'hardhat'; +import { ZkSyncArtifact } from '@matterlabs/hardhat-zksync-solc/dist/src/types'; + +/** + * Loads the test contract + * + * @param name Name of the contract, e.g. `Counter` + * @returns Artifact containing the bytecode and ABI of the contract. + */ +export function getTestContract(name: string): ZkSyncArtifact { + const artifact = hre.artifacts.readArtifactSync(name); + return artifact as ZkSyncArtifact; +} + +/** + * Loads the `*.sol` file for a test contract. + * + * @param relativePath Path relative to the `ts-integration/contracts` folder (e.g. `contra). + * @returns Conta + */ +export function getContractSource(relativePath: string): string { + const contractPath = `${process.env.ZKSYNC_HOME}/core/tests/ts-integration/contracts/${relativePath}`; + const source = fs.readFileSync(contractPath, 'utf8'); + return source; +} + +/** + * Performs a contract deployment + * + * @param initiator Wallet from which contract should be deployed + * @param artifact ABI and bytecode of the contract + * @param args Constructor arguments for the contract + * @param deploymentType Optional: should be set to `createAccount` if deployed contract would represent an account. + * @param overrides Optional: overrides for the deployment transaction. + * @returns Deployed contract object (with `initiator` wallet attached). + */ +export async function deployContract( + initiator: zksync.Wallet, + artifact: ZkSyncArtifact, + args: any[], + deploymentType?: zksync.types.DeploymentType, + overrides: any = {} +): Promise { + const contractFactory = new zksync.ContractFactory(artifact.abi, artifact.bytecode, initiator, deploymentType); + const contract = await contractFactory.deploy(...args, overrides); + await contract.deployed(); + return contract; +} + +/** + * Just performs a transaction. Can be used when you don't care about a particular action, + * but just need a transaction to be executed. + * + * @param wallet Wallet to send a transaction from. Should have enough balance to cover the fee. + * @returns Transaction receipt. + */ +export async function anyTransaction(wallet: zksync.Wallet): Promise { + return await wallet.transfer({ to: wallet.address, amount: 0 }).then((tx) => tx.wait()); +} + +/** + * Waits until a new L1 batch is created on zkSync node. + * This function attempts to trigger this action by sending an additional transaction, + * however it may not be enough in some env (e.g. if some testnet is configured to utilize the block capacity). + * + * @param wallet Wallet to send transaction from. Should have enough balance to cover the fee. + */ +export async function waitForNewL1Batch(wallet: zksync.Wallet) { + // Send a dummy transaction and wait until the new L1 batch is created. + const currentL1Batch = await wallet.provider.getL1BatchNumber(); + await anyTransaction(wallet); + // Invariant: even with 1 transaction, l1 batch must be eventually sealed, so this loop must exit. + while ((await wallet.provider.getL1BatchNumber()) <= currentL1Batch) { + await zksync.utils.sleep(wallet.provider.pollingInterval); + } +} +/** + * Waits until the requested block is finalized. + * + * @param wallet Wallet to use to poll the server. + * @param blockNumber Number of block. + */ +export async function waitUntilBlockFinalized(wallet: zksync.Wallet, blockNumber: number) { + while (true) { + const block = await wallet.provider.getBlock('finalized'); + if (blockNumber <= block.number) { + break; + } else { + await zksync.utils.sleep(wallet.provider.pollingInterval); + } + } +} + +/** + * Returns an increased gas price to decrease chances of L1 transactions being stuck + * + * @param wallet Wallet to use to fetch the gas price. + * @returns Scaled gas price. + */ +export async function scaledGasPrice(wallet: ethers.Wallet | zksync.Wallet): Promise { + const gasPrice = await wallet.getGasPrice(); + // Increase by 40%. + return gasPrice.mul(140).div(100); +} diff --git a/core/tests/ts-integration/src/index.ts b/core/tests/ts-integration/src/index.ts new file mode 100644 index 000000000000..a2f70531adbd --- /dev/null +++ b/core/tests/ts-integration/src/index.ts @@ -0,0 +1,5 @@ +export * as helpers from './helpers'; +export { TestContext, TestEnvironment, TestWallets } from './types'; +export { TestContextOwner } from './context-owner'; +export { loadTestEnvironment, waitForServer } from './env'; +export { TestMaster } from './test-master'; diff --git a/core/tests/ts-integration/src/jest-setup/add-matchers.ts b/core/tests/ts-integration/src/jest-setup/add-matchers.ts new file mode 100644 index 000000000000..e673e7a909d1 --- /dev/null +++ b/core/tests/ts-integration/src/jest-setup/add-matchers.ts @@ -0,0 +1,9 @@ +import * as bigNumberMatchers from '../matchers/big-number'; +import * as ethPrimitives from '../matchers/eth-primitives'; +import * as transaction from '../matchers/transaction'; +import * as fail from '../matchers/fail'; + +expect.extend(bigNumberMatchers); +expect.extend(ethPrimitives); +expect.extend(transaction); +expect.extend(fail); diff --git a/core/tests/ts-integration/src/jest-setup/global-setup.ts b/core/tests/ts-integration/src/jest-setup/global-setup.ts new file mode 100644 index 000000000000..b0e2c8bf56dc --- /dev/null +++ b/core/tests/ts-integration/src/jest-setup/global-setup.ts @@ -0,0 +1,36 @@ +import { TestContextOwner, loadTestEnvironment, waitForServer } from '../index'; + +declare global { + var __ZKSYNC_TEST_CONTEXT_OWNER__: TestContextOwner; +} + +/** + * This script performs the initial setup for the integration tests. + * See `TestContextOwner` class for more details. + */ +async function performSetup(_globalConfig: any, _projectConfig: any) { + // Perform the test initialization. + // This is an expensive operation that preceeds running any tests, as we need + // to deposit & distribute funds, deploy some contracts, and perform basic server checks. + + // Jest writes an initial message without a newline, so we have to do it manually. + console.log(''); + + // Before starting any actual logic, we need to ensure that the server is running (it may not + // be the case, for example, right after deployment on stage). + await waitForServer(); + + const testEnvironment = await loadTestEnvironment(); + const testContextOwner = new TestContextOwner(testEnvironment); + const testContext = await testContextOwner.setupContext(); + + // Set the test context for test suites to pick up. + // Currently, jest doesn't provide a way to pass data from `globalSetup` to suites, + // so we store the data as serialized JSON. + process.env.ZKSYNC_JEST_TEST_CONTEXT = JSON.stringify(testContext); + + // Store the context object for teardown script, so it can perform, well, the teardown. + globalThis.__ZKSYNC_TEST_CONTEXT_OWNER__ = testContextOwner; +} + +export default performSetup; diff --git a/core/tests/ts-integration/src/jest-setup/global-teardown.ts b/core/tests/ts-integration/src/jest-setup/global-teardown.ts new file mode 100644 index 000000000000..7d2216cc167b --- /dev/null +++ b/core/tests/ts-integration/src/jest-setup/global-teardown.ts @@ -0,0 +1,17 @@ +import { TestContextOwner } from '../index'; + +declare global { + var __ZKSYNC_TEST_CONTEXT_OWNER__: TestContextOwner; +} + +/** + * This script performs the teardown after the whole test suite is completed (either successfully or with some + * tests failed). + * It will recollect funds from all the allocated accounts back to the main account they were deposited from. + */ +async function performTeardown(_globalConfig: any, _projectConfig: any) { + const testContextOwner = globalThis.__ZKSYNC_TEST_CONTEXT_OWNER__; + await testContextOwner.teardownContext(); +} + +export default performTeardown; diff --git a/core/tests/ts-integration/src/matchers/big-number.ts b/core/tests/ts-integration/src/matchers/big-number.ts new file mode 100644 index 000000000000..df93ad1c71af --- /dev/null +++ b/core/tests/ts-integration/src/matchers/big-number.ts @@ -0,0 +1,100 @@ +import { BigNumber, BigNumberish } from 'ethers'; +import { TestMessage } from './matcher-helpers'; + +// Note: I attempted to "overload" the existing matchers from Jest (like `toBeGreaterThan`), +// but failed. There is a proposed hack in one GitHub issue from 2018: if you'll be trying to +// do the same, know: this hack doesn't work anymore. Default matchers rely on `this` to have +// certain properties, so attempt to load default matchers from `build` directory and call them +// as a fallback won't work (or I failed to make it work). + +// This file contains implementation of matchers for BigNumber objects. +// For actual doc-comments, see `typings/jest.d.ts` file. + +// Matcher for `l.gt(r)` +export function bnToBeGt(l: BigNumberish, r: BigNumberish, additionalInfo?: string) { + const comparator = (l: BigNumber, r: BigNumber) => l.gt(r); + const matcherName = `bnToBeGt`; + const matcherMessage = `greater than`; + return matcherBody(l, r, comparator, matcherName, matcherMessage, additionalInfo); +} + +// Matcher for `l.gte(r)` +export function bnToBeGte(l: BigNumberish, r: BigNumberish, additionalInfo?: string) { + const comparator = (l: BigNumber, r: BigNumber) => l.gte(r); + const matcherName = `bnToBeGte`; + const matcherMessage = `greater or equal than`; + return matcherBody(l, r, comparator, matcherName, matcherMessage, additionalInfo); +} + +// Matcher for `l.eq(r)` +export function bnToBeEq(l: BigNumberish, r: BigNumberish, additionalInfo?: string) { + const comparator = (l: BigNumber, r: BigNumber) => l.eq(r); + const matcherName = `bnToBeEq`; + const matcherMessage = `equal to`; + return matcherBody(l, r, comparator, matcherName, matcherMessage, additionalInfo); +} + +// Matcher for `l.lt(r)` +export function bnToBeLt(l: BigNumberish, r: BigNumberish, additionalInfo?: string) { + const comparator = (l: BigNumber, r: BigNumber) => l.lt(r); + const matcherName = `bnToBeLt`; + const matcherMessage = `less than`; + return matcherBody(l, r, comparator, matcherName, matcherMessage, additionalInfo); +} + +// Matcher for `l.lte(r)` +export function bnToBeLte(l: BigNumberish, r: BigNumberish, additionalInfo?: string) { + const comparator = (l: BigNumber, r: BigNumber) => l.lte(r); + const matcherName = `bnToBeLte`; + const matcherMessage = `less than or equal`; + return matcherBody(l, r, comparator, matcherName, matcherMessage, additionalInfo); +} + +/** + * Generic body of the BigNumber matchers. Use to reduce the amount of boilerplate code. + * + * @param l Initial number (from `expect(l)`). + * @param r Number to compare to (from `.bnToBeXXX(r)`). + * @param comparator Comparator function to invoke to see if test passes (e.g. `(l, r) => l.gt(r)`). + * @param matcherName Name of the matcher function (e.g. `bnToBeGt`). + * @param matcherMessage Generic part of the failure message (e.g. `greater than`). + * @param additionalInfo Message provided by user to be included in case of failure. + * @returns Object expected by jest matcher. + */ +function matcherBody( + l: BigNumberish, + r: BigNumberish, + comparator: (l: BigNumber, r: BigNumber) => boolean, + matcherName: string, + matcherMessage: string, + additionalInfo?: string +) { + // Numbers are provided as `BigNumberish`, so they can be strings or numbers. + const left = BigNumber.from(l); + const right = BigNumber.from(r); + const pass = comparator(left, right); + + // Declare messages for normal case and case where matcher was preceded by `.not`. + let passMessage = new TestMessage() + .matcherHint(`.not.${matcherName}`) + .line('Expected the following number:') + .received(left) + .line(`to not be ${matcherMessage}:`) + .expected(right) + .additional(additionalInfo) + .build(); + + let failMessage = new TestMessage() + .matcherHint(`.${matcherName}`) + .line('Expected the following number:') + .received(left) + .line(`to be ${matcherMessage}:`) + .expected(right) + .additional(additionalInfo) + .build(); + + return { + pass, + message: () => (pass ? passMessage : failMessage) + }; +} diff --git a/core/tests/ts-integration/src/matchers/eth-primitives.ts b/core/tests/ts-integration/src/matchers/eth-primitives.ts new file mode 100644 index 000000000000..509b4aa51d26 --- /dev/null +++ b/core/tests/ts-integration/src/matchers/eth-primitives.ts @@ -0,0 +1,53 @@ +import * as ethers from 'ethers'; +import { TestMessage } from './matcher-helpers'; + +// This file contains implementation of matchers for common Ethereum primitives objects. +// For actual doc-comments, see `typings/jest.d.ts` file. + +export function toBeAddress(value: string, additionalInfo?: string) { + const pass = ethers.utils.isAddress(value); + + // Declare messages for normal case and case where matcher was preceded by `.not`. + let passMessage = new TestMessage() + .matcherHint('.not.toBeAddress') + .line('Expected the following string to not be an address:') + .received(value) + .additional(additionalInfo) + .build(); + + let failMessage = new TestMessage() + .matcherHint('.toBeAddress') + .line('Expected the following string to be an address:') + .received(value) + .additional(additionalInfo) + .build(); + + return { + pass, + message: () => (pass ? passMessage : failMessage) + }; +} + +export function toBeHexString(value: string, additionalInfo?: string) { + const pass = ethers.utils.isHexString(value); + + // Declare messages for normal case and case where matcher was preceded by `.not`. + let passMessage = new TestMessage() + .matcherHint('.not.toBeHexString') + .line('Expected the following string to not be a hex string:') + .received(value) + .additional(additionalInfo) + .build(); + + let failMessage = new TestMessage() + .matcherHint('.toBeHexString') + .line('Expected the following string to be a hex string:') + .received(value) + .additional(additionalInfo) + .build(); + + return { + pass, + message: () => (pass ? passMessage : failMessage) + }; +} diff --git a/core/tests/ts-integration/src/matchers/fail.ts b/core/tests/ts-integration/src/matchers/fail.ts new file mode 100644 index 000000000000..e5610d0bb20b --- /dev/null +++ b/core/tests/ts-integration/src/matchers/fail.ts @@ -0,0 +1,6 @@ +export function fail(_: any, message: string) { + return { + pass: false, + message: () => (message ? message : 'fails by .fail() assertion') + }; +} diff --git a/core/tests/ts-integration/src/matchers/matcher-helpers.ts b/core/tests/ts-integration/src/matchers/matcher-helpers.ts new file mode 100644 index 000000000000..e2e5c7c7a687 --- /dev/null +++ b/core/tests/ts-integration/src/matchers/matcher-helpers.ts @@ -0,0 +1,61 @@ +// Common utilities for writing custom matchers. +import { printReceived, printExpected, matcherHint } from 'jest-matcher-utils'; + +// Creates a Jest matcher response for failed test. +export function fail(message: string) { + return { + pass: false, + message: () => message + }; +} + +// Creates a Jest matcher response for succeeded test. +// It still needs a message, as it could've been preceeded by `.not`. +export function pass(message: string) { + return { + pass: true, + message: () => message + }; +} + +export class TestMessage { + message: string = ''; + + // Adds a matcher name hint to the message. + // Normally should be first method to call. + matcherHint(name: string): TestMessage { + this.message += `${matcherHint(name)}\n\n`; + return this; + } + + // Adds a line of text to the message + line(text: string): TestMessage { + this.message += `${text}\n`; + return this; + } + + // Adds a "received" value to the message. + received(value: any): TestMessage { + this.message += ` ${printReceived(value)}\n`; + return this; + } + + // Adds an "expected" value to the message. + expected(value: any): TestMessage { + this.message += ` ${printExpected(value)}\n`; + return this; + } + + // Adds an additional information if provided. + additional(text?: string): TestMessage { + if (text) { + this.message += `${text}\n`; + } + return this; + } + + // Returns a built message string. + build(): string { + return this.message; + } +} diff --git a/core/tests/ts-integration/src/matchers/transaction.ts b/core/tests/ts-integration/src/matchers/transaction.ts new file mode 100644 index 000000000000..fa363ef58c09 --- /dev/null +++ b/core/tests/ts-integration/src/matchers/transaction.ts @@ -0,0 +1,167 @@ +import { TestMessage } from './matcher-helpers'; +import { MatcherModifier } from '../modifiers'; +import * as zksync from 'zksync-web3'; + +// This file contains implementation of matchers for zkSync/ethereum transaction. +// For actual doc-comments, see `typings/jest.d.ts` file. + +export async function toBeAccepted( + txPromise: Promise, + modifiers: MatcherModifier[] = [], + additionalInfo?: string +) { + try { + const tx = await txPromise; + const receipt = await tx.wait(); + + // Check receipt validity. + const failReason = checkReceiptFields(tx, receipt); + if (failReason) { + return failReason; + } + + // Apply modifiers. + for (const modifier of modifiers) { + const failReason = await modifier.check(receipt); + if (failReason) { + return failReason; + } + } + + return pass(); + } catch (error: any) { + // Check if an error was raised by `jest` (e.g. by using `expect` inside of the modifier). + if (error.matcherResult) { + // It was, just re-broadcast it. + throw error; + } + + const message = new TestMessage() + .matcherHint('.toBeAccepted') + .line('Transaction was expected to pass, but it failed. Details:') + .received(error) + .additional(additionalInfo) + .build(); + return fail(message); + } +} + +export async function toBeReverted( + txPromise: Promise, + modifiers: MatcherModifier[] = [], + additionalInfo?: string +) { + try { + const tx = await txPromise; + const receipt = await tx.wait(); + + const message = new TestMessage() + .matcherHint('.toBeReverted') + .line('Transaction was expected to be reverted, but it succeeded. Receipt:') + .received(receipt) + .additional(additionalInfo) + .build(); + + return fail(message); + } catch (error: any) { + const receipt = error.receipt; + if (!receipt) { + const message = new TestMessage() + .matcherHint('.toBeReverted') + .line('Received malformed error message (without transaction receipt)') + .received(error) + .build(); + return fail(message); + } + + for (const modifier of modifiers) { + const failReason = await modifier.check(receipt); + if (failReason) { + return failReason; + } + } + + return pass(); + } +} + +export async function toBeRejected(txPromise: Promise, errorSubstring?: string, additionalInfo?: string) { + try { + const tx = await txPromise; + // Unlike with `toBeReverted` test, we don't even need to wait for the transaction to be executed. + // We expect it to be rejected by the API server. + + const message = new TestMessage() + .matcherHint('.toBeRejected') + .line('Transaction was expected to be rejected by the API server, but it was not. Receipt:') + .received(tx) + .additional(additionalInfo) + .build(); + + return fail(message); + } catch (error: any) { + if (errorSubstring) { + // We expect thrown exception to always have the `message` field. + if (!error.message || !error.message.includes(errorSubstring)) { + const message = new TestMessage() + .matcherHint('.toBeRejected') + .line('Transaction was expected to be rejected by the API server with the following message:') + .expected(errorSubstring) + .line("but it wasn't detected. Received error:") + .received(error) + .additional(additionalInfo) + .build(); + + return fail(message); + } + } + + return pass(); + } +} + +// Local helper to mark transaction test as passed. +function pass() { + const message = + "No details available. \ + Make sure that you don't use transaction matchers with a `.not` modifier, as it's not supported. \ + If you don't, probably there exists a bug in the framework"; + return { + pass: true, + message: () => message + }; +} + +// Local helper to mark transaction test as failed. +function fail(message: string) { + return { + pass: false, + message: () => message + }; +} + +/** + * Checks that the values in the receipt correspond to the values in the transaction request. + * + * @returns If check has failed, returns a Jest error object. Otherwise, returns `undefined`. + */ +function checkReceiptFields(request: zksync.types.TransactionRequest, receipt: zksync.types.TransactionReceipt) { + const errorMessageBuilder = new TestMessage() + .matcherHint('.checkReceiptFields') + .line('Transaction receipt is not properly formatted. Transaction request:') + .expected(request) + .line('Transaction receipt:') + .received(receipt); + const failWith = (line: string) => fail(errorMessageBuilder.line(line).build()); + + if (receipt.status !== 0 && receipt.status !== 1) { + return failWith(`Status field in the receipt has an unexpected value (expected 0 or 1): ${receipt.status}`); + } + if (!receipt.effectiveGasPrice) { + return failWith(`Effective gas price expected to be greater than 0`); + } + if (!receipt.gasUsed) { + return failWith(`Gas used expected to be greater than 0`); + } + return undefined; +} diff --git a/core/tests/ts-integration/src/modifiers/balance-checker.ts b/core/tests/ts-integration/src/modifiers/balance-checker.ts new file mode 100644 index 000000000000..59dd57ff9724 --- /dev/null +++ b/core/tests/ts-integration/src/modifiers/balance-checker.ts @@ -0,0 +1,242 @@ +/** + * Collection of modifiers to check token balance changes caused by a transaction. + */ + +import * as zksync from 'zksync-web3'; +import * as ethers from 'ethers'; +import { TestMessage } from '../matchers/matcher-helpers'; +import { MatcherModifier, MatcherMessage } from '.'; +import { Fee } from '../types'; +import { IERC20MetadataFactory } from 'zksync-web3/build/typechain'; + +/** + * Modifier that ensures that fee was taken from the wallet for a transaction. + * Note: if you need to check multiple wallets, it's better to use `shouldChangeETHBalances` + * modifier, since it also includes the fee check. + * + * @param wallet Wallet that is expected to pay for a transaction. + * @returns Matcher object + */ +export async function shouldOnlyTakeFee(wallet: zksync.Wallet): Promise { + return await ShouldChangeBalance.create(zksync.utils.ETH_ADDRESS, [{ wallet, change: 0 }]); +} + +/** + * Checks that the transaction caused ETH balance changes. + * Balance changes may be both positive and negative. + * + * @param balanceChanges List of expected balance changes. + * @param params Optional parameters (e.g. to disable the fee check or check balances on L1). + * @returns Matcher object. + */ +export async function shouldChangeETHBalances( + balanceChanges: BalanceChange[], + params?: Params +): Promise { + return await ShouldChangeBalance.create(zksync.utils.ETH_ADDRESS, balanceChanges, params); +} + +/** + * Checks that the transaction caused ETH balance changes. + * Balance changes may be both positive and negative. + * + * @param token ERC20 token to check. + * @param balanceChanges List of expected balance changes. + * @param params Optional parameters (e.g. to disable the fee check or check balances on L1). + * @returns Matcher object. + */ +export async function shouldChangeTokenBalances( + token: string, + balanceChanges: BalanceChange[], + params?: Params +): Promise { + return await ShouldChangeBalance.create(token, balanceChanges, { + noAutoFeeCheck: true, + l1: params?.l1 ?? false + }); +} + +/** + * Represents an expected balance change in wei. + * Change can be both positive and negative. + * Hint: use `ethers.parseEther` for big amounts. + * + * If `addressToCheck` parameter is set, the balance would be checked + * *for this provided address*. It may be very useful if you need to ensure the change + * of balance for an account for which you can't create a `Wallet` object (e.g. custom + * account or a certain smart contract). + */ +export interface BalanceChange { + wallet: zksync.Wallet; + change: ethers.BigNumberish; + addressToCheck?: string; +} + +/** + * Additional (optional) parameters to setup the balance change modifiers. + */ +export interface Params { + noAutoFeeCheck?: boolean; + l1?: boolean; +} + +/** + * Internal extension of `BalanceChange` that contains the balance value + * *before* the transaction was sent. + */ +interface PopulatedBalanceChange extends BalanceChange { + initialBalance: ethers.BigNumber; +} + +/** + * Generic modifier capable of checking for the balance change. + * Can work with both ETH and ERC20 tokens, on L2 and L1. + */ +class ShouldChangeBalance extends MatcherModifier { + token: string; + balanceChanges: PopulatedBalanceChange[]; + noAutoFeeCheck: boolean; + l1: boolean; + + static async create(token: string, balanceChanges: BalanceChange[], params?: Params) { + const l1 = params?.l1 ?? false; + const noAutoFeeCheck = params?.noAutoFeeCheck ?? false; + + if (token == zksync.utils.ETH_ADDRESS && l1 && !noAutoFeeCheck) { + throw new Error('ETH balance checks on L1 are not supported'); + } + + const populatedBalanceChanges: PopulatedBalanceChange[] = []; + for (const entry of balanceChanges) { + const wallet = entry.wallet; + const address = entry.addressToCheck ?? entry.wallet.address; + const initialBalance = await getBalance(l1, wallet, address, token); + populatedBalanceChanges.push({ + wallet: entry.wallet, + change: entry.change, + addressToCheck: entry.addressToCheck, + initialBalance + }); + } + + return new ShouldChangeBalance(token, populatedBalanceChanges, noAutoFeeCheck, l1); + } + + private constructor(token: string, balanceChanges: PopulatedBalanceChange[], noAutoFeeCheck: boolean, l1: boolean) { + super(); + this.token = token; + this.balanceChanges = balanceChanges; + this.noAutoFeeCheck = noAutoFeeCheck; + this.l1 = l1; + } + + async check(receipt: zksync.types.TransactionReceipt): Promise { + let id = 0; + for (const balanceChange of this.balanceChanges) { + const prevBalance = balanceChange.initialBalance; + const wallet = balanceChange.wallet; + const address = balanceChange.addressToCheck ?? balanceChange.wallet.address; + let newBalance = await getBalance(this.l1, wallet, address, this.token); + + // If fee should be checked, we're checking ETH token and this wallet is an initiator, + // we should consider fees as well. + if (!this.noAutoFeeCheck && this.token == zksync.utils.ETH_ADDRESS && address == receipt.from) { + // To "ignore" subtracted fee, we just add it back to the account balance. + newBalance = newBalance.add(extractFee(receipt).feeAfterRefund); + } + + const diff = newBalance.sub(prevBalance); + const change = ethers.BigNumber.from(balanceChange.change); + if (!diff.eq(change)) { + const message = new TestMessage() + .matcherHint(`ShouldChangeBalance modifier`) + .line(`Incorrect balance change for wallet ${balanceChange.wallet.address} (index ${id} in array)`) + .line(`Expected balance change to be:`) + .expected(change) + .line(`But actual change is:`) + .received(diff) + .line(`Balance before: ${prevBalance}, balance after: ${newBalance}`) + .build(); + + return { + pass: false, + message: () => message + }; + } + + id += 1; + } + + return null; + } +} + +/** + * Helper method to extract the fee in ETH wei from the transaction receipt. + * Only works with L2 transactions. + * + * @param receipt Receipt of the transaction to extract fee from. + * @param from Optional substitute to `receipt.from`. + * @returns Extracted fee + */ +export function extractFee(receipt: zksync.types.TransactionReceipt, from?: string): Fee { + from = from ?? receipt.from; + + const systemAccountAddress = '0x0000000000000000000000000000000000000000000000000000000000008001'; + // We need to pad address to represent 256-bit value. + const fromAccountAddress = ethers.utils.hexZeroPad(ethers.utils.arrayify(from), 32); + // Fee log is one that sends money to the system contract account. + const feeLog = receipt.logs.find((log) => { + return log.topics.length == 3 && log.topics[1] == fromAccountAddress && log.topics[2] == systemAccountAddress; + }); + if (!feeLog) { + throw { + message: `No fee log was found in the following transaction receipt`, + receipt + }; + } + + const feeAmount = ethers.BigNumber.from(feeLog.data); + + // There may be more than one refund log for the user + const feeRefund = receipt.logs + .filter((log) => { + return ( + log.topics.length == 3 && log.topics[1] == systemAccountAddress && log.topics[2] == fromAccountAddress + ); + }) + .map((log) => ethers.BigNumber.from(log.data)) + .reduce((prev, cur) => { + return prev.add(cur); + }, ethers.BigNumber.from(0)); + + return { + feeBeforeRefund: feeAmount, + feeAfterRefund: feeAmount.sub(feeRefund), + refund: feeRefund + }; +} + +/** + * Returns the balance of requested token for a certain address. + * + * @param l1 Whether to check l1 balance or l2 + * @param wallet Wallet to make requests from (may not represent the address to check) + * @param address Address to check the balance + * @param token Address of the token + * @returns Token balance + */ +async function getBalance( + l1: boolean, + wallet: zksync.Wallet, + address: string, + token: string +): Promise { + const provider = l1 ? wallet.providerL1! : wallet.provider; + if (zksync.utils.isETH(token)) { + return await provider.getBalance(address); + } else { + const erc20contract = IERC20MetadataFactory.connect(token, provider); + return await erc20contract.balanceOf(address); + } +} diff --git a/core/tests/ts-integration/src/modifiers/index.ts b/core/tests/ts-integration/src/modifiers/index.ts new file mode 100644 index 000000000000..84aef2d9b8d3 --- /dev/null +++ b/core/tests/ts-integration/src/modifiers/index.ts @@ -0,0 +1,28 @@ +/** + * Base interface for custom transaction matcher modifiers. + */ + +import * as zksync from 'zksync-web3'; + +/** + * Base class for custom transaction matcher modifiers. + * Matcher can be applied to both a succeeded and reverted transaction + * (but not a rejected one, since it's not even executed by the server). + */ +export abstract class MatcherModifier { + /** + * Asynchronous checker function. + * + * @param receipt Corresponding L2 transaction receipt. + * @returns Should return `null` if check is passed and `MatcherMessage` otherwise. + */ + abstract check(receipt: zksync.types.TransactionReceipt): Promise; +} + +/** + * Object to be returned from a matcher modifier in case the check is failed. + */ +export interface MatcherMessage { + pass: boolean; + message: () => string; +} diff --git a/core/tests/ts-integration/src/modifiers/receipt-check.ts b/core/tests/ts-integration/src/modifiers/receipt-check.ts new file mode 100644 index 000000000000..e87c4246570c --- /dev/null +++ b/core/tests/ts-integration/src/modifiers/receipt-check.ts @@ -0,0 +1,42 @@ +import { MatcherModifier, MatcherMessage } from '.'; +import * as zksync from 'zksync-web3'; + +/** + * Creates a custom checker for the transaction receipt. + * + * @param checkFn Function to check the receipt. Must return `true` if check passed, and `false` otherwise. + * @param failMessage Message to be displayed if check wasn't passed. + * @returns Matcher modifier object. + */ +export function checkReceipt( + checkFn: (receipt: zksync.types.TransactionReceipt) => boolean, + failMessage: string +): ShouldCheckReceipt { + return new ShouldCheckReceipt(checkFn, failMessage); +} + +/** + * Generic modifier capable of checking any data available in receipt. + * Applied provided closure to the receipt. + */ +class ShouldCheckReceipt extends MatcherModifier { + checkFn: (receipt: zksync.types.TransactionReceipt) => boolean; + failMessage: string; + + constructor(checkFn: (receipt: zksync.types.TransactionReceipt) => boolean, failMessage: string) { + super(); + this.checkFn = checkFn; + this.failMessage = failMessage; + } + + async check(receipt: zksync.types.TransactionReceipt): Promise { + if (!this.checkFn(receipt)) { + return { + pass: false, + message: () => this.failMessage + }; + } + + return null; + } +} diff --git a/core/tests/ts-integration/src/prerequisites.ts b/core/tests/ts-integration/src/prerequisites.ts new file mode 100644 index 000000000000..c20e33bbab4d --- /dev/null +++ b/core/tests/ts-integration/src/prerequisites.ts @@ -0,0 +1,43 @@ +import * as fs from 'fs'; +import * as path from 'path'; + +// All zkSync test suites are expected to be named `*.test.ts`. +const TEST_SUITE_MARKER = 'test.ts'; + +// Files that are excluded from the integration test suited (e.g. unit tests for the framework itself). +const EXCLUDED_FILES = ['self-unit.test.ts']; + +/** + * Gets all the files that contain zkSync integration test suites. + * Used to provide each test suite a funded wallet. + * + * @returns list of filenames that correspond to zkSync integration test suites. + */ +export function lookupPrerequisites(): string[] { + const files = loadFilesRecursively(`${__dirname}/../tests/`); + + return files.filter((file) => !EXCLUDED_FILES.includes(file) && file.endsWith(TEST_SUITE_MARKER)); +} + +/** + * Recursively collects file paths from the `base` directory (e.g. `some/directory/file.ts`) + * + * @param base Base folder to recursively traverse. + * @param dirPath Collected path relative to the base folder. + * @param arrayOfFiles Array of files collected so far. + * @returns Array of file paths. + */ +function loadFilesRecursively(base: string, dirPath: string = '', arrayOfFiles: string[] = []): string[] { + const files = fs.readdirSync(base + dirPath); + + files.forEach((file) => { + if (fs.statSync(base + dirPath + '/' + file).isDirectory()) { + arrayOfFiles = loadFilesRecursively(base, dirPath + '/' + file, arrayOfFiles); + } else { + const relativePath = path.join(dirPath, '/', file).substring(1); // strip the `/` at the beginning. + arrayOfFiles.push(relativePath); + } + }); + + return arrayOfFiles; +} diff --git a/core/tests/ts-integration/src/reporter.ts b/core/tests/ts-integration/src/reporter.ts new file mode 100644 index 000000000000..6ea93f38b0dd --- /dev/null +++ b/core/tests/ts-integration/src/reporter.ts @@ -0,0 +1,121 @@ +import chalk from 'chalk'; + +const entry = chalk.bold.yellow; +const announce = chalk.yellow; +const success = chalk.green; +const fail = chalk.red; +const timestamp = chalk.grey; +const info = chalk.grey; +const warn = chalk.bgYellow; +const errorPrefix = chalk.bgRed; + +/** + * Represents an action that is currently being performed by the context owner. + */ +interface Action { + name: string; + startedAt: Date; +} + +/** + * Status reporter for the framework. + * Contains utilities for pretty-printing information to the console. + * + * Main responsibility is to announce started & finished actions, as well as time required + * to do so. + */ +export class Reporter { + /** + * Stack of actions that are in progress. + */ + private stack: Action[] = []; + + /** + * Reports a started action to the console and stores it on the action stack. + * + * @param name Name of the action. + */ + startAction(name: string) { + const announceLine = this.indent(`${entry('>')} ${announce(name)}`); + console.log(announceLine); + + this.stack.push({ + name, + startedAt: new Date() + }); + } + + /** + * Finishes the last action on the stack, reporting it to the console. + */ + finishAction() { + const finish = new Date(); + const action = this.pop(); + + const time = finish.getTime() - action.startedAt.getTime(); + const successLine = `${success('✔')} ${action.name} done`; + const timestampLine = timestamp(`(${time}ms)`); + console.log(this.indent(`${successLine} ${timestampLine}`)); + } + + /** + * Finishes the last action on the stack, reporting that it failed. + */ + failAction() { + const finish = new Date(); + const action = this.pop(); + + const time = finish.getTime() - action.startedAt.getTime(); + const failLine = `${fail('❌')} ${action.name} failed`; + const timestampLine = timestamp(`(${time}ms)`); + console.log(this.indent(`${failLine} ${timestampLine}`)); + } + + /** + * Prints a message to the console. + */ + message(message: string) { + console.log(this.indent(info(message))); + } + + /** + * Prints an easily visible warning to the console. + */ + warn(message: string) { + console.log(this.indent(warn(message))); + } + + /** + * Prints an error message to the console. + */ + error(message: string) { + console.log(this.indent(`${errorPrefix('Error:')}: ${fail(message)}`)); + } + + /** + * Prints a debug message. + * Debug messages are only shown if `ZKSYNC_DEBUG_LOGS` env variable is set. + */ + debug(message: string) { + if (process.env.ZKSYNC_DEBUG_LOGS) { + console.log(this.indent(`DEBUG: ${message}`)); + } + } + + /** + * Adds required indention to the messages based on the amount of items on the stack. + */ + private indent(line: string): string { + return ' '.repeat(this.stack.length) + line; + } + + /** + * Pops a message from the stack, throwing an error if the stack is empty. + */ + private pop(): Action { + if (this.stack.length === 0) { + throw new Error('There are no actions on the stack'); + } + return this.stack.pop()!; + } +} diff --git a/core/tests/ts-integration/src/retry-provider.ts b/core/tests/ts-integration/src/retry-provider.ts new file mode 100644 index 000000000000..cf5ddb9949cc --- /dev/null +++ b/core/tests/ts-integration/src/retry-provider.ts @@ -0,0 +1,47 @@ +import * as zksync from 'zksync-web3'; +import * as ethers from 'ethers'; + +/** + * RetryProvider retries every RPC request if it detects a timeout-related issue on the server side. + */ +export class RetryProvider extends zksync.Provider { + constructor( + url?: string | ethers.ethers.utils.ConnectionInfo | undefined, + network?: ethers.ethers.providers.Networkish | undefined + ) { + super(url, network); + } + + override async send(method: string, params: any): Promise { + for (let retry = 0; retry < 50; retry++) { + try { + const result = await super.send(method, params); + // If we obtained result not from the first attempt, print a warning. + if (retry != 0) { + console.log(`Request for method ${method} took ${retry} retries to succeed`); + } + return result; + } catch (err: any) { + // Error markers observed on stage so far. + const ignoredErrors = [ + 'timeout', + 'etimedout', + 'econnrefused', + 'econnreset', + 'bad gateway', + 'service temporarily unavailable', + 'nonetwork' + ]; + const errString: string = err.toString().toLowerCase(); + const found = ignoredErrors.some((sampleErr) => errString.indexOf(sampleErr) !== -1); + if (found) { + // Error is related to timeouts. Sleep a bit and try again. + await zksync.utils.sleep(this.pollingInterval); + continue; + } + // Re-throw any non-timeout-related error. + throw err; + } + } + } +} diff --git a/core/tests/ts-integration/src/system.ts b/core/tests/ts-integration/src/system.ts new file mode 100644 index 000000000000..18126f205956 --- /dev/null +++ b/core/tests/ts-integration/src/system.ts @@ -0,0 +1,116 @@ +import { BigNumber, BytesLike } from 'ethers'; +import { ethers } from 'ethers'; +import { Provider, utils } from 'zksync-web3'; + +const L1_CONTRACTS_FOLDER = `${process.env.ZKSYNC_HOME}/contracts/ethereum/artifacts/cache/solpp-generated-contracts`; +const DIAMOND_UPGRADE_INIT_ABI = new ethers.utils.Interface( + require(`${L1_CONTRACTS_FOLDER}/zksync/upgrade-initializers/DiamondUpgradeInit1.sol/DiamondUpgradeInit1.json`).abi +); +const DIAMOND_CUT_FACET_ABI = new ethers.utils.Interface( + require(`${L1_CONTRACTS_FOLDER}/zksync/facets/DiamondCut.sol/DiamondCutFacet.json`).abi +); +export interface ForceDeployment { + // The bytecode hash to put on an address + bytecodeHash: BytesLike; + // The address on which to deploy the bytecodehash to + newAddress: string; + // The value with which to initialize a contract + value: BigNumber; + // The constructor calldata + input: BytesLike; + // Whether to call the constructor + callConstructor: boolean; +} + +// A minimized copy of the `diamondCut` function used in L1 contracts +function diamondCut(facetCuts: any[], initAddress: string, initCalldata: string): any { + return { + facetCuts, + initAddress, + initCalldata + }; +} + +/** + * Uses a small upgrade to deploy a contract via a forcedDeploy + * + * @param ethProvider The L1 provider. + * @param l2Provider The zkSync provider. + * @param deployments Array of forced deployments to perform. + * @param factoryDeps Factory deps that should be included with this transaction. + * @returns The receipt of the L2 transaction corresponding to the forced deployment + */ +export async function deployOnAnyLocalAddress( + ethProvider: ethers.providers.Provider, + l2Provider: Provider, + deployments: ForceDeployment[], + factoryDeps: BytesLike[] +): Promise { + const diamondUpgradeInitAddress = process.env.CONTRACTS_DIAMOND_UPGRADE_INIT_ADDR; + + // The same mnemonic as in the etc/test_config/eth.json + const govMnemonic = require('../../../../etc/test_config/constant/eth.json').mnemonic; + + if (!diamondUpgradeInitAddress) { + throw new Error('DIAMOND_UPGRADE_INIT_ADDRESS not set'); + } + + const govWallet = ethers.Wallet.fromMnemonic(govMnemonic, "m/44'/60'/0'/0/1").connect(ethProvider); + + const zkSyncContract = await l2Provider.getMainContractAddress(); + + const zkSync = new ethers.Contract(zkSyncContract, utils.ZKSYNC_MAIN_ABI, govWallet); + + // In case there is some pending upgrade there, we cancel it + const upgradeProposalState = await zkSync.getUpgradeProposalState(); + if (upgradeProposalState != 0) { + const currentProposalHash = await zkSync.getProposedUpgradeHash(); + await zkSync.connect(govWallet).cancelUpgradeProposal(currentProposalHash); + } + + // Encode data for the upgrade call + const encodedParams = utils.CONTRACT_DEPLOYER.encodeFunctionData('forceDeployOnAddresses', [deployments]); + + // Prepare the diamond cut data + const upgradeInitData = DIAMOND_UPGRADE_INIT_ABI.encodeFunctionData('forceDeployL2Contract', [ + encodedParams, + factoryDeps, + parseInt(process.env.CONTRACTS_PRIORITY_TX_MAX_GAS_LIMIT as string) + ]); + + const upgradeParam = diamondCut([], diamondUpgradeInitAddress, upgradeInitData); + const currentProposalId = (await zkSync.getCurrentProposalId()).add(1); + // Get transaction data of the `proposeTransparentUpgrade` + const proposeTransparentUpgrade = DIAMOND_CUT_FACET_ABI.encodeFunctionData('proposeTransparentUpgrade', [ + upgradeParam, + currentProposalId + ]); + + // Get transaction data of the `executeUpgrade` + const executeUpgrade = DIAMOND_CUT_FACET_ABI.encodeFunctionData('executeUpgrade', [ + upgradeParam, + ethers.constants.HashZero + ]); + + // Proposing the upgrade + await ( + await govWallet.sendTransaction({ + to: zkSyncContract, + data: proposeTransparentUpgrade, + gasLimit: BigNumber.from(10000000) + }) + ).wait(); + + // Finalize the upgrade + const receipt = await ( + await govWallet.sendTransaction({ + to: zkSyncContract, + data: executeUpgrade, + gasLimit: BigNumber.from(10000000) + }) + ).wait(); + + const txHash = utils.getL2HashFromPriorityOp(receipt, zkSyncContract); + + return await l2Provider.waitForTransaction(txHash); +} diff --git a/core/tests/ts-integration/src/test-master.ts b/core/tests/ts-integration/src/test-master.ts new file mode 100644 index 000000000000..0b9fba72e856 --- /dev/null +++ b/core/tests/ts-integration/src/test-master.ts @@ -0,0 +1,134 @@ +import * as zksync from 'zksync-web3'; +import * as ethers from 'ethers'; +import { TestEnvironment, TestContext } from './types'; +import { claimEtherBack } from './context-owner'; +import { RetryProvider } from './retry-provider'; + +/** + * Test master is a singleton class (per suite) that is capable of providing wallets to the suite. + * + * It loads one funded wallet from the initialized context, and can create new empty wallets. + * This class keeps track of all the wallets that were created and after tests it collects funds back. + * + * Additionally, it also provides access to the test environment + */ +export class TestMaster { + private static _instance?: TestMaster; + + private env: TestEnvironment; + + private l1Provider: ethers.providers.JsonRpcProvider; + private l2Provider: zksync.Provider; + + private mainWallet: zksync.Wallet; + private subAccounts: zksync.Wallet[] = []; + + private constructor(file: string) { + if (TestMaster._instance) { + throw new Error('Use TestMaster.getInstance instead of constructor'); + } + + const contextStr = process.env.ZKSYNC_JEST_TEST_CONTEXT; + if (!contextStr) { + throw new Error('Test context was not initalized; unable to load context environment variable'); + } + + const context = JSON.parse(contextStr) as TestContext; + this.env = context.environment; + + // Note: suite files may be nested, and the "name" here should contain the corresponding portion of the + // directory path. Example: `ts-integration/tests/contracts/some.test.ts` -> `contracts/some.test.ts`. + const marker = 'ts-integration/tests/'; + const markerPos = file.lastIndexOf(marker); + if (markerPos === -1) { + throw new Error(`Received invalid test suite path: ${file}`); + } + const suiteName = file.substring(markerPos + marker.length); + + const suiteWalletPK = context.wallets[suiteName]; + if (!suiteWalletPK) { + throw new Error(`Wallet for ${suiteName} suite was not provided`); + } + + this.l1Provider = new ethers.providers.JsonRpcProvider(this.env.l1NodeUrl); + this.l2Provider = new RetryProvider({ + url: this.env.l2NodeUrl, + timeout: 1200 * 1000 + }); + + if (context.environment.network == 'localhost') { + // Setup small polling interval on localhost to speed up tests. + this.l1Provider.pollingInterval = 100; + this.l2Provider.pollingInterval = 100; + } else { + // Poll less frequently to not make the server sad. + this.l2Provider.pollingInterval = 5000; + } + + this.mainWallet = new zksync.Wallet(suiteWalletPK, this.l2Provider, this.l1Provider); + } + + /** + * Returns an instance of the `TestMaster` initialized for the specified suite file. + * + * @param localSuitePath Local path to the suite file, e.g. `erc20.test.ts` or `sample/file.test.ts` + * @returns Constructed `TestMaster` object. + */ + static getInstance(localSuitePath: string): TestMaster { + if (TestMaster._instance) { + return TestMaster._instance; + } + + TestMaster._instance = new TestMaster(localSuitePath); + return TestMaster._instance; + } + + /** + * Getter for the main (funded) account exclusive to the suite. + */ + mainAccount(): zksync.Wallet { + return this.mainWallet; + } + + /** + * Generates a new random empty account. + * After the test suite is completed, funds from accounts created via this method + * are recollected back to the main account. + */ + newEmptyAccount(): zksync.Wallet { + const randomPK = ethers.Wallet.createRandom().privateKey; + const newWallet = new zksync.Wallet(randomPK, this.l2Provider, this.l1Provider); + this.subAccounts.push(newWallet); + return newWallet; + } + + /** + * Getter for the test environment. + */ + environment(): TestEnvironment { + return this.env; + } + + /** + * Checks if tests are being run in the "fast" mode. + * "Long" mode is default and includes tests that wait for block finalization. + * "Fast" mode may be used, for example, on stage when we need to quickly run a set + * of tests. + */ + isFastMode(): boolean { + return process.env['ZK_INTEGRATION_TESTS_FAST_MODE'] === 'true'; + } + + /** + * Deinitialized the context, collecting funds from created account back to the main one. + */ + async deinitialize() { + try { + const promises = await claimEtherBack(this.subAccounts, this.mainWallet.address); + await Promise.all(promises); + } catch (err) { + // We don't want deinitialization to fail the test suite, so just report it. + console.log(`Test deinitialization failed. Error: {err}`); + } + } +} diff --git a/core/tests/ts-integration/src/types.ts b/core/tests/ts-integration/src/types.ts new file mode 100644 index 000000000000..632d03febd0e --- /dev/null +++ b/core/tests/ts-integration/src/types.ts @@ -0,0 +1,76 @@ +import { ethers } from 'ethers'; + +/** + * Description of an ERC20 token. + */ +export interface Token { + name: string; + symbol: string; + decimals: number; + l1Address: string; + l2Address: string; +} + +/** + * Description of the environment the integration tests are being run in. + */ +export interface TestEnvironment { + /** + * Plaintext name of the L1 network name (i.e. `localhost` or `goerli`). + */ + network: string; + /** + * Private key of the "master" wallet (used to distribute funds to other wallets). + * Do not use it directly unless you know what you're doing! + * Use wallets provided through the test context instead. + */ + mainWalletPK: string; + /** + * URL of zkSync node's HTTP Web3 API. + */ + l2NodeUrl: string; + /** + * URL of Ethereum node's HTTP Web3 API. + */ + l1NodeUrl: string; + /** + * URL of zkSync node's WS Web3 API. + */ + wsL2NodeUrl: string; + /** + * URL of zkSync node's Explorer API. + */ + explorerUrl: string; + /** + * Description of the "main" ERC20 token used in the tests. + */ + erc20Token: Token; +} + +/** + * Set of wallets associated with each test suite file. + * Represents mapping (file name => private key of funded wallet). + */ +export type TestWallets = { + [testSuiteFile: string]: string; +}; + +/** + * Representation of the data required to run a test suite. + * Includes addresses of APIs, private keys of funded wallets, and other required data. + * + * Note: to share this object to each test suite, Jest requires object to be JSON-serializable. + * This means that it should be a raw object, and not an instance of a class with some associated functions. + * If you need to add logic to this interface, consider creating a free function that accepts `TestContext` + * as an argument. + */ +export interface TestContext { + wallets: TestWallets; + environment: TestEnvironment; +} + +export interface Fee { + feeBeforeRefund: ethers.BigNumber; + feeAfterRefund: ethers.BigNumber; + refund: ethers.BigNumber; +} diff --git a/core/tests/ts-integration/tests/api/explorer.test.ts b/core/tests/ts-integration/tests/api/explorer.test.ts new file mode 100644 index 000000000000..695bd75e2f5a --- /dev/null +++ b/core/tests/ts-integration/tests/api/explorer.test.ts @@ -0,0 +1,589 @@ +import { TestMaster } from '../../src/index'; +import * as zksync from 'zksync-web3'; +import * as ethers from 'ethers'; +import fetch from 'node-fetch'; +import { anyTransaction, deployContract, getContractSource, getTestContract } from '../../src/helpers'; +import { sleep } from 'zksync-web3/build/src/utils'; +import { IERC20MetadataFactory } from 'zksync-web3/build/typechain'; +import { extractFee } from '../../src/modifiers/balance-checker'; +import { Token } from '../../src/types'; + +const contracts = { + counter: getTestContract('Counter'), + customAccount: getTestContract('CustomAccount'), + create: { + ...getTestContract('Import'), + factoryDep: getTestContract('Foo').bytecode + } +}; + +// Regular expression to match 32-byte hashes. +const HASH_REGEX = /^0x[\da-fA-F]{64}$/; +// Regular expression to match 20-byte addresses in lowercase. +const ADDRESS_REGEX = /^0x[\da-f]{40}$/; +// Regular expression to match variable-length hex number. +const HEX_VALUE_REGEX = /^0x[\da-fA-F]*$/; +// Regular expression to match ISO dates. +const DATE_REGEX = /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d{6})?/; + +describe('Tests for the Explorer API', () => { + let testMaster: TestMaster; + let alice: zksync.Wallet; + let erc20: Token; + + beforeAll(() => { + testMaster = TestMaster.getInstance(__filename); + alice = testMaster.mainAccount(); + erc20 = testMaster.environment().erc20Token; + }); + + test('Should test /network_stats endpoint', async () => { + const initialStats = await query('/network_stats'); + expect(initialStats).toEqual({ + last_sealed: expect.any(Number), + last_verified: expect.any(Number), + total_transactions: expect.any(Number) + }); + }); + + test('Should test /blocks endpoint', async () => { + // To ensure that the newest block is not verified yet, we're sending a transaction. + await anyTransaction(alice); + + const blocksResponse = await query('/blocks', { direction: 'older', limit: '1' }); + expect(blocksResponse).toHaveLength(1); + const apiBlock = blocksResponse[0]; + expect(apiBlock).toEqual({ + number: expect.any(Number), + l1TxCount: expect.any(Number), + l2TxCount: expect.any(Number), + hash: expect.stringMatching(/^0x[\da-fA-F]{64}$/), + status: 'sealed', + timestamp: expect.any(Number) + }); + + // Sanity checks for the values we can't control. + expect(apiBlock.l1TxCount).toBeGreaterThanOrEqual(0); + expect(apiBlock.l2TxCount).toBeGreaterThanOrEqual(0); + expectTimestampToBeSane(apiBlock.timestamp); + + // Retrieve block details through web3 API and cross-check the root hash. + const blockHash = await alice.provider.getBlock(apiBlock.number).then((block) => block.hash); + expect(apiBlock.hash).toEqual(blockHash); + + // Now try to find the same block using the "newer" query. + const newBlocksResponse = await query('/blocks', { + from: (apiBlock.number - 1).toString(), + direction: 'newer', + limit: '1' + }); + expect(newBlocksResponse).toHaveLength(1); + const apiBlockCopy = newBlocksResponse[0]; + // Response should be the same. + expect(apiBlockCopy).toEqual(apiBlock); + + // Finally, in the long mode also check, that once block becomes finalized, status also changes + // in the explorer API. + if (!testMaster.isFastMode()) { + await waitFor(async () => { + const verifiedApiBlock = ( + await query('/blocks', { from: (apiBlock.number - 1).toString(), direction: 'newer', limit: '1' }) + )[0]; + return verifiedApiBlock.status == 'verified'; + }, 'Block was not verified'); + } + }); + + test('Should test /block endpoint', async () => { + // Send the transaction to query block data about. + const tx = await anyTransaction(alice); + + const apiBlock = await query(`/block/${tx.blockNumber}`); + expect(apiBlock).toMatchObject({ + number: expect.any(Number), + l1TxCount: expect.any(Number), + l2TxCount: expect.any(Number), + rootHash: expect.stringMatching(HASH_REGEX), + status: expect.stringMatching(/sealed|verified/), + timestamp: expect.any(Number) + }); + expect(apiBlock.number).toEqual(tx.blockNumber); + expect(apiBlock.rootHash).toEqual(tx.blockHash); + expect(apiBlock.l1TxCount).toBeGreaterThanOrEqual(0); + expect(apiBlock.l2TxCount).toBeGreaterThanOrEqual(1); // We know that at least 1 tx is included there. + expectTimestampToBeSane(apiBlock.timestamp); + + // Perform L1-related checks in the long mode only. + if (!testMaster.isFastMode()) { + // Check that L1 transaction count can also be non-zero. + const l1Tx = await alice.deposit({ token: zksync.utils.ETH_ADDRESS, amount: 1 }).then((tx) => tx.wait()); + const apiBlockWithL1Tx = await query(`/block/${l1Tx.blockNumber}`); + expect(apiBlockWithL1Tx.l1TxCount).toBeGreaterThanOrEqual(1); + + // Wait until the block is verified and check that the required fields are set. + let verifiedBlock = null; + await waitFor(async () => { + verifiedBlock = await query(`/block/${tx.blockNumber}`); + return verifiedBlock.status == 'verified'; + }, 'Block was not verified'); + expect(verifiedBlock).toEqual({ + number: expect.any(Number), + l1TxCount: expect.any(Number), + l2TxCount: expect.any(Number), + rootHash: expect.stringMatching(/^0x[\da-fA-F]{64}$/), + status: 'verified', + timestamp: expect.any(Number), + commitTxHash: expect.stringMatching(HASH_REGEX), + committedAt: expect.stringMatching(DATE_REGEX), + proveTxHash: expect.stringMatching(HASH_REGEX), + provenAt: expect.stringMatching(DATE_REGEX), + executeTxHash: expect.stringMatching(HASH_REGEX), + executedAt: expect.stringMatching(DATE_REGEX) + }); + } + }); + + test('Should test /account endpoint for an EOA', async () => { + // Check response for the empty account. + const newEoa = testMaster.newEmptyAccount(); + const newEoaResponse = await query(`/account/${newEoa.address}`); + expect(newEoaResponse).toEqual({ + address: newEoa.address.toLowerCase(), + balances: {}, + sealedNonce: 0, + verifiedNonce: 0, + accountType: 'eOA' + }); + + // Check response for the non-empty account. + const aliceResponse = await query(`/account/${alice.address}`); + const aliceExpectedBalances: any = {}; + aliceExpectedBalances[zksync.utils.ETH_ADDRESS] = await apiBalanceObject( + zksync.utils.ETH_ADDRESS, + await alice.getBalance() + ); + aliceExpectedBalances[erc20.l2Address.toLowerCase()] = await apiBalanceObject( + erc20.l2Address, + await alice.getBalance(erc20.l2Address), + erc20.l1Address + ); + expect(aliceResponse.balances).toEqual(aliceExpectedBalances); + }); + + test('Should test /account endpoint for a contract', async () => { + // Check response for the empty account. + const contract = await deployContract(alice, contracts.counter, []); + const contractResponse = await query(`/account/${contract.address}`); + expect(contractResponse).toEqual({ + address: contract.address.toLowerCase(), + balances: {}, + sealedNonce: 0, + verifiedNonce: 0, + accountType: 'contract' + }); + }); + + test('Should test /transaction endpoint', async () => { + const amount = 1; + const bob = testMaster.newEmptyAccount(); + const txNonce = await alice.getTransactionCount(); + const txHandle = await alice.transfer({ to: bob.address, amount, token: erc20.l2Address }); + const tx = await txHandle.wait(); + + const apiTx = await query(`/transaction/${tx.transactionHash}`); + expect(apiTx).toMatchObject({ + transactionHash: tx.transactionHash, + nonce: txNonce, + blockNumber: tx.blockNumber, + blockHash: tx.blockHash, + indexInBlock: expect.any(Number), + status: expect.stringMatching(/included|verified/), + fee: ethers.utils.hexValue(extractFee(tx as any).feeAfterRefund), + isL1Originated: false, + initiatorAddress: alice.address.toLowerCase(), + receivedAt: expect.stringMatching(DATE_REGEX), + balanceChanges: expect.any(Array), + erc20Transfers: expect.any(Array), + data: { + calldata: txHandle.data, + contractAddress: erc20.l2Address.toLowerCase(), + factoryDeps: null, + value: ethers.utils.hexValue(txHandle.value) + }, + logs: expect.any(Array), + transfer: { + from: alice.address.toLowerCase(), + to: bob.address.toLowerCase(), + amount: ethers.utils.hexValue(amount), + tokenInfo: await erc20TokenInfo(erc20.l2Address, erc20.l1Address) + } + }); + + if (!testMaster.isFastMode()) { + // Wait for the block to become verified and check that the corresponding fields are set. + await waitFor(async () => { + const verifiedBlock = await query(`/block/${tx.blockNumber}`); + return verifiedBlock.status == 'verified'; + }, 'Block was not verified'); + + const finalizedApiTx = await query(`/transaction/${tx.transactionHash}`); + expect(finalizedApiTx).toMatchObject({ + ethCommitTxHash: expect.stringMatching(HASH_REGEX), + ethProveTxHash: expect.stringMatching(HASH_REGEX), + ethExecuteTxHash: expect.stringMatching(HASH_REGEX), + l1BatchNumber: expect.any(Number) + }); + } + }); + + test('Should test /transactions endpoint', async () => { + const amount = 1; + const bob = testMaster.newEmptyAccount(); + const txNonce = await alice.getNonce(); + const tx = await alice.transfer({ to: bob.address, amount }).then((tx) => tx.wait()); + + const response: any = await query('/transactions', { + blockNumber: tx.blockNumber.toString(), + limit: '100', + direction: 'older' + }); + expect(response).toEqual({ + total: expect.any(Number), + list: expect.anything() + }); + expect(response.total).toBeGreaterThanOrEqual(1); + + const apiTx = response.list.find((apiTx: any) => apiTx.transactionHash == tx.transactionHash); + expect(apiTx).toBeDefined(); + + // Ensure the response format based on the performed ETH transfer. + // After this check we assume that the response format is the same in other responses + // to avoid being too verbose. + expect(apiTx).toMatchObject({ + transactionHash: tx.transactionHash, + nonce: txNonce, + blockNumber: tx.blockNumber, + blockHash: tx.blockHash, + indexInBlock: expect.any(Number), + status: expect.stringMatching(/included|verified/), + fee: ethers.utils.hexValue(extractFee(tx as any).feeAfterRefund), + isL1Originated: false, + initiatorAddress: alice.address.toLowerCase(), + receivedAt: expect.stringMatching(DATE_REGEX), + balanceChanges: expect.any(Array), + erc20Transfers: expect.any(Array), + data: { + calldata: '0x', + contractAddress: bob.address.toLowerCase(), + factoryDeps: null, + value: ethers.utils.hexValue(amount) + }, + transfer: { + from: alice.address.toLowerCase(), + to: bob.address.toLowerCase(), + amount: ethers.utils.hexValue(amount), + tokenInfo: { + address: zksync.utils.ETH_ADDRESS, + l1Address: zksync.utils.ETH_ADDRESS, + l2Address: zksync.utils.ETH_ADDRESS, + symbol: 'ETH', + name: 'Ether', + decimals: 18, + usdPrice: expect.any(String) + } + }, + type: tx.type + }); + + // Check other query parameters combinations + const backwards = await query('/transactions', { + limit: '1', + direction: 'older' + }); + expect(backwards.list.length).toEqual(1); + + const forward = await query('/transactions', { + limit: '1', + offset: '1', + direction: 'newer' + }); + expect(forward.list.length).toEqual(1); + + const account = await query('/transactions', { + limit: '1', + direction: 'older', + account: alice.address + }); + expect(account.list.length).toEqual(1); + + // Invariant: ERC20 tokens are distributed during init, so it must have transactions. + const contract = await query('/transactions', { + limit: '1', + direction: 'older', + contract: erc20.l2Address + }); + expect(contract.list.length).toEqual(1); + }); + + test('Should test /contract endpoint', async () => { + const counterContract = await deployContract(alice, contracts.counter, []); + const createdInBlockNumber = ( + await alice.provider.getTransactionReceipt(counterContract.deployTransaction.hash) + ).blockNumber; + const apiContractInfo = await query(`/contract/${counterContract.address}`); + expect(apiContractInfo).toEqual({ + address: counterContract.address.toLowerCase(), + creatorAddress: alice.address.toLowerCase(), + creatorTxHash: counterContract.deployTransaction.hash, + createdInBlockNumber, + totalTransactions: 0, + bytecode: ethers.utils.hexlify(contracts.counter.bytecode), + verificationInfo: null, + balances: {} + }); + + // ERC20 contract is guaranteed to have more than 0 transactions. + const apiErc20Info = await query(`/contract/${erc20.l2Address}`); + expect(apiErc20Info.totalTransactions).toBeGreaterThan(0); + }); + + test('Should test /events endpoint', async () => { + const apiEvents = await query('/events', { + direction: 'older', + limit: '100', + fromBlockNumber: (await alice.provider.getBlockNumber()).toString() + }); + // Check generic API response structure. + expect(apiEvents).toEqual({ + list: expect.anything(), + total: expect.any(Number) + }); + expect(apiEvents.total).toBeGreaterThan(0); + expect(apiEvents.list.length).toBeGreaterThan(0); + expect(apiEvents.list[0]).toMatchObject({ + address: expect.stringMatching(ADDRESS_REGEX), + blockHash: expect.stringMatching(HASH_REGEX), + blockNumber: expect.stringMatching(HEX_VALUE_REGEX), + data: expect.stringMatching(HEX_VALUE_REGEX), + logIndex: expect.stringMatching(HEX_VALUE_REGEX), + removed: expect.any(Boolean), + topics: expect.any(Array), + transactionHash: expect.stringMatching(HASH_REGEX), + transactionIndex: expect.stringMatching(HEX_VALUE_REGEX), + transactionLogIndex: expect.stringMatching(HEX_VALUE_REGEX) + }); + + // Test per-contract filtering. + const apiErc20Events = await query('/events', { + direction: 'older', + limit: '100', + contractAddress: erc20.l2Address + }); + for (const apiEvent of apiErc20Events.list) { + expect(apiEvent.address).toEqual(erc20.l2Address.toLowerCase()); + } + }); + + test('Should test /token endpoint', async () => { + const apiToken = await query(`/token/${erc20.l2Address}`); + expect(apiToken).toEqual(await erc20TokenInfo(erc20.l2Address, erc20.l1Address)); + }); + + test('should test contract verification', async () => { + if (process.env.RUN_CONTRACT_VERIFICATION_TEST != 'true') { + // Contract verification test is not requested to run. + return; + } + + const counterContract = await deployContract(alice, contracts.counter, []); + const constructorArguments = counterContract.interface.encodeDeploy([]); + + const requestBody = { + contractAddress: counterContract.address, + contractName: 'Counter', + sourceCode: getContractSource('counter/counter.sol'), + compilerZksolcVersion: 'v1.3.1', + compilerSolcVersion: '0.8.16', + optimizationUsed: true, + constructorArguments + }; + let requestId = await query('/contract_verification', undefined, requestBody); + + await expectVerifyRequestToSucceed(requestId, counterContract.address); + }); + + test('should test multi-files contract verification', async () => { + if (process.env.RUN_CONTRACT_VERIFICATION_TEST != 'true') { + // Contract verification test is not requested to run. + return; + } + + const contractFactory = new zksync.ContractFactory(contracts.create.abi, contracts.create.bytecode, alice); + const contractHandle = await contractFactory.deploy({ + customData: { + factoryDeps: [contracts.create.factoryDep] + } + }); + const importContract = await contractHandle.deployed(); + const standardJsonInput = { + language: 'Solidity', + sources: { + 'create.sol': { content: getContractSource('create/create.sol') }, + 'Foo.sol': { content: getContractSource('create/Foo.sol') } + }, + settings: { + optimizer: { enabled: true } + } + }; + + const constructorArguments = importContract.interface.encodeDeploy([]); + + const requestBody = { + contractAddress: importContract.address, + contractName: 'create.sol:Import', + sourceCode: standardJsonInput, + codeFormat: 'solidity-standard-json-input', + compilerZksolcVersion: 'v1.3.1', + compilerSolcVersion: '0.8.16', + optimizationUsed: true, + constructorArguments + }; + let requestId = await query('/contract_verification', undefined, requestBody); + + await expectVerifyRequestToSucceed(requestId, importContract.address); + }); + + afterAll(async () => { + await testMaster.deinitialize(); + }); + + /** + * Performs an API call to the Explorer API. + * + * @param endpoint API endpoint to call. + * @param queryParams Parameters for a query string. + * @param requestBody Request body. If provided, a POST request would be met and body would be encoded to JSON. + * @returns API response parsed as a JSON. + */ + async function query(endpoint: string, queryParams?: { [key: string]: string }, requestBody?: any): Promise { + const url = new URL(endpoint, testMaster.environment().explorerUrl); + // Iterate through query params and add them to URL. + if (queryParams) { + Object.entries(queryParams).forEach(([key, value]) => url.searchParams.set(key, value)); + } + + let init = undefined; + if (requestBody) { + init = { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify(requestBody) + }; + } + + let response = await fetch(url, init); + try { + return await response.json(); + } catch (e) { + throw { + error: 'Could not decode JSON in response', + status: `${response.status} ${response.statusText}` + }; + } + } + + /** + * Constructs an Explorer API balance object representation + */ + async function apiBalanceObject(address: string, balance: ethers.BigNumber, l1Address?: string) { + address = address.toLowerCase(); + // `hexValue` can contain an uneven number of nibbles (unlike `.toHexString()`), which is required for API. + const hexBalance = ethers.utils.hexValue(balance); + if (address == zksync.utils.ETH_ADDRESS) { + return { + balance: hexBalance, + tokenInfo: { + address, + decimals: 18, + l1Address: address, + l2Address: address, + name: 'Ether', + symbol: 'ETH', + usdPrice: expect.any(String) + } + }; + } + + return { + balance: hexBalance, + tokenInfo: await erc20TokenInfo(address, l1Address) + }; + } + + /** + * Constructs an object that represent the token information sent by the Explorer API. + */ + async function erc20TokenInfo(address: string, l1Address?: string) { + const erc20 = IERC20MetadataFactory.connect(address, alice); + return { + address: address.toLowerCase(), + decimals: await erc20.decimals(), + l1Address: l1Address ? l1Address.toLowerCase() : expect.stringMatching(ADDRESS_REGEX), + l2Address: address.toLowerCase(), + name: await erc20.name(), + symbol: await erc20.symbol(), + usdPrice: expect.any(String) + }; + } + + /** + * Runs a provided asynchronous predicate until it returns `true`. + * If it doesn't happen for a while, fails the test from which it has been called. + */ + async function waitFor(cond: () => Promise, errorMessage: string) { + const MAX_RETRIES = 15_000; + let iter = 0; + while (iter++ < MAX_RETRIES) { + if (await cond()) { + return; + } + await sleep(alice.provider.pollingInterval); + } + + expect(null).fail(errorMessage); + } + + async function expectVerifyRequestToSucceed(requestId: number, contractAddress: string) { + let retries = 0; + while (true) { + if (retries > 100) { + throw new Error('Too many retries'); + } + + let statusObject = await query(`/contract_verification/${requestId}`); + if (statusObject.status == 'successful') { + break; + } else if (statusObject.status == 'failed') { + throw new Error(statusObject.error); + } else { + retries += 1; + await sleep(alice.provider.pollingInterval); + } + } + + let contractObject = await query(`/contract/${contractAddress}`); + expect(contractObject.verificationInfo).toBeTruthy(); + } +}); + +/** + * Checks that timestamp has some relatively sane value (not too much in the past, and not in the future) + */ +function expectTimestampToBeSane(timestamp: number) { + const minDate = new Date('01 Jan 2022 00:00:00 UTC').getSeconds(); + const maxDate = Date.now(); + expect(timestamp).toBeGreaterThan(minDate); + expect(timestamp).toBeLessThanOrEqual(maxDate); +} diff --git a/core/tests/ts-integration/tests/api/web3.test.ts b/core/tests/ts-integration/tests/api/web3.test.ts new file mode 100644 index 000000000000..dbeb3883c9e2 --- /dev/null +++ b/core/tests/ts-integration/tests/api/web3.test.ts @@ -0,0 +1,664 @@ +/** + * This suite contains tests for the Web3 API compatibility and zkSync-specific extensions. + */ +import { TestMaster } from '../../src'; +import * as zksync from 'zksync-web3'; +import { types } from 'zksync-web3'; +import { ethers, Event } from 'ethers'; +import { serialize } from '@ethersproject/transactions'; +import { deployContract, getTestContract, waitForNewL1Batch } from '../../src/helpers'; +import { shouldOnlyTakeFee } from '../../src/modifiers/balance-checker'; +import fetch, { RequestInit } from 'node-fetch'; +import { EIP712_TX_TYPE, PRIORITY_OPERATION_L2_TX_TYPE } from 'zksync-web3/build/src/utils'; +// Regular expression to match variable-length hex number. +const HEX_VALUE_REGEX = /^0x[\da-fA-F]*$/; +const DATE_REGEX = /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d{6})?/; + +const contracts = { + counter: getTestContract('Counter'), + events: getTestContract('Emitter') +}; + +describe('web3 API compatibility tests', () => { + let testMaster: TestMaster; + let alice: zksync.Wallet; + let l2Token: string; + + beforeAll(async () => { + testMaster = TestMaster.getInstance(__filename); + alice = testMaster.mainAccount(); + l2Token = testMaster.environment().erc20Token.l2Address; + }); + + test('Should test block/transaction web3 methods', async () => { + const blockNumber = 1; + const blockNumberHex = '0x1'; + + // eth_getBlockByNumber + const blockHash = (await alice.provider.getBlock(blockNumber)).hash; + const blockWithTxsByNumber = await alice.provider.getBlockWithTransactions(blockNumber); + expect(blockWithTxsByNumber.gasLimit).bnToBeGt(0); + let sumTxGasUsed = ethers.BigNumber.from(0); + for (const tx of blockWithTxsByNumber.transactions) { + const receipt = await alice.provider.getTransactionReceipt(tx.hash); + sumTxGasUsed = sumTxGasUsed.add(receipt.gasUsed); + } + expect(blockWithTxsByNumber.gasUsed).bnToBeGte(sumTxGasUsed); + + // eth_getBlockByHash + await alice.provider.getBlock(blockHash); + const blockWithTxsByHash = await alice.provider.getBlockWithTransactions(blockHash); + expect(blockWithTxsByNumber.number).toEqual(blockWithTxsByHash.number); + + // eth_getBlockTransactionCountByNumber + const txCountByNumber = await alice.provider.send('eth_getBlockTransactionCountByNumber', [blockNumberHex]); + expect(parseInt(txCountByNumber, 16)).toEqual(blockWithTxsByNumber.transactions.length); + + // eth_getBlockTransactionCountByHash + const txCountByHash = await alice.provider.send('eth_getBlockTransactionCountByHash', [blockHash]); + expect(parseInt(txCountByHash, 16)).toEqual(blockWithTxsByNumber.transactions.length); + + // eth_getTransactionByBlockNumberAndIndex + const txByBlockNumberAndIndex = await alice.provider.send('eth_getTransactionByBlockNumberAndIndex', [ + blockNumberHex, + '0x0' + ]); + + // eth_getTransactionByBlockHashAndIndex + const txByBlockHashAndIndex = await alice.provider.send('eth_getTransactionByBlockHashAndIndex', [ + blockHash, + '0x0' + ]); + expect(txByBlockHashAndIndex.hash).toEqual(txByBlockNumberAndIndex.hash); + + // eth_getTransactionByHash + const txByHash = await alice.provider.send('eth_getTransactionByHash', [txByBlockNumberAndIndex.hash]); + expect(txByHash.hash).toEqual(txByBlockNumberAndIndex.hash); + }); + + test('Should test storage web3 methods', async () => { + const counterContract = await deployContract(alice, contracts.counter, []); + + // eth_getCode + const code = await alice.provider.getCode(counterContract.address); + expect(code).toEqual(ethers.utils.hexlify(contracts.counter.bytecode)); + + // eth_getStorageAt + const accCodeStorageAddress = '0x0000000000000000000000000000000000008002'; + const codeKey = '0x000000000000000000000000' + counterContract.address.substring(2); + const codeHash = await alice.provider.getStorageAt(accCodeStorageAddress, codeKey); + + const expectedHash = ethers.utils.sha256(contracts.counter.bytecode); + expect(codeHash.substring(10)).toEqual(expectedHash.substring(10)); + }); + + test('Should test some zks web3 methods', async () => { + // zks_getAllAccountBalances + const balances = await alice.getAllBalances(); + const tokenBalance = await alice.getBalance(l2Token); + expect(balances[l2Token.toLowerCase()].eq(tokenBalance)); + // zks_L1ChainId + const l1ChainId = (await alice.providerL1!.getNetwork()).chainId; + const l1ChainIdFromL2Provider = await alice.provider.l1ChainId(); + expect(l1ChainId).toEqual(l1ChainIdFromL2Provider); + // zks_getBlockDetails + const blockDetails = await alice.provider.getBlockDetails(1); + const block = await alice.provider.getBlock(1); + expect(blockDetails.rootHash).toEqual(block.hash); + }); + + test('Should check the network version', async () => { + // Valid network IDs for zkSync are greater than 270. + // This test suite may run on different envs, so we don't expect a particular ID. + await expect(alice.provider.send('net_version', [])).resolves.toMatch(/^27\d|28\d$/); + }); + + // @ts-ignore + test.each([ + ['net_peerCount', [], '0x0'], + ['net_listening', [], false], + ['web3_clientVersion', [], 'zkSync/v2.0'], + ['eth_protocolVersion', [], 'zks/1'], + ['eth_syncing', [], false], + ['eth_accounts', [], []], + ['eth_coinbase', [], '0x0000000000000000000000000000000000000000'], + ['eth_getCompilers', [], []], + ['eth_hashrate', [], '0x0'], + ['eth_mining', [], false], + ['eth_getUncleCountByBlockNumber', ['0x0'], '0x0'] + ])('Should test bogus web3 methods (%s)', async (method: string, input: string[], output: string) => { + await expect(alice.provider.send(method, input)).resolves.toEqual(output); + }); + + test('Should test bogus web3 methods (eth_getUncleCountByBlockHash)', async () => { + // This test can't be represented as a part of the table, since the input is dynamic. + const firstBlockHash = (await alice.provider.getBlock(1)).hash; + await expect(alice.provider.send('eth_getUncleCountByBlockHash', [firstBlockHash])).resolves.toEqual('0x0'); + }); + + test('Should test web3 response extensions', async () => { + if (testMaster.isFastMode()) { + // This test requires a new L1 batch to be created, which may be very time consuming on stage. + return; + } + + const amount = 1; + const erc20ABI = ['function transfer(address to, uint256 amount)']; + const erc20contract = new ethers.Contract(l2Token, erc20ABI, alice); + const tx = await erc20contract.transfer(alice.address, amount).then((tx: any) => tx.wait()); + + // Trigger new L1 batch for all the fields in the receipt to be present. + // Normally it's faster than to wait for tx finalization. + await waitForNewL1Batch(alice); + + // We must get the receipt explicitly, because the receipt obtained via `tx.wait()` could resolve + // *before* the batch was created and not have all the fields set. + const receipt = await alice.provider.getTransactionReceipt(tx.transactionHash); + const logs = await alice.provider.getLogs({ + fromBlock: receipt.blockNumber, + toBlock: receipt.blockNumber + }); + const block = await alice.provider.getBlock(receipt.blockNumber); + const blockWithTransactions = await alice.provider.getBlockWithTransactions(receipt.blockNumber); + const tx1 = await alice.provider.getTransaction(tx.transactionHash); + expect(tx1.l1BatchNumber).toEqual(expect.anything()); // Can be anything except `null` or `undefined`. + expect(tx1.l1BatchTxIndex).toEqual(expect.anything()); // Can be anything except `null` or `undefined`. + expect(tx1.chainId).toEqual(+process.env.CHAIN_ETH_ZKSYNC_NETWORK_ID!); + expect(tx1.type).toEqual(0); + + expect(receipt.l1BatchNumber).toEqual(expect.anything()); // Can be anything except `null` or `undefined`. + expect(receipt.l1BatchTxIndex).toEqual(expect.anything()); // Can be anything except `null` or `undefined`. + expect(receipt.logs[0].l1BatchNumber).toEqual(receipt.l1BatchNumber); + expect(logs[0].l1BatchNumber).toEqual(receipt.l1BatchNumber); + expect(block.l1BatchNumber).toEqual(receipt.l1BatchNumber); + expect(block.l1BatchTimestamp).toEqual(expect.anything()); + expect(blockWithTransactions.l1BatchNumber).toEqual(receipt.l1BatchNumber); + expect(blockWithTransactions.l1BatchTimestamp).toEqual(expect.anything()); + blockWithTransactions.transactions.forEach((txInBlock, _) => { + expect(txInBlock.l1BatchNumber).toEqual(expect.anything()); // Can be anything except `null` or `undefined`. + expect(txInBlock.l1BatchTxIndex).toEqual(expect.anything()); // Can be anything except `null` or `undefined`. + expect(txInBlock.chainId).toEqual(+process.env.CHAIN_ETH_ZKSYNC_NETWORK_ID!); + expect([0, EIP712_TX_TYPE, PRIORITY_OPERATION_L2_TX_TYPE]).toContain(txInBlock.type); + }); + }); + + test('Should test various token methods', async () => { + const tokens = await alice.provider.getConfirmedTokens(); + expect(tokens).not.toHaveLength(0); // Should not be an empty array. + + const price = await alice.provider.getTokenPrice(l2Token); + expect(+price!).toEqual(expect.any(Number)); + }); + + test('Should check transactions from API / Legacy tx', async () => { + const LEGACY_TX_TYPE = 0; + const legacyTx = await alice.sendTransaction({ + type: LEGACY_TX_TYPE, + to: alice.address + }); + await legacyTx.wait(); + + const legacyApiReceipt = await alice.provider.getTransaction(legacyTx.hash); + expect(legacyApiReceipt.gasPrice).bnToBeEq(legacyTx.gasPrice!); + }); + + test('Should check transactions from API / EIP1559 tx', async () => { + const EIP1559_TX_TYPE = 2; + const eip1559Tx = await alice.sendTransaction({ + type: EIP1559_TX_TYPE, + to: alice.address + }); + await eip1559Tx.wait(); + + const eip1559ApiReceipt = await alice.provider.getTransaction(eip1559Tx.hash); + expect(eip1559ApiReceipt.maxFeePerGas).bnToBeEq(eip1559Tx.maxFeePerGas!); + expect(eip1559ApiReceipt.maxPriorityFeePerGas).bnToBeEq(eip1559Tx.maxPriorityFeePerGas!); + }); + + test('Should test pub-sub API: blocks', async () => { + // Checks that we can receive an event for new block being created. + let wsProvider = new ethers.providers.WebSocketProvider(testMaster.environment().wsL2NodeUrl); + + let newBlock: number | null = null; + const currentBlock = await alice._providerL2().getBlockNumber(); + + // Pubsub notifier is not reactive + tests are being run in parallel, so we can't expect that the next block + // would be expected one. Instead, we just want to receive an event with the particular block number. + wsProvider.on('block', (block) => { + if (block >= currentBlock) { + newBlock = block; + } + }); + + const tx = await alice.transfer({ + to: alice.address, + amount: 1, + token: l2Token + }); + + let iterationsCount = 0; + while (!newBlock) { + await tryWait(iterationsCount++); + } + + // More blocks may be created between the moment when the block was requested and event was received. + expect(newBlock).toBeGreaterThanOrEqual(currentBlock); + // ...though the gap should not be *too* big. + expect(newBlock).toBeLessThan(currentBlock + 100); + await tx.wait(); // To not leave a hanging promise. + wsProvider.removeAllListeners(); + await wsProvider.destroy(); + }); + + test('Should test pub-sub API: txs', async () => { + // Checks that we can receive an event for new pending transactions. + let wsProvider = new ethers.providers.WebSocketProvider(testMaster.environment().wsL2NodeUrl); + + // We're sending a few transfers from the wallet, so we'll use a new account to make event unique. + let uniqueRecipient = testMaster.newEmptyAccount().address; + + let newTxHash: string | null = null; + // We can't use `once` as there may be other pending txs sent together with our one. + wsProvider.on('pending', async (txHash) => { + const receipt = await alice.provider.getTransactionReceipt(txHash); + // We're waiting for the exact transaction to appear. + if (!receipt || receipt.to != uniqueRecipient) { + // Not the transaction we're looking for. + return; + } + + newTxHash = txHash; + }); + + const tx = await alice.transfer({ + to: uniqueRecipient, + amount: 1, + token: zksync.utils.ETH_ADDRESS // With ERC20 "to" would be an address of the contract. + }); + + let iterationsCount = 0; + while (!newTxHash) { + await tryWait(iterationsCount++); + } + + expect(newTxHash as string).toEqual(tx.hash); + await tx.wait(); // To not leave a hanging promise. + wsProvider.removeAllListeners(); + await wsProvider.destroy(); + }); + + test('Should test pub-sub API: events', async () => { + // Checks that we can receive an event for events matching a certain filter. + let wsProvider = new ethers.providers.WebSocketProvider(testMaster.environment().wsL2NodeUrl); + let newEvent: Event | null = null; + + // We're sending a few transfers from the wallet, so we'll use a new account to make event unique. + let uniqueRecipient = testMaster.newEmptyAccount().address; + + // Setup a filter for an ERC20 transfer. + const erc20TransferTopic = ethers.utils.id('Transfer(address,address,uint256)'); + let filter = { + address: l2Token, + topics: [ + erc20TransferTopic, + ethers.utils.hexZeroPad(alice.address, 32), // Filter only transfers from this wallet., + ethers.utils.hexZeroPad(uniqueRecipient, 32) // Recipient + ] + }; + wsProvider.once(filter, (event) => { + newEvent = event; + }); + + // Setup a filter that should not match anything. + let incorrectFilter = { + address: alice.address + }; + wsProvider.once(incorrectFilter, (_) => { + expect(null).fail('Found log for incorrect filter'); + }); + + const tx = await alice.transfer({ + to: uniqueRecipient, + amount: 1, + token: l2Token + }); + + let iterationsCount = 0; + while (!newEvent) { + await tryWait(iterationsCount++); + } + + expect((newEvent as any as Event).transactionHash).toEqual(tx.hash); + await tx.wait(); // To not leave a hanging promise. + wsProvider.removeAllListeners(); + await wsProvider.destroy(); + }); + + test('Should test transaction status', async () => { + const amount = 1; + const token = l2Token; + + const randomHash = ethers.utils.hexlify(ethers.utils.randomBytes(32)); + let status = await alice.provider.getTransactionStatus(randomHash); + expect(status).toEqual(types.TransactionStatus.NotFound); + + const sentTx = await alice.transfer({ + to: alice.address, + amount, + token + }); + + do { + status = await alice.provider.getTransactionStatus(sentTx.hash); + } while (status == types.TransactionStatus.NotFound); + + status = await alice.provider.getTransactionStatus(sentTx.hash); + expect( + status == types.TransactionStatus.Processing || status == types.TransactionStatus.Committed + ).toBeTruthy(); + + await sentTx.wait(); + status = await alice.provider.getTransactionStatus(sentTx.hash); + expect(status == types.TransactionStatus.Committed || status == types.TransactionStatus.Finalized).toBeTruthy(); + + if (!testMaster.isFastMode()) { + // It's not worth it to wait for finalization in the API test. + // If it works on localhost, it *must* work elsewhere. + await sentTx.waitFinalize(); + status = await alice.provider.getTransactionStatus(sentTx.hash); + expect(status).toEqual(types.TransactionStatus.Finalized); + } + }); + + test('Should test L2 transaction details', async () => { + const amount = 1; + const token = l2Token; + + const randomHash = ethers.utils.hexlify(ethers.utils.randomBytes(32)); + let details = await alice.provider.getTransactionDetails(randomHash); + expect(details).toEqual(null); + + const sentTx = await alice.transfer({ + to: alice.address, + amount, + token + }); + let expectedDetails = { + fee: expect.stringMatching(HEX_VALUE_REGEX), + initiatorAddress: alice.address.toLowerCase(), + isL1Originated: false, + receivedAt: expect.stringMatching(DATE_REGEX), + status: expect.stringMatching(/pending|failed|included|verified/) + }; + details = await alice.provider.getTransactionDetails(sentTx.hash); + expect(details).toMatchObject(expectedDetails); + + const receipt = await sentTx.wait(); + expectedDetails.status = expect.stringMatching(/failed|included|verified/); + + details = await alice.provider.getTransactionDetails(receipt.transactionHash); + expect(details).toMatchObject(expectedDetails); + + if (!testMaster.isFastMode()) { + // It's not worth it to wait for finalization in the API test. + // If it works on localhost, it *must* work elsewhere. + await sentTx.waitFinalize(); + details = await alice.provider.getTransactionDetails(receipt.transactionHash); + expectedDetails.status = expect.stringMatching(/verified/); + expect(details).toMatchObject(expectedDetails); + } + }); + + test('Should test L1 transaction details', async () => { + if (testMaster.isFastMode()) { + return; + } + let amount = 1; + + const sentTx = await alice.deposit({ + token: zksync.utils.ETH_ADDRESS, + amount + }); + const receipt = await sentTx.wait(); + + let details = await alice.provider.getTransactionDetails(receipt.transactionHash); + let expectedDetails = { + fee: expect.stringMatching(HEX_VALUE_REGEX), + initiatorAddress: expect.stringMatching(HEX_VALUE_REGEX), + isL1Originated: true, + receivedAt: expect.stringMatching(DATE_REGEX), + status: expect.stringMatching(/failed|included|verified/) + }; + expect(details).toMatchObject(expectedDetails); + }); + + test('Should check miniblock range', async () => { + const l1BatchNumber = (await alice.provider.getL1BatchNumber()) - 2; + const range = await alice.provider.getL1BatchBlockRange(l1BatchNumber); + expect(range).toBeTruthy(); + + const [from, to] = range!; + + for (let i = from; i <= to; i++) { + const block = await alice.provider.getBlockWithTransactions(i); + expect(block.l1BatchNumber).toEqual(l1BatchNumber); + expect(block.l1BatchTimestamp).toEqual(expect.anything()); + expect(block.number).toEqual(i); + for (let tx of block.transactions) { + expect(tx.blockNumber).toEqual(i); + const receipt = await alice.provider.getTransactionReceipt(tx.hash); + expect(receipt.l1BatchNumber).toEqual(l1BatchNumber); + } + } + + const prevBlock = await alice.provider.getBlockWithTransactions(from - 1); + expect(prevBlock.l1BatchNumber).toEqual(l1BatchNumber - 1); + + const nextBlock = await alice.provider.getBlock(to + 1); + expect(nextBlock.l1BatchNumber).toEqual(l1BatchNumber + 1); + }); + + test.skip('Should listen for human-readable events', async () => { + const contract = await deployContract(alice, contracts.events, []); + + const blockNumber = await alice.provider.getBlockNumber(); + const deadbeef = ethers.utils.hexZeroPad('0xdeadbeef', 20); + const c0ffee = ethers.utils.hexZeroPad('0xc0ffee', 20); + const emitted = { + trivial: 0, + simple: 0, + indexed: 0 + }; + + contract.connect(alice); + contract + .on(contract.filters.Trivial(), () => ++emitted.trivial) + .on(contract.filters.Simple(), (_number: any, address: any) => { + ++emitted.simple; + expect(address.toLowerCase()).toEqual(deadbeef); + }) + .on(contract.filters.Indexed(42), (number: any, address: any) => { + ++emitted.indexed; + expect(number.toNumber()).toEqual(42); + expect(address.toLowerCase()).toEqual(c0ffee); + }); + + let tx = await contract.test(42); + await tx.wait(); + tx = await contract.test(18); + await tx.wait(); + + // Pubsub notify is not reactive and may be laggy, so we want to increase the chances + // for test to pass. So we try to sleep a few iterations until we receive expected amount + // of events. If we won't receive them, we continue and the test will fail anyway. + const expectedTrivialEventsCount = 2; + const expectedSimpleEventsCount = 2; + const expectedIndexedEventsCount = 1; + + for (let iter = 0; iter < 20; iter++) { + if ( + emitted.trivial >= expectedTrivialEventsCount && + emitted.simple >= expectedSimpleEventsCount && + emitted.indexed >= expectedIndexedEventsCount + ) { + break; + } + await zksync.utils.sleep(alice.provider.pollingInterval); + } + + let events = await contract.queryFilter(contract.filters.Trivial(), blockNumber); + expect(events).toHaveLength(expectedTrivialEventsCount); + events = await contract.queryFilter(contract.filters.Simple(), blockNumber); + expect(events).toHaveLength(expectedSimpleEventsCount); + events = await contract.queryFilter(contract.filters.Indexed(42), blockNumber); + expect(events).toHaveLength(1); + + expect(emitted.trivial).toEqual(expectedTrivialEventsCount); + expect(emitted.simple).toEqual(expectedSimpleEventsCount); + expect(emitted.indexed).toEqual(expectedIndexedEventsCount); + + contract.removeAllListeners(); + }); + + test('Should check metamask interoperability', async () => { + // Prepare "metamask" wallet. + const from = new MockMetamask(alice); + const to = alice.address; + const web3Provider = new zksync.Web3Provider(from); + const signer = web3Provider.getSigner(); + + // Check to ensure that tx was correctly processed. + const feeCheck = await shouldOnlyTakeFee(alice); + + // Ensure that transaction is accepted by the server. + const transfer = signer.transfer({ + to, + token: l2Token, + amount: 1, + overrides: { + // We're testing sending an EIP712 tx with metamask. + type: zksync.utils.EIP712_TX_TYPE + } + }); + await expect(transfer).toBeAccepted([feeCheck]); + }); + + test('Should check API accepts JSON RPC requests with additional fields', async () => { + const req: RequestInit = { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + jsonrpc: '2.0', + method: 'eth_blockNumber', + params: [], + id: 1, + someExtraField: true + }) + }; + + await expect( + fetch(testMaster.environment().l2NodeUrl, req).then((response) => response.json()) + ).resolves.toHaveProperty('result', expect.stringMatching(HEX_VALUE_REGEX)); + }); + + test('Should check API returns error when there are too many logs in eth_getLogs', async () => { + const contract = await deployContract(alice, contracts.events, []); + const maxLogsLimit = parseInt(process.env.API_WEB3_JSON_RPC_REQ_ENTITIES_LIMIT!); + + // Send 3 transactions that emit `maxLogsLimit / 2` events. + const tx1 = await contract.emitManyEvents(maxLogsLimit / 2); + const tx1Receipt = await tx1.wait(); + + const tx2 = await contract.emitManyEvents(maxLogsLimit / 2); + await tx2.wait(); + + const tx3 = await contract.emitManyEvents(maxLogsLimit / 2); + const tx3Receipt = await tx3.wait(); + + // There are around `0.5 * maxLogsLimit` logs in [tx1Receipt.blockNumber, tx1Receipt.blockNumber] range, + // so query with such filter should succeed. + await expect(alice.provider.getLogs({ fromBlock: tx1Receipt.blockNumber, toBlock: tx1Receipt.blockNumber })) + .resolves; + + // There are at least `1.5 * maxLogsLimit` logs in [tx1Receipt.blockNumber, tx3Receipt.blockNumber] range, + // so query with such filter should fail. + let thrown = false; + try { + await alice.provider.getLogs({ fromBlock: tx1Receipt.blockNumber, toBlock: tx3Receipt.blockNumber }); + } catch (err: any) { + thrown = true; + expect(err.toString().includes(`Query returned more than ${maxLogsLimit} results.`)).toBeTruthy(); + } + expect(thrown).toBeTruthy(); + }); + + afterAll(async () => { + await testMaster.deinitialize(); + }); + + /** + * Waits a bit more. Fails the test if the `iterationStep` is too big. + * + * @param iterationStep The number of times this function has been called. + */ + async function tryWait(iterationStep: number) { + const maxWaitTimeMs = 100_000; // 100 seconds + const maxRetries = maxWaitTimeMs / alice.provider.pollingInterval; + await zksync.utils.sleep(alice.provider.pollingInterval); + if (iterationStep >= maxRetries) { + expect(null).fail(`Timeout while waiting for updates.`); + } + } +}); + +export class MockMetamask { + readonly isMetaMask: boolean = true; + readonly networkVersion = parseInt(process.env.CHAIN_ETH_ZKSYNC_NETWORK_ID!, 10); + readonly chainId: string = ethers.utils.hexlify(parseInt(process.env.CHAIN_ETH_ZKSYNC_NETWORK_ID!, 10)); + + constructor(readonly wallet: zksync.Wallet) {} + + // EIP-1193 + async request(req: { method: string; params?: any[] }) { + let { method, params } = req; + params ??= []; + + switch (method) { + case 'eth_requestAccounts': + case 'eth_accounts': + return [this.wallet.address]; + case 'net_version': + return this.networkVersion; + case 'eth_chainId': + return this.chainId; + case 'personal_sign': + return this.wallet.signMessage(params[0]); + case 'eth_sendTransaction': + const tx = params[0]; + delete tx.gas; + let populated = { + ...(await this.wallet.populateTransaction(tx)), + nonce: await this.wallet.getTransactionCount() + }; + delete populated.from; + const signature = this.wallet._signingKey().signDigest(ethers.utils.keccak256(serialize(populated))); + const signed = serialize(populated, signature); + const response = await this.wallet.provider.sendTransaction(signed); + return response.hash; + case 'eth_getTransactionCount': + return this.wallet.getTransactionCount(); + case 'eth_signTypedData_v4': + let payload = JSON.parse(params[1]); + delete payload.types.EIP712Domain; + return this.wallet._signTypedData(payload.domain, payload.types, payload.message); + default: + // unfortunately though, metamask does not forward methods from zks_ namespace + if (method.startsWith('zks')) { + throw new Error('zks namespace methods are not forwarded by metamask'); + } + return this.wallet.provider.send(method, params); + } + } +} diff --git a/core/tests/ts-integration/tests/contracts.test.ts b/core/tests/ts-integration/tests/contracts.test.ts new file mode 100644 index 000000000000..de26528954cc --- /dev/null +++ b/core/tests/ts-integration/tests/contracts.test.ts @@ -0,0 +1,297 @@ +/** + * Generic tests checking the deployed smart contract behavior. + * + * Note: if you are going to write multiple tests checking specific topic (e.g. `CREATE2` behavior or something like this), + * consider creating a separate suite. + * Let's try to keep only relatively simple and self-contained tests here. + */ + +import { TestMaster } from '../src/index'; +import { deployContract, getTestContract, waitForNewL1Batch } from '../src/helpers'; +import { shouldOnlyTakeFee } from '../src/modifiers/balance-checker'; + +import * as ethers from 'ethers'; +import * as zksync from 'zksync-web3'; +import { Provider } from 'zksync-web3'; +import { RetryProvider } from '../src/retry-provider'; + +const contracts = { + counter: getTestContract('Counter'), + constructor: getTestContract('SimpleConstructor'), + expensive: getTestContract('Expensive'), + infinite: getTestContract('InfiniteLoop'), + create: { + ...getTestContract('Import'), + factoryDep: getTestContract('Foo').bytecode + }, + context: getTestContract('Context') +}; + +describe('Smart contract behavior checks', () => { + let testMaster: TestMaster; + let alice: zksync.Wallet; + + // Contracts shared in several tests. + let counterContract: zksync.Contract; + + beforeAll(() => { + testMaster = TestMaster.getInstance(__filename); + alice = testMaster.mainAccount(); + }); + + test('Should deploy & call a contract', async () => { + counterContract = await deployContract(alice, contracts.counter, []); + const feeCheck = await shouldOnlyTakeFee(alice); + + // Change the storage slot and ensure it actually changes. + expect(counterContract.get()).resolves.bnToBeEq(0); + await expect(counterContract.increment(42)).toBeAccepted([feeCheck]); + expect(counterContract.get()).resolves.bnToBeEq(42); + }); + + test('Should deploy contract with a constructor', async () => { + const contract1 = await deployContract(alice, contracts.constructor, [2, 3, false]); + await expect(contract1.get()).resolves.bnToBeEq(2 * 3); + + const contract2 = await deployContract(alice, contracts.constructor, [5, 10, false]); + await expect(contract2.get()).resolves.bnToBeEq(5 * 10); + }); + + test('Should deploy contract with create', async () => { + const contractFactory = new zksync.ContractFactory(contracts.create.abi, contracts.create.bytecode, alice); + + const contract = await contractFactory.deploy({ + customData: { + factoryDeps: [contracts.create.factoryDep] + } + }); + await contract.deployed(); + await expect(contract.getFooName()).resolves.toBe('Foo'); + }); + + test('Should perform "expensive" contract calls', async () => { + const expensiveContract = await deployContract(alice, contracts.expensive, []); + + // First, check that the transaction that is too expensive would be rejected by the API server. + await expect(expensiveContract.expensive(2000)).toBeRejected( + 'transaction may fail or may require manual gas limit' + ); + + // Second, check that processable transaction may fail with out of gas error. + // To do so, we estimate gas for arg "1" and supply it to arg "20". + // This guarantees that transaction won't fail during verification. + const lowGasLimit = await expensiveContract.estimateGas.expensive(1); + await expect( + expensiveContract.expensive(20, { + gasLimit: lowGasLimit + }) + ).toBeReverted(); + }); + + test('Should fail an infinite loop transaction', async () => { + if (testMaster.isFastMode()) { + console.log(`This test is disabled. If you see this line, please check if the issue is resolved`); + return; + } + + const infiniteLoop = await deployContract(alice, contracts.infinite, []); + + // Test eth_call first + // await expect(infiniteLoop.callStatic.infiniteLoop()).toBeRejected('cannot estimate transaction: out of gas'); + // ...and then an actual transaction + await expect(infiniteLoop.infiniteLoop({ gasLimit: 1_000_000 })).toBeReverted([]); + }); + + test('Should test reverting storage logs', async () => { + // In this test we check that if transaction reverts, it rolls back the storage slots. + const prevValue = await counterContract.get(); + + // We manually provide a constant, since otherwise the exception would be thrown + // while estimating gas + await expect(counterContract.incrementWithRevert(5, true, { gasLimit: 5000000 })).toBeReverted([]); + + // The tx has been reverted, so the value Should not have been changed: + const newValue = await counterContract.get(); + expect(newValue).bnToBeEq(prevValue, 'The counter has changed despite the revert'); + }); + + test('Should not allow invalid constructor calldata', async () => { + const randomWrongArgs = [12, 12, true]; + await expect(deployContract(alice, contracts.counter, randomWrongArgs)).toBeRejected('too many arguments'); + }); + + test('Should not allow invalid contract bytecode', async () => { + // In this test we ensure that bytecode validity is checked by server. + + // Helpers to interact with the RPC API directly. + const send = (tx: any) => alice.provider.send('eth_sendRawTransaction', [zksync.utils.serialize(tx)]); + const call = (tx: any) => alice.provider.send('eth_call', [Provider.hexlifyTransaction(tx)]); + const estimateGas = (tx: any) => alice.provider.send('eth_estimateGas', [Provider.hexlifyTransaction(tx)]); + // Prepares an invalid serialized transaction with the bytecode of provided length. + const invalidTx = (length: number) => invalidBytecodeTestTransaction(alice.provider, [new Uint8Array(length)]); + + const txWithUnchunkableBytecode = await invalidTx(17); + const unchunkableError = 'Bytecode length is not divisible by 32'; + await expect(send(txWithUnchunkableBytecode)).toBeRejected(unchunkableError); + await expect(call(txWithUnchunkableBytecode)).toBeRejected(unchunkableError); + await expect(estimateGas(txWithUnchunkableBytecode)).toBeRejected(unchunkableError); + + const txWithBytecodeWithEvenChunks = await invalidTx(64); + const evenChunksError = 'Bytecode has even number of 32-byte words'; + await expect(send(txWithBytecodeWithEvenChunks)).toBeRejected(evenChunksError); + await expect(call(txWithBytecodeWithEvenChunks)).toBeRejected(evenChunksError); + await expect(estimateGas(txWithBytecodeWithEvenChunks)).toBeRejected(evenChunksError); + + const longBytecodeLen = zksync.utils.MAX_BYTECODE_LEN_BYTES + 32; + const txWithTooLongBytecode = await invalidTx(longBytecodeLen); + const tooLongBytecodeError = `Bytecode too long: ${longBytecodeLen} bytes, while max ${zksync.utils.MAX_BYTECODE_LEN_BYTES} allowed`; + await expect(send(txWithTooLongBytecode)).toBeRejected(tooLongBytecodeError); + await expect(call(txWithTooLongBytecode)).toBeRejected(tooLongBytecodeError); + await expect(estimateGas(txWithTooLongBytecode)).toBeRejected(tooLongBytecodeError); + }); + + test('Should interchangeably use ethers for eth calls', async () => { + // In this test we make sure that we can use `ethers` `Contract` object and provider + // to do an `eth_Call` and send transactions to zkSync contract. + // This check is important to ensure that external apps do not have to use our SDK and + // can keep using `ethers` on their side. + + const rpcAddress = testMaster.environment().l2NodeUrl; + const provider = new RetryProvider(rpcAddress); + const wallet = new ethers.Wallet(alice.privateKey, provider); + const ethersBasedContract = new ethers.Contract(counterContract.address, counterContract.interface, wallet); + + const oldValue = await ethersBasedContract.get(); + await expect(ethersBasedContract.increment(1)).toBeAccepted([]); + expect(ethersBasedContract.get()).resolves.bnToBeEq(oldValue.add(1)); + }); + + test('Should check that eth_call works with custom block tags', async () => { + // Retrieve value normally. + const counterValue = await counterContract.get(); + + // Check current block tag. + await expect(counterContract.callStatic.get({ blockTag: 'pending' })).resolves.bnToBeEq(counterValue); + + // Block from the future. + await expect(counterContract.callStatic.get({ blockTag: 1000000000 })).toBeRejected( + "Block with such an ID doesn't exist yet" + ); + + // Genesis block + await expect(counterContract.callStatic.get({ blockTag: 0 })).toBeRejected('call revert exception'); + }); + + test('Should correctly process msg.value inside constructor and in ethCall', async () => { + const value = ethers.BigNumber.from(1); + + // Check that value provided to the constructor is processed. + const contextContract = await deployContract(alice, contracts.context, [], undefined, { value }); + await expect(contextContract.valueOnCreate()).resolves.bnToBeEq(value); + + // Check that value provided to `eth_Call` is processed. + // This call won't return anything, but will throw if it'll result in a revert. + await contextContract.callStatic.requireMsgValue(value, { + value + }); + }); + + test('Should check block properties for tx execution', async () => { + if (testMaster.isFastMode()) { + // This test requires a new L1 batch to be created, which may be very time consuming on stage. + return; + } + + // This test checks that block properties are correctly provided to the smart contracts. + // Note that inside the VM we use l1 batches, not l2 blocks. + // Also we have to use the `pending` block tag for eth calls, because by default it's "latest" and + // will correspond to the last *sealed* batch (e.g. previous one). + + const contextContract = await deployContract(alice, contracts.context, []); + const deploymentBlock = contextContract.deployTransaction.blockNumber!; + const deploymentBlockInfo = await alice.provider.getBlock(deploymentBlock); + // If batch was not sealed, its number may not be present in the receipt. + const deploymentl1Batch = deploymentBlockInfo.l1BatchNumber ?? (await alice.provider.getL1BatchNumber()) + 1; + + // Check that block gas limit is correct. + const blockGasLimit = await contextContract.getBlockGasLimit({ blockTag: 'pending' }); + expect(blockGasLimit).bnToBeGt(0); + + // Record values from the contract right after deployment to compare them with new ones later. + const initialL1Batch = await contextContract.getBlockNumber({ + blockTag: 'pending' + }); + const initialTimestamp = await contextContract.getBlockTimestamp({ + blockTag: 'pending' + }); + // Soft check to verify that `tx.gasprice` doesn't revert. + await contextContract.getTxGasPrice({ blockTag: 'pending' }); + + // Check that current number of L1 batch on contract has sane value. + // Here and below we use "gte"/"gt" instead of strict checks because tests are executed in parallel + // and we can't guarantee a certain block commitment order. + expect(initialL1Batch).bnToBeGte(deploymentl1Batch); + + // Wait till the new L1 batch is created. + await waitForNewL1Batch(alice); + + // Now we're sure than a new L1 batch is created, we may check the new properties. + const newL1Batch = await contextContract.getBlockNumber({ + blockTag: 'pending' + }); + const newTimestamp = await contextContract.getBlockTimestamp({ + blockTag: 'pending' + }); + + expect(newL1Batch).bnToBeGt(initialL1Batch, 'New L1 batch number must be strictly greater'); + expect(newTimestamp).bnToBeGte(initialTimestamp, 'New timestamp must not be less than previous one'); + + // And finally check block properties for the actual contract execution (not `eth_call`). + const acceptedBlockLag = 20; + const acceptedTimestampLag = 600; + await expect(contextContract.checkBlockNumber(newL1Batch, newL1Batch.add(acceptedBlockLag))).toBeAccepted([]); + // `newTimestamp` was received from the API, so actual timestamp in the state keeper may be lower. + // This is why we use `initialTimestamp` here. + await expect( + contextContract.checkBlockTimestamp(initialTimestamp, initialTimestamp.add(acceptedTimestampLag)) + ).toBeAccepted([]); + }); + + afterAll(async () => { + await testMaster.deinitialize(); + }); +}); + +async function invalidBytecodeTestTransaction( + provider: zksync.Provider, + factoryDeps: Uint8Array[] +): Promise { + const chainId = (await provider.getNetwork()).chainId; + + const gasPrice = await provider.getGasPrice(); + const address = zksync.Wallet.createRandom().address; + const tx: ethers.providers.TransactionRequest = { + to: address, + from: address, + nonce: 0, + + gasLimit: ethers.BigNumber.from(300000), + + data: '0x', + value: 0, + chainId, + + type: 113, + + maxPriorityFeePerGas: gasPrice, + maxFeePerGas: gasPrice, + + customData: { + gasPerPubdata: zksync.utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + factoryDeps, + customSignature: new Uint8Array(17) + } + }; + + return tx; +} diff --git a/core/tests/ts-integration/tests/custom-account.test.ts b/core/tests/ts-integration/tests/custom-account.test.ts new file mode 100644 index 000000000000..8df5a93b2ea8 --- /dev/null +++ b/core/tests/ts-integration/tests/custom-account.test.ts @@ -0,0 +1,199 @@ +/** + * This suite contains tests checking the behavior of custom accounts (accounts represented by smart contracts). + */ + +import { TestMaster } from '../src/index'; + +import * as zksync from 'zksync-web3'; +import { utils, types } from 'zksync-web3'; +import * as ethers from 'ethers'; +import { deployContract, getTestContract } from '../src/helpers'; +import { ERC20_PER_ACCOUNT, L2_ETH_PER_ACCOUNT } from '../src/context-owner'; +import { shouldChangeETHBalances, shouldChangeTokenBalances } from '../src/modifiers/balance-checker'; + +const contracts = { + customAccount: getTestContract('CustomAccount'), + context: getTestContract('Context') +}; + +// We create multiple custom accounts and we need to fund them with ETH to pay for fees. +const ETH_PER_CUSTOM_ACCOUNT = L2_ETH_PER_ACCOUNT.div(8); +const TRANSFER_AMOUNT = 1; + +describe('Tests for the custom account behavior', () => { + let testMaster: TestMaster; + let alice: zksync.Wallet; + let customAccount: zksync.Contract; + let erc20Address: string; + let erc20: zksync.Contract; + + beforeAll(() => { + testMaster = TestMaster.getInstance(__filename); + alice = testMaster.mainAccount(); + erc20Address = testMaster.environment().erc20Token.l2Address; + erc20 = new zksync.Contract( + erc20Address, + utils.IERC20, + // Signer doesn't matter for custom account transactions, as signature would be replaced with custom one. + alice + ); + }); + + test('Should deploy custom account', async () => { + const violateRules = false; + customAccount = await deployContract(alice, contracts.customAccount, [violateRules], 'createAccount'); + + // Now we need to check that it was correctly marked as an account: + const contractAccountInfo = await alice.provider.getContractAccountInfo(customAccount.address); + + // Checking that the version of the account abstraction is correct + expect(contractAccountInfo.supportedAAVersion).toEqual(types.AccountAbstractionVersion.Version1); + + // Checking that the nonce ordering is correct + expect(contractAccountInfo.nonceOrdering).toEqual(types.AccountNonceOrdering.Sequential); + }); + + test('Should fund the custom account', async () => { + await alice.transfer({ to: customAccount.address, amount: ETH_PER_CUSTOM_ACCOUNT }).then((tx) => tx.wait()); + await alice + .transfer({ + to: customAccount.address, + token: erc20Address, + amount: ERC20_PER_ACCOUNT.div(4) + }) + .then((tx) => tx.wait()); + }); + + test('Should execute contract by custom account', async () => { + const tx = await erc20.populateTransaction.transfer(alice.address, TRANSFER_AMOUNT); + + const erc20BalanceChange = await shouldChangeTokenBalances(erc20Address, [ + // Custom account change (sender) + { + addressToCheck: customAccount.address, + wallet: alice, + change: -TRANSFER_AMOUNT + }, + // Alice change (recipient) + { wallet: alice, change: TRANSFER_AMOUNT } + ]); + const feeCheck = await shouldChangeETHBalances([ + // 0 change would only check for fees. + { addressToCheck: customAccount.address, wallet: alice, change: 0 } + ]); + + // Check that transaction succeeds. + await expect(sendCustomAccountTransaction(tx, alice.provider, customAccount.address)).toBeAccepted([ + erc20BalanceChange, + feeCheck + ]); + }); + + test('Should fail the validation with incorrect signature', async () => { + const tx = await erc20.populateTransaction.transfer(alice.address, TRANSFER_AMOUNT); + const fakeSignature = new Uint8Array(12); + await expect( + sendCustomAccountTransaction(tx, alice.provider, customAccount.address, fakeSignature) + ).toBeRejected('failed to validate the transaction.'); + }); + + test('Should not allow violating validation rules', async () => { + // We configure account to violate storage access rules during tx validation. + const violateRules = true; + const badCustomAccount = await deployContract(alice, contracts.customAccount, [violateRules], 'createAccount'); + + // Fund the account. + await alice + .transfer({ + to: badCustomAccount.address, + amount: ETH_PER_CUSTOM_ACCOUNT + }) + .then((tx) => tx.wait()); + await alice + .transfer({ + to: badCustomAccount.address, + token: erc20Address, + amount: TRANSFER_AMOUNT + }) + .then((tx) => tx.wait()); + + let tx = await erc20.populateTransaction.transfer(alice.address, TRANSFER_AMOUNT); + await expect(sendCustomAccountTransaction(tx, alice.provider, badCustomAccount.address)).toBeRejected( + 'Violated validation rules' + ); + }); + + test('Should not execute from non-account', async () => { + // Note that we supply "create" instead of "createAccount" here -- the code is the same, but it'll + // be treated as a common contract. + const violateRules = false; + const nonAccount = await deployContract(alice, contracts.customAccount, [violateRules], 'create'); + + // Fund the account. + await alice.transfer({ to: nonAccount.address, amount: ETH_PER_CUSTOM_ACCOUNT }).then((tx) => tx.wait()); + await alice + .transfer({ + to: nonAccount.address, + token: erc20Address, + amount: TRANSFER_AMOUNT + }) + .then((tx) => tx.wait()); + + let tx = await erc20.populateTransaction.transfer(alice.address, TRANSFER_AMOUNT); + await expect(sendCustomAccountTransaction(tx, alice.provider, nonAccount.address)).toBeRejected( + "invalid sender. can't start a transaction from a non-account" + ); + }); + + test('Should provide correct tx.origin for EOA and custom accounts', async () => { + const contextContract = await deployContract(alice, contracts.context, []); + + // For EOA, the tx.origin should be the EOA + await expect(contextContract.checkTxOrigin(alice.address)).toBeAccepted([]); + + // For custom accounts, the tx.origin should be the bootloader address + const customAATx = await contextContract.populateTransaction.checkTxOrigin(utils.BOOTLOADER_FORMAL_ADDRESS); + await expect(sendCustomAccountTransaction(customAATx, alice.provider, customAccount.address)).toBeAccepted([]); + }); + + afterAll(async () => { + await testMaster.deinitialize(); + }); +}); + +// Accepts the tx request with filled transaction's data and +// sends the transaction that should be accepted by the `custom-aa.sol` test contract. +async function sendCustomAccountTransaction( + tx: ethers.PopulatedTransaction, + web3Provider: zksync.Provider, + accountAddress: string, + customSignature?: Uint8Array +) { + const gasLimit = await web3Provider.estimateGas({ + ...tx, + from: accountAddress + }); + const gasPrice = await web3Provider.getGasPrice(); + + tx.gasLimit = gasLimit; + tx.gasPrice = gasPrice; + tx.chainId = parseInt(process.env.CHAIN_ETH_ZKSYNC_NETWORK_ID!, 10); + tx.value = ethers.BigNumber.from(0); + tx.nonce = await web3Provider.getTransactionCount(accountAddress); + tx.type = 113; + tx.from = accountAddress; + + tx.customData = { + gasPerPubdata: utils.DEFAULT_GAS_PER_PUBDATA_LIMIT + }; + + const signedTxHash = zksync.EIP712Signer.getSignedDigest(tx); + tx.customData = { + ...tx.customData, + from: accountAddress, + customSignature: customSignature ?? ethers.utils.concat([signedTxHash, accountAddress]) + }; + const serializedTx = utils.serialize({ ...tx }); + + return await web3Provider.sendTransaction(serializedTx); +} diff --git a/core/tests/ts-integration/tests/erc20.test.ts b/core/tests/ts-integration/tests/erc20.test.ts new file mode 100644 index 000000000000..d56d2ba9140b --- /dev/null +++ b/core/tests/ts-integration/tests/erc20.test.ts @@ -0,0 +1,202 @@ +/** + * This suite contains tests checking default ERC-20 contract behavior. + */ + +import { TestMaster } from '../src/index'; +import { Token } from '../src/types'; +import { shouldChangeTokenBalances, shouldOnlyTakeFee } from '../src/modifiers/balance-checker'; + +import * as zksync from 'zksync-web3'; +import { BigNumber, utils as etherUtils } from 'ethers'; +import * as ethers from 'ethers'; +import { scaledGasPrice, waitUntilBlockFinalized } from '../src/helpers'; +import { L2_ETH_PER_ACCOUNT } from '../src/context-owner'; + +describe('ERC20 contract checks', () => { + let testMaster: TestMaster; + let alice: zksync.Wallet; + let bob: zksync.Wallet; + let tokenDetails: Token; + let aliceErc20: zksync.Contract; + + beforeAll(async () => { + testMaster = TestMaster.getInstance(__filename); + alice = testMaster.mainAccount(); + bob = testMaster.newEmptyAccount(); + + tokenDetails = testMaster.environment().erc20Token; + aliceErc20 = new zksync.Contract(tokenDetails.l2Address, zksync.utils.IERC20, alice); + }); + + test('Token properties are correct', async () => { + expect(aliceErc20.name()).resolves.toBe(tokenDetails.name); + expect(aliceErc20.decimals()).resolves.toBe(tokenDetails.decimals); + expect(aliceErc20.symbol()).resolves.toBe(tokenDetails.symbol); + expect(aliceErc20.balanceOf(alice.address)).resolves.bnToBeGt(0, 'Alice should have non-zero balance'); + }); + + test('Can perform a deposit', async () => { + const amount = 1; // 1 wei is enough. + const gasPrice = scaledGasPrice(alice); + + // Note: for L1 we should use L1 token address. + const l1BalanceChange = await shouldChangeTokenBalances( + tokenDetails.l1Address, + [{ wallet: alice, change: -amount }], + { + l1: true + } + ); + const l2BalanceChange = await shouldChangeTokenBalances(tokenDetails.l2Address, [ + { wallet: alice, change: amount } + ]); + const feeCheck = await shouldOnlyTakeFee(alice); + await expect( + alice.deposit({ + token: tokenDetails.l1Address, + amount, + approveERC20: true, + approveOverrides: { + gasPrice + }, + overrides: { + gasPrice + } + }) + ).toBeAccepted([l1BalanceChange, l2BalanceChange, feeCheck]); + }); + + test('Can perform a transfer', async () => { + const value = BigNumber.from(200); + + const balanceChange = await shouldChangeTokenBalances(tokenDetails.l2Address, [ + { wallet: alice, change: -value }, + { wallet: bob, change: value } + ]); + const feeCheck = await shouldOnlyTakeFee(alice); + + // Send transfer, it should succeed. + await expect(aliceErc20.transfer(bob.address, value)).toBeAccepted([balanceChange, feeCheck]); + }); + + test('Can perform a transfer to self', async () => { + const value = BigNumber.from(200); + + // When transferring to self, balance should not change. + const balanceChange = await shouldChangeTokenBalances(tokenDetails.l2Address, [{ wallet: alice, change: 0 }]); + const feeCheck = await shouldOnlyTakeFee(alice); + await expect(aliceErc20.transfer(alice.address, value)).toBeAccepted([balanceChange, feeCheck]); + }); + + test('Incorrect transfer should revert', async () => { + const value = etherUtils.parseEther('1000000.0'); + + // Since gas estimation is expected to fail, we request gas limit for similar non-failing tx. + const gasLimit = await aliceErc20.estimateGas.transfer(bob.address, 1); + + // Balances should not change for this token. + const noBalanceChange = await shouldChangeTokenBalances(tokenDetails.l2Address, [ + { wallet: alice, change: 0 }, + { wallet: bob, change: 0 } + ]); + // Fee in ETH should be taken though. + const feeTaken = await shouldOnlyTakeFee(alice); + + // Send transfer, it should revert due to lack of balance. + await expect(aliceErc20.transfer(bob.address, value, { gasLimit })).toBeReverted([noBalanceChange, feeTaken]); + }); + + test('Transfer to zero address should revert', async () => { + const zeroAddress = ethers.constants.AddressZero; + const value = BigNumber.from(200); + + // Since gas estimation is expected to fail, we request gas limit for similar non-failing tx. + const gasLimit = await aliceErc20.estimateGas.transfer(bob.address, 1); + + // Balances should not change for this token. + const noBalanceChange = await shouldChangeTokenBalances(tokenDetails.l2Address, [{ wallet: alice, change: 0 }]); + // Fee in ETH should be taken though. + const feeTaken = await shouldOnlyTakeFee(alice); + + // Send transfer, it should revert because transfers to zero address are not allowed. + await expect(aliceErc20.transfer(zeroAddress, value, { gasLimit })).toBeReverted([noBalanceChange, feeTaken]); + }); + + test('Approve and transferFrom should work', async () => { + const approveAmount = 42; + const bobErc20 = new zksync.Contract(tokenDetails.l2Address, zksync.utils.IERC20, bob); + + // Fund bob's account to perform a transaction from it. + await alice + .transfer({ to: bob.address, amount: L2_ETH_PER_ACCOUNT.div(8), token: zksync.utils.ETH_ADDRESS }) + .then((tx) => tx.wait()); + + await expect(aliceErc20.allowance(alice.address, bob.address)).resolves.bnToBeEq(0); + await expect(aliceErc20.approve(bob.address, approveAmount)).toBeAccepted(); + await expect(aliceErc20.allowance(alice.address, bob.address)).resolves.bnToBeEq(approveAmount); + await expect(bobErc20.transferFrom(alice.address, bob.address, approveAmount)).toBeAccepted(); + await expect(aliceErc20.allowance(alice.address, bob.address)).resolves.bnToBeEq(0); + }); + + test('Can perform a withdrawal', async () => { + if (testMaster.isFastMode()) { + return; + } + const amount = 1; + + const l2BalanceChange = await shouldChangeTokenBalances(tokenDetails.l2Address, [ + { wallet: alice, change: -amount } + ]); + const feeCheck = await shouldOnlyTakeFee(alice); + const withdrawalPromise = alice.withdraw({ token: tokenDetails.l2Address, amount }); + await expect(withdrawalPromise).toBeAccepted([l2BalanceChange, feeCheck]); + const withdrawalTx = await withdrawalPromise; + await withdrawalTx.waitFinalize(); + + // Note: For L1 we should use L1 token address. + const l1BalanceChange = await shouldChangeTokenBalances( + tokenDetails.l1Address, + [{ wallet: alice, change: amount }], + { + l1: true + } + ); + await expect(alice.finalizeWithdrawal(withdrawalTx.hash)).toBeAccepted([l1BalanceChange]); + }); + + test('Should claim failed deposit', async () => { + if (testMaster.isFastMode()) { + return; + } + + const amount = 1; + const initialBalance = await alice.getBalanceL1(tokenDetails.l1Address); + // Deposit to the zero address is forbidden and should fail with the current implementation. + const depositHandle = await alice.deposit({ + to: ethers.constants.AddressZero, + token: tokenDetails.l1Address, + amount, + approveERC20: true + }); + const l1Receipt = await depositHandle.waitL1Commit(); + + // L1 balance should change, but tx should fail in L2. + await expect(alice.getBalanceL1(tokenDetails.l1Address)).resolves.bnToBeEq(initialBalance.sub(amount)); + await expect(depositHandle).toBeReverted(); + + // Wait for tx to be finalized. + // `waitFinalize` is not used because it doesn't work as expected for failed transactions. + // It throws once it gets status == 0 in the receipt and doesn't wait for the finalization. + const l2Hash = zksync.utils.getL2HashFromPriorityOp(l1Receipt, await alice.provider.getMainContractAddress()); + const l2TxReceipt = await alice.provider.getTransactionReceipt(l2Hash); + await waitUntilBlockFinalized(alice, l2TxReceipt.blockNumber); + + // Claim failed deposit. + await expect(alice.claimFailedDeposit(l2Hash)).toBeAccepted(); + await expect(alice.getBalanceL1(tokenDetails.l1Address)).resolves.bnToBeEq(initialBalance); + }); + + afterAll(async () => { + await testMaster.deinitialize(); + }); +}); diff --git a/core/tests/ts-integration/tests/ether.test.ts b/core/tests/ts-integration/tests/ether.test.ts new file mode 100644 index 000000000000..f875f554b57c --- /dev/null +++ b/core/tests/ts-integration/tests/ether.test.ts @@ -0,0 +1,165 @@ +/** + * This suite contains tests checking our handling of Ether (such as depositing, checking `msg.value`, etc). + */ + +import { TestMaster } from '../src/index'; +import { shouldChangeETHBalances, shouldOnlyTakeFee } from '../src/modifiers/balance-checker'; +import { checkReceipt } from '../src/modifiers/receipt-check'; + +import * as zksync from 'zksync-web3'; +import { BigNumber } from 'ethers'; +import { scaledGasPrice } from '../src/helpers'; + +const ETH_ADDRESS = zksync.utils.ETH_ADDRESS; + +describe('ETH token checks', () => { + let testMaster: TestMaster; + let alice: zksync.Wallet; + let bob: zksync.Wallet; + + beforeAll(() => { + testMaster = TestMaster.getInstance(__filename); + alice = testMaster.mainAccount(); + bob = testMaster.newEmptyAccount(); + }); + + test('Can perform a deposit', async () => { + const amount = 1; // 1 wei is enough. + const gasPrice = scaledGasPrice(alice); + + // Unfortunately, since fee is taken in ETH, we must calculate the L1 ETH balance diff explicitly. + const l1EthBalanceBefore = await alice.getBalanceL1(); + // No need to check fee as the L1->L2 are free for now + const l2ethBalanceChange = await shouldChangeETHBalances([{ wallet: alice, change: amount }], { + noAutoFeeCheck: true + }); + const depositOp = alice.deposit({ + token: ETH_ADDRESS, + amount, + overrides: { + gasPrice + } + }); + await expect(depositOp).toBeAccepted([l2ethBalanceChange]); + + const depositFee = await depositOp + .then((op) => op.waitL1Commit()) + .then((receipt) => receipt.gasUsed.mul(receipt.effectiveGasPrice)); + const l1EthBalanceAfter = await alice.getBalanceL1(); + expect(l1EthBalanceBefore.sub(depositFee).sub(l1EthBalanceAfter)).bnToBeEq(amount); + }); + + test('Can perform a transfer (legacy)', async () => { + const LEGACY_TX_TYPE = 0; + const value = BigNumber.from(200); + + const ethBalanceChange = await shouldChangeETHBalances([ + { wallet: alice, change: -value }, + { wallet: bob, change: value } + ]); + const correctReceiptType = checkReceipt( + (receipt) => receipt.type == LEGACY_TX_TYPE, + 'Incorrect tx type in receipt' + ); + + await expect(alice.sendTransaction({ type: LEGACY_TX_TYPE, to: bob.address, value })).toBeAccepted([ + ethBalanceChange, + correctReceiptType + ]); + }); + + test('Can perform a transfer (EIP712)', async () => { + const value = BigNumber.from(200); + + const ethBalanceChange = await shouldChangeETHBalances([ + { wallet: alice, change: -value }, + { wallet: bob, change: value } + ]); + const correctReceiptType = checkReceipt( + (receipt) => receipt.type == zksync.utils.EIP712_TX_TYPE, + 'Incorrect tx type in receipt' + ); + + await expect(alice.sendTransaction({ type: zksync.utils.EIP712_TX_TYPE, to: bob.address, value })).toBeAccepted( + [ethBalanceChange, correctReceiptType] + ); + }); + + test('Can perform a transfer (EIP1559)', async () => { + const EIP1559_TX_TYPE = 2; + const value = BigNumber.from(200); + + const ethBalanceChange = await shouldChangeETHBalances([ + { wallet: alice, change: -value }, + { wallet: bob, change: value } + ]); + const correctReceiptType = checkReceipt( + (receipt) => receipt.type == EIP1559_TX_TYPE, + 'Incorrect tx type in receipt' + ); + + await expect(alice.sendTransaction({ type: EIP1559_TX_TYPE, to: bob.address, value })).toBeAccepted([ + ethBalanceChange, + correctReceiptType + ]); + }); + + test('Should reject transactions with access lists', async () => { + const EIP_2930_TX_TYPE = 0x01; + const EIP_1559_TX_TYPE = 0x02; + const value = BigNumber.from(200); + + await expect(alice.sendTransaction({ type: EIP_2930_TX_TYPE, to: bob.address, value })).toBeRejected( + 'access lists are not supported' + ); + + await expect( + alice.sendTransaction({ + type: EIP_1559_TX_TYPE, + to: bob.address, + value, + accessList: [{ address: '0x0000000000000000000000000000000000000000', storageKeys: [] }] + }) + ).toBeRejected('access lists are not supported'); + }); + + test('Can perform a transfer to self', async () => { + const value = BigNumber.from(200); + + // Balance should not change, only fee should be taken. + const ethBalanceChange = await shouldOnlyTakeFee(alice); + await expect(alice.sendTransaction({ to: alice.address, value })).toBeAccepted([ethBalanceChange]); + }); + + test('Incorrect transfer should revert', async () => { + // Attempt to transfer the whole Alice balance: there would be no enough balance to cover the fee. + const value = await alice.getBalance(); + + // Since gas estimation is expected to fail, we request gas limit for similar non-failing tx. + const gasLimit = await alice.estimateGas({ to: bob.address, value: 1 }); + + // Send transfer, it should be rejected due to lack of balance. + await expect(alice.sendTransaction({ to: bob.address, value, gasLimit })).toBeRejected( + 'insufficient funds for gas + value.' + ); + }); + + test('Can perform a withdrawal', async () => { + if (testMaster.isFastMode()) { + return; + } + const amount = 1; + + const l2ethBalanceChange = await shouldChangeETHBalances([{ wallet: alice, change: -amount }]); + const withdrawalPromise = alice.withdraw({ token: ETH_ADDRESS, amount }); + await expect(withdrawalPromise).toBeAccepted([l2ethBalanceChange]); + const withdrawalTx = await withdrawalPromise; + await withdrawalTx.waitFinalize(); + + await expect(alice.finalizeWithdrawal(withdrawalTx.hash)).toBeAccepted(); + }); + + afterAll(async () => { + await testMaster.deinitialize(); + }); +}); diff --git a/core/tests/ts-integration/tests/l1.test.ts b/core/tests/ts-integration/tests/l1.test.ts new file mode 100644 index 000000000000..8eab00bec54c --- /dev/null +++ b/core/tests/ts-integration/tests/l1.test.ts @@ -0,0 +1,373 @@ +/** + * This suite contains tests checking the interaction with L1. + * + * !WARN! Tests that interact with L1 may be very time consuming on stage. + * Please only do the minimal amount of actions to test the behavior (e.g. no unnecessary deposits/withdrawals + * and waiting for the block finalization). + */ +import { TestMaster } from '../src/index'; +import * as zksync from 'zksync-web3'; +import * as ethers from 'ethers'; +import { deployContract, getTestContract, scaledGasPrice, waitForNewL1Batch } from '../src/helpers'; +import { getHashedL2ToL1Msg, L1_MESSENGER, L1_MESSENGER_ADDRESS } from 'zksync-web3/build/src/utils'; + +const SYSTEM_CONFIG = require(`${process.env.ZKSYNC_HOME}/contracts/SystemConfig.json`); + +const contracts = { + counter: getTestContract('Counter'), + errors: getTestContract('SimpleRequire'), + context: getTestContract('Context'), + writesAndMessages: getTestContract('WritesAndMessages') +}; + +// Sane amount of L2 gas enough to process a transaction. +const DEFAULT_L2_GAS_LIMIT = 5000000; + +describe('Tests for L1 behavior', () => { + let testMaster: TestMaster; + let alice: zksync.Wallet; + + let counterContract: zksync.Contract; + let contextContract: zksync.Contract; + let errorContract: zksync.Contract; + + beforeAll(() => { + testMaster = TestMaster.getInstance(__filename); + alice = testMaster.mainAccount(); + }); + + test('Should deploy required contracts', async () => { + // We will need to call several different contracts, so it's better to deploy all them + // as a separate step. + counterContract = await deployContract(alice, contracts.counter, []); + contextContract = await deployContract(alice, contracts.context, []); + errorContract = await deployContract(alice, contracts.errors, []); + }); + + test('Should request L1 execute', async () => { + const calldata = counterContract.interface.encodeFunctionData('increment', ['1']); + const gasPrice = scaledGasPrice(alice); + + await expect( + alice.requestExecute({ + contractAddress: counterContract.address, + calldata, + l2GasLimit: DEFAULT_L2_GAS_LIMIT, + overrides: { + gasPrice + } + }) + ).toBeAccepted([]); + }); + + test('Should request L1 execute with msg.value', async () => { + const l2Value = 10; + const calldata = contextContract.interface.encodeFunctionData('requireMsgValue', [l2Value]); + const gasPrice = scaledGasPrice(alice); + + await expect( + alice.requestExecute({ + contractAddress: contextContract.address, + calldata, + l2GasLimit: DEFAULT_L2_GAS_LIMIT, + l2Value, + overrides: { + gasPrice + } + }) + ).toBeAccepted([]); + }); + + test('Should fail requested L1 execute', async () => { + const calldata = errorContract.interface.encodeFunctionData('require_short', []); + const gasPrice = scaledGasPrice(alice); + + await expect( + alice.requestExecute({ + contractAddress: errorContract.address, + calldata, + l2GasLimit: DEFAULT_L2_GAS_LIMIT, + overrides: { + gasPrice + } + }) + ).toBeReverted([]); + }); + + test('Should send L2->L1 messages', async () => { + if (testMaster.isFastMode()) { + return; + } + const contract = new zksync.Contract(L1_MESSENGER_ADDRESS, L1_MESSENGER, alice); + + // Send message to L1 and wait until it gets there. + const message = ethers.utils.toUtf8Bytes('Some L2->L1 message'); + const tx = await contract.sendToL1(message); + const receipt = await tx.waitFinalize(); + + // Get the proof for the sent message from the server, expect it to exist. + const l2ToL1LogIndex = receipt.l2ToL1Logs.findIndex( + (log: zksync.types.L2ToL1Log) => log.sender == L1_MESSENGER_ADDRESS + ); + const msgProof = await alice.provider.getLogProof(tx.hash, l2ToL1LogIndex); + expect(msgProof).toBeTruthy(); + + // Ensure that received proof matches the provided root hash. + const { id, proof, root } = msgProof!; + const accumutatedRoot = calculateAccumulatedRoot(alice.address, message, receipt.l1BatchTxIndex, id, proof); + expect(accumutatedRoot).toBe(root); + + // Ensure that provided proof is accepted by the main zkSync contract. + const zkSyncContract = await alice.getMainContract(); + const acceptedByContract = await zkSyncContract.proveL2MessageInclusion( + receipt.l1BatchNumber, + id, + { + txNumberInBlock: receipt.l1BatchTxIndex, + sender: alice.address, + data: message + }, + proof + ); + expect(acceptedByContract).toBeTruthy(); + }); + + test('Should check max L2 gas limit for priority txs', async () => { + const gasPrice = scaledGasPrice(alice); + + const l2GasLimit = maxL2GasLimitForPriorityTxs(); + + // Check that the request with higher `gasLimit` fails. + let priorityOpHandle = await alice.requestExecute({ + contractAddress: alice.address, + calldata: '0x', + l2GasLimit: l2GasLimit + 1, + overrides: { + gasPrice + } + }); + let thrown = false; + try { + await priorityOpHandle.waitL1Commit(); + } catch { + thrown = true; + } + expect(thrown).toBeTruthy(); + + // Check that the request with `gasLimit` succeeds. + priorityOpHandle = await alice.requestExecute({ + contractAddress: alice.address, + calldata: '0x', + l2GasLimit, + overrides: { + gasPrice + } + }); + await priorityOpHandle.waitL1Commit(); + }); + + test('Should revert l1 tx with too many initial storage writes', async () => { + const contract = await deployContract(alice, contracts.writesAndMessages, []); + // The circuit allows us to have ~4700 initial writes for an L1 batch. + // We check that we will run out of gas if we do a bit smaller amount of writes. + const calldata = contract.interface.encodeFunctionData('writes', [0, 4500, 1]); + const gasPrice = scaledGasPrice(alice); + + const l2GasLimit = maxL2GasLimitForPriorityTxs(); + + const priorityOpHandle = await alice.requestExecute({ + contractAddress: contract.address, + calldata, + l2GasLimit, + overrides: { + gasPrice + } + }); + // The request should be accepted on L1. + await priorityOpHandle.waitL1Commit(); + // The L2 tx should revert. + await expect(priorityOpHandle).toBeReverted(); + }); + + test('Should revert l1 tx with too many repeated storage writes', async () => { + const contract = await deployContract(alice, contracts.writesAndMessages, []); + // The circuit allows us to have ~7500 repeated writes for an L1 batch. + // We check that we will run out of gas if we do a bit smaller amount of writes. + // In order for writes to be repeated we should firstly write to the keys initially. + const initialWritesInOneTx = 500; + const repeatedWritesInOneTx = 7000; + const gasLimit = await contract.estimateGas.writes(0, initialWritesInOneTx, 1); + + let proms = []; + const nonce = await alice.getNonce(); + for (let i = 0; i < repeatedWritesInOneTx / initialWritesInOneTx; ++i) { + proms.push( + contract.writes(i * initialWritesInOneTx, initialWritesInOneTx, 1, { gasLimit, nonce: nonce + i }) + ); + } + const handles = await Promise.all(proms); + for (const handle of handles) { + await handle.wait(); + } + await waitForNewL1Batch(alice); + + const calldata = contract.interface.encodeFunctionData('writes', [0, repeatedWritesInOneTx, 2]); + const gasPrice = scaledGasPrice(alice); + + const l2GasLimit = maxL2GasLimitForPriorityTxs(); + + const priorityOpHandle = await alice.requestExecute({ + contractAddress: contract.address, + calldata, + l2GasLimit, + overrides: { + gasPrice + } + }); + // The request should be accepted on L1. + await priorityOpHandle.waitL1Commit(); + // The L2 tx should revert. + await expect(priorityOpHandle).toBeReverted(); + }); + + test('Should revert l1 tx with too many l2 to l1 messages', async () => { + const contract = await deployContract(alice, contracts.writesAndMessages, []); + // The circuit allows us to have 512 L2->L1 logs for an L1 batch. + // We check that we will run out of gas if we send a bit smaller amount of L2->L1 logs. + const calldata = contract.interface.encodeFunctionData('l2_l1_messages', [500]); + const gasPrice = scaledGasPrice(alice); + + const l2GasLimit = maxL2GasLimitForPriorityTxs(); + + const priorityOpHandle = await alice.requestExecute({ + contractAddress: contract.address, + calldata, + l2GasLimit, + overrides: { + gasPrice + } + }); + // The request should be accepted on L1. + await priorityOpHandle.waitL1Commit(); + // The L2 tx should revert. + await expect(priorityOpHandle).toBeReverted(); + }); + + test('Should revert l1 tx with too big l2 to l1 message', async () => { + const contract = await deployContract(alice, contracts.writesAndMessages, []); + const MAX_PUBDATA_PER_BLOCK = ethers.BigNumber.from(SYSTEM_CONFIG['MAX_PUBDATA_PER_BLOCK']); + // We check that we will run out of gas if we send a bit + // smaller than `MAX_PUBDATA_PER_BLOCK` amount of pubdata in a single tx. + const calldata = contract.interface.encodeFunctionData('big_l2_l1_message', [ + MAX_PUBDATA_PER_BLOCK.mul(9).div(10) + ]); + const gasPrice = scaledGasPrice(alice); + + const l2GasLimit = maxL2GasLimitForPriorityTxs(); + + const priorityOpHandle = await alice.requestExecute({ + contractAddress: contract.address, + calldata, + l2GasLimit, + overrides: { + gasPrice + } + }); + // The request should be accepted on L1. + await priorityOpHandle.waitL1Commit(); + // The L2 tx should revert. + await expect(priorityOpHandle).toBeReverted(); + }); + + afterAll(async () => { + await testMaster.deinitialize(); + }); +}); + +/** + * Recreates the root hash of the merkle tree based on the provided proof. + */ +function calculateAccumulatedRoot( + address: string, + message: Uint8Array, + l1BatchTxIndex: number, + id: number, + proof: string[] +): string { + let accumutatedRoot = getHashedL2ToL1Msg(address, message, l1BatchTxIndex); + + let idCopy = id; + for (const elem of proof) { + const bytes = + (idCopy & 1) == 0 + ? new Uint8Array([...ethers.utils.arrayify(accumutatedRoot), ...ethers.utils.arrayify(elem)]) + : new Uint8Array([...ethers.utils.arrayify(elem), ...ethers.utils.arrayify(accumutatedRoot)]); + + accumutatedRoot = ethers.utils.keccak256(bytes); + idCopy /= 2; + } + return accumutatedRoot; +} + +function maxL2GasLimitForPriorityTxs(): number { + // Find maximum `gasLimit` that satisfies `txBodyGasLimit <= CONTRACTS_PRIORITY_TX_MAX_GAS_LIMIT` + // using binary search. + let maxGasBodyLimit = +process.env.CONTRACTS_PRIORITY_TX_MAX_GAS_LIMIT!; + + const overhead = 0; + // const overhead = getOverheadForTransaction( + // ethers.BigNumber.from(maxGasBodyLimit), + // ethers.BigNumber.from(zksync.utils.DEFAULT_GAS_PER_PUBDATA_LIMIT), + // // We can just pass 0 as `encodingLength` because `overheadForPublicData` and `overheadForGas` + // // will be greater than `overheadForLength` for large `gasLimit`. + // ethers.BigNumber.from(0) + // ); + return maxGasBodyLimit + overhead; +} + +// function getOverheadForTransaction( +// bodyGasLimit: ethers.BigNumber, +// gasPricePerPubdata: ethers.BigNumber, +// encodingLength: ethers.BigNumber +// ): number { +// const BLOCK_OVERHEAD_L2_GAS = ethers.BigNumber.from(SYSTEM_CONFIG['BLOCK_OVERHEAD_L2_GAS']); +// const L1_GAS_PER_PUBDATA_BYTE = ethers.BigNumber.from(SYSTEM_CONFIG['L1_GAS_PER_PUBDATA_BYTE']); +// const BLOCK_OVERHEAD_L1_GAS = ethers.BigNumber.from(SYSTEM_CONFIG['BLOCK_OVERHEAD_L1_GAS']); +// const BLOCK_OVERHEAD_PUBDATA = BLOCK_OVERHEAD_L1_GAS.div(L1_GAS_PER_PUBDATA_BYTE); + +// const MAX_TRANSACTIONS_IN_BLOCK = ethers.BigNumber.from(SYSTEM_CONFIG['MAX_TRANSACTIONS_IN_BLOCK']); +// const BOOTLOADER_TX_ENCODING_SPACE = ethers.BigNumber.from(SYSTEM_CONFIG['BOOTLOADER_TX_ENCODING_SPACE']); +// const MAX_PUBDATA_PER_BLOCK = ethers.BigNumber.from(SYSTEM_CONFIG['MAX_PUBDATA_PER_BLOCK']); +// const L2_TX_MAX_GAS_LIMIT = ethers.BigNumber.from(SYSTEM_CONFIG['L2_TX_MAX_GAS_LIMIT']); + +// const maxBlockOverhead = BLOCK_OVERHEAD_L2_GAS.add(BLOCK_OVERHEAD_PUBDATA.mul(gasPricePerPubdata)); + +// // The overhead from taking up the transaction's slot +// const txSlotOverhead = ceilDiv(maxBlockOverhead, MAX_TRANSACTIONS_IN_BLOCK); +// let blockOverheadForTransaction = txSlotOverhead; + +// // The overhead for occupying the bootloader memory can be derived from encoded_len +// const overheadForLength = ceilDiv(encodingLength.mul(maxBlockOverhead), BOOTLOADER_TX_ENCODING_SPACE); +// if (overheadForLength.gt(blockOverheadForTransaction)) { +// blockOverheadForTransaction = overheadForLength; +// } + +// // The overhead for possible published public data +// let maxPubdataInTx = ceilDiv(bodyGasLimit, gasPricePerPubdata); +// let overheadForPublicData = ceilDiv(maxPubdataInTx.mul(maxBlockOverhead), MAX_PUBDATA_PER_BLOCK); +// if (overheadForPublicData.gt(blockOverheadForTransaction)) { +// blockOverheadForTransaction = overheadForPublicData; +// } + +// // The overhead for gas that could be used to use single-instance circuits +// let overheadForSingleInstanceCircuits = ceilDiv(bodyGasLimit.mul(maxBlockOverhead), L2_TX_MAX_GAS_LIMIT); +// if (overheadForSingleInstanceCircuits.gt(blockOverheadForTransaction)) { +// blockOverheadForTransaction = overheadForSingleInstanceCircuits; +// } + +// return blockOverheadForTransaction.toNumber(); +// } + +// function ceilDiv(a: ethers.BigNumber, b: ethers.BigNumber): ethers.BigNumber { +// return a.add(b.sub(1)).div(b); +// } diff --git a/core/tests/ts-integration/tests/mempool.test.ts b/core/tests/ts-integration/tests/mempool.test.ts new file mode 100644 index 000000000000..d6c40bdcaacc --- /dev/null +++ b/core/tests/ts-integration/tests/mempool.test.ts @@ -0,0 +1,141 @@ +/** + * This suite contains tests checking the mempool behavior: how transactions are inserted, + * scheduled, processed and/or postponed. + */ +import { TestMaster } from '../src/index'; +import * as zksync from 'zksync-web3'; + +describe('Tests for the mempool behavior', () => { + let testMaster: TestMaster; + let alice: zksync.Wallet; + + beforeAll(() => { + testMaster = TestMaster.getInstance(__filename); + alice = testMaster.mainAccount(); + }); + + test('Should allow a nonce gap', async () => { + // Here we check a basic case: first we send a transaction with nonce +1, then with valid nonce. + // Both transactions should be processed. + const startNonce = await alice.getTransactionCount(); + + const tx2 = await sendTxWithNonce(alice, startNonce + 1); + const tx1 = await sendTxWithNonce(alice, startNonce); + + await expect(tx1).toBeAccepted([]); + await expect(tx2).toBeAccepted([]); + // @ts-ignore + }, 600000); + + test('Should process shuffled nonces', async () => { + // More complex nonce mixup: we send 5 txs completely out of order. + const startNonce = await alice.getTransactionCount(); + + const nonceOffsets = [4, 0, 3, 1, 2]; + const txs = nonceOffsets.map((offset) => sendTxWithNonce(alice, startNonce + offset).then((tx) => tx.wait())); + + // If any of txs would fail, it would throw. + // If txs would get stuck, test would be killed because of timeout. + await Promise.all(txs); + // @ts-ignore + }, 600000); + + test('Should discard too low nonce', async () => { + const startNonce = await alice.getTransactionCount(); + await expect(sendTxWithNonce(alice, startNonce - 1)).toBeRejected('nonce too low.'); + }); + + test('Should discard too big nonce', async () => { + const maxNonceAhead = 450; // Matches the server config. + const startNonce = await alice.getTransactionCount(); + await expect(sendTxWithNonce(alice, startNonce + maxNonceAhead + 1)).toBeRejected('nonce too high.'); + }); + + test('Should correctly show pending nonce', async () => { + const startNonce = await alice.getTransactionCount(); + // Send tx with nonce + 1 + const tx2 = await sendTxWithNonce(alice, startNonce + 1); + + // Nonce from API should not change (e.g. not become "nonce + 2"). + const nonce = await alice.getTransactionCount(); + expect(nonce).toEqual(startNonce); + + // Finish both transactions to not ruin the flow for other tests. + const tx1 = await sendTxWithNonce(alice, startNonce); + await Promise.all([tx1.wait(), tx2.wait()]); + }); + + test('Should replace the transaction', async () => { + const startNonce = await alice.getTransactionCount(); + // Send tx with nonce + 1 + const tx2 = await sendTxWithNonce(alice, startNonce + 1); + await expect(alice.provider.getTransaction(tx2.hash)).resolves.toMatchObject({ + nonce: startNonce + 1, + to: alice.address + }); + + // Change our mind, replace the transaction, while we can! + const bob = testMaster.newEmptyAccount(); + const replacedTx2 = await sendTxWithNonce(alice, startNonce + 1, bob.address); + await expect(alice.provider.getTransaction(replacedTx2.hash)).resolves.toMatchObject({ + nonce: startNonce + 1, + to: bob.address + }); + // First transaction should disappear from the server. + await expect(alice.provider.getTransaction(tx2.hash)).resolves.toBeNull(); + + // Now fill the gap and see what gets executed + await sendTxWithNonce(alice, startNonce).then((tx) => tx.wait()); + const replacedReceipt = await replacedTx2.wait(); + + expect(replacedReceipt.to).toEqual(bob.address); + }); + + test('Should reject a pre-sent transaction with not enough balance', async () => { + // In this test we send tx with the nonce from the future that should be rejected, + // i.e. transaction should pass the API server, but be rejected once queried by the mempool. + // To do so we create an account that has balance to execute just one transaction, and + // send two transactions with `nonce + 1` and after that with `nonce`. + const poorBob = testMaster.newEmptyAccount(); + const nonce = 0; // No transactions from this account were sent. + + const gasForTransfer = await alice.estimateGas({ to: alice.address }); + const gasPrice = await alice.provider.getGasPrice(); + const fund = gasForTransfer.mul(gasPrice).mul(13).div(10); + await alice.sendTransaction({ to: poorBob.address, value: fund }).then((tx) => tx.wait()); + + // Create a *promise* that would await for the rejection. + // Even though we use `toBeReverted` matcher, we'll check that it's actually rejected based on the nonce. + // However, we use `await` on the `sendTransaction` to make sure that tx is past the API server checks. + const rejectionCheckPromise = expect( + await poorBob.sendTransaction({ to: poorBob.address, nonce: nonce + 1 }) + ).toBeReverted(); + await expect(poorBob.sendTransaction({ to: poorBob.address, nonce })).toBeAccepted(); + await rejectionCheckPromise; + + // Now check that there is only one executed transaction for the account. + await expect(poorBob.getTransactionCount()).resolves.toEqual(1); + }); + + afterAll(async () => { + await testMaster.deinitialize(); + }); +}); + +/** + * Sends a valid zkSync transaction with a certain nonce. + * What transaction does is assumed to be not important besides the fact that it should be accepted. + * + * @param wallet Wallet to send transaction from. + * @param nonce Nonce to use + * @param to Optional recipient of the transaction. + * + * @returns Transaction request object. + */ +function sendTxWithNonce(wallet: zksync.Wallet, nonce: number, to?: string) { + return wallet.sendTransaction({ + to: to ?? wallet.address, + value: 1, + nonce + }); +} diff --git a/core/tests/ts-integration/tests/paymaster.test.ts b/core/tests/ts-integration/tests/paymaster.test.ts new file mode 100644 index 000000000000..56d1cbf83ddc --- /dev/null +++ b/core/tests/ts-integration/tests/paymaster.test.ts @@ -0,0 +1,384 @@ +/** + * This suite contains tests checking the behavior of paymasters -- entities that can cover fees for users. + */ +import { TestMaster } from '../src/index'; +import * as zksync from 'zksync-web3'; +import { Provider, Wallet, utils } from 'zksync-web3'; +import * as ethers from 'ethers'; +import { deployContract, getTestContract } from '../src/helpers'; +import { L2_ETH_PER_ACCOUNT } from '../src/context-owner'; +import { checkReceipt } from '../src/modifiers/receipt-check'; +import { extractFee } from '../src/modifiers/balance-checker'; +import { TestMessage } from '../src/matchers/matcher-helpers'; +import { Address } from 'zksync-web3/build/src/types'; + +const contracts = { + customPaymaster: getTestContract('CustomPaymaster') +}; + +// The amount of tokens to transfer (in wei). +const AMOUNT = 1; + +// Exchange ratios for each 1 ETH wei +const CUSTOM_PAYMASTER_RATE_NUMERATOR = ethers.BigNumber.from(5); +const TESTNET_PAYMASTER_RATE_NUMERATOR = ethers.BigNumber.from(1); +const PAYMASTER_RATE_DENOMINATOR = ethers.BigNumber.from(1); + +describe('Paymaster tests', () => { + let testMaster: TestMaster; + let alice: zksync.Wallet; + let paymaster: zksync.Contract; + let erc20Address: string; + let erc20: zksync.Contract; + + beforeAll(() => { + testMaster = TestMaster.getInstance(__filename); + alice = testMaster.mainAccount(); + erc20Address = testMaster.environment().erc20Token.l2Address; + erc20 = new zksync.Contract( + erc20Address, + zksync.utils.IERC20, + // Signer doesn't matter for custom account transactions, as signature would be replaced with custom one. + alice + ); + }); + + test('Should deploy a paymaster', async () => { + paymaster = await deployContract(alice, contracts.customPaymaster, []); + // Supplying paymaster with ETH it would need to cover the fees for the user + await alice.transfer({ to: paymaster.address, amount: L2_ETH_PER_ACCOUNT.div(4) }).then((tx) => tx.wait()); + }); + + test('Should pay fee with paymaster', async () => { + const correctSignature = new Uint8Array(46); + + const paymasterParamsForEstimation = await getTestPaymasterParamsForFeeEstimation( + erc20, + alice.address, + paymaster.address + ); + const tx = await erc20.populateTransaction.transfer(alice.address, AMOUNT, { + customData: { + gasPerPubdata: utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + paymasterParams: paymasterParamsForEstimation + } + }); + tx.gasLimit = await erc20.estimateGas.transfer(alice.address, AMOUNT, { + customData: { + gasPerPubdata: utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + paymasterParams: paymasterParamsForEstimation + } + }); + + const txPromise = sendTxWithTestPaymasterParams( + tx, + alice.provider, + alice, + paymaster.address, + erc20Address, + correctSignature + ); + await expect(txPromise).toBeAccepted([ + checkReceipt( + (receipt) => paidFeeWithPaymaster(receipt, CUSTOM_PAYMASTER_RATE_NUMERATOR, paymaster.address), + 'Fee was not paid (or paid incorrectly)' + ) + ]); + }); + + test('Should call postOp of the paymaster', async () => { + const correctSignature = new Uint8Array(46); + + const paymasterParamsForEstimation = await getTestPaymasterParamsForFeeEstimation( + erc20, + alice.address, + paymaster.address + ); + const tx = await erc20.populateTransaction.transfer(alice.address, AMOUNT, { + customData: { + gasPerPubdata: utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + paymasterParams: paymasterParamsForEstimation + } + }); + tx.gasLimit = await erc20.estimateGas.transfer(alice.address, AMOUNT, { + customData: { + gasPerPubdata: utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + paymasterParams: paymasterParamsForEstimation + } + }); + // We add 300k gas to make sure that the postOp is successfully called + // Note, that the successful call of the postOp is not guaranteed by the protocol & + // should not be required from the users. We still do it here for the purpose of the test. + tx.gasLimit = tx.gasLimit!.add(300000); + + const txPromise = sendTxWithTestPaymasterParams( + tx, + alice.provider, + alice, + paymaster.address, + erc20Address, + correctSignature + ); + await expect(txPromise).toBeAccepted([ + checkReceipt( + (receipt) => paidFeeWithPaymaster(receipt, CUSTOM_PAYMASTER_RATE_NUMERATOR, paymaster.address), + 'Fee was not paid (or paid incorrectly)' + ) + ]); + + const afterCounter = await paymaster.txCounter(); + const calledContextWithCounter = await paymaster.calledContext(afterCounter); + expect(calledContextWithCounter).toEqual(true); + }); + + test('Should pay fees with testnet paymaster', async () => { + // The testnet paymaster is not available on mainnet + if (testMaster.environment().network == 'mainnet') { + return; + } + + const testnetPaymaster = (await alice.provider.getTestnetPaymasterAddress())!; + expect(testnetPaymaster).toBeTruthy(); + + // Supplying paymaster with ETH it would need to cover the fees for the user + await alice.transfer({ to: testnetPaymaster, amount: L2_ETH_PER_ACCOUNT.div(4) }).then((tx) => tx.wait()); + + const tx = await erc20.populateTransaction.transfer(alice.address, AMOUNT); + const gasPrice = await alice.provider.getGasPrice(); + + const aliceERC20Balance = await erc20.balanceOf(alice.address); + const paramsForFeeEstimation = zksync.utils.getPaymasterParams(testnetPaymaster, { + type: 'ApprovalBased', + // For transaction estimation we provide the paymasterInput with large + // minimalAllowance. It is safe for the end users, since the transaction is never + // actually signed. + minimalAllowance: aliceERC20Balance.sub(AMOUNT), + token: erc20Address, + // While the "correct" paymaster signature may not be available in the true mainnet + // paymasters, it is accessible in this test to make the test paymaster simpler. + // The amount that is passed does not matter, since the testnet paymaster does not enforce it + // to cover the fee for him. + innerInput: new Uint8Array() + }); + const gasLimit = await erc20.estimateGas.transfer(alice.address, AMOUNT, { + customData: { + gasPerPubdata: utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + paymasterParams: paramsForFeeEstimation + } + }); + const fee = gasPrice.mul(gasLimit); + + const paymasterParams = utils.getPaymasterParams(testnetPaymaster, { + type: 'ApprovalBased', + token: erc20Address, + minimalAllowance: fee, + innerInput: new Uint8Array() + }); + const txPromise = alice.sendTransaction({ + ...tx, + maxFeePerGas: gasPrice, + maxPriorityFeePerGas: gasPrice, + gasLimit, + customData: { + gasPerPubdata: utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + paymasterParams + } + }); + + await expect(txPromise).toBeAccepted([ + checkReceipt( + (receipt) => paidFeeWithPaymaster(receipt, TESTNET_PAYMASTER_RATE_NUMERATOR, testnetPaymaster), + 'Fee was not paid (or paid incorrectly)' + ) + ]); + }); + + test('Should reject tx with invalid paymaster input', async () => { + const paymasterParamsForEstimation = await getTestPaymasterParamsForFeeEstimation( + erc20, + alice.address, + paymaster.address + ); + const tx = await erc20.populateTransaction.transfer(alice.address, AMOUNT, { + customData: { + gasPerPubdata: utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + paymasterParams: paymasterParamsForEstimation + } + }); + tx.gasLimit = await erc20.estimateGas.transfer(alice.address, AMOUNT, { + customData: { + gasPerPubdata: utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + paymasterParams: paymasterParamsForEstimation + } + }); + + const incorrectSignature = new Uint8Array(45); + await expect( + sendTxWithTestPaymasterParams( + tx, + alice.provider, + alice, + paymaster.address, + erc20Address, + incorrectSignature + ) + ).toBeRejected('Paymaster validation error'); + }); + + afterAll(async () => { + await testMaster.deinitialize(); + }); +}); + +/** + * Matcher modifer that checks if the fee was paid with the paymaster. + * It only checks the receipt logs and assumes that logs are correct (e.g. if event is present, tokens were moved). + * Assumption is that other tests ensure this invariant. + */ +function paidFeeWithPaymaster( + receipt: zksync.types.TransactionReceipt, + ratioNumerator: ethers.BigNumber, + paymaster: string +): boolean { + const errorMessage = (line: string) => { + return new TestMessage() + .matcherHint('.shouldBeAccepted.paidFeeWithPaymaster') + .line(line) + .line(`Transaction initiator:`) + .expected(receipt.from) + .line(`Paymaster address:`) + .expected(paymaster) + .line('Receipt') + .received(receipt) + .build(); + }; + + // So, if the fees were paid, there should be the following logs: + // 1. paymaster -> bootloader (fee amount in ETH) + // 2. initiator -> paymaster (converted fee amount in ERC20) + // Below we're looking for the 1st log, then convert it to the ERC20 log and look for it as well. + let fee; + try { + fee = extractFee(receipt, paymaster); + } catch (e) { + // No fee was paid by paymaster, test is failed. + expect(null).fail(errorMessage('Transaction did not have the ETH fee log')); + throw e; // Unreachable, needed to make ts happy. + } + const expectedErc20Fee = getTestPaymasterFeeInToken(fee.feeBeforeRefund, ratioNumerator); + + // Find the log showing that the fee in ERC20 was taken from the user. + // We need to pad values to represent 256-bit value. + const fromAccountAddress = ethers.utils.hexZeroPad(ethers.utils.arrayify(receipt.from), 32); + const paddedAmount = ethers.utils.hexZeroPad(ethers.utils.arrayify(expectedErc20Fee), 32); + const paddedPaymaster = ethers.utils.hexZeroPad(ethers.utils.arrayify(paymaster), 32); + // ERC20 fee log is one that sends money to the paymaster. + const erc20TransferTopic = ethers.utils.id('Transfer(address,address,uint256)'); + const erc20FeeLog = receipt.logs.find((log) => { + return ( + log.topics.length == 3 && + log.topics[0] == erc20TransferTopic && + log.topics[1] == fromAccountAddress && + log.topics[2] == paddedPaymaster && + log.data == paddedAmount + ); + }); + if (!erc20FeeLog) { + // ERC20 token was not taken (or taken incorrectly) from the account. + expect(null).fail(errorMessage('Transaction did not have the ERC20 fee log (or the amount was incorrect)')); + throw new Error(); // Unreachable, needed to make ts happy. + } + + return true; +} + +function getTestPaymasterFeeInToken(feeInEth: ethers.BigNumber, numerator: ethers.BigNumber) { + // The number of ETH that the paymaster agrees to swap is equal to + // tokenAmount * exchangeRateNumerator / exchangeRateDenominator + // + // tokenAmount * exchangeRateNumerator / exchangeRateDenominator >= feeInEth + // tokenAmount >= feeInEth * exchangeRateDenominator / exchangeRateNumerator + // tokenAmount = ceil(feeInEth * exchangeRateDenominator / exchangeRateNumerator) + // for easier ceiling we do the following: + // tokenAmount = (ethNeeded * exchangeRateDenominator + exchangeRateNumerator - 1) / exchangeRateNumerator + return feeInEth.mul(PAYMASTER_RATE_DENOMINATOR).add(numerator).sub(1).div(numerator); +} + +function getTestPaymasterInnerInput(signature: ethers.BytesLike, tokenAmount: ethers.BigNumber) { + const abiEncoder = new ethers.utils.AbiCoder(); + return abiEncoder.encode( + ['bytes', 'uint256', 'uint256', 'uint256'], + [signature, CUSTOM_PAYMASTER_RATE_NUMERATOR, PAYMASTER_RATE_DENOMINATOR, tokenAmount] + ); +} + +async function getTestPaymasterParamsForFeeEstimation( + erc20: ethers.Contract, + senderAddress: Address, + paymasterAddress: Address +): Promise { + // While the "correct" paymaster signature may not be available in the true mainnet + // paymasters, it is accessible in this test to make the test paymaster simpler. + const correctSignature = new Uint8Array(46); + + const aliceERC20Balance = await erc20.balanceOf(senderAddress); + const paramsForFeeEstimation = zksync.utils.getPaymasterParams(paymasterAddress, { + type: 'ApprovalBased', + // For transaction estimation we provide the paymasterInput with large + // minimalAllowance. It is safe for the end users, since the transaction is never + // actually signed. + minimalAllowance: aliceERC20Balance, + token: erc20.address, + // The amount that is passed does not matter, since the testnet paymaster does not enforce it + // to cover the fee for him. + innerInput: getTestPaymasterInnerInput(correctSignature, ethers.BigNumber.from(1)) + }); + + return paramsForFeeEstimation; +} + +function getTestPaymasterParams( + paymaster: string, + token: string, + ethNeeded: ethers.BigNumber, + signature: ethers.BytesLike +) { + const tokenAmount = getTestPaymasterFeeInToken(ethNeeded, CUSTOM_PAYMASTER_RATE_NUMERATOR); + // The input to the tester paymaster + const innerInput = getTestPaymasterInnerInput(signature, tokenAmount); + + return utils.getPaymasterParams(paymaster, { + type: 'ApprovalBased', + token, + minimalAllowance: tokenAmount, + innerInput + }); +} + +async function sendTxWithTestPaymasterParams( + tx: ethers.PopulatedTransaction, + web3Provider: Provider, + sender: Wallet, + paymasterAddress: string, + token: string, + paymasterSignature: ethers.BytesLike +) { + const gasPrice = await web3Provider.getGasPrice(); + + tx.gasPrice = gasPrice; + tx.chainId = parseInt(process.env.CHAIN_ETH_ZKSYNC_NETWORK_ID!, 10); + tx.value = ethers.BigNumber.from(0); + tx.nonce = await web3Provider.getTransactionCount(sender.address); + tx.type = 113; + + const ethNeeded = tx.gasLimit!.mul(gasPrice); + const paymasterParams = getTestPaymasterParams(paymasterAddress, token, ethNeeded, paymasterSignature); + + tx.customData = { + ...tx.customData, + gasPerPubdata: utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + paymasterParams + }; + const signedTx = await sender.signTransaction(tx); + return await web3Provider.sendTransaction(signedTx); +} diff --git a/core/tests/ts-integration/tests/self-unit.test.ts b/core/tests/ts-integration/tests/self-unit.test.ts new file mode 100644 index 000000000000..f59d66f1361f --- /dev/null +++ b/core/tests/ts-integration/tests/self-unit.test.ts @@ -0,0 +1,39 @@ +/** + * This file contains unit tests for the framework itself. + * It does not receive a funced account and should not interact with the zkSync server. + */ +import { TestMaster } from '../src/index'; +import { BigNumber } from 'ethers'; + +describe('Common checks for library invariants', () => { + test('Should not have a test master', () => { + // Should not receive a test account in the unit tests file. + expect(() => TestMaster.getInstance(__filename)).toThrow('Wallet for self-unit.test.ts suite was not provided'); + }); + + test('BigNumber matchers should work', () => { + const hundred = BigNumber.from(100); + + // gt + expect(hundred).bnToBeGt(0); + expect(hundred).not.bnToBeGt(100); + + // gte + expect(hundred).bnToBeGte(0); + expect(hundred).bnToBeGte(100); + expect(hundred).not.bnToBeGte(200); + + // eq + expect(hundred).bnToBeEq(100); + expect(hundred).not.bnToBeEq(200); + + // lte + expect(hundred).not.bnToBeLte(90); + expect(hundred).bnToBeLte(100); + expect(hundred).bnToBeLte(101); + + // lt + expect(hundred).not.bnToBeLt(100); + expect(hundred).bnToBeLt(101); + }); +}); diff --git a/core/tests/ts-integration/tests/system.test.ts b/core/tests/ts-integration/tests/system.test.ts new file mode 100644 index 000000000000..e29f0bd44c15 --- /dev/null +++ b/core/tests/ts-integration/tests/system.test.ts @@ -0,0 +1,490 @@ +/** + * This suite contains tests checking the overall system behavior, e.g. not any particular topic, + * but rather how do we handle certain relatively unique situations. + * + * Stuff related to the edge cases, bootloader and system contracts normally expected to go here. + */ + +import { TestMaster } from '../src/index'; +import { shouldChangeTokenBalances } from '../src/modifiers/balance-checker'; +import { L2_ETH_PER_ACCOUNT } from '../src/context-owner'; + +import * as zksync from 'zksync-web3'; +import * as ethers from 'ethers'; +import { BigNumberish, BytesLike } from 'ethers'; +import { serialize, hashBytecode } from 'zksync-web3/build/src/utils'; +import { deployOnAnyLocalAddress, ForceDeployment } from '../src/system'; +import { getTestContract } from '../src/helpers'; + +const contracts = { + counter: getTestContract('Counter'), + events: getTestContract('Emitter') +}; + +describe('System behavior checks', () => { + let testMaster: TestMaster; + let alice: zksync.Wallet; + + beforeAll(() => { + testMaster = TestMaster.getInstance(__filename); + alice = testMaster.mainAccount(); + }); + + test('Should check that system contracts and SDK create same CREATE/CREATE2 addresses', async () => { + const deployerContract = new zksync.Contract( + zksync.utils.CONTRACT_DEPLOYER_ADDRESS, + zksync.utils.CONTRACT_DEPLOYER, + alice.provider + ); + + const sender = zksync.Wallet.createRandom().address; + const hash = ethers.utils.randomBytes(32); + const salt = ethers.utils.randomBytes(32); + const input = ethers.utils.randomBytes(128); + const nonce = 5; + + const create2AddressBySDK = zksync.utils.create2Address(sender, hash, salt, input); + const create2AddressByDeployer = await deployerContract.getNewAddressCreate2(sender, hash, salt, input); + expect(create2AddressBySDK).toEqual(create2AddressByDeployer); + + const createAddressBySDK = zksync.utils.createAddress(sender, nonce); + const createAddressByDeployer = await deployerContract.getNewAddressCreate(sender, nonce); + expect(createAddressBySDK).toEqual(createAddressByDeployer); + }); + + test('Should accept transactions with small gasPerPubdataByte', async () => { + // The number "10" was chosen because we have a different error for lesser `smallGasPerPubdata`. + const smallGasPerPubdata = 10; + const senderNonce = await alice.getTransactionCount(); + + // This tx should be accepted by the server, but would never be executed, so we don't wait for the receipt. + await alice.sendTransaction({ + to: alice.address, + customData: { + gasPerPubdata: smallGasPerPubdata + } + }); + + // Now send the next tx with the same nonce: it should override the previous one and be executed. + await expect( + alice.sendTransaction({ + to: alice.address, + nonce: senderNonce + }) + ).toBeAccepted([]); + }); + + test('Should check that bootloader utils: Legacy tx hash', async () => { + const bootloaderUtils = bootloaderUtilsContract(); + + // Testing the correctness of calculating the legacy tx hashes + const legacyTx = await alice.populateTransaction({ + type: 0, + to: alice.address, + from: alice.address, + data: '0x', + value: 0, + gasLimit: 50000 + }); + const txBytes = await alice.signTransaction(legacyTx); + const parsedTx = zksync.utils.parseTransaction(txBytes); + const txData = signedTxToTransactionData(parsedTx)!; + + const expectedTxHash = parsedTx.hash; + delete legacyTx.from; + const expectedSignedHash = ethers.utils.keccak256(serialize(legacyTx)); + + const proposedHashes = await bootloaderUtils.getTransactionHashes(txData); + expect(proposedHashes.txHash).toEqual(expectedTxHash); + expect(proposedHashes.signedTxHash).toEqual(expectedSignedHash); + }); + + test('Should check bootloader utils: EIP2930 tx hash', async () => { + const bootloaderUtils = bootloaderUtilsContract(); + + // Testing EIP2930 transactions + const eip2930Tx = await alice.populateTransaction({ + type: 1, + to: alice.address, + from: alice.address, + data: '0x', + value: 0, + gasLimit: 50000, + gasPrice: 55000 + }); + const signedEip2930Tx = await alice.signTransaction(eip2930Tx); + const parsedEIP2930tx = zksync.utils.parseTransaction(signedEip2930Tx); + + const EIP2930TxData = signedTxToTransactionData(parsedEIP2930tx)!; + delete eip2930Tx.from; + const expectedEIP2930TxHash = parsedEIP2930tx.hash; + const expectedEIP2930SignedHash = ethers.utils.keccak256(serialize(eip2930Tx)); + + const proposedEIP2930Hashes = await bootloaderUtils.getTransactionHashes(EIP2930TxData); + expect(proposedEIP2930Hashes.txHash).toEqual(expectedEIP2930TxHash); + expect(proposedEIP2930Hashes.signedTxHash).toEqual(expectedEIP2930SignedHash); + }); + + test('Should check bootloader utils: EIP1559 tx hash', async () => { + const bootloaderUtils = bootloaderUtilsContract(); + + // Testing EIP1559 transactions + const eip1559Tx = await alice.populateTransaction({ + type: 2, + to: alice.address, + from: alice.address, + data: '0x', + value: 0, + maxFeePerGas: 12000, + maxPriorityFeePerGas: 100 + }); + const signedEip1559Tx = await alice.signTransaction(eip1559Tx); + const parsedEIP1559tx = zksync.utils.parseTransaction(signedEip1559Tx); + + const EIP1559TxData = signedTxToTransactionData(parsedEIP1559tx)!; + delete eip1559Tx.from; + const expectedEIP1559TxHash = parsedEIP1559tx.hash; + const expectedEIP1559SignedHash = ethers.utils.keccak256(serialize(eip1559Tx)); + + const proposedEIP1559Hashes = await bootloaderUtils.getTransactionHashes(EIP1559TxData); + expect(proposedEIP1559Hashes.txHash).toEqual(expectedEIP1559TxHash); + expect(proposedEIP1559Hashes.signedTxHash).toEqual(expectedEIP1559SignedHash); + }); + + test('Should check bootloader utils: EIP712 tx hash', async () => { + const bootloaderUtils = bootloaderUtilsContract(); + + // EIP712 transaction hashes' test + const eip712Tx = await alice.populateTransaction({ + type: 113, + to: alice.address, + from: alice.address, + data: '0x', + value: 0, + maxFeePerGas: 12000, + maxPriorityFeePerGas: 100, + customData: { + gasPerPubdata: zksync.utils.DEFAULT_GAS_PER_PUBDATA_LIMIT + } + }); + const signedEip712Tx = await alice.signTransaction(eip712Tx); + const parsedEIP712tx = zksync.utils.parseTransaction(signedEip712Tx); + + const eip712TxData = signedTxToTransactionData(parsedEIP712tx)!; + const expectedEIP712TxHash = parsedEIP712tx.hash; + const expectedEIP712SignedHash = zksync.EIP712Signer.getSignedDigest(eip712Tx); + + const proposedEIP712Hashes = await bootloaderUtils.getTransactionHashes(eip712TxData); + + expect(proposedEIP712Hashes.txHash).toEqual(expectedEIP712TxHash); + expect(proposedEIP712Hashes.signedTxHash).toEqual(expectedEIP712SignedHash); + }); + + test('Should execute withdrawals with different parameters in one block', async () => { + // This test checks the SDK/system contracts (not even the server) behavior, and it's very time consuming, + // so it doesn't make sense to run it outside of the localhost environment. + if (testMaster.isFastMode()) { + return; + } + const bob = testMaster.newEmptyAccount(); + + const l2Token = testMaster.environment().erc20Token.l2Address; + const l1Token = testMaster.environment().erc20Token.l1Address; + const amount = 1; + + // Fund bob's account. + await alice.transfer({ amount, to: bob.address, token: l2Token }); + await alice + .transfer({ amount: L2_ETH_PER_ACCOUNT.div(8), to: bob.address, token: zksync.utils.ETH_ADDRESS }) + .then((tx) => tx.wait()); + + // Prepare matcher modifiers for L1 balance change. + const aliceChange = await shouldChangeTokenBalances(l1Token, [{ wallet: alice, change: amount }], { l1: true }); + const bobChange = await shouldChangeTokenBalances(l1Token, [{ wallet: bob, change: amount }], { l1: true }); + + // Maximize chances of including transactions into the same block by first creating both promises + // and only then awaiting them. This is still probabalistic though: if this test becomes flaky, + // most likely there exists a very big problem in the system. + const aliceWithdrawalPromise = alice + .withdraw({ token: l2Token, amount }) + .then((response) => response.waitFinalize()); + const bobWithdrawalPromise = bob + .withdraw({ token: l2Token, amount }) + .then((response) => response.waitFinalize()); + + const [aliceReceipt, bobReceipt] = await Promise.all([aliceWithdrawalPromise, bobWithdrawalPromise]); + await expect(alice.finalizeWithdrawal(aliceReceipt.transactionHash)).toBeAccepted([aliceChange]); + await expect(alice.finalizeWithdrawal(bobReceipt.transactionHash)).toBeAccepted([bobChange]); + }); + + test('Should execute a the withdrawal with same parameters twice', async () => { + // This test is a logical copy of the previous one, but in this one we send two withdrawals from the same account + // It's skipped on the localhost for the same reason. + if (testMaster.isFastMode()) { + return; + } + + const l2Token = testMaster.environment().erc20Token.l2Address; + const l1Token = testMaster.environment().erc20Token.l1Address; + const amount = 1; + + // Prepare matcher modifiers. These modifiers would record the *current* Alice's balance, so after + // the first finalization the diff would be (compared to now) `amount`, and after the second -- `amount*2`. + const change1 = await shouldChangeTokenBalances(l1Token, [{ wallet: alice, change: amount }], { l1: true }); + const change2 = await shouldChangeTokenBalances(l1Token, [{ wallet: alice, change: amount * 2 }], { l1: true }); + + // Maximize chances of including transactions into the same block by first creating both promises + // and only then awaiting them. This is still probabalistic though: if this test becomes flaky, + // most likely there exists a very big problem in the system. + const nonce = await alice.getTransactionCount(); + const withdrawal1 = alice + .withdraw({ token: l2Token, amount, overrides: { nonce } }) + .then((response) => response.waitFinalize()); + const withdrawal2 = alice + .withdraw({ token: l2Token, amount, overrides: { nonce: nonce + 1 } }) + .then((response) => response.waitFinalize()); + + const [receipt1, receipt2] = await Promise.all([withdrawal1, withdrawal2]); + await expect(alice.finalizeWithdrawal(receipt1.transactionHash)).toBeAccepted([change1]); + await expect(alice.finalizeWithdrawal(receipt2.transactionHash)).toBeAccepted([change2]); + }); + + test.skip('Should test forceDeploy', async () => { + // Testing forcedDeploys involves small upgrades of smart contacts. + // Thus, it is not appropriate to do them anywhere else except for localhost. + if (testMaster.environment().network !== 'localhost') { + return; + } + + const bytecodeHash = hashBytecode(contracts.counter.bytecode); + + // Force-deploying two counters on the address 0x100 and 0x101 + const forcedDeployments: ForceDeployment[] = [ + { + bytecodeHash, + newAddress: '0x0000000000000000000000000000000000000100', + value: ethers.BigNumber.from(0), + input: '0x', + callConstructor: true + }, + { + bytecodeHash, + newAddress: '0x0000000000000000000000000000000000000101', + value: ethers.BigNumber.from(0), + input: '0x', + callConstructor: true + } + ]; + + await testForcedDeployments(forcedDeployments, contracts.counter.bytecode); + + // Testing that the bytecodes work correctly + for (const deployment of forcedDeployments) { + const contract = new ethers.Contract(deployment.newAddress, contracts.counter.abi, alice); + + // Checking that the forced-deployed counter works well + await (await contract.set(1)).wait(); + expect(contract.get()).resolves.bnToBeEq(1); + } + + // We use it to check that overriding old bytecodes would work just as fine + // Here we use `contracts.events` contract, because it does not have a constructor and + // so will not override the storage + const eventsBytecode = contracts.events.bytecode; + await testForcedDeployments( + forcedDeployments.map((deployment) => ({ ...deployment, bytecodeHash: hashBytecode(eventsBytecode) })), + eventsBytecode + ); + // Checking that the methods of the `events` contract work + for (const deployment of forcedDeployments) { + const contract = new ethers.Contract(deployment.newAddress, contracts.events.abi, alice); + await (await contract.test(1)).wait(); + } + + await testForcedDeployments(forcedDeployments, contracts.counter.bytecode); + // Testing that the storage has been preserved + for (const deployment of forcedDeployments) { + const contract = new ethers.Contract(deployment.newAddress, contracts.counter.abi, alice); + + await (await contract.increment(1)).wait(); + expect(contract.get()).resolves.bnToBeEq(2); + } + }); + + it('should reject transaction with huge gas limit', async () => { + await expect( + alice.sendTransaction({ to: alice.address, gasLimit: ethers.BigNumber.from(2).pow(32) }) + ).toBeRejected('exceeds block gas limit'); + }); + + afterAll(async () => { + await testMaster.deinitialize(); + }); + + function bootloaderUtilsContract() { + const BOOTLOADER_UTILS_ADDRESS = '0x000000000000000000000000000000000000800c'; + const BOOTLOADER_UTILS = new ethers.utils.Interface( + require(`${process.env.ZKSYNC_HOME}/etc/system-contracts/artifacts-zk/cache-zk/solpp-generated-contracts/BootloaderUtilities.sol/BootloaderUtilities.json`).abi + ); + + return new ethers.Contract(BOOTLOADER_UTILS_ADDRESS, BOOTLOADER_UTILS, alice); + } + + async function testForcedDeployments(forcedDeployments: ForceDeployment[], bytecode: BytesLike) { + const receipt = await deployOnAnyLocalAddress(alice.providerL1!, alice.provider, forcedDeployments, [bytecode]); + + expect(receipt.status).toBe(1); + + // veryfing that the codes stored are correct + for (const deployment of forcedDeployments) { + const codeFromApi = await alice.provider.getCode(deployment.newAddress); + + // Testing that the API returns the correct bytecode + expect(deployment.bytecodeHash).toStrictEqual(hashBytecode(codeFromApi)); + } + } +}); + +// Interface encoding the transaction struct used for AA protocol +export interface TransactionData { + txType: BigNumberish; + from: BigNumberish; + to: BigNumberish; + gasLimit: BigNumberish; + gasPerPubdataByteLimit: BigNumberish; + maxFeePerGas: BigNumberish; + maxPriorityFeePerGas: BigNumberish; + paymaster: BigNumberish; + nonce: BigNumberish; + value: BigNumberish; + // In the future, we might want to add some + // new fields to the struct. The `txData` struct + // is to be passed to account and any changes to its structure + // would mean a breaking change to these accounts. In order to prevent this, + // we should keep some fields as "reserved". + // It is also recommneded that their length is fixed, since + // it would allow easier proof integration (in case we will need + // some special circuit for preprocessing transactions). + reserved: BigNumberish[]; + data: BytesLike; + signature: BytesLike; + factoryDeps: BytesLike[]; + paymasterInput: BytesLike; + // Reserved dynamic type for the future use-case. Using it should be avoided, + // But it is still here, just in case we want to enable some additional functionality. + reservedDynamic: BytesLike; +} + +function signedTxToTransactionData(tx: ethers.Transaction) { + // Transform legacy transaction's `v` part of the signature + // to a single byte used in the packed eth signature + function unpackV(v: number) { + if (v >= 35) { + const chainId = Math.floor((v - 35) / 2); + return v - chainId * 2 - 8; + } else if (v <= 1) { + return 27 + v; + } + + throw new Error('Invalid `v`'); + } + + function legacyTxToTransactionData(tx: any): TransactionData { + return { + txType: 0, + from: tx.from!, + to: tx.to!, + gasLimit: tx.gasLimit!, + gasPerPubdataByteLimit: zksync.utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + maxFeePerGas: tx.gasPrice!, + maxPriorityFeePerGas: tx.gasPrice!, + paymaster: 0, + nonce: tx.nonce, + value: tx.value || 0, + reserved: [tx.chainId || 0, 0, 0, 0], + data: tx.data!, + signature: ethers.utils.hexConcat([tx.r, tx.s, new Uint8Array([unpackV(tx.v)])]), + factoryDeps: [], + paymasterInput: '0x', + reservedDynamic: '0x' + }; + } + + function eip2930TxToTransactionData(tx: any): TransactionData { + return { + txType: 1, + from: tx.from!, + to: tx.to!, + gasLimit: tx.gasLimit!, + gasPerPubdataByteLimit: zksync.utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + maxFeePerGas: tx.gasPrice!, + maxPriorityFeePerGas: tx.gasPrice!, + paymaster: 0, + nonce: tx.nonce, + value: tx.value || 0, + reserved: [0, 0, 0, 0], + data: tx.data!, + signature: ethers.utils.hexConcat([tx.r, tx.s, unpackV(tx.v)]), + factoryDeps: [], + paymasterInput: '0x', + reservedDynamic: '0x' + }; + } + + function eip1559TxToTransactionData(tx: any): TransactionData { + return { + txType: 2, + from: tx.from!, + to: tx.to!, + gasLimit: tx.gasLimit!, + gasPerPubdataByteLimit: zksync.utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + maxFeePerGas: tx.maxFeePerGas, + maxPriorityFeePerGas: tx.maxPriorityFeePerGas, + paymaster: 0, + nonce: tx.nonce, + value: tx.value || 0, + reserved: [0, 0, 0, 0], + data: tx.data!, + signature: ethers.utils.hexConcat([tx.r, tx.s, unpackV(tx.v)]), + factoryDeps: [], + paymasterInput: '0x', + reservedDynamic: '0x' + }; + } + + function eip712TxToTransactionData(tx: any): TransactionData { + return { + txType: 113, + from: tx.from!, + to: tx.to!, + gasLimit: tx.gasLimit!, + gasPerPubdataByteLimit: tx.customData.gasPerPubdata || zksync.utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + maxFeePerGas: tx.maxFeePerGas, + maxPriorityFeePerGas: tx.maxPriorityFeePerGas, + paymaster: tx.customData.paymasterParams?.paymaster || 0, + nonce: tx.nonce, + value: tx.value || 0, + reserved: [0, 0, 0, 0], + data: tx.data!, + signature: tx.customData.customSignature, + factoryDeps: tx.customData.factoryDeps.map(hashBytecode), + paymasterInput: tx.customData.paymasterParams?.paymasterInput || '0x', + reservedDynamic: '0x' + }; + } + + const txType = tx.type ?? 0; + + switch (txType) { + case 0: + return legacyTxToTransactionData(tx); + case 1: + return eip2930TxToTransactionData(tx); + case 2: + return eip1559TxToTransactionData(tx); + case 113: + return eip712TxToTransactionData(tx); + default: + throw new Error('Unsupported tx type'); + } +} diff --git a/core/tests/ts-integration/tsconfig.json b/core/tests/ts-integration/tsconfig.json new file mode 100644 index 000000000000..baf2b2d0a791 --- /dev/null +++ b/core/tests/ts-integration/tsconfig.json @@ -0,0 +1,16 @@ +{ + "compilerOptions": { + "target": "es2019", + "module": "commonjs", + "esModuleInterop": true, + "strict": true, + "skipLibCheck": true, + "noEmitOnError": true + }, + "include": [ + "**/*.ts" + ], + "exclude": [ + "node_modules" + ] +} diff --git a/core/tests/ts-integration/typings/jest.d.ts b/core/tests/ts-integration/typings/jest.d.ts new file mode 100644 index 000000000000..de67acafab1c --- /dev/null +++ b/core/tests/ts-integration/typings/jest.d.ts @@ -0,0 +1,101 @@ +import { BigNumberish } from 'ethers'; +import { MatcherModifier } from '../src/matchers/transaction-modifiers'; + +export declare global { + namespace jest { + interface Matchers { + // Generic matchers + + /** + * Fails the test with the provided message + * + * Note: `expect` expects a parameter to be provided, so you can pass anything there, + * argument won't be used. + * For example, `expect(null).fail("This shouldn't have happened!");`. + * + * @param message Message to be displayed in Jest output. + */ + fail(message: string): R; + + // BigNumber matchers + + /** + * Checks if initial number is greater than the provided one. + * + * @param r Number to be checked against. + * @param additionalInfo Optional message to be included if test fails. + */ + bnToBeGt(r: BigNumberish, additionalInfo?: string): R; + /** + * Checks if initial number is greater than or equal to the provided one. + * + * @param r Number to be checked against. + * @param additionalInfo Optional message to be included if test fails. + */ + bnToBeGte(r: BigNumberish, additionalInfo?: string): R; + /** + * Checks if initial number is equals the provided one. + * + * @param r Number to be checked against. + * @param additionalInfo Optional message to be included if test fails. + */ + bnToBeEq(r: BigNumberish, additionalInfo?: string): R; + /** + * Checks if initial number is less than the provided one. + * + * @param r Number to be checked against. + * @param additionalInfo Optional message to be included if test fails. + */ + bnToBeLt(r: BigNumberish, additionalInfo?: string): R; + /** + * Checks if initial number is less than or equal to the provided one. + * + * @param r Number to be checked against. + * @param additionalInfo Optional message to be included if test fails. + */ + bnToBeLte(r: BigNumberish, additionalInfo?: string): R; + + // Ethereum primitives matchers + + /** + * Checks if value represents a valid Ethereum address + * + * @param additionalInfo Optional message to be included if test fails. + */ + toBeAddress(additionalInfo?: string): R; + /** + * Checks if value represents a valid Ethereum address + * + * @param additionalInfo Optional message to be included if test fails. + */ + toBeHexString(additionalInfo?: string): R; + + // Transaction matchers + + /** + * Checks that transaction is successfully executed by the server. + * Does NOT support `.not` modifier. Use `toBeRejected` or `toBeReverted` instead. + * + * @param modifiers Optional list of transaction matcher modifiers to be applied to a receipt. + * @param additionalInfo Optional message to be included if test fails. + */ + toBeAccepted(modifiers?: MatcherModifier[], additionalInfo?: string): Promise; + /** + * Checks that transaction is executed by server, but execution results in a revert. + * Does NOT support `.not` modifier. Use `toBeAccepted` instead. + * + * @param modifiers Optional list of transaction matcher modifiers to be applied to a receipt. + * @param additionalInfo Optional message to be included if test fails. + */ + toBeReverted(modifiers?: MatcherModifier[], additionalInfo?: string): Promise; + /** + * Checks that transaction is rejected by the API server. + * Does NOT support `.not` modifier. Use `toBeAccepted` instead. + * + * @param errorSubstring Optional part of the error message that should be present in the API response. + * @param additionalInfo Optional message to be included if test fails. + */ + toBeRejected(errorSubstring?: string, additionalInfo?: string): Promise; + } + } +} diff --git a/core/tests/ts-integration/yarn.lock b/core/tests/ts-integration/yarn.lock new file mode 100644 index 000000000000..43cd4fea1a4b --- /dev/null +++ b/core/tests/ts-integration/yarn.lock @@ -0,0 +1,3092 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@ampproject/remapping@^2.1.0": + version "2.2.0" + resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.0.tgz#56c133824780de3174aed5ab6834f3026790154d" + integrity sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w== + dependencies: + "@jridgewell/gen-mapping" "^0.1.0" + "@jridgewell/trace-mapping" "^0.3.9" + +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.18.6.tgz#3b25d38c89600baa2dcc219edfa88a74eb2c427a" + integrity sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q== + dependencies: + "@babel/highlight" "^7.18.6" + +"@babel/compat-data@^7.19.1": + version "7.19.1" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.19.1.tgz#72d647b4ff6a4f82878d184613353af1dd0290f9" + integrity sha512-72a9ghR0gnESIa7jBN53U32FOVCEoztyIlKaNoU05zRhEecduGK9L9c3ww7Mp06JiR+0ls0GBPFJQwwtjn9ksg== + +"@babel/core@^7.11.6", "@babel/core@^7.12.3": + version "7.19.1" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.19.1.tgz#c8fa615c5e88e272564ace3d42fbc8b17bfeb22b" + integrity sha512-1H8VgqXme4UXCRv7/Wa1bq7RVymKOzC7znjyFM8KiEzwFqcKUKYNoQef4GhdklgNvoBXyW4gYhuBNCM5o1zImw== + dependencies: + "@ampproject/remapping" "^2.1.0" + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.19.0" + "@babel/helper-compilation-targets" "^7.19.1" + "@babel/helper-module-transforms" "^7.19.0" + "@babel/helpers" "^7.19.0" + "@babel/parser" "^7.19.1" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.19.1" + "@babel/types" "^7.19.0" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.1" + semver "^6.3.0" + +"@babel/generator@^7.19.0", "@babel/generator@^7.7.2": + version "7.19.0" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.19.0.tgz#785596c06425e59334df2ccee63ab166b738419a" + integrity sha512-S1ahxf1gZ2dpoiFgA+ohK9DIpz50bJ0CWs7Zlzb54Z4sG8qmdIrGrVqmy1sAtTVRb+9CU6U8VqT9L0Zj7hxHVg== + dependencies: + "@babel/types" "^7.19.0" + "@jridgewell/gen-mapping" "^0.3.2" + jsesc "^2.5.1" + +"@babel/helper-compilation-targets@^7.19.1": + version "7.19.1" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.19.1.tgz#7f630911d83b408b76fe584831c98e5395d7a17c" + integrity sha512-LlLkkqhCMyz2lkQPvJNdIYU7O5YjWRgC2R4omjCTpZd8u8KMQzZvX4qce+/BluN1rcQiV7BoGUpmQ0LeHerbhg== + dependencies: + "@babel/compat-data" "^7.19.1" + "@babel/helper-validator-option" "^7.18.6" + browserslist "^4.21.3" + semver "^6.3.0" + +"@babel/helper-environment-visitor@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz#0c0cee9b35d2ca190478756865bb3528422f51be" + integrity sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg== + +"@babel/helper-function-name@^7.19.0": + version "7.19.0" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.19.0.tgz#941574ed5390682e872e52d3f38ce9d1bef4648c" + integrity sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w== + dependencies: + "@babel/template" "^7.18.10" + "@babel/types" "^7.19.0" + +"@babel/helper-hoist-variables@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz#d4d2c8fb4baeaa5c68b99cc8245c56554f926678" + integrity sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-module-imports@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz#1e3ebdbbd08aad1437b428c50204db13c5a3ca6e" + integrity sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-module-transforms@^7.19.0": + version "7.19.0" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.19.0.tgz#309b230f04e22c58c6a2c0c0c7e50b216d350c30" + integrity sha512-3HBZ377Fe14RbLIA+ac3sY4PTgpxHVkFrESaWhoI5PuyXPBBX8+C34qblV9G89ZtycGJCmCI/Ut+VUDK4bltNQ== + dependencies: + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-simple-access" "^7.18.6" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/helper-validator-identifier" "^7.18.6" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.19.0" + "@babel/types" "^7.19.0" + +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.8.0": + version "7.19.0" + resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.19.0.tgz#4796bb14961521f0f8715990bee2fb6e51ce21bf" + integrity sha512-40Ryx7I8mT+0gaNxm8JGTZFUITNqdLAgdg0hXzeVZxVD6nFsdhQvip6v8dqkRHzsz1VFpFAaOCHNn0vKBL7Czw== + +"@babel/helper-simple-access@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.18.6.tgz#d6d8f51f4ac2978068df934b569f08f29788c7ea" + integrity sha512-iNpIgTgyAvDQpDj76POqg+YEt8fPxx3yaNBg3S30dxNKm2SWfYhD0TGrK/Eu9wHpUW63VQU894TsTg+GLbUa1g== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-split-export-declaration@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz#7367949bc75b20c6d5a5d4a97bba2824ae8ef075" + integrity sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-string-parser@^7.18.10": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.18.10.tgz#181f22d28ebe1b3857fa575f5c290b1aaf659b56" + integrity sha512-XtIfWmeNY3i4t7t4D2t02q50HvqHybPqW2ki1kosnvWCwuCMeo81Jf0gwr85jy/neUdg5XDdeFE/80DXiO+njw== + +"@babel/helper-validator-identifier@^7.18.6": + version "7.19.1" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz#7eea834cf32901ffdc1a7ee555e2f9c27e249ca2" + integrity sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w== + +"@babel/helper-validator-option@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.18.6.tgz#bf0d2b5a509b1f336099e4ff36e1a63aa5db4db8" + integrity sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw== + +"@babel/helpers@^7.19.0": + version "7.19.0" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.19.0.tgz#f30534657faf246ae96551d88dd31e9d1fa1fc18" + integrity sha512-DRBCKGwIEdqY3+rPJgG/dKfQy9+08rHIAJx8q2p+HSWP87s2HCrQmaAMMyMll2kIXKCW0cO1RdQskx15Xakftg== + dependencies: + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.19.0" + "@babel/types" "^7.19.0" + +"@babel/highlight@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.18.6.tgz#81158601e93e2563795adcbfbdf5d64be3f2ecdf" + integrity sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g== + dependencies: + "@babel/helper-validator-identifier" "^7.18.6" + chalk "^2.0.0" + js-tokens "^4.0.0" + +"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.18.10", "@babel/parser@^7.19.1": + version "7.19.1" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.19.1.tgz#6f6d6c2e621aad19a92544cc217ed13f1aac5b4c" + integrity sha512-h7RCSorm1DdTVGJf3P2Mhj3kdnkmF/EiysUkzS2TdgAYqyjFdMQJbVuXOBej2SBJaXan/lIVtT6KkGbyyq753A== + +"@babel/plugin-syntax-async-generators@^7.8.4": + version "7.8.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" + integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-bigint@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea" + integrity sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-class-properties@^7.8.3": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" + integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== + dependencies: + "@babel/helper-plugin-utils" "^7.12.13" + +"@babel/plugin-syntax-import-meta@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" + integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-json-strings@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" + integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-jsx@^7.7.2": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.18.6.tgz#a8feef63b010150abd97f1649ec296e849943ca0" + integrity sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-syntax-logical-assignment-operators@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" + integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" + integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-numeric-separator@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" + integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-object-rest-spread@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" + integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-catch-binding@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" + integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-chaining@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" + integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-top-level-await@^7.8.3": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" + integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-typescript@^7.7.2": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.18.6.tgz#1c09cd25795c7c2b8a4ba9ae49394576d4133285" + integrity sha512-mAWAuq4rvOepWCBid55JuRNvpTNf2UGVgoz4JV0fXEKolsVZDzsa4NqCef758WZJj/GDu0gVGItjKFiClTAmZA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/template@^7.18.10", "@babel/template@^7.3.3": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.18.10.tgz#6f9134835970d1dbf0835c0d100c9f38de0c5e71" + integrity sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA== + dependencies: + "@babel/code-frame" "^7.18.6" + "@babel/parser" "^7.18.10" + "@babel/types" "^7.18.10" + +"@babel/traverse@^7.19.0", "@babel/traverse@^7.19.1", "@babel/traverse@^7.7.2": + version "7.19.1" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.19.1.tgz#0fafe100a8c2a603b4718b1d9bf2568d1d193347" + integrity sha512-0j/ZfZMxKukDaag2PtOPDbwuELqIar6lLskVPPJDjXMXjfLb1Obo/1yjxIGqqAJrmfaTIY3z2wFLAQ7qSkLsuA== + dependencies: + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.19.0" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.19.0" + "@babel/helper-hoist-variables" "^7.18.6" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/parser" "^7.19.1" + "@babel/types" "^7.19.0" + debug "^4.1.0" + globals "^11.1.0" + +"@babel/types@^7.0.0", "@babel/types@^7.18.10", "@babel/types@^7.18.6", "@babel/types@^7.19.0", "@babel/types@^7.3.0", "@babel/types@^7.3.3": + version "7.19.0" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.19.0.tgz#75f21d73d73dc0351f3368d28db73465f4814600" + integrity sha512-YuGopBq3ke25BVSiS6fgF49Ul9gH1x70Bcr6bqRLjWCkcX8Hre1/5+z+IiWOIerRMSSEfGZVB9z9kyq7wVs9YA== + dependencies: + "@babel/helper-string-parser" "^7.18.10" + "@babel/helper-validator-identifier" "^7.18.6" + to-fast-properties "^2.0.0" + +"@bcoe/v8-coverage@^0.2.3": + version "0.2.3" + resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" + integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== + +"@cspotcode/source-map-support@^0.8.0": + version "0.8.1" + resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz#00629c35a688e05a88b1cda684fb9d5e73f000a1" + integrity sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw== + dependencies: + "@jridgewell/trace-mapping" "0.3.9" + +"@ethersproject/abi@5.7.0", "@ethersproject/abi@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/abi/-/abi-5.7.0.tgz#b3f3e045bbbeed1af3947335c247ad625a44e449" + integrity sha512-351ktp42TiRcYB3H1OP8yajPeAQstMW/yCFokj/AthP9bLHzQFPlOrxOcwYEDkUAICmOHljvN4K39OMTMUa9RA== + dependencies: + "@ethersproject/address" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/constants" "^5.7.0" + "@ethersproject/hash" "^5.7.0" + "@ethersproject/keccak256" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/strings" "^5.7.0" + +"@ethersproject/abstract-provider@5.7.0", "@ethersproject/abstract-provider@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/abstract-provider/-/abstract-provider-5.7.0.tgz#b0a8550f88b6bf9d51f90e4795d48294630cb9ef" + integrity sha512-R41c9UkchKCpAqStMYUpdunjo3pkEvZC3FAwZn5S5MGbXoMQOHIdHItezTETxAO5bevtMApSyEhn9+CHcDsWBw== + dependencies: + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/networks" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/transactions" "^5.7.0" + "@ethersproject/web" "^5.7.0" + +"@ethersproject/abstract-signer@5.7.0", "@ethersproject/abstract-signer@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/abstract-signer/-/abstract-signer-5.7.0.tgz#13f4f32117868452191a4649723cb086d2b596b2" + integrity sha512-a16V8bq1/Cz+TGCkE2OPMTOUDLS3grCpdjoJCYNnVBbdYEMSgKrU0+B90s8b6H+ByYTBZN7a3g76jdIJi7UfKQ== + dependencies: + "@ethersproject/abstract-provider" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + +"@ethersproject/address@5.7.0", "@ethersproject/address@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/address/-/address-5.7.0.tgz#19b56c4d74a3b0a46bfdbb6cfcc0a153fc697f37" + integrity sha512-9wYhYt7aghVGo758POM5nqcOMaE168Q6aRLJZwUmiqSrAungkG74gSSeKEIR7ukixesdRZGPgVqme6vmxs1fkA== + dependencies: + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/keccak256" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/rlp" "^5.7.0" + +"@ethersproject/base64@5.7.0", "@ethersproject/base64@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/base64/-/base64-5.7.0.tgz#ac4ee92aa36c1628173e221d0d01f53692059e1c" + integrity sha512-Dr8tcHt2mEbsZr/mwTPIQAf3Ai0Bks/7gTw9dSqk1mQvhW3XvRlmDJr/4n+wg1JmCl16NZue17CDh8xb/vZ0sQ== + dependencies: + "@ethersproject/bytes" "^5.7.0" + +"@ethersproject/basex@5.7.0", "@ethersproject/basex@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/basex/-/basex-5.7.0.tgz#97034dc7e8938a8ca943ab20f8a5e492ece4020b" + integrity sha512-ywlh43GwZLv2Voc2gQVTKBoVQ1mti3d8HK5aMxsfu/nRDnMmNqaSJ3r3n85HBByT8OpoY96SXM1FogC533T4zw== + dependencies: + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + +"@ethersproject/bignumber@5.7.0", "@ethersproject/bignumber@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/bignumber/-/bignumber-5.7.0.tgz#e2f03837f268ba655ffba03a57853e18a18dc9c2" + integrity sha512-n1CAdIHRWjSucQO3MC1zPSVgV/6dy/fjL9pMrPP9peL+QxEg9wOsVqwD4+818B6LUEtaXzVHQiuivzRoxPxUGw== + dependencies: + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + bn.js "^5.2.1" + +"@ethersproject/bytes@5.7.0", "@ethersproject/bytes@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/bytes/-/bytes-5.7.0.tgz#a00f6ea8d7e7534d6d87f47188af1148d71f155d" + integrity sha512-nsbxwgFXWh9NyYWo+U8atvmMsSdKJprTcICAkvbBffT75qDocbuggBU0SJiVK2MuTrp0q+xvLkTnGMPK1+uA9A== + dependencies: + "@ethersproject/logger" "^5.7.0" + +"@ethersproject/constants@5.7.0", "@ethersproject/constants@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/constants/-/constants-5.7.0.tgz#df80a9705a7e08984161f09014ea012d1c75295e" + integrity sha512-DHI+y5dBNvkpYUMiRQyxRBYBefZkJfo70VUkUAsRjcPs47muV9evftfZ0PJVCXYbAiCgght0DtcF9srFQmIgWA== + dependencies: + "@ethersproject/bignumber" "^5.7.0" + +"@ethersproject/contracts@5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/contracts/-/contracts-5.7.0.tgz#c305e775abd07e48aa590e1a877ed5c316f8bd1e" + integrity sha512-5GJbzEU3X+d33CdfPhcyS+z8MzsTrBGk/sc+G+59+tPa9yFkl6HQ9D6L0QMgNTA9q8dT0XKxxkyp883XsQvbbg== + dependencies: + "@ethersproject/abi" "^5.7.0" + "@ethersproject/abstract-provider" "^5.7.0" + "@ethersproject/abstract-signer" "^5.7.0" + "@ethersproject/address" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/constants" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/transactions" "^5.7.0" + +"@ethersproject/hash@5.7.0", "@ethersproject/hash@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/hash/-/hash-5.7.0.tgz#eb7aca84a588508369562e16e514b539ba5240a7" + integrity sha512-qX5WrQfnah1EFnO5zJv1v46a8HW0+E5xuBBDTwMFZLuVTx0tbU2kkx15NqdjxecrLGatQN9FGQKpb1FKdHCt+g== + dependencies: + "@ethersproject/abstract-signer" "^5.7.0" + "@ethersproject/address" "^5.7.0" + "@ethersproject/base64" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/keccak256" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/strings" "^5.7.0" + +"@ethersproject/hdnode@5.7.0", "@ethersproject/hdnode@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/hdnode/-/hdnode-5.7.0.tgz#e627ddc6b466bc77aebf1a6b9e47405ca5aef9cf" + integrity sha512-OmyYo9EENBPPf4ERhR7oj6uAtUAhYGqOnIS+jE5pTXvdKBS99ikzq1E7Iv0ZQZ5V36Lqx1qZLeak0Ra16qpeOg== + dependencies: + "@ethersproject/abstract-signer" "^5.7.0" + "@ethersproject/basex" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/pbkdf2" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/sha2" "^5.7.0" + "@ethersproject/signing-key" "^5.7.0" + "@ethersproject/strings" "^5.7.0" + "@ethersproject/transactions" "^5.7.0" + "@ethersproject/wordlists" "^5.7.0" + +"@ethersproject/json-wallets@5.7.0", "@ethersproject/json-wallets@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/json-wallets/-/json-wallets-5.7.0.tgz#5e3355287b548c32b368d91014919ebebddd5360" + integrity sha512-8oee5Xgu6+RKgJTkvEMl2wDgSPSAQ9MB/3JYjFV9jlKvcYHUXZC+cQp0njgmxdHkYWn8s6/IqIZYm0YWCjO/0g== + dependencies: + "@ethersproject/abstract-signer" "^5.7.0" + "@ethersproject/address" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/hdnode" "^5.7.0" + "@ethersproject/keccak256" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/pbkdf2" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/random" "^5.7.0" + "@ethersproject/strings" "^5.7.0" + "@ethersproject/transactions" "^5.7.0" + aes-js "3.0.0" + scrypt-js "3.0.1" + +"@ethersproject/keccak256@5.7.0", "@ethersproject/keccak256@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/keccak256/-/keccak256-5.7.0.tgz#3186350c6e1cd6aba7940384ec7d6d9db01f335a" + integrity sha512-2UcPboeL/iW+pSg6vZ6ydF8tCnv3Iu/8tUmLLzWWGzxWKFFqOBQFLo6uLUv6BDrLgCDfN28RJ/wtByx+jZ4KBg== + dependencies: + "@ethersproject/bytes" "^5.7.0" + js-sha3 "0.8.0" + +"@ethersproject/logger@5.7.0", "@ethersproject/logger@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/logger/-/logger-5.7.0.tgz#6ce9ae168e74fecf287be17062b590852c311892" + integrity sha512-0odtFdXu/XHtjQXJYA3u9G0G8btm0ND5Cu8M7i5vhEcE8/HmF4Lbdqanwyv4uQTr2tx6b7fQRmgLrsnpQlmnig== + +"@ethersproject/networks@5.7.1", "@ethersproject/networks@^5.7.0": + version "5.7.1" + resolved "https://registry.yarnpkg.com/@ethersproject/networks/-/networks-5.7.1.tgz#118e1a981d757d45ccea6bb58d9fd3d9db14ead6" + integrity sha512-n/MufjFYv3yFcUyfhnXotyDlNdFb7onmkSy8aQERi2PjNcnWQ66xXxa3XlS8nCcA8aJKJjIIMNJTC7tu80GwpQ== + dependencies: + "@ethersproject/logger" "^5.7.0" + +"@ethersproject/pbkdf2@5.7.0", "@ethersproject/pbkdf2@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/pbkdf2/-/pbkdf2-5.7.0.tgz#d2267d0a1f6e123f3771007338c47cccd83d3102" + integrity sha512-oR/dBRZR6GTyaofd86DehG72hY6NpAjhabkhxgr3X2FpJtJuodEl2auADWBZfhDHgVCbu3/H/Ocq2uC6dpNjjw== + dependencies: + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/sha2" "^5.7.0" + +"@ethersproject/properties@5.7.0", "@ethersproject/properties@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/properties/-/properties-5.7.0.tgz#a6e12cb0439b878aaf470f1902a176033067ed30" + integrity sha512-J87jy8suntrAkIZtecpxEPxY//szqr1mlBaYlQ0r4RCaiD2hjheqF9s1LVE8vVuJCXisjIP+JgtK/Do54ej4Sw== + dependencies: + "@ethersproject/logger" "^5.7.0" + +"@ethersproject/providers@5.7.1": + version "5.7.1" + resolved "https://registry.yarnpkg.com/@ethersproject/providers/-/providers-5.7.1.tgz#b0799b616d5579cd1067a8ebf1fc1ec74c1e122c" + integrity sha512-vZveG/DLyo+wk4Ga1yx6jSEHrLPgmTt+dFv0dv8URpVCRf0jVhalps1jq/emN/oXnMRsC7cQgAF32DcXLL7BPQ== + dependencies: + "@ethersproject/abstract-provider" "^5.7.0" + "@ethersproject/abstract-signer" "^5.7.0" + "@ethersproject/address" "^5.7.0" + "@ethersproject/base64" "^5.7.0" + "@ethersproject/basex" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/constants" "^5.7.0" + "@ethersproject/hash" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/networks" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/random" "^5.7.0" + "@ethersproject/rlp" "^5.7.0" + "@ethersproject/sha2" "^5.7.0" + "@ethersproject/strings" "^5.7.0" + "@ethersproject/transactions" "^5.7.0" + "@ethersproject/web" "^5.7.0" + bech32 "1.1.4" + ws "7.4.6" + +"@ethersproject/random@5.7.0", "@ethersproject/random@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/random/-/random-5.7.0.tgz#af19dcbc2484aae078bb03656ec05df66253280c" + integrity sha512-19WjScqRA8IIeWclFme75VMXSBvi4e6InrUNuaR4s5pTF2qNhcGdCUwdxUVGtDDqC00sDLCO93jPQoDUH4HVmQ== + dependencies: + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + +"@ethersproject/rlp@5.7.0", "@ethersproject/rlp@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/rlp/-/rlp-5.7.0.tgz#de39e4d5918b9d74d46de93af80b7685a9c21304" + integrity sha512-rBxzX2vK8mVF7b0Tol44t5Tb8gomOHkj5guL+HhzQ1yBh/ydjGnpw6at+X6Iw0Kp3OzzzkcKp8N9r0W4kYSs9w== + dependencies: + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + +"@ethersproject/sha2@5.7.0", "@ethersproject/sha2@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/sha2/-/sha2-5.7.0.tgz#9a5f7a7824ef784f7f7680984e593a800480c9fb" + integrity sha512-gKlH42riwb3KYp0reLsFTokByAKoJdgFCwI+CCiX/k+Jm2mbNs6oOaCjYQSlI1+XBVejwH2KrmCbMAT/GnRDQw== + dependencies: + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + hash.js "1.1.7" + +"@ethersproject/signing-key@5.7.0", "@ethersproject/signing-key@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/signing-key/-/signing-key-5.7.0.tgz#06b2df39411b00bc57c7c09b01d1e41cf1b16ab3" + integrity sha512-MZdy2nL3wO0u7gkB4nA/pEf8lu1TlFswPNmy8AiYkfKTdO6eXBJyUdmHO/ehm/htHw9K/qF8ujnTyUAD+Ry54Q== + dependencies: + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + bn.js "^5.2.1" + elliptic "6.5.4" + hash.js "1.1.7" + +"@ethersproject/solidity@5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/solidity/-/solidity-5.7.0.tgz#5e9c911d8a2acce2a5ebb48a5e2e0af20b631cb8" + integrity sha512-HmabMd2Dt/raavyaGukF4XxizWKhKQ24DoLtdNbBmNKUOPqwjsKQSdV9GQtj9CBEea9DlzETlVER1gYeXXBGaA== + dependencies: + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/keccak256" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/sha2" "^5.7.0" + "@ethersproject/strings" "^5.7.0" + +"@ethersproject/strings@5.7.0", "@ethersproject/strings@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/strings/-/strings-5.7.0.tgz#54c9d2a7c57ae8f1205c88a9d3a56471e14d5ed2" + integrity sha512-/9nu+lj0YswRNSH0NXYqrh8775XNyEdUQAuf3f+SmOrnVewcJ5SBNAjF7lpgehKi4abvNNXyf+HX86czCdJ8Mg== + dependencies: + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/constants" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + +"@ethersproject/transactions@5.7.0", "@ethersproject/transactions@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/transactions/-/transactions-5.7.0.tgz#91318fc24063e057885a6af13fdb703e1f993d3b" + integrity sha512-kmcNicCp1lp8qanMTC3RIikGgoJ80ztTyvtsFvCYpSCfkjhD0jZ2LOrnbcuxuToLIUYYf+4XwD1rP+B/erDIhQ== + dependencies: + "@ethersproject/address" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/constants" "^5.7.0" + "@ethersproject/keccak256" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/rlp" "^5.7.0" + "@ethersproject/signing-key" "^5.7.0" + +"@ethersproject/units@5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/units/-/units-5.7.0.tgz#637b563d7e14f42deeee39245275d477aae1d8b1" + integrity sha512-pD3xLMy3SJu9kG5xDGI7+xhTEmGXlEqXU4OfNapmfnxLVY4EMSSRp7j1k7eezutBPH7RBN/7QPnwR7hzNlEFeg== + dependencies: + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/constants" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + +"@ethersproject/wallet@5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/wallet/-/wallet-5.7.0.tgz#4e5d0790d96fe21d61d38fb40324e6c7ef350b2d" + integrity sha512-MhmXlJXEJFBFVKrDLB4ZdDzxcBxQ3rLyCkhNqVu3CDYvR97E+8r01UgrI+TI99Le+aYm/in/0vp86guJuM7FCA== + dependencies: + "@ethersproject/abstract-provider" "^5.7.0" + "@ethersproject/abstract-signer" "^5.7.0" + "@ethersproject/address" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/hash" "^5.7.0" + "@ethersproject/hdnode" "^5.7.0" + "@ethersproject/json-wallets" "^5.7.0" + "@ethersproject/keccak256" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/random" "^5.7.0" + "@ethersproject/signing-key" "^5.7.0" + "@ethersproject/transactions" "^5.7.0" + "@ethersproject/wordlists" "^5.7.0" + +"@ethersproject/web@5.7.1", "@ethersproject/web@^5.7.0": + version "5.7.1" + resolved "https://registry.yarnpkg.com/@ethersproject/web/-/web-5.7.1.tgz#de1f285b373149bee5928f4eb7bcb87ee5fbb4ae" + integrity sha512-Gueu8lSvyjBWL4cYsWsjh6MtMwM0+H4HvqFPZfB6dV8ctbP9zFAO73VG1cMWae0FLPCtz0peKPpZY8/ugJJX2w== + dependencies: + "@ethersproject/base64" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/strings" "^5.7.0" + +"@ethersproject/wordlists@5.7.0", "@ethersproject/wordlists@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/wordlists/-/wordlists-5.7.0.tgz#8fb2c07185d68c3e09eb3bfd6e779ba2774627f5" + integrity sha512-S2TFNJNfHWVHNE6cNDjbVlZ6MgE17MIxMbMg2zv3wn+3XSJGosL1m9ZVv3GXCf/2ymSsQ+hRI5IzoMJTG6aoVA== + dependencies: + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/hash" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/strings" "^5.7.0" + +"@istanbuljs/load-nyc-config@^1.0.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" + integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== + dependencies: + camelcase "^5.3.1" + find-up "^4.1.0" + get-package-type "^0.1.0" + js-yaml "^3.13.1" + resolve-from "^5.0.0" + +"@istanbuljs/schema@^0.1.2": + version "0.1.3" + resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" + integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== + +"@jest/console@^29.0.3": + version "29.0.3" + resolved "https://registry.yarnpkg.com/@jest/console/-/console-29.0.3.tgz#a222ab87e399317a89db88a58eaec289519e807a" + integrity sha512-cGg0r+klVHSYnfE977S9wmpuQ9L+iYuYgL+5bPXiUlUynLLYunRxswEmhBzvrSKGof5AKiHuTTmUKAqRcDY9dg== + dependencies: + "@jest/types" "^29.0.3" + "@types/node" "*" + chalk "^4.0.0" + jest-message-util "^29.0.3" + jest-util "^29.0.3" + slash "^3.0.0" + +"@jest/core@^29.0.3": + version "29.0.3" + resolved "https://registry.yarnpkg.com/@jest/core/-/core-29.0.3.tgz#ba22a9cbd0c7ba36e04292e2093c547bf53ec1fd" + integrity sha512-1d0hLbOrM1qQE3eP3DtakeMbKTcXiXP3afWxqz103xPyddS2NhnNghS7MaXx1dcDt4/6p4nlhmeILo2ofgi8cQ== + dependencies: + "@jest/console" "^29.0.3" + "@jest/reporters" "^29.0.3" + "@jest/test-result" "^29.0.3" + "@jest/transform" "^29.0.3" + "@jest/types" "^29.0.3" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + ci-info "^3.2.0" + exit "^0.1.2" + graceful-fs "^4.2.9" + jest-changed-files "^29.0.0" + jest-config "^29.0.3" + jest-haste-map "^29.0.3" + jest-message-util "^29.0.3" + jest-regex-util "^29.0.0" + jest-resolve "^29.0.3" + jest-resolve-dependencies "^29.0.3" + jest-runner "^29.0.3" + jest-runtime "^29.0.3" + jest-snapshot "^29.0.3" + jest-util "^29.0.3" + jest-validate "^29.0.3" + jest-watcher "^29.0.3" + micromatch "^4.0.4" + pretty-format "^29.0.3" + slash "^3.0.0" + strip-ansi "^6.0.0" + +"@jest/environment@^29.0.3": + version "29.0.3" + resolved "https://registry.yarnpkg.com/@jest/environment/-/environment-29.0.3.tgz#7745ec30a954e828e8cc6df6a13280d3b51d8f35" + integrity sha512-iKl272NKxYNQNqXMQandAIwjhQaGw5uJfGXduu8dS9llHi8jV2ChWrtOAVPnMbaaoDhnI3wgUGNDvZgHeEJQCA== + dependencies: + "@jest/fake-timers" "^29.0.3" + "@jest/types" "^29.0.3" + "@types/node" "*" + jest-mock "^29.0.3" + +"@jest/expect-utils@^29.0.3": + version "29.0.3" + resolved "https://registry.yarnpkg.com/@jest/expect-utils/-/expect-utils-29.0.3.tgz#f5bb86f5565bf2dacfca31ccbd887684936045b2" + integrity sha512-i1xUkau7K/63MpdwiRqaxgZOjxYs4f0WMTGJnYwUKubsNRZSeQbLorS7+I4uXVF9KQ5r61BUPAUMZ7Lf66l64Q== + dependencies: + jest-get-type "^29.0.0" + +"@jest/expect@^29.0.3": + version "29.0.3" + resolved "https://registry.yarnpkg.com/@jest/expect/-/expect-29.0.3.tgz#9dc7c46354eeb7a348d73881fba6402f5fdb2c30" + integrity sha512-6W7K+fsI23FQ01H/BWccPyDZFrnU9QlzDcKOjrNVU5L8yUORFAJJIpmyxWPW70+X624KUNqzZwPThPMX28aXEQ== + dependencies: + expect "^29.0.3" + jest-snapshot "^29.0.3" + +"@jest/fake-timers@^29.0.3": + version "29.0.3" + resolved "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-29.0.3.tgz#ad5432639b715d45a86a75c47fd75019bc36b22c" + integrity sha512-tmbUIo03x0TdtcZCESQ0oQSakPCpo7+s6+9mU19dd71MptkP4zCwoeZqna23//pgbhtT1Wq02VmA9Z9cNtvtCQ== + dependencies: + "@jest/types" "^29.0.3" + "@sinonjs/fake-timers" "^9.1.2" + "@types/node" "*" + jest-message-util "^29.0.3" + jest-mock "^29.0.3" + jest-util "^29.0.3" + +"@jest/globals@^29.0.3": + version "29.0.3" + resolved "https://registry.yarnpkg.com/@jest/globals/-/globals-29.0.3.tgz#681950c430fdc13ff9aa89b2d8d572ac0e4a1bf5" + integrity sha512-YqGHT65rFY2siPIHHFjuCGUsbzRjdqkwbat+Of6DmYRg5shIXXrLdZoVE/+TJ9O1dsKsFmYhU58JvIbZRU1Z9w== + dependencies: + "@jest/environment" "^29.0.3" + "@jest/expect" "^29.0.3" + "@jest/types" "^29.0.3" + jest-mock "^29.0.3" + +"@jest/reporters@^29.0.3": + version "29.0.3" + resolved "https://registry.yarnpkg.com/@jest/reporters/-/reporters-29.0.3.tgz#735f110e08b44b38729d8dbbb74063bdf5aba8a5" + integrity sha512-3+QU3d4aiyOWfmk1obDerie4XNCaD5Xo1IlKNde2yGEi02WQD+ZQD0i5Hgqm1e73sMV7kw6pMlCnprtEwEVwxw== + dependencies: + "@bcoe/v8-coverage" "^0.2.3" + "@jest/console" "^29.0.3" + "@jest/test-result" "^29.0.3" + "@jest/transform" "^29.0.3" + "@jest/types" "^29.0.3" + "@jridgewell/trace-mapping" "^0.3.15" + "@types/node" "*" + chalk "^4.0.0" + collect-v8-coverage "^1.0.0" + exit "^0.1.2" + glob "^7.1.3" + graceful-fs "^4.2.9" + istanbul-lib-coverage "^3.0.0" + istanbul-lib-instrument "^5.1.0" + istanbul-lib-report "^3.0.0" + istanbul-lib-source-maps "^4.0.0" + istanbul-reports "^3.1.3" + jest-message-util "^29.0.3" + jest-util "^29.0.3" + jest-worker "^29.0.3" + slash "^3.0.0" + string-length "^4.0.1" + strip-ansi "^6.0.0" + terminal-link "^2.0.0" + v8-to-istanbul "^9.0.1" + +"@jest/schemas@^29.0.0": + version "29.0.0" + resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-29.0.0.tgz#5f47f5994dd4ef067fb7b4188ceac45f77fe952a" + integrity sha512-3Ab5HgYIIAnS0HjqJHQYZS+zXc4tUmTmBH3z83ajI6afXp8X3ZtdLX+nXx+I7LNkJD7uN9LAVhgnjDgZa2z0kA== + dependencies: + "@sinclair/typebox" "^0.24.1" + +"@jest/source-map@^29.0.0": + version "29.0.0" + resolved "https://registry.yarnpkg.com/@jest/source-map/-/source-map-29.0.0.tgz#f8d1518298089f8ae624e442bbb6eb870ee7783c" + integrity sha512-nOr+0EM8GiHf34mq2GcJyz/gYFyLQ2INDhAylrZJ9mMWoW21mLBfZa0BUVPPMxVYrLjeiRe2Z7kWXOGnS0TFhQ== + dependencies: + "@jridgewell/trace-mapping" "^0.3.15" + callsites "^3.0.0" + graceful-fs "^4.2.9" + +"@jest/test-result@^29.0.3": + version "29.0.3" + resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-29.0.3.tgz#b03d8ef4c58be84cd5d5d3b24d4b4c8cabbf2746" + integrity sha512-vViVnQjCgTmbhDKEonKJPtcFe9G/CJO4/Np4XwYJah+lF2oI7KKeRp8t1dFvv44wN2NdbDb/qC6pi++Vpp0Dlg== + dependencies: + "@jest/console" "^29.0.3" + "@jest/types" "^29.0.3" + "@types/istanbul-lib-coverage" "^2.0.0" + collect-v8-coverage "^1.0.0" + +"@jest/test-sequencer@^29.0.3": + version "29.0.3" + resolved "https://registry.yarnpkg.com/@jest/test-sequencer/-/test-sequencer-29.0.3.tgz#0681061ad21fb8e293b49c4fdf7e631ca79240ba" + integrity sha512-Hf4+xYSWZdxTNnhDykr8JBs0yBN/nxOXyUQWfotBUqqy0LF9vzcFB0jm/EDNZCx587znLWTIgxcokW7WeZMobQ== + dependencies: + "@jest/test-result" "^29.0.3" + graceful-fs "^4.2.9" + jest-haste-map "^29.0.3" + slash "^3.0.0" + +"@jest/transform@^29.0.3": + version "29.0.3" + resolved "https://registry.yarnpkg.com/@jest/transform/-/transform-29.0.3.tgz#9eb1fed2072a0354f190569807d1250572fb0970" + integrity sha512-C5ihFTRYaGDbi/xbRQRdbo5ddGtI4VSpmL6AIcZxdhwLbXMa7PcXxxqyI91vGOFHnn5aVM3WYnYKCHEqmLVGzg== + dependencies: + "@babel/core" "^7.11.6" + "@jest/types" "^29.0.3" + "@jridgewell/trace-mapping" "^0.3.15" + babel-plugin-istanbul "^6.1.1" + chalk "^4.0.0" + convert-source-map "^1.4.0" + fast-json-stable-stringify "^2.1.0" + graceful-fs "^4.2.9" + jest-haste-map "^29.0.3" + jest-regex-util "^29.0.0" + jest-util "^29.0.3" + micromatch "^4.0.4" + pirates "^4.0.4" + slash "^3.0.0" + write-file-atomic "^4.0.1" + +"@jest/types@^29.0.3": + version "29.0.3" + resolved "https://registry.yarnpkg.com/@jest/types/-/types-29.0.3.tgz#0be78fdddb1a35aeb2041074e55b860561c8ef63" + integrity sha512-coBJmOQvurXjN1Hh5PzF7cmsod0zLIOXpP8KD161mqNlroMhLcwpODiEzi7ZsRl5Z/AIuxpeNm8DCl43F4kz8A== + dependencies: + "@jest/schemas" "^29.0.0" + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^17.0.8" + chalk "^4.0.0" + +"@jridgewell/gen-mapping@^0.1.0": + version "0.1.1" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz#e5d2e450306a9491e3bd77e323e38d7aff315996" + integrity sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w== + dependencies: + "@jridgewell/set-array" "^1.0.0" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@jridgewell/gen-mapping@^0.3.2": + version "0.3.2" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz#c1aedc61e853f2bb9f5dfe6d4442d3b565b253b9" + integrity sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A== + dependencies: + "@jridgewell/set-array" "^1.0.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.9" + +"@jridgewell/resolve-uri@^3.0.3": + version "3.1.0" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" + integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== + +"@jridgewell/set-array@^1.0.0", "@jridgewell/set-array@^1.0.1": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" + integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== + +"@jridgewell/sourcemap-codec@^1.4.10": + version "1.4.14" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" + integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== + +"@jridgewell/trace-mapping@0.3.9": + version "0.3.9" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz#6534fd5933a53ba7cbf3a17615e273a0d1273ff9" + integrity sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ== + dependencies: + "@jridgewell/resolve-uri" "^3.0.3" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@jridgewell/trace-mapping@^0.3.12", "@jridgewell/trace-mapping@^0.3.15", "@jridgewell/trace-mapping@^0.3.9": + version "0.3.15" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.15.tgz#aba35c48a38d3fd84b37e66c9c0423f9744f9774" + integrity sha512-oWZNOULl+UbhsgB51uuZzglikfIKSUBO/M9W2OfEjn7cmqoAiCgmv9lyACTUacZwBz0ITnJ2NqjU8Tx0DHL88g== + dependencies: + "@jridgewell/resolve-uri" "^3.0.3" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@sinclair/typebox@^0.24.1": + version "0.24.42" + resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.24.42.tgz#a74b608d494a1f4cc079738e050142a678813f52" + integrity sha512-d+2AtrHGyWek2u2ITF0lHRIv6Tt7X0dEHW+0rP+5aDCEjC3fiN2RBjrLD0yU0at52BcZbRGxLbAtXiR0hFCjYw== + +"@sinonjs/commons@^1.7.0": + version "1.8.3" + resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.3.tgz#3802ddd21a50a949b6721ddd72da36e67e7f1b2d" + integrity sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ== + dependencies: + type-detect "4.0.8" + +"@sinonjs/fake-timers@^9.1.2": + version "9.1.2" + resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-9.1.2.tgz#4eaab737fab77332ab132d396a3c0d364bd0ea8c" + integrity sha512-BPS4ynJW/o92PUR4wgriz2Ud5gpST5vz6GQfMixEDK0Z8ZCUv2M7SkBLykH56T++Xs+8ln9zTGbOvNGIe02/jw== + dependencies: + "@sinonjs/commons" "^1.7.0" + +"@tsconfig/node10@^1.0.7": + version "1.0.9" + resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.9.tgz#df4907fc07a886922637b15e02d4cebc4c0021b2" + integrity sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA== + +"@tsconfig/node12@^1.0.7": + version "1.0.11" + resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.11.tgz#ee3def1f27d9ed66dac6e46a295cffb0152e058d" + integrity sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag== + +"@tsconfig/node14@^1.0.0": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.3.tgz#e4386316284f00b98435bf40f72f75a09dabf6c1" + integrity sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow== + +"@tsconfig/node16@^1.0.2": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.3.tgz#472eaab5f15c1ffdd7f8628bd4c4f753995ec79e" + integrity sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ== + +"@types/babel__core@^7.1.14": + version "7.1.19" + resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.1.19.tgz#7b497495b7d1b4812bdb9d02804d0576f43ee460" + integrity sha512-WEOTgRsbYkvA/KCsDwVEGkd7WAr1e3g31VHQ8zy5gul/V1qKullU/BU5I68X5v7V3GnB9eotmom4v5a5gjxorw== + dependencies: + "@babel/parser" "^7.1.0" + "@babel/types" "^7.0.0" + "@types/babel__generator" "*" + "@types/babel__template" "*" + "@types/babel__traverse" "*" + +"@types/babel__generator@*": + version "7.6.4" + resolved "https://registry.yarnpkg.com/@types/babel__generator/-/babel__generator-7.6.4.tgz#1f20ce4c5b1990b37900b63f050182d28c2439b7" + integrity sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg== + dependencies: + "@babel/types" "^7.0.0" + +"@types/babel__template@*": + version "7.4.1" + resolved "https://registry.yarnpkg.com/@types/babel__template/-/babel__template-7.4.1.tgz#3d1a48fd9d6c0edfd56f2ff578daed48f36c8969" + integrity sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g== + dependencies: + "@babel/parser" "^7.1.0" + "@babel/types" "^7.0.0" + +"@types/babel__traverse@*", "@types/babel__traverse@^7.0.6": + version "7.18.1" + resolved "https://registry.yarnpkg.com/@types/babel__traverse/-/babel__traverse-7.18.1.tgz#ce5e2c8c272b99b7a9fd69fa39f0b4cd85028bd9" + integrity sha512-FSdLaZh2UxaMuLp9lixWaHq/golWTRWOnRsAXzDTDSDOQLuZb1nsdCt6pJSPWSEQt2eFZ2YVk3oYhn+1kLMeMA== + dependencies: + "@babel/types" "^7.3.0" + +"@types/bn.js@^4.11.3": + version "4.11.6" + resolved "https://registry.yarnpkg.com/@types/bn.js/-/bn.js-4.11.6.tgz#c306c70d9358aaea33cd4eda092a742b9505967c" + integrity sha512-pqr857jrp2kPuO9uRjZ3PwnJTjoQy+fcdxvBTvHm6dkmEL9q+hDD/2j/0ELOBPtPnS8LjCX0gI9nbl8lVkadpg== + dependencies: + "@types/node" "*" + +"@types/graceful-fs@^4.1.3": + version "4.1.5" + resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.5.tgz#21ffba0d98da4350db64891f92a9e5db3cdb4e15" + integrity sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw== + dependencies: + "@types/node" "*" + +"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1": + version "2.0.4" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44" + integrity sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g== + +"@types/istanbul-lib-report@*": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686" + integrity sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg== + dependencies: + "@types/istanbul-lib-coverage" "*" + +"@types/istanbul-reports@^3.0.0": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz#9153fe98bba2bd565a63add9436d6f0d7f8468ff" + integrity sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw== + dependencies: + "@types/istanbul-lib-report" "*" + +"@types/jest@^29.0.3": + version "29.0.3" + resolved "https://registry.yarnpkg.com/@types/jest/-/jest-29.0.3.tgz#b61a5ed100850686b8d3c5e28e3a1926b2001b59" + integrity sha512-F6ukyCTwbfsEX5F2YmVYmM5TcTHy1q9P5rWlRbrk56KyMh3v9xRGUO3aa8+SkvMi0SHXtASJv1283enXimC0Og== + dependencies: + expect "^29.0.0" + pretty-format "^29.0.0" + +"@types/node-fetch@^2.5.7": + version "2.6.2" + resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.2.tgz#d1a9c5fd049d9415dce61571557104dec3ec81da" + integrity sha512-DHqhlq5jeESLy19TYhLakJ07kNumXWjcDdxXsLUMJZ6ue8VZJj4kLPQVE/2mdHh3xZziNF1xppu5lwmS53HR+A== + dependencies: + "@types/node" "*" + form-data "^3.0.0" + +"@types/node@*": + version "18.7.18" + resolved "https://registry.yarnpkg.com/@types/node/-/node-18.7.18.tgz#633184f55c322e4fb08612307c274ee6d5ed3154" + integrity sha512-m+6nTEOadJZuTPkKR/SYK3A2d7FZrgElol9UP1Kae90VVU4a6mxnPuLiIW1m4Cq4gZ/nWb9GrdVXJCoCazDAbg== + +"@types/node@^14.14.5": + version "14.18.29" + resolved "https://registry.yarnpkg.com/@types/node/-/node-14.18.29.tgz#a0c58d67a42f8953c13d32f0acda47ed26dfce40" + integrity sha512-LhF+9fbIX4iPzhsRLpK5H7iPdvW8L4IwGciXQIOEcuF62+9nw/VQVsOViAOOGxY3OlOKGLFv0sWwJXdwQeTn6A== + +"@types/pbkdf2@^3.0.0": + version "3.1.0" + resolved "https://registry.yarnpkg.com/@types/pbkdf2/-/pbkdf2-3.1.0.tgz#039a0e9b67da0cdc4ee5dab865caa6b267bb66b1" + integrity sha512-Cf63Rv7jCQ0LaL8tNXmEyqTHuIJxRdlS5vMh1mj5voN4+QFhVZnlZruezqpWYDiJ8UTzhP0VmeLXCmBk66YrMQ== + dependencies: + "@types/node" "*" + +"@types/prettier@^2.1.5": + version "2.7.0" + resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.7.0.tgz#ea03e9f0376a4446f44797ca19d9c46c36e352dc" + integrity sha512-RI1L7N4JnW5gQw2spvL7Sllfuf1SaHdrZpCHiBlCXjIlufi1SMNnbu2teze3/QE67Fg2tBlH7W+mi4hVNk4p0A== + +"@types/secp256k1@^4.0.1": + version "4.0.3" + resolved "https://registry.yarnpkg.com/@types/secp256k1/-/secp256k1-4.0.3.tgz#1b8e55d8e00f08ee7220b4d59a6abe89c37a901c" + integrity sha512-Da66lEIFeIz9ltsdMZcpQvmrmmoqrfju8pm1BH8WbYjZSwUgCwXLb9C+9XYogwBITnbsSaMdVPb2ekf7TV+03w== + dependencies: + "@types/node" "*" + +"@types/stack-utils@^2.0.0": + version "2.0.1" + resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.1.tgz#20f18294f797f2209b5f65c8e3b5c8e8261d127c" + integrity sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw== + +"@types/yargs-parser@*": + version "21.0.0" + resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b" + integrity sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA== + +"@types/yargs@^17.0.8": + version "17.0.12" + resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.12.tgz#0745ff3e4872b4ace98616d4b7e37ccbd75f9526" + integrity sha512-Nz4MPhecOFArtm81gFQvQqdV7XYCrWKx5uUt6GNHredFHn1i2mtWqXTON7EPXMtNi1qjtjEM/VCHDhcHsAMLXQ== + dependencies: + "@types/yargs-parser" "*" + +acorn-walk@^8.1.1: + version "8.2.0" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1" + integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA== + +acorn@^8.4.1: + version "8.8.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.8.0.tgz#88c0187620435c7f6015803f5539dae05a9dbea8" + integrity sha512-QOxyigPVrpZ2GXT+PFyZTl6TtOFc5egxHIP9IlQ+RbupQuX4RkT/Bee4/kQuC02Xkzg84JcT7oLYtDIQxp+v7w== + +aes-js@3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/aes-js/-/aes-js-3.0.0.tgz#e21df10ad6c2053295bcbb8dab40b09dbea87e4d" + integrity sha512-H7wUZRn8WpTq9jocdxQ2c8x2sKo9ZVmzfRE13GiNJXfp7NcKYEdvl3vspKjXox6RIG2VtaRe4JFvxG4rqp2Zuw== + +ansi-escapes@^4.2.1: + version "4.3.2" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" + integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== + dependencies: + type-fest "^0.21.3" + +ansi-regex@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== + +ansi-styles@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== + dependencies: + color-convert "^1.9.0" + +ansi-styles@^4.0.0, ansi-styles@^4.1.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +ansi-styles@^5.0.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" + integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== + +anymatch@^3.0.3: + version "3.1.2" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" + integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg== + dependencies: + normalize-path "^3.0.0" + picomatch "^2.0.4" + +arg@^4.1.0: + version "4.1.3" + resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089" + integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA== + +argparse@^1.0.7: + version "1.0.10" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" + integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== + dependencies: + sprintf-js "~1.0.2" + +asynckit@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== + +babel-jest@^29.0.3: + version "29.0.3" + resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-29.0.3.tgz#64e156a47a77588db6a669a88dedff27ed6e260f" + integrity sha512-ApPyHSOhS/sVzwUOQIWJmdvDhBsMG01HX9z7ogtkp1TToHGGUWFlnXJUIzCgKPSfiYLn3ibipCYzsKSURHEwLg== + dependencies: + "@jest/transform" "^29.0.3" + "@types/babel__core" "^7.1.14" + babel-plugin-istanbul "^6.1.1" + babel-preset-jest "^29.0.2" + chalk "^4.0.0" + graceful-fs "^4.2.9" + slash "^3.0.0" + +babel-plugin-istanbul@^6.1.1: + version "6.1.1" + resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" + integrity sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@istanbuljs/load-nyc-config" "^1.0.0" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-instrument "^5.0.4" + test-exclude "^6.0.0" + +babel-plugin-jest-hoist@^29.0.2: + version "29.0.2" + resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-29.0.2.tgz#ae61483a829a021b146c016c6ad39b8bcc37c2c8" + integrity sha512-eBr2ynAEFjcebVvu8Ktx580BD1QKCrBG1XwEUTXJe285p9HA/4hOhfWCFRQhTKSyBV0VzjhG7H91Eifz9s29hg== + dependencies: + "@babel/template" "^7.3.3" + "@babel/types" "^7.3.3" + "@types/babel__core" "^7.1.14" + "@types/babel__traverse" "^7.0.6" + +babel-preset-current-node-syntax@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b" + integrity sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ== + dependencies: + "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/plugin-syntax-bigint" "^7.8.3" + "@babel/plugin-syntax-class-properties" "^7.8.3" + "@babel/plugin-syntax-import-meta" "^7.8.3" + "@babel/plugin-syntax-json-strings" "^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators" "^7.8.3" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + "@babel/plugin-syntax-numeric-separator" "^7.8.3" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + "@babel/plugin-syntax-top-level-await" "^7.8.3" + +babel-preset-jest@^29.0.2: + version "29.0.2" + resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-29.0.2.tgz#e14a7124e22b161551818d89e5bdcfb3b2b0eac7" + integrity sha512-BeVXp7rH5TK96ofyEnHjznjLMQ2nAeDJ+QzxKnHAAMs0RgrQsCywjAN8m4mOm5Di0pxU//3AoEeJJrerMH5UeA== + dependencies: + babel-plugin-jest-hoist "^29.0.2" + babel-preset-current-node-syntax "^1.0.0" + +balanced-match@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== + +base-x@^3.0.2: + version "3.0.9" + resolved "https://registry.yarnpkg.com/base-x/-/base-x-3.0.9.tgz#6349aaabb58526332de9f60995e548a53fe21320" + integrity sha512-H7JU6iBHTal1gp56aKoaa//YUxEaAOUiydvrV/pILqIHXTtqxSkATOnDA2u+jZ/61sD+L/412+7kzXRtWukhpQ== + dependencies: + safe-buffer "^5.0.1" + +bech32@1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/bech32/-/bech32-1.1.4.tgz#e38c9f37bf179b8eb16ae3a772b40c356d4832e9" + integrity sha512-s0IrSOzLlbvX7yp4WBfPITzpAU8sqQcpsmwXDiKwrG4r491vwCO/XpejasRNl0piBMe/DvP4Tz0mIS/X1DPJBQ== + +blakejs@^1.1.0: + version "1.2.1" + resolved "https://registry.yarnpkg.com/blakejs/-/blakejs-1.2.1.tgz#5057e4206eadb4a97f7c0b6e197a505042fc3814" + integrity sha512-QXUSXI3QVc/gJME0dBpXrag1kbzOqCjCX8/b54ntNyW6sjtoqxqRk3LTmXzaJoh71zMsDCjM+47jS7XiwN/+fQ== + +bn.js@^4.11.0, bn.js@^4.11.8, bn.js@^4.11.9: + version "4.12.0" + resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.12.0.tgz#775b3f278efbb9718eec7361f483fb36fbbfea88" + integrity sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA== + +bn.js@^5.2.0, bn.js@^5.2.1: + version "5.2.1" + resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.2.1.tgz#0bc527a6a0d18d0aa8d5b0538ce4a77dccfa7b70" + integrity sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ== + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +braces@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" + integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== + dependencies: + fill-range "^7.0.1" + +brorand@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f" + integrity sha512-cKV8tMCEpQs4hK/ik71d6LrPOnpkpGBR0wzxqr68g2m/LB2GxVYQroAjMJZRVM1Y4BCjCKc3vAamxSzOY2RP+w== + +browserify-aes@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.2.0.tgz#326734642f403dabc3003209853bb70ad428ef48" + integrity sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA== + dependencies: + buffer-xor "^1.0.3" + cipher-base "^1.0.0" + create-hash "^1.1.0" + evp_bytestokey "^1.0.3" + inherits "^2.0.1" + safe-buffer "^5.0.1" + +browserslist@^4.21.3: + version "4.21.4" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.21.4.tgz#e7496bbc67b9e39dd0f98565feccdcb0d4ff6987" + integrity sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw== + dependencies: + caniuse-lite "^1.0.30001400" + electron-to-chromium "^1.4.251" + node-releases "^2.0.6" + update-browserslist-db "^1.0.9" + +bs-logger@0.x: + version "0.2.6" + resolved "https://registry.yarnpkg.com/bs-logger/-/bs-logger-0.2.6.tgz#eb7d365307a72cf974cc6cda76b68354ad336bd8" + integrity sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog== + dependencies: + fast-json-stable-stringify "2.x" + +bs58@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/bs58/-/bs58-4.0.1.tgz#be161e76c354f6f788ae4071f63f34e8c4f0a42a" + integrity sha512-Ok3Wdf5vOIlBrgCvTq96gBkJw+JUEzdBgyaza5HLtPm7yTHkjRy8+JzNyHF7BHa0bNWOQIp3m5YF0nnFcOIKLw== + dependencies: + base-x "^3.0.2" + +bs58check@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/bs58check/-/bs58check-2.1.2.tgz#53b018291228d82a5aa08e7d796fdafda54aebfc" + integrity sha512-0TS1jicxdU09dwJMNZtVAfzPi6Q6QeN0pM1Fkzrjn+XYHvzMKPU3pHVpva+769iNVSfIYWf7LJ6WR+BuuMf8cA== + dependencies: + bs58 "^4.0.0" + create-hash "^1.1.0" + safe-buffer "^5.1.2" + +bser@2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" + integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== + dependencies: + node-int64 "^0.4.0" + +buffer-from@^1.0.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" + integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== + +buffer-xor@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9" + integrity sha512-571s0T7nZWK6vB67HI5dyUF7wXiNcfaPPPTl6zYCNApANjIvYJTg7hlud/+cJpdAhS7dVzqMLmfhfHR3rAcOjQ== + +callsites@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" + integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== + +camelcase@^5.3.1: + version "5.3.1" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" + integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== + +camelcase@^6.2.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" + integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== + +caniuse-lite@^1.0.30001400: + version "1.0.30001409" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001409.tgz#6135da9dcab34cd9761d9cdb12a68e6740c5e96e" + integrity sha512-V0mnJ5dwarmhYv8/MzhJ//aW68UpvnQBXv8lJ2QUsvn2pHcmAuNtu8hQEDz37XnA1iE+lRR9CIfGWWpgJ5QedQ== + +chalk@^2.0.0: + version "2.4.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + +chalk@^4.0.0: + version "4.1.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +char-regex@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" + integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== + +ci-info@^3.2.0: + version "3.4.0" + resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.4.0.tgz#b28484fd436cbc267900364f096c9dc185efb251" + integrity sha512-t5QdPT5jq3o262DOQ8zA6E1tlH2upmUc4Hlvrbx1pGYJuiiHl7O7rvVNI+l8HTVhd/q3Qc9vqimkNk5yiXsAug== + +cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de" + integrity sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q== + dependencies: + inherits "^2.0.1" + safe-buffer "^5.0.1" + +cjs-module-lexer@^1.0.0: + version "1.2.2" + resolved "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-1.2.2.tgz#9f84ba3244a512f3a54e5277e8eef4c489864e40" + integrity sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA== + +cliui@^7.0.2: + version "7.0.4" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" + integrity sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.0" + wrap-ansi "^7.0.0" + +co@^4.6.0: + version "4.6.0" + resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" + integrity sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ== + +collect-v8-coverage@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz#cc2c8e94fc18bbdffe64d6534570c8a673b27f59" + integrity sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg== + +color-convert@^1.9.0: + version "1.9.3" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" + integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== + dependencies: + color-name "1.1.3" + +color-convert@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== + +color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +combined-stream@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== + dependencies: + delayed-stream "~1.0.0" + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== + +convert-source-map@^1.4.0, convert-source-map@^1.6.0, convert-source-map@^1.7.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369" + integrity sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA== + dependencies: + safe-buffer "~5.1.1" + +create-hash@^1.1.0, create-hash@^1.1.2, create-hash@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.2.0.tgz#889078af11a63756bcfb59bd221996be3a9ef196" + integrity sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg== + dependencies: + cipher-base "^1.0.1" + inherits "^2.0.1" + md5.js "^1.3.4" + ripemd160 "^2.0.1" + sha.js "^2.4.0" + +create-hmac@^1.1.4, create-hmac@^1.1.7: + version "1.1.7" + resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.7.tgz#69170c78b3ab957147b2b8b04572e47ead2243ff" + integrity sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg== + dependencies: + cipher-base "^1.0.3" + create-hash "^1.1.0" + inherits "^2.0.1" + ripemd160 "^2.0.0" + safe-buffer "^5.0.1" + sha.js "^2.4.8" + +create-require@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333" + integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ== + +cross-spawn@^7.0.3: + version "7.0.3" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== + dependencies: + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" + +debug@^4.1.0, debug@^4.1.1: + version "4.3.4" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" + integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== + dependencies: + ms "2.1.2" + +dedent@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c" + integrity sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA== + +deepmerge@^4.2.2: + version "4.2.2" + resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" + integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== + +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== + +detect-newline@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" + integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== + +diff-sequences@^29.0.0: + version "29.0.0" + resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-29.0.0.tgz#bae49972ef3933556bcb0800b72e8579d19d9e4f" + integrity sha512-7Qe/zd1wxSDL4D/X/FPjOMB+ZMDt71W94KYaq05I2l0oQqgXgs7s4ftYYmV38gBSrPz2vcygxfs1xn0FT+rKNA== + +diff@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" + integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== + +electron-to-chromium@^1.4.251: + version "1.4.257" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.257.tgz#895dc73c6bb58d1235dc80879ecbca0bcba96e2c" + integrity sha512-C65sIwHqNnPC2ADMfse/jWTtmhZMII+x6ADI9gENzrOiI7BpxmfKFE84WkIEl5wEg+7+SfIkwChDlsd1Erju2A== + +elliptic@6.5.4, elliptic@^6.5.2, elliptic@^6.5.4: + version "6.5.4" + resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.4.tgz#da37cebd31e79a1367e941b592ed1fbebd58abbb" + integrity sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ== + dependencies: + bn.js "^4.11.9" + brorand "^1.1.0" + hash.js "^1.0.0" + hmac-drbg "^1.0.1" + inherits "^2.0.4" + minimalistic-assert "^1.0.1" + minimalistic-crypto-utils "^1.0.1" + +emittery@^0.10.2: + version "0.10.2" + resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.10.2.tgz#902eec8aedb8c41938c46e9385e9db7e03182933" + integrity sha512-aITqOwnLanpHLNXZJENbOgjUBeHocD+xsSJmNrjovKBW5HbSpW3d1pEls7GFQPUWXiwG9+0P4GtHfEqC/4M0Iw== + +emoji-regex@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== + +error-ex@^1.3.1: + version "1.3.2" + resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" + integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== + dependencies: + is-arrayish "^0.2.1" + +escalade@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" + integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== + +escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== + +escape-string-regexp@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" + integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== + +esprima@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" + integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== + +ethereum-cryptography@^0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/ethereum-cryptography/-/ethereum-cryptography-0.1.3.tgz#8d6143cfc3d74bf79bbd8edecdf29e4ae20dd191" + integrity sha512-w8/4x1SGGzc+tO97TASLja6SLd3fRIK2tLVcV2Gx4IB21hE19atll5Cq9o3d0ZmAYC/8aw0ipieTSiekAea4SQ== + dependencies: + "@types/pbkdf2" "^3.0.0" + "@types/secp256k1" "^4.0.1" + blakejs "^1.1.0" + browserify-aes "^1.2.0" + bs58check "^2.1.2" + create-hash "^1.2.0" + create-hmac "^1.1.7" + hash.js "^1.1.7" + keccak "^3.0.0" + pbkdf2 "^3.0.17" + randombytes "^2.1.0" + safe-buffer "^5.1.2" + scrypt-js "^3.0.0" + secp256k1 "^4.0.1" + setimmediate "^1.0.5" + +ethereumjs-abi@^0.6.8: + version "0.6.8" + resolved "https://registry.yarnpkg.com/ethereumjs-abi/-/ethereumjs-abi-0.6.8.tgz#71bc152db099f70e62f108b7cdfca1b362c6fcae" + integrity sha512-Tx0r/iXI6r+lRsdvkFDlut0N08jWMnKRZ6Gkq+Nmw75lZe4e6o3EkSnkaBP5NF6+m5PTGAr9JP43N3LyeoglsA== + dependencies: + bn.js "^4.11.8" + ethereumjs-util "^6.0.0" + +ethereumjs-util@^6.0.0: + version "6.2.1" + resolved "https://registry.yarnpkg.com/ethereumjs-util/-/ethereumjs-util-6.2.1.tgz#fcb4e4dd5ceacb9d2305426ab1a5cd93e3163b69" + integrity sha512-W2Ktez4L01Vexijrm5EB6w7dg4n/TgpoYU4avuT5T3Vmnw/eCRtiBrJfQYS/DCSvDIOLn2k57GcHdeBcgVxAqw== + dependencies: + "@types/bn.js" "^4.11.3" + bn.js "^4.11.0" + create-hash "^1.1.2" + elliptic "^6.5.2" + ethereum-cryptography "^0.1.3" + ethjs-util "0.1.6" + rlp "^2.2.3" + +ethers@~5.7.0: + version "5.7.1" + resolved "https://registry.yarnpkg.com/ethers/-/ethers-5.7.1.tgz#48c83a44900b5f006eb2f65d3ba6277047fd4f33" + integrity sha512-5krze4dRLITX7FpU8J4WscXqADiKmyeNlylmmDLbS95DaZpBhDe2YSwRQwKXWNyXcox7a3gBgm/MkGXV1O1S/Q== + dependencies: + "@ethersproject/abi" "5.7.0" + "@ethersproject/abstract-provider" "5.7.0" + "@ethersproject/abstract-signer" "5.7.0" + "@ethersproject/address" "5.7.0" + "@ethersproject/base64" "5.7.0" + "@ethersproject/basex" "5.7.0" + "@ethersproject/bignumber" "5.7.0" + "@ethersproject/bytes" "5.7.0" + "@ethersproject/constants" "5.7.0" + "@ethersproject/contracts" "5.7.0" + "@ethersproject/hash" "5.7.0" + "@ethersproject/hdnode" "5.7.0" + "@ethersproject/json-wallets" "5.7.0" + "@ethersproject/keccak256" "5.7.0" + "@ethersproject/logger" "5.7.0" + "@ethersproject/networks" "5.7.1" + "@ethersproject/pbkdf2" "5.7.0" + "@ethersproject/properties" "5.7.0" + "@ethersproject/providers" "5.7.1" + "@ethersproject/random" "5.7.0" + "@ethersproject/rlp" "5.7.0" + "@ethersproject/sha2" "5.7.0" + "@ethersproject/signing-key" "5.7.0" + "@ethersproject/solidity" "5.7.0" + "@ethersproject/strings" "5.7.0" + "@ethersproject/transactions" "5.7.0" + "@ethersproject/units" "5.7.0" + "@ethersproject/wallet" "5.7.0" + "@ethersproject/web" "5.7.1" + "@ethersproject/wordlists" "5.7.0" + +ethjs-util@0.1.6: + version "0.1.6" + resolved "https://registry.yarnpkg.com/ethjs-util/-/ethjs-util-0.1.6.tgz#f308b62f185f9fe6237132fb2a9818866a5cd536" + integrity sha512-CUnVOQq7gSpDHZVVrQW8ExxUETWrnrvXYvYz55wOU8Uj4VCgw56XC2B/fVqQN+f7gmrnRHSLVnFAwsCuNwji8w== + dependencies: + is-hex-prefixed "1.0.0" + strip-hex-prefix "1.0.0" + +evp_bytestokey@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz#7fcbdb198dc71959432efe13842684e0525acb02" + integrity sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA== + dependencies: + md5.js "^1.3.4" + safe-buffer "^5.1.1" + +execa@^5.0.0: + version "5.1.1" + resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" + integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== + dependencies: + cross-spawn "^7.0.3" + get-stream "^6.0.0" + human-signals "^2.1.0" + is-stream "^2.0.0" + merge-stream "^2.0.0" + npm-run-path "^4.0.1" + onetime "^5.1.2" + signal-exit "^3.0.3" + strip-final-newline "^2.0.0" + +exit@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" + integrity sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ== + +expect@^29.0.0, expect@^29.0.3: + version "29.0.3" + resolved "https://registry.yarnpkg.com/expect/-/expect-29.0.3.tgz#6be65ddb945202f143c4e07c083f4f39f3bd326f" + integrity sha512-t8l5DTws3212VbmPL+tBFXhjRHLmctHB0oQbL8eUc6S7NzZtYUhycrFO9mkxA0ZUC6FAWdNi7JchJSkODtcu1Q== + dependencies: + "@jest/expect-utils" "^29.0.3" + jest-get-type "^29.0.0" + jest-matcher-utils "^29.0.3" + jest-message-util "^29.0.3" + jest-util "^29.0.3" + +fast-json-stable-stringify@2.x, fast-json-stable-stringify@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + +fb-watchman@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.2.tgz#e9524ee6b5c77e9e5001af0f85f3adbb8623255c" + integrity sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA== + dependencies: + bser "2.1.1" + +fill-range@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" + integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== + dependencies: + to-regex-range "^5.0.1" + +find-up@^4.0.0, find-up@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" + integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== + dependencies: + locate-path "^5.0.0" + path-exists "^4.0.0" + +form-data@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" + integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.8" + mime-types "^2.1.12" + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== + +fsevents@^2.3.2: + version "2.3.2" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" + integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== + +function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + +gensync@^1.0.0-beta.2: + version "1.0.0-beta.2" + resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" + integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== + +get-caller-file@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" + integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== + +get-package-type@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" + integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== + +get-stream@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" + integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== + +glob@^7.1.3, glob@^7.1.4: + version "7.2.3" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" + integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.1.1" + once "^1.3.0" + path-is-absolute "^1.0.0" + +globals@^11.1.0: + version "11.12.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== + +graceful-fs@^4.2.9: + version "4.2.10" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" + integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== + +has-flag@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== + +has-flag@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== + +has@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + dependencies: + function-bind "^1.1.1" + +hash-base@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.1.0.tgz#55c381d9e06e1d2997a883b4a3fddfe7f0d3af33" + integrity sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA== + dependencies: + inherits "^2.0.4" + readable-stream "^3.6.0" + safe-buffer "^5.2.0" + +hash.js@1.1.7, hash.js@^1.0.0, hash.js@^1.0.3, hash.js@^1.1.7: + version "1.1.7" + resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.7.tgz#0babca538e8d4ee4a0f8988d68866537a003cf42" + integrity sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA== + dependencies: + inherits "^2.0.3" + minimalistic-assert "^1.0.1" + +hmac-drbg@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1" + integrity sha512-Tti3gMqLdZfhOQY1Mzf/AanLiqh1WTiJgEj26ZuYQ9fbkLomzGchCws4FyrSd4VkpBfiNhaE1On+lOz894jvXg== + dependencies: + hash.js "^1.0.3" + minimalistic-assert "^1.0.0" + minimalistic-crypto-utils "^1.0.1" + +html-escaper@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" + integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== + +human-signals@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" + integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== + +import-local@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/import-local/-/import-local-3.1.0.tgz#b4479df8a5fd44f6cdce24070675676063c95cb4" + integrity sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg== + dependencies: + pkg-dir "^4.2.0" + resolve-cwd "^3.0.0" + +imurmurhash@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +is-arrayish@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" + integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg== + +is-core-module@^2.9.0: + version "2.10.0" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.10.0.tgz#9012ede0a91c69587e647514e1d5277019e728ed" + integrity sha512-Erxj2n/LDAZ7H8WNJXd9tw38GYM3dv8rk8Zcs+jJuxYTW7sozH+SS8NtrSjVL1/vpLvWi1hxy96IzjJ3EHTJJg== + dependencies: + has "^1.0.3" + +is-fullwidth-code-point@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== + +is-generator-fn@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" + integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== + +is-hex-prefixed@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-hex-prefixed/-/is-hex-prefixed-1.0.0.tgz#7d8d37e6ad77e5d127148913c573e082d777f554" + integrity sha512-WvtOiug1VFrE9v1Cydwm+FnXd3+w9GaeVUss5W4v/SLy3UW00vP+6iNF2SdnfiBoLy4bTqVdkftNGTUeOFVsbA== + +is-number@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" + integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== + +is-stream@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" + integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== + +istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" + integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== + +istanbul-lib-instrument@^5.0.4, istanbul-lib-instrument@^5.1.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.0.tgz#31d18bdd127f825dd02ea7bfdfd906f8ab840e9f" + integrity sha512-6Lthe1hqXHBNsqvgDzGO6l03XNeu3CrG4RqQ1KM9+l5+jNGpEJfIELx1NS3SEHmJQA8np/u+E4EPRKRiu6m19A== + dependencies: + "@babel/core" "^7.12.3" + "@babel/parser" "^7.14.7" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-coverage "^3.2.0" + semver "^6.3.0" + +istanbul-lib-report@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6" + integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw== + dependencies: + istanbul-lib-coverage "^3.0.0" + make-dir "^3.0.0" + supports-color "^7.1.0" + +istanbul-lib-source-maps@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz#895f3a709fcfba34c6de5a42939022f3e4358551" + integrity sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw== + dependencies: + debug "^4.1.1" + istanbul-lib-coverage "^3.0.0" + source-map "^0.6.1" + +istanbul-reports@^3.1.3: + version "3.1.5" + resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.1.5.tgz#cc9a6ab25cb25659810e4785ed9d9fb742578bae" + integrity sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w== + dependencies: + html-escaper "^2.0.0" + istanbul-lib-report "^3.0.0" + +jest-changed-files@^29.0.0: + version "29.0.0" + resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-29.0.0.tgz#aa238eae42d9372a413dd9a8dadc91ca1806dce0" + integrity sha512-28/iDMDrUpGoCitTURuDqUzWQoWmOmOKOFST1mi2lwh62X4BFf6khgH3uSuo1e49X/UDjuApAj3w0wLOex4VPQ== + dependencies: + execa "^5.0.0" + p-limit "^3.1.0" + +jest-circus@^29.0.3: + version "29.0.3" + resolved "https://registry.yarnpkg.com/jest-circus/-/jest-circus-29.0.3.tgz#90faebc90295291cfc636b27dbd82e3bfb9e7a48" + integrity sha512-QeGzagC6Hw5pP+df1+aoF8+FBSgkPmraC1UdkeunWh0jmrp7wC0Hr6umdUAOELBQmxtKAOMNC3KAdjmCds92Zg== + dependencies: + "@jest/environment" "^29.0.3" + "@jest/expect" "^29.0.3" + "@jest/test-result" "^29.0.3" + "@jest/types" "^29.0.3" + "@types/node" "*" + chalk "^4.0.0" + co "^4.6.0" + dedent "^0.7.0" + is-generator-fn "^2.0.0" + jest-each "^29.0.3" + jest-matcher-utils "^29.0.3" + jest-message-util "^29.0.3" + jest-runtime "^29.0.3" + jest-snapshot "^29.0.3" + jest-util "^29.0.3" + p-limit "^3.1.0" + pretty-format "^29.0.3" + slash "^3.0.0" + stack-utils "^2.0.3" + +jest-cli@^29.0.3: + version "29.0.3" + resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-29.0.3.tgz#fd8f0ef363a7a3d9c53ef62e0651f18eeffa77b9" + integrity sha512-aUy9Gd/Kut1z80eBzG10jAn6BgS3BoBbXyv+uXEqBJ8wnnuZ5RpNfARoskSrTIy1GY4a8f32YGuCMwibtkl9CQ== + dependencies: + "@jest/core" "^29.0.3" + "@jest/test-result" "^29.0.3" + "@jest/types" "^29.0.3" + chalk "^4.0.0" + exit "^0.1.2" + graceful-fs "^4.2.9" + import-local "^3.0.2" + jest-config "^29.0.3" + jest-util "^29.0.3" + jest-validate "^29.0.3" + prompts "^2.0.1" + yargs "^17.3.1" + +jest-config@^29.0.3: + version "29.0.3" + resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-29.0.3.tgz#c2e52a8f5adbd18de79f99532d8332a19e232f13" + integrity sha512-U5qkc82HHVYe3fNu2CRXLN4g761Na26rWKf7CjM8LlZB3In1jadEkZdMwsE37rd9RSPV0NfYaCjHdk/gu3v+Ew== + dependencies: + "@babel/core" "^7.11.6" + "@jest/test-sequencer" "^29.0.3" + "@jest/types" "^29.0.3" + babel-jest "^29.0.3" + chalk "^4.0.0" + ci-info "^3.2.0" + deepmerge "^4.2.2" + glob "^7.1.3" + graceful-fs "^4.2.9" + jest-circus "^29.0.3" + jest-environment-node "^29.0.3" + jest-get-type "^29.0.0" + jest-regex-util "^29.0.0" + jest-resolve "^29.0.3" + jest-runner "^29.0.3" + jest-util "^29.0.3" + jest-validate "^29.0.3" + micromatch "^4.0.4" + parse-json "^5.2.0" + pretty-format "^29.0.3" + slash "^3.0.0" + strip-json-comments "^3.1.1" + +jest-diff@^29.0.3: + version "29.0.3" + resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-29.0.3.tgz#41cc02409ad1458ae1bf7684129a3da2856341ac" + integrity sha512-+X/AIF5G/vX9fWK+Db9bi9BQas7M9oBME7egU7psbn4jlszLFCu0dW63UgeE6cs/GANq4fLaT+8sGHQQ0eCUfg== + dependencies: + chalk "^4.0.0" + diff-sequences "^29.0.0" + jest-get-type "^29.0.0" + pretty-format "^29.0.3" + +jest-docblock@^29.0.0: + version "29.0.0" + resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-29.0.0.tgz#3151bcc45ed7f5a8af4884dcc049aee699b4ceae" + integrity sha512-s5Kpra/kLzbqu9dEjov30kj1n4tfu3e7Pl8v+f8jOkeWNqM6Ds8jRaJfZow3ducoQUrf2Z4rs2N5S3zXnb83gw== + dependencies: + detect-newline "^3.0.0" + +jest-each@^29.0.3: + version "29.0.3" + resolved "https://registry.yarnpkg.com/jest-each/-/jest-each-29.0.3.tgz#7ef3157580b15a609d7ef663dd4fc9b07f4e1299" + integrity sha512-wILhZfESURHHBNvPMJ0lZlYZrvOQJxAo3wNHi+ycr90V7M+uGR9Gh4+4a/BmaZF0XTyZsk4OiYEf3GJN7Ltqzg== + dependencies: + "@jest/types" "^29.0.3" + chalk "^4.0.0" + jest-get-type "^29.0.0" + jest-util "^29.0.3" + pretty-format "^29.0.3" + +jest-environment-node@^29.0.3: + version "29.0.3" + resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-29.0.3.tgz#293804b1e0fa5f0e354dacbe510655caa478a3b2" + integrity sha512-cdZqRCnmIlTXC+9vtvmfiY/40Cj6s2T0czXuq1whvQdmpzAnj4sbqVYuZ4zFHk766xTTJ+Ij3uUqkk8KCfXoyg== + dependencies: + "@jest/environment" "^29.0.3" + "@jest/fake-timers" "^29.0.3" + "@jest/types" "^29.0.3" + "@types/node" "*" + jest-mock "^29.0.3" + jest-util "^29.0.3" + +jest-get-type@^29.0.0: + version "29.0.0" + resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-29.0.0.tgz#843f6c50a1b778f7325df1129a0fd7aa713aef80" + integrity sha512-83X19z/HuLKYXYHskZlBAShO7UfLFXu/vWajw9ZNJASN32li8yHMaVGAQqxFW1RCFOkB7cubaL6FaJVQqqJLSw== + +jest-haste-map@^29.0.3: + version "29.0.3" + resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-29.0.3.tgz#d7f3f7180f558d760eacc5184aac5a67f20ef939" + integrity sha512-uMqR99+GuBHo0RjRhOE4iA6LmsxEwRdgiIAQgMU/wdT2XebsLDz5obIwLZm/Psj+GwSEQhw9AfAVKGYbh2G55A== + dependencies: + "@jest/types" "^29.0.3" + "@types/graceful-fs" "^4.1.3" + "@types/node" "*" + anymatch "^3.0.3" + fb-watchman "^2.0.0" + graceful-fs "^4.2.9" + jest-regex-util "^29.0.0" + jest-util "^29.0.3" + jest-worker "^29.0.3" + micromatch "^4.0.4" + walker "^1.0.8" + optionalDependencies: + fsevents "^2.3.2" + +jest-leak-detector@^29.0.3: + version "29.0.3" + resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-29.0.3.tgz#e85cf3391106a7a250850b6766b508bfe9c7bc6f" + integrity sha512-YfW/G63dAuiuQ3QmQlh8hnqLDe25WFY3eQhuc/Ev1AGmkw5zREblTh7TCSKLoheyggu6G9gxO2hY8p9o6xbaRQ== + dependencies: + jest-get-type "^29.0.0" + pretty-format "^29.0.3" + +jest-matcher-utils@^29.0.3: + version "29.0.3" + resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-29.0.3.tgz#b8305fd3f9e27cdbc210b21fc7dbba92d4e54560" + integrity sha512-RsR1+cZ6p1hDV4GSCQTg+9qjeotQCgkaleIKLK7dm+U4V/H2bWedU3RAtLm8+mANzZ7eDV33dMar4pejd7047w== + dependencies: + chalk "^4.0.0" + jest-diff "^29.0.3" + jest-get-type "^29.0.0" + pretty-format "^29.0.3" + +jest-message-util@^29.0.3: + version "29.0.3" + resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-29.0.3.tgz#f0254e1ffad21890c78355726202cc91d0a40ea8" + integrity sha512-7T8JiUTtDfppojosORAflABfLsLKMLkBHSWkjNQrjIltGoDzNGn7wEPOSfjqYAGTYME65esQzMJxGDjuLBKdOg== + dependencies: + "@babel/code-frame" "^7.12.13" + "@jest/types" "^29.0.3" + "@types/stack-utils" "^2.0.0" + chalk "^4.0.0" + graceful-fs "^4.2.9" + micromatch "^4.0.4" + pretty-format "^29.0.3" + slash "^3.0.0" + stack-utils "^2.0.3" + +jest-mock@^29.0.3: + version "29.0.3" + resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-29.0.3.tgz#4f0093f6a9cb2ffdb9c44a07a3912f0c098c8de9" + integrity sha512-ort9pYowltbcrCVR43wdlqfAiFJXBx8l4uJDsD8U72LgBcetvEp+Qxj1W9ZYgMRoeAo+ov5cnAGF2B6+Oth+ww== + dependencies: + "@jest/types" "^29.0.3" + "@types/node" "*" + +jest-pnp-resolver@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz#b704ac0ae028a89108a4d040b3f919dfddc8e33c" + integrity sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w== + +jest-regex-util@^29.0.0: + version "29.0.0" + resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-29.0.0.tgz#b442987f688289df8eb6c16fa8df488b4cd007de" + integrity sha512-BV7VW7Sy0fInHWN93MMPtlClweYv2qrSCwfeFWmpribGZtQPWNvRSq9XOVgOEjU1iBGRKXUZil0o2AH7Iy9Lug== + +jest-resolve-dependencies@^29.0.3: + version "29.0.3" + resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-29.0.3.tgz#f23a54295efc6374b86b198cf8efed5606d6b762" + integrity sha512-KzuBnXqNvbuCdoJpv8EanbIGObk7vUBNt/PwQPPx2aMhlv/jaXpUJsqWYRpP/0a50faMBY7WFFP8S3/CCzwfDw== + dependencies: + jest-regex-util "^29.0.0" + jest-snapshot "^29.0.3" + +jest-resolve@^29.0.3: + version "29.0.3" + resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-29.0.3.tgz#329a3431e3b9eb6629a2cd483e9bed95b26827b9" + integrity sha512-toVkia85Y/BPAjJasTC9zIPY6MmVXQPtrCk8SmiheC4MwVFE/CMFlOtMN6jrwPMC6TtNh8+sTMllasFeu1wMPg== + dependencies: + chalk "^4.0.0" + graceful-fs "^4.2.9" + jest-haste-map "^29.0.3" + jest-pnp-resolver "^1.2.2" + jest-util "^29.0.3" + jest-validate "^29.0.3" + resolve "^1.20.0" + resolve.exports "^1.1.0" + slash "^3.0.0" + +jest-runner@^29.0.3: + version "29.0.3" + resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-29.0.3.tgz#2e47fe1e8777aea9b8970f37e8f83630b508fb87" + integrity sha512-Usu6VlTOZlCZoNuh3b2Tv/yzDpKqtiNAetG9t3kJuHfUyVMNW7ipCCJOUojzKkjPoaN7Bl1f7Buu6PE0sGpQxw== + dependencies: + "@jest/console" "^29.0.3" + "@jest/environment" "^29.0.3" + "@jest/test-result" "^29.0.3" + "@jest/transform" "^29.0.3" + "@jest/types" "^29.0.3" + "@types/node" "*" + chalk "^4.0.0" + emittery "^0.10.2" + graceful-fs "^4.2.9" + jest-docblock "^29.0.0" + jest-environment-node "^29.0.3" + jest-haste-map "^29.0.3" + jest-leak-detector "^29.0.3" + jest-message-util "^29.0.3" + jest-resolve "^29.0.3" + jest-runtime "^29.0.3" + jest-util "^29.0.3" + jest-watcher "^29.0.3" + jest-worker "^29.0.3" + p-limit "^3.1.0" + source-map-support "0.5.13" + +jest-runtime@^29.0.3: + version "29.0.3" + resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-29.0.3.tgz#5a823ec5902257519556a4e5a71a868e8fd788aa" + integrity sha512-12gZXRQ7ozEeEHKTY45a+YLqzNDR/x4c//X6AqwKwKJPpWM8FY4vwn4VQJOcLRS3Nd1fWwgP7LU4SoynhuUMHQ== + dependencies: + "@jest/environment" "^29.0.3" + "@jest/fake-timers" "^29.0.3" + "@jest/globals" "^29.0.3" + "@jest/source-map" "^29.0.0" + "@jest/test-result" "^29.0.3" + "@jest/transform" "^29.0.3" + "@jest/types" "^29.0.3" + "@types/node" "*" + chalk "^4.0.0" + cjs-module-lexer "^1.0.0" + collect-v8-coverage "^1.0.0" + glob "^7.1.3" + graceful-fs "^4.2.9" + jest-haste-map "^29.0.3" + jest-message-util "^29.0.3" + jest-mock "^29.0.3" + jest-regex-util "^29.0.0" + jest-resolve "^29.0.3" + jest-snapshot "^29.0.3" + jest-util "^29.0.3" + slash "^3.0.0" + strip-bom "^4.0.0" + +jest-snapshot@^29.0.3: + version "29.0.3" + resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-29.0.3.tgz#0a024706986a915a6eefae74d7343069d2fc8eef" + integrity sha512-52q6JChm04U3deq+mkQ7R/7uy7YyfVIrebMi6ZkBoDJ85yEjm/sJwdr1P0LOIEHmpyLlXrxy3QP0Zf5J2kj0ew== + dependencies: + "@babel/core" "^7.11.6" + "@babel/generator" "^7.7.2" + "@babel/plugin-syntax-jsx" "^7.7.2" + "@babel/plugin-syntax-typescript" "^7.7.2" + "@babel/traverse" "^7.7.2" + "@babel/types" "^7.3.3" + "@jest/expect-utils" "^29.0.3" + "@jest/transform" "^29.0.3" + "@jest/types" "^29.0.3" + "@types/babel__traverse" "^7.0.6" + "@types/prettier" "^2.1.5" + babel-preset-current-node-syntax "^1.0.0" + chalk "^4.0.0" + expect "^29.0.3" + graceful-fs "^4.2.9" + jest-diff "^29.0.3" + jest-get-type "^29.0.0" + jest-haste-map "^29.0.3" + jest-matcher-utils "^29.0.3" + jest-message-util "^29.0.3" + jest-util "^29.0.3" + natural-compare "^1.4.0" + pretty-format "^29.0.3" + semver "^7.3.5" + +jest-util@^29.0.0, jest-util@^29.0.3: + version "29.0.3" + resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-29.0.3.tgz#06d1d77f9a1bea380f121897d78695902959fbc0" + integrity sha512-Q0xaG3YRG8QiTC4R6fHjHQPaPpz9pJBEi0AeOE4mQh/FuWOijFjGXMMOfQEaU9i3z76cNR7FobZZUQnL6IyfdQ== + dependencies: + "@jest/types" "^29.0.3" + "@types/node" "*" + chalk "^4.0.0" + ci-info "^3.2.0" + graceful-fs "^4.2.9" + picomatch "^2.2.3" + +jest-validate@^29.0.3: + version "29.0.3" + resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-29.0.3.tgz#f9521581d7344685428afa0a4d110e9c519aeeb6" + integrity sha512-OebiqqT6lK8cbMPtrSoS3aZP4juID762lZvpf1u+smZnwTEBCBInan0GAIIhv36MxGaJvmq5uJm7dl5gVt+Zrw== + dependencies: + "@jest/types" "^29.0.3" + camelcase "^6.2.0" + chalk "^4.0.0" + jest-get-type "^29.0.0" + leven "^3.1.0" + pretty-format "^29.0.3" + +jest-watcher@^29.0.3: + version "29.0.3" + resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-29.0.3.tgz#8e220d1cc4f8029875e82015d084cab20f33d57f" + integrity sha512-tQX9lU91A+9tyUQKUMp0Ns8xAcdhC9fo73eqA3LFxP2bSgiF49TNcc+vf3qgGYYK9qRjFpXW9+4RgF/mbxyOOw== + dependencies: + "@jest/test-result" "^29.0.3" + "@jest/types" "^29.0.3" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + emittery "^0.10.2" + jest-util "^29.0.3" + string-length "^4.0.1" + +jest-worker@^29.0.3: + version "29.0.3" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-29.0.3.tgz#c2ba0aa7e41eec9eb0be8e8a322ae6518df72647" + integrity sha512-Tl/YWUugQOjoTYwjKdfJWkSOfhufJHO5LhXTSZC3TRoQKO+fuXnZAdoXXBlpLXKGODBL3OvdUasfDD4PcMe6ng== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^8.0.0" + +jest@^29.0.3: + version "29.0.3" + resolved "https://registry.yarnpkg.com/jest/-/jest-29.0.3.tgz#5227a0596d30791b2649eea347e4aa97f734944d" + integrity sha512-ElgUtJBLgXM1E8L6K1RW1T96R897YY/3lRYqq9uVcPWtP2AAl/nQ16IYDh/FzQOOQ12VEuLdcPU83mbhG2C3PQ== + dependencies: + "@jest/core" "^29.0.3" + "@jest/types" "^29.0.3" + import-local "^3.0.2" + jest-cli "^29.0.3" + +js-sha3@0.8.0: + version "0.8.0" + resolved "https://registry.yarnpkg.com/js-sha3/-/js-sha3-0.8.0.tgz#b9b7a5da73afad7dedd0f8c463954cbde6818840" + integrity sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q== + +js-tokens@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + +js-yaml@^3.13.1: + version "3.14.1" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" + integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + +jsesc@^2.5.1: + version "2.5.2" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" + integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== + +json-parse-even-better-errors@^2.3.0: + version "2.3.1" + resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" + integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== + +json5@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.1.tgz#655d50ed1e6f95ad1a3caababd2b0efda10b395c" + integrity sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA== + +keccak@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/keccak/-/keccak-3.0.2.tgz#4c2c6e8c54e04f2670ee49fa734eb9da152206e0" + integrity sha512-PyKKjkH53wDMLGrvmRGSNWgmSxZOUqbnXwKL9tmgbFYA1iAYqW21kfR7mZXV0MlESiefxQQE9X9fTa3X+2MPDQ== + dependencies: + node-addon-api "^2.0.0" + node-gyp-build "^4.2.0" + readable-stream "^3.6.0" + +kleur@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" + integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== + +leven@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" + integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== + +lines-and-columns@^1.1.6: + version "1.2.4" + resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" + integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== + +locate-path@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" + integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== + dependencies: + p-locate "^4.1.0" + +lodash.memoize@4.x: + version "4.1.2" + resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" + integrity sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag== + +lru-cache@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + dependencies: + yallist "^4.0.0" + +make-dir@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" + integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== + dependencies: + semver "^6.0.0" + +make-error@1.x, make-error@^1.1.1: + version "1.3.6" + resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" + integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== + +makeerror@1.0.12: + version "1.0.12" + resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" + integrity sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg== + dependencies: + tmpl "1.0.5" + +md5.js@^1.3.4: + version "1.3.5" + resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f" + integrity sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg== + dependencies: + hash-base "^3.0.0" + inherits "^2.0.1" + safe-buffer "^5.1.2" + +merge-stream@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" + integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== + +micromatch@^4.0.4: + version "4.0.5" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" + integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== + dependencies: + braces "^3.0.2" + picomatch "^2.3.1" + +mime-db@1.52.0: + version "1.52.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== + +mime-types@^2.1.12: + version "2.1.35" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + +mimic-fn@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" + integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== + +minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" + integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== + +minimalistic-crypto-utils@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a" + integrity sha512-JIYlbt6g8i5jKfJ3xz7rF0LXmv2TkDxBLUkiBeZ7bAx4GnnNMr8xFpGnOxn6GhTEHx3SjRrZEoU+j04prX1ktg== + +minimatch@^3.0.4, minimatch@^3.1.1: + version "3.1.2" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== + dependencies: + brace-expansion "^1.1.7" + +ms@2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +natural-compare@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== + +node-addon-api@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-2.0.2.tgz#432cfa82962ce494b132e9d72a15b29f71ff5d32" + integrity sha512-Ntyt4AIXyaLIuMHF6IOoTakB3K+RWxwtsHNRxllEoA6vPwP9o4866g6YWDLUdnucilZhmkxiHwHr11gAENw+QA== + +node-fetch@^2.6.1: + version "2.6.7" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" + integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== + dependencies: + whatwg-url "^5.0.0" + +node-gyp-build@^4.2.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.5.0.tgz#7a64eefa0b21112f89f58379da128ac177f20e40" + integrity sha512-2iGbaQBV+ITgCz76ZEjmhUKAKVf7xfY1sRl4UiKQspfZMH2h06SyhNsnSVy50cwkFQDGLyif6m/6uFXHkOZ6rg== + +node-int64@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" + integrity sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw== + +node-releases@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.6.tgz#8a7088c63a55e493845683ebf3c828d8c51c5503" + integrity sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg== + +normalize-path@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" + integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== + +npm-run-path@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" + integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== + dependencies: + path-key "^3.0.0" + +once@^1.3.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== + dependencies: + wrappy "1" + +onetime@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" + integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== + dependencies: + mimic-fn "^2.1.0" + +p-limit@^2.2.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" + integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== + dependencies: + p-try "^2.0.0" + +p-limit@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" + integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== + dependencies: + yocto-queue "^0.1.0" + +p-locate@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" + integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== + dependencies: + p-limit "^2.2.0" + +p-try@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" + integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== + +parse-json@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" + integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== + dependencies: + "@babel/code-frame" "^7.0.0" + error-ex "^1.3.1" + json-parse-even-better-errors "^2.3.0" + lines-and-columns "^1.1.6" + +path-exists@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" + integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== + +path-key@^3.0.0, path-key@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + +path-parse@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== + +pbkdf2@^3.0.17: + version "3.1.2" + resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.1.2.tgz#dd822aa0887580e52f1a039dc3eda108efae3075" + integrity sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA== + dependencies: + create-hash "^1.1.2" + create-hmac "^1.1.4" + ripemd160 "^2.0.1" + safe-buffer "^5.0.1" + sha.js "^2.4.8" + +picocolors@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" + integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== + +picomatch@^2.0.4, picomatch@^2.2.3, picomatch@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" + integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== + +pirates@^4.0.4: + version "4.0.5" + resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.5.tgz#feec352ea5c3268fb23a37c702ab1699f35a5f3b" + integrity sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ== + +pkg-dir@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" + integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== + dependencies: + find-up "^4.0.0" + +pretty-format@^29.0.0, pretty-format@^29.0.3: + version "29.0.3" + resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-29.0.3.tgz#23d5f8cabc9cbf209a77d49409d093d61166a811" + integrity sha512-cHudsvQr1K5vNVLbvYF/nv3Qy/F/BcEKxGuIeMiVMRHxPOO1RxXooP8g/ZrwAp7Dx+KdMZoOc7NxLHhMrP2f9Q== + dependencies: + "@jest/schemas" "^29.0.0" + ansi-styles "^5.0.0" + react-is "^18.0.0" + +prompts@^2.0.1: + version "2.4.2" + resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" + integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== + dependencies: + kleur "^3.0.3" + sisteransi "^1.0.5" + +randombytes@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" + integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== + dependencies: + safe-buffer "^5.1.0" + +react-is@^18.0.0: + version "18.2.0" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-18.2.0.tgz#199431eeaaa2e09f86427efbb4f1473edb47609b" + integrity sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w== + +readable-stream@^3.6.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" + integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== + dependencies: + inherits "^2.0.3" + string_decoder "^1.1.1" + util-deprecate "^1.0.1" + +require-directory@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== + +resolve-cwd@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" + integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg== + dependencies: + resolve-from "^5.0.0" + +resolve-from@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" + integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== + +resolve.exports@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/resolve.exports/-/resolve.exports-1.1.0.tgz#5ce842b94b05146c0e03076985d1d0e7e48c90c9" + integrity sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ== + +resolve@^1.20.0: + version "1.22.1" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177" + integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw== + dependencies: + is-core-module "^2.9.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +ripemd160@^2.0.0, ripemd160@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.2.tgz#a1c1a6f624751577ba5d07914cbc92850585890c" + integrity sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA== + dependencies: + hash-base "^3.0.0" + inherits "^2.0.1" + +rlp@^2.2.3: + version "2.2.7" + resolved "https://registry.yarnpkg.com/rlp/-/rlp-2.2.7.tgz#33f31c4afac81124ac4b283e2bd4d9720b30beaf" + integrity sha512-d5gdPmgQ0Z+AklL2NVXr/IoSjNZFfTVvQWzL/AM2AOcSzYP2xjlb0AC8YyCLc41MSNf6P6QVtjgPdmVtzb+4lQ== + dependencies: + bn.js "^5.2.0" + +safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.0, safe-buffer@~5.2.0: + version "5.2.1" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + +safe-buffer@~5.1.1: + version "5.1.2" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +scrypt-js@3.0.1, scrypt-js@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/scrypt-js/-/scrypt-js-3.0.1.tgz#d314a57c2aef69d1ad98a138a21fe9eafa9ee312" + integrity sha512-cdwTTnqPu0Hyvf5in5asVdZocVDTNRmR7XEcJuIzMjJeSHybHl7vpB66AzwTaIg6CLSbtjcxc8fqcySfnTkccA== + +secp256k1@^4.0.1: + version "4.0.3" + resolved "https://registry.yarnpkg.com/secp256k1/-/secp256k1-4.0.3.tgz#c4559ecd1b8d3c1827ed2d1b94190d69ce267303" + integrity sha512-NLZVf+ROMxwtEj3Xa562qgv2BK5e2WNmXPiOdVIPLgs6lyTzMvBq0aWTYMI5XCP9jZMVKOcqZLw/Wc4vDkuxhA== + dependencies: + elliptic "^6.5.4" + node-addon-api "^2.0.0" + node-gyp-build "^4.2.0" + +semver@7.x, semver@^7.3.5: + version "7.3.7" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.7.tgz#12c5b649afdbf9049707796e22a4028814ce523f" + integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g== + dependencies: + lru-cache "^6.0.0" + +semver@^6.0.0, semver@^6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" + integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== + +setimmediate@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" + integrity sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA== + +sha.js@^2.4.0, sha.js@^2.4.8: + version "2.4.11" + resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.11.tgz#37a5cf0b81ecbc6943de109ba2960d1b26584ae7" + integrity sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ== + dependencies: + inherits "^2.0.1" + safe-buffer "^5.0.1" + +shebang-command@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== + dependencies: + shebang-regex "^3.0.0" + +shebang-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + +signal-exit@^3.0.3, signal-exit@^3.0.7: + version "3.0.7" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" + integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== + +sisteransi@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" + integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== + +slash@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" + integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== + +source-map-support@0.5.13: + version "0.5.13" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.13.tgz#31b24a9c2e73c2de85066c0feb7d44767ed52932" + integrity sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w== + dependencies: + buffer-from "^1.0.0" + source-map "^0.6.0" + +source-map@^0.6.0, source-map@^0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== + +sprintf-js@~1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== + +stack-utils@^2.0.3: + version "2.0.5" + resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.5.tgz#d25265fca995154659dbbfba3b49254778d2fdd5" + integrity sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA== + dependencies: + escape-string-regexp "^2.0.0" + +string-length@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" + integrity sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ== + dependencies: + char-regex "^1.0.2" + strip-ansi "^6.0.0" + +string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string_decoder@^1.1.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" + integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== + dependencies: + safe-buffer "~5.2.0" + +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-bom@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" + integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== + +strip-final-newline@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" + integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== + +strip-hex-prefix@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/strip-hex-prefix/-/strip-hex-prefix-1.0.0.tgz#0c5f155fef1151373377de9dbb588da05500e36f" + integrity sha512-q8d4ue7JGEiVcypji1bALTos+0pWtyGlivAWyPuTkHzuTCJqrK9sWxYQZUq6Nq3cuyv3bm734IhHvHtGGURU6A== + dependencies: + is-hex-prefixed "1.0.0" + +strip-json-comments@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" + integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== + +supports-color@^5.3.0: + version "5.5.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" + integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== + dependencies: + has-flag "^3.0.0" + +supports-color@^7.0.0, supports-color@^7.1.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" + integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== + dependencies: + has-flag "^4.0.0" + +supports-color@^8.0.0: + version "8.1.1" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" + integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== + dependencies: + has-flag "^4.0.0" + +supports-hyperlinks@^2.0.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/supports-hyperlinks/-/supports-hyperlinks-2.3.0.tgz#3943544347c1ff90b15effb03fc14ae45ec10624" + integrity sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA== + dependencies: + has-flag "^4.0.0" + supports-color "^7.0.0" + +supports-preserve-symlinks-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== + +terminal-link@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/terminal-link/-/terminal-link-2.1.1.tgz#14a64a27ab3c0df933ea546fba55f2d078edc994" + integrity sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ== + dependencies: + ansi-escapes "^4.2.1" + supports-hyperlinks "^2.0.0" + +test-exclude@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" + integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== + dependencies: + "@istanbuljs/schema" "^0.1.2" + glob "^7.1.4" + minimatch "^3.0.4" + +tmpl@1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" + integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== + +to-fast-properties@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" + integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== + +to-regex-range@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" + integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== + dependencies: + is-number "^7.0.0" + +tr46@~0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" + integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== + +ts-jest@^29.0.1: + version "29.0.1" + resolved "https://registry.yarnpkg.com/ts-jest/-/ts-jest-29.0.1.tgz#3296b39d069dc55825ce1d059a9510b33c718b86" + integrity sha512-htQOHshgvhn93QLxrmxpiQPk69+M1g7govO1g6kf6GsjCv4uvRV0znVmDrrvjUrVCnTYeY4FBxTYYYD4airyJA== + dependencies: + bs-logger "0.x" + fast-json-stable-stringify "2.x" + jest-util "^29.0.0" + json5 "^2.2.1" + lodash.memoize "4.x" + make-error "1.x" + semver "7.x" + yargs-parser "^21.0.1" + +ts-node@^10.1.0: + version "10.9.1" + resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.1.tgz#e73de9102958af9e1f0b168a6ff320e25adcff4b" + integrity sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw== + dependencies: + "@cspotcode/source-map-support" "^0.8.0" + "@tsconfig/node10" "^1.0.7" + "@tsconfig/node12" "^1.0.7" + "@tsconfig/node14" "^1.0.0" + "@tsconfig/node16" "^1.0.2" + acorn "^8.4.1" + acorn-walk "^8.1.1" + arg "^4.1.0" + create-require "^1.1.0" + diff "^4.0.1" + make-error "^1.1.1" + v8-compile-cache-lib "^3.0.1" + yn "3.1.1" + +type-detect@4.0.8: + version "4.0.8" + resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" + integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== + +type-fest@^0.21.3: + version "0.21.3" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" + integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== + +typescript@^4.3.5: + version "4.8.3" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.8.3.tgz#d59344522c4bc464a65a730ac695007fdb66dd88" + integrity sha512-goMHfm00nWPa8UvR/CPSvykqf6dVV8x/dp0c5mFTMTIu0u0FlGWRioyy7Nn0PGAdHxpJZnuO/ut+PpQ8UiHAig== + +update-browserslist-db@^1.0.9: + version "1.0.9" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.9.tgz#2924d3927367a38d5c555413a7ce138fc95fcb18" + integrity sha512-/xsqn21EGVdXI3EXSum1Yckj3ZVZugqyOZQ/CxYPBD/R+ko9NSUScf8tFF4dOKY+2pvSSJA/S+5B8s4Zr4kyvg== + dependencies: + escalade "^3.1.1" + picocolors "^1.0.0" + +util-deprecate@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== + +v8-compile-cache-lib@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf" + integrity sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg== + +v8-to-istanbul@^9.0.1: + version "9.0.1" + resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-9.0.1.tgz#b6f994b0b5d4ef255e17a0d17dc444a9f5132fa4" + integrity sha512-74Y4LqY74kLE6IFyIjPtkSTWzUZmj8tdHT9Ii/26dvQ6K9Dl2NbEfj0XgU2sHCtKgt5VupqhlO/5aWuqS+IY1w== + dependencies: + "@jridgewell/trace-mapping" "^0.3.12" + "@types/istanbul-lib-coverage" "^2.0.1" + convert-source-map "^1.6.0" + +walker@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f" + integrity sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ== + dependencies: + makeerror "1.0.12" + +webidl-conversions@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" + integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== + +whatwg-url@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" + integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw== + dependencies: + tr46 "~0.0.3" + webidl-conversions "^3.0.0" + +which@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== + dependencies: + isexe "^2.0.0" + +wrap-ansi@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== + +write-file-atomic@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-4.0.2.tgz#a9df01ae5b77858a027fd2e80768ee433555fcfd" + integrity sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg== + dependencies: + imurmurhash "^0.1.4" + signal-exit "^3.0.7" + +ws@7.4.6: + version "7.4.6" + resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.6.tgz#5654ca8ecdeee47c33a9a4bf6d28e2be2980377c" + integrity sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A== + +y18n@^5.0.5: + version "5.0.8" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" + integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== + +yallist@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + +yargs-parser@^21.0.0, yargs-parser@^21.0.1: + version "21.1.1" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" + integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw== + +yargs@^17.3.1: + version "17.5.1" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.5.1.tgz#e109900cab6fcb7fd44b1d8249166feb0b36e58e" + integrity sha512-t6YAJcxDkNX7NFYiVtKvWUz8l+PaKTLiL63mJYWR2GnHq2gjEWISzsLp9wg3aY36dY1j+gfIEL3pIF+XlJJfbA== + dependencies: + cliui "^7.0.2" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.3" + y18n "^5.0.5" + yargs-parser "^21.0.0" + +yn@3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" + integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== + +yocto-queue@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" + integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== + +"zksync-web3@link:../../../sdk/zksync-web3.js": + version "0.0.0" + uid "" diff --git a/docker-compose-backup-test.yml b/docker-compose-backup-test.yml new file mode 100644 index 000000000000..dfadd7cad586 --- /dev/null +++ b/docker-compose-backup-test.yml @@ -0,0 +1,25 @@ +version: '3.2' +services: + + geth: + image: "matterlabs/geth:latest" + environment: + - PLUGIN_CONFIG + + zk: + image: "matterlabs/zk-environment:latest2.0" + depends_on: + - geth + command: cargo run --bin zksync_core + volumes: + - .:/usr/src/zksync + - /usr/src/cache:/usr/src/cache + - /usr/src/keys:/usr/src/keys + environment: + - IN_DOCKER=1 + - CACHE_DIR=/usr/src/cache + - SCCACHE_CACHE_SIZE=50g + - DB_PATH=/usr/src/db + - CI=1 + env_file: + - ./etc/env/dev.env diff --git a/docker-compose-gpu-runner.yml b/docker-compose-gpu-runner.yml new file mode 100644 index 000000000000..68c48f01204a --- /dev/null +++ b/docker-compose-gpu-runner.yml @@ -0,0 +1,42 @@ +version: '3.2' +services: + geth: + image: "matterlabs/geth:latest" + environment: + - PLUGIN_CONFIG + + zk: + image: "matterlabs/zk-environment:latest2.0" + depends_on: + - geth + - postgres + security_opt: + - seccomp:unconfined + command: tail -f /dev/null + volumes: + - .:/usr/src/zksync + - /usr/src/cache:/usr/src/cache + - /var/run/docker.sock:/var/run/docker.sock + - /usr/src/keys:/mnt/prover_setup_keys + environment: + - CACHE_DIR=/usr/src/cache + - SCCACHE_CACHE_SIZE=50g + - SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage + - SCCACHE_GCS_OAUTH_URL=http://169.254.169.254/computeMetadata/v1/instance/service-accounts/gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com/token + - SCCACHE_ERROR_LOG=/tmp/sccache_log.txt + - SCCACHE_GCS_RW_MODE=READ_WRITE + - CI=1 + - GITHUB_WORKSPACE=$GITHUB_WORKSPACE + env_file: + - ./.env + deploy: + resources: + reservations: + devices: + - capabilities: [gpu] + postgres: + image: "postgres:12" + ports: + - "5432:5432" + environment: + - POSTGRES_HOST_AUTH_METHOD=trust diff --git a/docker-compose-runner.yml b/docker-compose-runner.yml new file mode 100644 index 000000000000..c9c6b815ebd7 --- /dev/null +++ b/docker-compose-runner.yml @@ -0,0 +1,36 @@ +version: '3.2' +services: + geth: + image: "matterlabs/geth:latest" + environment: + - PLUGIN_CONFIG + + zk: + image: "matterlabs/zk-environment:latest2.0-lightweight" + depends_on: + - geth + - postgres + security_opt: + - seccomp:unconfined + command: tail -f /dev/null + volumes: + - .:/usr/src/zksync + - /usr/src/cache:/usr/src/cache + - /var/run/docker.sock:/var/run/docker.sock + environment: + - CACHE_DIR=/usr/src/cache + - SCCACHE_CACHE_SIZE=50g + - SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage + - SCCACHE_GCS_OAUTH_URL=http://169.254.169.254/computeMetadata/v1/instance/service-accounts/gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com/token + - SCCACHE_ERROR_LOG=/tmp/sccache_log.txt + - SCCACHE_GCS_RW_MODE=READ_WRITE + - CI=1 + - GITHUB_WORKSPACE=$GITHUB_WORKSPACE + env_file: + - ./.env + postgres: + image: "postgres:12" + ports: + - "5432:5432" + environment: + - POSTGRES_HOST_AUTH_METHOD=trust diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 000000000000..44dd9bba328c --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,22 @@ +version: '3.2' +services: + geth: + image: "matterlabs/geth:latest" + ports: + - "8545:8545" + - "8546:8546" + volumes: + - type: bind + source: ./volumes/geth + target: /var/lib/geth/data + postgres: + image: "postgres:12" + ports: + - "5432:5432" + volumes: + - type: bind + source: ./volumes/postgres + target: /var/lib/postgresql/data + environment: + - POSTGRES_HOST_AUTH_METHOD=trust + diff --git a/docker/circuit-synthesizer/Dockerfile b/docker/circuit-synthesizer/Dockerfile new file mode 100644 index 000000000000..dd128e0ce794 --- /dev/null +++ b/docker/circuit-synthesizer/Dockerfile @@ -0,0 +1,29 @@ +# For using private GitHub dependencies, CI downdloads all crates and bellman-cuda dependency outside of the contatiner +# Not expected to work locally + +# syntax=docker/dockerfile:experimental +FROM debian:buster-slim as builder + +ARG DEBIAN_FRONTEND=noninteractive + +RUN apt-get update && apt-get install -y curl clang openssl libssl-dev gcc g++ \ + pkg-config build-essential libclang-dev && \ + rm -rf /var/lib/apt/lists/* + +ENV RUSTUP_HOME=/usr/local/rustup \ + CARGO_HOME=/usr/local/cargo \ + PATH=/usr/local/cargo/bin:$PATH + +RUN curl https://sh.rustup.rs -sSf | bash -s -- -y && \ + rustup install nightly-2022-08-23 && \ + rustup default nightly-2022-08-23 + +WORKDIR /usr/src/zksync +COPY . . + +RUN CARGO_HOME=./cargo cargo build --release + +FROM debian:buster-slim +RUN apt-get update && apt-get install -y curl openssl libpq5 ca-certificates && rm -rf /var/lib/apt/lists/* +COPY --from=builder /usr/src/zksync/target/release/zksync_circuit_synthesizer /usr/bin/ +ENTRYPOINT ["zksync_circuit_synthesizer"] diff --git a/docker/contract-verifier/Dockerfile b/docker/contract-verifier/Dockerfile new file mode 100644 index 000000000000..2b1708e78125 --- /dev/null +++ b/docker/contract-verifier/Dockerfile @@ -0,0 +1,43 @@ +# syntax=docker/dockerfile:experimental +FROM matterlabs/llvm_runner:latest as builder + +WORKDIR /usr/src/zksync +COPY . . +#Needed to get zkEVM package from github link with auth embedded +# Doesn't expected to work local +RUN CARGO_HOME=./cargo cargo build --release + +FROM matterlabs/zksolc:v1.2.0 as zksolc-v1-2-0 + +RUN apt-get update && apt-get install -y curl libpq5 ca-certificates wget && rm -rf /var/lib/apt/lists/* + +RUN mkdir -p /etc/zksolc-bin/v1.2.0 && cp /usr/local/bin/zksolc /etc/zksolc-bin/v1.2.0/ +RUN mkdir -p /etc/zksolc-bin/v1.2.1 \ + && wget https://github.com/matter-labs/zksolc-bin/raw/main/linux-amd64/zksolc-linux-amd64-musl-v1.2.1 \ + && cp zksolc-linux-amd64-musl-v1.2.1 /etc/zksolc-bin/v1.2.1/zksolc \ + && chmod +x /etc/zksolc-bin/v1.2.1/zksolc +RUN mkdir -p /etc/zksolc-bin/v1.2.2 \ + && wget https://github.com/matter-labs/zksolc-bin/raw/main/linux-amd64/zksolc-linux-amd64-musl-v1.2.2 \ + && cp zksolc-linux-amd64-musl-v1.2.2 /etc/zksolc-bin/v1.2.2/zksolc \ + && chmod +x /etc/zksolc-bin/v1.2.2/zksolc +RUN mkdir -p /etc/zksolc-bin/v1.2.3 \ + && wget https://github.com/matter-labs/zksolc-bin/raw/main/linux-amd64/zksolc-linux-amd64-musl-v1.2.3 \ + && cp zksolc-linux-amd64-musl-v1.2.3 /etc/zksolc-bin/v1.2.3/zksolc \ + && chmod +x /etc/zksolc-bin/v1.2.3/zksolc +RUN mkdir -p /etc/zksolc-bin/v1.3.0 \ + && wget https://github.com/matter-labs/zksolc-bin/raw/main/linux-amd64/zksolc-linux-amd64-musl-v1.3.0 \ + && cp zksolc-linux-amd64-musl-v1.3.0 /etc/zksolc-bin/v1.3.0/zksolc \ + && chmod +x /etc/zksolc-bin/v1.3.0/zksolc +RUN mkdir -p /etc/zksolc-bin/v1.3.1 \ + && wget https://github.com/matter-labs/zksolc-bin/raw/main/linux-amd64/zksolc-linux-amd64-musl-v1.3.1 \ + && cp zksolc-linux-amd64-musl-v1.3.1 /etc/zksolc-bin/v1.3.1/zksolc \ + && chmod +x /etc/zksolc-bin/v1.3.1/zksolc + +COPY docker/contract-verifier/install-all-solc.sh install-all-solc.sh +RUN sh ./install-all-solc.sh + +COPY --from=builder /usr/src/zksync/target/release/zksync_contract_verifier /usr/bin/ +COPY etc/system-contracts/bootloader/build/artifacts/ /etc/system-contracts/bootloader/build/artifacts/ +COPY etc/system-contracts/artifacts-zk /etc/system-contracts/artifacts-zk +# CMD tail -f /dev/null +ENTRYPOINT ["zksync_contract_verifier"] diff --git a/docker/contract-verifier/install-all-solc.sh b/docker/contract-verifier/install-all-solc.sh new file mode 100644 index 000000000000..8f4d8c38a5c1 --- /dev/null +++ b/docker/contract-verifier/install-all-solc.sh @@ -0,0 +1,20 @@ +# Installs all solc versions from the github repo +wget -O list.txt https://github.com/ethereum/solc-bin/raw/gh-pages/linux-amd64/list.txt + +# Iterate versions +for LN in $(cat list.txt) +do + # Download + wget https://github.com/ethereum/solc-bin/raw/gh-pages/linux-amd64/$LN + + # Get short version name + temp="${LN#"solc-linux-amd64-v"}" + version="${temp%\+*}" + + # Move and rename + mkdir -p etc/solc-bin/$version/ + mv $LN etc/solc-bin/$version/solc + chmod +x etc/solc-bin/$version/solc + + ls etc/solc-bin/ +done diff --git a/docker/geth/Dockerfile b/docker/geth/Dockerfile new file mode 100644 index 000000000000..b52e640e62ab --- /dev/null +++ b/docker/geth/Dockerfile @@ -0,0 +1,13 @@ +FROM ethereum/client-go:latest + +RUN mkdir -p /seed/keystore +COPY password.sec /seed/ +COPY fast-dev.json /seed/ +COPY standard-dev.json /seed/ +COPY mainnet-dev.json /seed/ +COPY keystore/UTC--2019-04-06T21-13-27.692266000Z--8a91dc2d28b689474298d91899f0c1baf62cb85b /seed/keystore/ + +COPY geth-entry.sh /bin/ + +EXPOSE 8545 8546 30303 30303/udp +ENTRYPOINT [ "sh", "/bin/geth-entry.sh" ] \ No newline at end of file diff --git a/docker/geth/fast-dev.json b/docker/geth/fast-dev.json new file mode 100644 index 000000000000..a0cb5c46f90d --- /dev/null +++ b/docker/geth/fast-dev.json @@ -0,0 +1,77 @@ +{ + "config": { + "chainId": 9, + "homesteadBlock": 0, + "eip150Block": 0, + "eip150Hash": "0x0000000000000000000000000000000000000000000000000000000000000000", + "eip155Block": 0, + "eip158Block": 0, + "byzantiumBlock": 0, + "constantinopleBlock": 0, + "petersburgBlock": 0, + "istanbulBlock": 0, + "berlinBlock": 0, + "londonBlock": 0, + "clique": { + "period": 0, + "epoch": 30000 + } + }, + "nonce": "0x0", + "timestamp": "0x5ca9158b", + "extraData": "0x00000000000000000000000000000000000000000000000000000000000000008a91dc2d28b689474298d91899f0c1baf62cb85b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "gasLimit": "0x989680", + "difficulty": "0x80000", + "mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000", + "coinbase": "0x0000000000000000000000000000000000000000", + "alloc": { + "0000000000000000000000000000000000000000": { + "balance": "0x1" + }, + "8a91dc2d28b689474298d91899f0c1baf62cb85b": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "de03a0b5963f75f1c8485b355ff6d30f3093bde7": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "52312AD6f01657413b2eaE9287f6B9ADaD93D5FE": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "36615Cf349d7F6344891B1e7CA7C72883F5dc049": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "a61464658AfeAf65CccaaFD3a512b69A83B77618": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "0D43eB5B8a47bA8900d84AA36656c92024e9772e": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "A13c10C0D5bd6f79041B9835c63f91de35A15883": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "8002cD98Cfb563492A6fB3E7C8243b7B9Ad4cc92": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "4F9133D1d3F50011A6859807C837bdCB31Aaab13": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "bd29A1B981925B94eEc5c4F1125AF02a2Ec4d1cA": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "edB6F5B4aab3dD95C7806Af42881FF12BE7e9daa": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "e706e60ab5Dc512C36A4646D719b889F398cbBcB": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "E90E12261CCb0F3F7976Ae611A29e84a6A85f424": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "e706e60ab5dc512c36a4646d719b889f398cbbcb": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + } + }, + "number": "0x0", + "gasUsed": "0x0", + "parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000" +} diff --git a/docker/geth/geth-entry.sh b/docker/geth/geth-entry.sh new file mode 100755 index 000000000000..df9cf6bfe583 --- /dev/null +++ b/docker/geth/geth-entry.sh @@ -0,0 +1,46 @@ +#!/bin/sh + +# standard - 1 sec block time, 10kk gas limit +# fast - 0 sec block time, 10kk gas limit +# mainnet - 15 sec block time, 10kk gas limit + +if [ ! -z $PLUGIN_CONFIG ]; then + CONFIG=$PLUGIN_CONFIG +else + CONFIG=${1:-standard} +fi + + +echo config $CONFIG + +case $CONFIG in +standard|fast|mainnet) + ;; +*) + echo "supported configurations: standard, fast, mainnet"; + exit 1 + ;; +esac + +cd /var/lib/geth/data + +DEV="$CONFIG"-dev.json + +if [ ! -f ./keystore ]; then + echo initializing dev network + cp /seed/$DEV ./ + cp /seed/password.sec ./ + geth --datadir . init $DEV + cp /seed/keystore/UTC--2019-04-06T21-13-27.692266000Z--8a91dc2d28b689474298d91899f0c1baf62cb85b ./keystore/ +fi + +exec geth --networkid 9 --mine --miner.threads 1 \ + --datadir "." \ + --nodiscover \ + --http --http.addr "0.0.0.0" \ + --http.corsdomain "*" --nat "any" --http.api eth,web3,personal,net \ + --unlock 0 --password "./password.sec" --allow-insecure-unlock \ + --ws --ws.port 8546 \ + --gcmode archive \ + --ws.origins "*" --http.vhosts=* \ + --miner.gastarget=10000000 --miner.gaslimit=11000000 diff --git a/docker/geth/keystore/UTC--2019-04-06T21-13-27.692266000Z--8a91dc2d28b689474298d91899f0c1baf62cb85b b/docker/geth/keystore/UTC--2019-04-06T21-13-27.692266000Z--8a91dc2d28b689474298d91899f0c1baf62cb85b new file mode 100644 index 000000000000..5a1bcbd4b14a --- /dev/null +++ b/docker/geth/keystore/UTC--2019-04-06T21-13-27.692266000Z--8a91dc2d28b689474298d91899f0c1baf62cb85b @@ -0,0 +1 @@ +{"address":"8a91dc2d28b689474298d91899f0c1baf62cb85b","crypto":{"cipher":"aes-128-ctr","ciphertext":"c0b1725ea8dcff76578e304023eeed04a9a5ecde683f6e48fe30cd59186f3c6f","cipherparams":{"iv":"eb4d35a8a5f4502cf7d8fa2ae6cef7bd"},"kdf":"scrypt","kdfparams":{"dklen":32,"n":262144,"p":1,"r":8,"salt":"a16f6be667fc2b2a2335e53ca4902f0af1e3abd740373073ed59fcdfdbdd3e91"},"mac":"689c82e199155f38cfac88fa27ba632d6c44e133ed50f43483e407622c1c508e"},"id":"3d639525-1478-47de-8e5a-09a6199214dd","version":3} \ No newline at end of file diff --git a/docker/geth/mainnet-dev.json b/docker/geth/mainnet-dev.json new file mode 100644 index 000000000000..942e1017a364 --- /dev/null +++ b/docker/geth/mainnet-dev.json @@ -0,0 +1,77 @@ +{ + "config": { + "chainId": 9, + "homesteadBlock": 0, + "eip150Block": 0, + "eip150Hash": "0x0000000000000000000000000000000000000000000000000000000000000000", + "eip155Block": 0, + "eip158Block": 0, + "byzantiumBlock": 0, + "constantinopleBlock": 0, + "petersburgBlock": 0, + "istanbulBlock": 0, + "berlinBlock": 0, + "londonBlock": 0, + "clique": { + "period": 15, + "epoch": 30000 + } + }, + "nonce": "0x0", + "timestamp": "0x5ca9158b", + "extraData": "0x00000000000000000000000000000000000000000000000000000000000000008a91dc2d28b689474298d91899f0c1baf62cb85b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "gasLimit": "0x989680", + "difficulty": "0x80000", + "mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000", + "coinbase": "0x0000000000000000000000000000000000000000", + "alloc": { + "0000000000000000000000000000000000000000": { + "balance": "0x1" + }, + "8a91dc2d28b689474298d91899f0c1baf62cb85b": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "de03a0b5963f75f1c8485b355ff6d30f3093bde7": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "52312AD6f01657413b2eaE9287f6B9ADaD93D5FE": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "36615Cf349d7F6344891B1e7CA7C72883F5dc049": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "a61464658AfeAf65CccaaFD3a512b69A83B77618": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "0D43eB5B8a47bA8900d84AA36656c92024e9772e": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "A13c10C0D5bd6f79041B9835c63f91de35A15883": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "8002cD98Cfb563492A6fB3E7C8243b7B9Ad4cc92": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "4F9133D1d3F50011A6859807C837bdCB31Aaab13": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "bd29A1B981925B94eEc5c4F1125AF02a2Ec4d1cA": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "edB6F5B4aab3dD95C7806Af42881FF12BE7e9daa": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "e706e60ab5Dc512C36A4646D719b889F398cbBcB": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "E90E12261CCb0F3F7976Ae611A29e84a6A85f424": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "e706e60ab5dc512c36a4646d719b889f398cbbcb": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + } + }, + "number": "0x0", + "gasUsed": "0x0", + "parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000" +} diff --git a/docker/geth/password.sec b/docker/geth/password.sec new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/docker/geth/standard-dev.json b/docker/geth/standard-dev.json new file mode 100644 index 000000000000..556af1632a6d --- /dev/null +++ b/docker/geth/standard-dev.json @@ -0,0 +1,77 @@ +{ + "config": { + "chainId": 9, + "homesteadBlock": 0, + "eip150Block": 0, + "eip150Hash": "0x0000000000000000000000000000000000000000000000000000000000000000", + "eip155Block": 0, + "eip158Block": 0, + "byzantiumBlock": 0, + "constantinopleBlock": 0, + "petersburgBlock": 0, + "istanbulBlock": 0, + "berlinBlock": 0, + "londonBlock": 0, + "clique": { + "period": 1, + "epoch": 30000 + } + }, + "nonce": "0x0", + "timestamp": "0x5ca9158b", + "extraData": "0x00000000000000000000000000000000000000000000000000000000000000008a91dc2d28b689474298d91899f0c1baf62cb85b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "gasLimit": "0x989680", + "difficulty": "0x80000", + "mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000", + "coinbase": "0x0000000000000000000000000000000000000000", + "alloc": { + "0000000000000000000000000000000000000000": { + "balance": "0x1" + }, + "8a91dc2d28b689474298d91899f0c1baf62cb85b": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "de03a0b5963f75f1c8485b355ff6d30f3093bde7": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "52312AD6f01657413b2eaE9287f6B9ADaD93D5FE": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "36615Cf349d7F6344891B1e7CA7C72883F5dc049": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "a61464658AfeAf65CccaaFD3a512b69A83B77618": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "0D43eB5B8a47bA8900d84AA36656c92024e9772e": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "A13c10C0D5bd6f79041B9835c63f91de35A15883": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "8002cD98Cfb563492A6fB3E7C8243b7B9Ad4cc92": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "4F9133D1d3F50011A6859807C837bdCB31Aaab13": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "bd29A1B981925B94eEc5c4F1125AF02a2Ec4d1cA": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "edB6F5B4aab3dD95C7806Af42881FF12BE7e9daa": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "e706e60ab5Dc512C36A4646D719b889F398cbBcB": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "E90E12261CCb0F3F7976Ae611A29e84a6A85f424": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + }, + "e706e60ab5dc512c36a4646d719b889f398cbbcb": { + "balance": "0x4B3B4CA85A86C47A098A224000000000" + } + }, + "number": "0x0", + "gasUsed": "0x0", + "parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000" +} diff --git a/docker/local-node/Dockerfile b/docker/local-node/Dockerfile new file mode 100644 index 000000000000..faf880d31afc --- /dev/null +++ b/docker/local-node/Dockerfile @@ -0,0 +1,67 @@ +# syntax=docker/dockerfile:experimental + +# Image is always built from the server image to reuse the common parts +# This image is expected to be built locally beforehand (implemented in the `zk` tool) +FROM matterlabs/server-v2:latest2.0 + +WORKDIR / + +# Install required dependencies +RUN apt-get update; apt-get install -y make bash git openssl libssl-dev gcc g++ curl pkg-config software-properties-common jq wget +RUN apt-get install -y libpq5 ca-certificates postgresql-client && rm -rf /var/lib/apt/lists/* + +# Install node and yarn +RUN curl -sL https://deb.nodesource.com/setup_14.x | bash - +RUN apt-get install -y nodejs +RUN npm install -g yarn + +# Copy compiler (both solc and zksolc) binaries +# Obtain `solc` 0.8.12. +RUN wget https://github.com/ethereum/solc-bin/raw/gh-pages/linux-amd64/solc-linux-amd64-v0.8.12%2Bcommit.f00d7308 \ + && mv solc-linux-amd64-v0.8.12+commit.f00d7308 /usr/bin/solc \ + && chmod +x /usr/bin/solc +# Obtain `zksolc` 1.1.5. +RUN wget https://github.com/matter-labs/zksolc-bin/raw/main/linux-amd64/zksolc-linux-amd64-musl-v1.1.6 \ + && mv zksolc-linux-amd64-musl-v1.1.6 /usr/bin/zksolc \ + && chmod +x /usr/bin/zksolc + +# Copy required packages while preserving the folders structure from the repo +# It's required because these packages use relative paths to the SDK +# Copy the SDK +COPY sdk/zksync-web3.js /sdk/zksync-web3.js +# Copy `zk` tool +COPY infrastructure/zk /infrastructure/zk +# Copy `local-setup-preparation` tool +COPY infrastructure/local-setup-preparation /infrastructure/local-setup-preparation +# Copy migrations +COPY core/lib/dal/migrations /migrations +# Copy dev configs +COPY etc/env /etc/env +# Copy test configs (required to list rich accounts) +COPY etc/test_config /etc/test_config +# Copy all the L1 contracts so they can be deployed +COPY contracts /contracts/ + +# Set `ZKSYNC_HOME` to the root (required for `zk` tool) +ENV ZKSYNC_HOME=/ +# Set `LOCAL_SETUP` variable to modify `zk` tool behavior +ENV ZKSYNC_LOCAL_SETUP=true +# Disable all checks +ENV ZKSYNC_ACTION=dont_ask + +# Build all the required TS packages + +# Build SDK (`yarn add ethers` required because it's a peer dependency) +RUN cd /sdk/zksync-web3.js/ && yarn add ethers@5.7.2 && yarn build && cd / +# Build `zk` tool +RUN cd /infrastructure/zk && yarn && yarn build && cd / +# Build `local-setup-preparation` tool +RUN cd /infrastructure/local-setup-preparation && yarn && cd / +# Build L1 contracts package (contracts themselves should be already built) +RUN cd /contracts/ethereum && yarn && cd / +# Same for L2 contracts +RUN cd /contracts/zksync && yarn && cd / + +# setup entrypoint script +COPY ./docker/local-node/entrypoint.sh /usr/bin/ +ENTRYPOINT ["entrypoint.sh"] diff --git a/docker/local-node/entrypoint.sh b/docker/local-node/entrypoint.sh new file mode 100755 index 000000000000..fd2ffa8aefa0 --- /dev/null +++ b/docker/local-node/entrypoint.sh @@ -0,0 +1,46 @@ +#!/bin/bash +set -ea + +# wait till db service is ready +until psql ${DATABASE_URL%/*} -c '\q'; do + >&2 echo "Postgres is unavailable - sleeping" + sleep 5 +done + +# ensure database initialization +if ! psql $DATABASE_URL -c '\q' 2>/dev/null; +then + echo "Initialing local environment" + psql ${DATABASE_URL%/*} -c "create database ${DATABASE_URL##*/}" + find /migrations -name "*up.sql" | sort | xargs printf -- ' -f %s' | xargs -t psql $DATABASE_URL + + cd /infrastructure/zk + # Compile configs + yarn start config compile + + # Override values for database URL and eth client in the toml config files + # so they will be taken into account + sed -i 's!^database_url=.*$!database_url="'"$DATABASE_URL"'"!' /etc/env/dev/private.toml + sed -i 's!^web3_url=.*$!web3_url="'"$ETH_CLIENT_WEB3_URL"'"!' /etc/env/dev/eth_client.toml + sed -i 's!^path=.*$!path="/var/lib/zksync/data"!' /etc/env/dev/database.toml + sed -i 's!^state_keeper_db_path=.*$!state_keeper_db_path="/var/lib/zksync/data/state_keeper"!' /etc/env/dev/database.toml + sed -i 's!^merkle_tree_backup_path=.*$!merkle_tree_backup_path="/var/lib/zksync/data/backups"!' /etc/env/dev/database.toml + + # Switch zksolc compiler source from docker to binary + sed -i "s!'docker'!'binary'!" /contracts/zksync/hardhat.config.ts + + # Compile configs again (with changed values) + yarn start config compile + + # Perform initialization + yarn start lightweight-init + yarn start f yarn --cwd /infrastructure/local-setup-preparation start + + # Return to the root directory + cd / +fi + +# start server +cd /infrastructure/zk && yarn start config compile && cd / +source /etc/env/dev.env +zksync_server diff --git a/docker/prover/Dockerfile b/docker/prover/Dockerfile new file mode 100644 index 000000000000..6d96eaa2494b --- /dev/null +++ b/docker/prover/Dockerfile @@ -0,0 +1,42 @@ +# For using private GitHub dependencies, CI downdloads all crates and bellman-cuda dependency outside of the contatiner +# Not expected to work locally + +# syntax=docker/dockerfile:experimental +FROM nvidia/cuda:11.8.0-devel-ubuntu22.04 as builder + +ARG DEBIAN_FRONTEND=noninteractive + +RUN apt-get update && apt-get install -y curl clang openssl libssl-dev gcc g++ \ + pkg-config build-essential libclang-dev && \ + rm -rf /var/lib/apt/lists/* + +ENV RUSTUP_HOME=/usr/local/rustup \ + CARGO_HOME=/usr/local/cargo \ + PATH=/usr/local/cargo/bin:$PATH + +RUN curl https://sh.rustup.rs -sSf | bash -s -- -y && \ + rustup install nightly-2022-08-23 && \ + rustup default nightly-2022-08-23 + +WORKDIR /usr/src/zksync +COPY . . + +ENV BELLMAN_CUDA_DIR=/usr/src/zksync/bellman-cuda + +RUN CARGO_HOME=./cargo cargo build --release + +FROM nvidia/cuda:11.8.0-runtime-ubuntu22.04 as runner + +ARG DEBIAN_FRONTEND=noninteractive + +RUN apt-get update && apt-get install -y libpq5 ca-certificates openssl && rm -rf /var/lib/apt/lists/* + +COPY etc/system-contracts/bootloader/build/artifacts/ /etc/system-contracts/bootloader/build/artifacts/ +COPY etc/system-contracts/artifacts-zk /etc/system-contracts/artifacts-zk +COPY contracts/ethereum/artifacts/ /contracts/ethereum/artifacts/ +COPY contracts/zksync/artifacts-zk/ /contracts/zksync/artifacts-zk/ + +COPY core/bin/verification_key_generator_and_server/data/ /core/bin/verification_key_generator_and_server/data/ + +COPY --from=builder /usr/src/zksync/target/release/zksync_prover /usr/bin/ +ENTRYPOINT ["zksync_prover"] diff --git a/docker/runner/Dockerfile b/docker/runner/Dockerfile new file mode 100644 index 000000000000..bec7c1e015f1 --- /dev/null +++ b/docker/runner/Dockerfile @@ -0,0 +1,5 @@ +FROM tcardonne/github-runner +FROM docker:dind +RUN apk update +RUN apk add py-pip python3-dev libffi-dev openssl-dev gcc libc-dev make +RUN pip install docker-compose diff --git a/docker/server-v2/Dockerfile b/docker/server-v2/Dockerfile new file mode 100644 index 000000000000..13f219fbdb31 --- /dev/null +++ b/docker/server-v2/Dockerfile @@ -0,0 +1,29 @@ +# For using private GitHub dependencies, CI downdloads all crates outside of the contatiner +# Not expected to work locally + +# syntax=docker/dockerfile:experimental +FROM rust:1.65-buster as builder +RUN apt-get update && apt-get install -y clang && rm -rf /var/lib/apt/lists/* +WORKDIR /usr/src/zksync +COPY . . + +RUN CARGO_HOME=./cargo cargo build --release + +FROM debian:buster-slim +RUN apt-get update && apt-get install -y curl libpq5 ca-certificates && rm -rf /var/lib/apt/lists/* +EXPOSE 3000 +EXPOSE 3031 +EXPOSE 3030 +COPY --from=builder /usr/src/zksync/target/release/zksync_server /usr/bin +COPY --from=builder /usr/src/zksync/target/release/block_reverter /usr/bin +COPY --from=builder /usr/src/zksync/target/release/merkle_tree_consistency_checker /usr/bin +COPY --from=builder /usr/src/zksync/target/release/rocksdb_util /usr/bin +COPY etc/system-contracts/bootloader/build/artifacts/ /etc/system-contracts/bootloader/build/artifacts/ +COPY etc/system-contracts/artifacts-zk /etc/system-contracts/artifacts-zk +COPY contracts/ethereum/artifacts/ /contracts/ethereum/artifacts/ +COPY contracts/zksync/artifacts-zk/ /contracts/zksync/artifacts-zk/ +COPY etc/tokens/ /etc/tokens/ +COPY etc/ERC20/ /etc/ERC20/ +COPY core/bin/verification_key_generator_and_server/data/ /core/bin/verification_key_generator_and_server/data/ + +ENTRYPOINT ["zksync_server"] diff --git a/docker/zk-environment/Dockerfile b/docker/zk-environment/Dockerfile new file mode 100644 index 000000000000..5de4046b8ad7 --- /dev/null +++ b/docker/zk-environment/Dockerfile @@ -0,0 +1,178 @@ +FROM ubuntu:20.04 as base + +WORKDIR /usr/src/zksync +ENV DEBIAN_FRONTEND noninteractive + +# Install required dependencies +RUN apt-get update && apt-get install -y \ + cmake \ + make \ + bash \ + git \ + openssl \ + libssl-dev \ + gcc \ + g++ \ + curl \ + pkg-config \ + software-properties-common \ + jq \ + openssh-server \ + openssh-client \ + wget \ + vim \ + ca-certificates \ + gnupg2 + +# Install dependencies for RocksDB +RUN apt-get install -y \ + curl \ + gnutls-bin git \ + build-essential \ + clang-7 \ + lldb-7 \ + lld-7 + +# Install docker engine +RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add - +RUN add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" +RUN apt update; apt install -y docker-ce-cli + +# Configurate git to fetch submodules correctly (https://stackoverflow.com/questions/38378914/how-to-fix-git-error-rpc-failed-curl-56-gnutls) +RUN git config --global http.postBuffer 1048576000 + +# Install node and yarn +RUN curl -sL https://deb.nodesource.com/setup_14.x | bash - +RUN apt-get install -y nodejs +RUN npm install -g yarn + +# Install Rust and required cargo packages +ENV RUSTUP_HOME=/usr/local/rustup \ + CARGO_HOME=/usr/local/cargo \ + PATH=/usr/local/cargo/bin:$PATH + +ENV GCLOUD_VERSION=403.0.0 +# Install gloud for gcr login and gcfuze for mounting buckets +RUN echo "deb http://packages.cloud.google.com/apt cloud-sdk main" > /etc/apt/sources.list.d/google-cloud-sdk.list && \ + curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add - && \ + apt-get update -y && apt-get install google-cloud-cli=${GCLOUD_VERSION}-0 --no-install-recommends -y && \ + gcloud config set core/disable_usage_reporting true && \ + gcloud config set component_manager/disable_update_check true && \ + gcloud config set metrics/environment github_docker_image + +RUN curl https://sh.rustup.rs -sSf | bash -s -- -y +RUN rustup install nightly-2022-08-23 +RUN rustup default stable +RUN cargo install --version=0.5.6 sqlx-cli +RUN cargo install cargo-tarpaulin + +# Copy compiler (both solc and zksolc) binaries +# Obtain `solc` 0.8.12. +RUN wget https://github.com/ethereum/solc-bin/raw/gh-pages/linux-amd64/solc-linux-amd64-v0.8.12%2Bcommit.f00d7308 \ + && mv solc-linux-amd64-v0.8.12+commit.f00d7308 /usr/bin/solc \ + && chmod +x /usr/bin/solc +# Obtain `zksolc` 1.1.5. +RUN wget https://github.com/matter-labs/zksolc-bin/raw/main/linux-amd64/zksolc-linux-amd64-musl-v1.1.6 \ + && mv zksolc-linux-amd64-musl-v1.1.6 /usr/bin/zksolc \ + && chmod +x /usr/bin/zksolc + +# Setup the environment +ENV ZKSYNC_HOME=/usr/src/zksync +ENV PATH="${ZKSYNC_HOME}/bin:${PATH}" +ENV CI=1 +RUN cargo install sccache +ENV RUSTC_WRAPPER=/usr/local/cargo/bin/sccache + +FROM base as nvidia-tools + +# Install Rust and required cargo packages +ENV RUSTUP_HOME=/usr/local/rustup \ + CARGO_HOME=/usr/local/cargo \ + PATH=/usr/local/cargo/bin:$PATH + +# Setup the environment +ENV ZKSYNC_HOME=/usr/src/zksync +ENV PATH="${ZKSYNC_HOME}/bin:${PATH}" +ENV CI=1 +ENV RUSTC_WRAPPER=/usr/local/cargo/bin/sccache +ENV DEBIAN_FRONTEND noninteractive + +# Setup nvidia-cuda env +ENV NVARCH x86_64 + +ENV NVIDIA_REQUIRE_CUDA "cuda>=11.8 brand=tesla,driver>=450,driver<451 brand=tesla,driver>=470,driver<471 brand=unknown,driver>=470,driver<471 brand=nvidia,driver>=470,driver<471 brand=nvidiartx,driver>=470,driver<471 brand=quadrortx,driver>=470,driver<471 brand=unknown,driver>=510,driver<511 brand=nvidia,driver>=510,driver<511 brand=nvidiartx,driver>=510,driver<511 brand=quadrortx,driver>=510,driver<511 brand=unknown,driver>=515,driver<516 brand=nvidia,driver>=515,driver<516 brand=nvidiartx,driver>=515,driver<516 brand=quadrortx,driver>=515,driver<516" +ENV NV_CUDA_CUDART_VERSION 11.8.89-1 +ENV NV_CUDA_COMPAT_PACKAGE cuda-compat-11-8 + +RUN curl -fsSL https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/${NVARCH}/3bf863cc.pub | apt-key add - && \ + echo "deb https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/${NVARCH} /" > /etc/apt/sources.list.d/cuda.list + +ENV CUDA_VERSION 11.8.0 + +# For libraries in the cuda-compat-* package: https://docs.nvidia.com/cuda/eula/index.html#attachment-a +RUN apt-get update && apt-get install -y --no-install-recommends \ + cuda-cudart-11-8=${NV_CUDA_CUDART_VERSION} \ + ${NV_CUDA_COMPAT_PACKAGE} \ + && ln -s cuda-11.8 /usr/local/cuda && \ + rm -rf /var/lib/apt/lists/* + +# Required for nvidia-docker v1 +RUN echo "/usr/local/nvidia/lib" >> /etc/ld.so.conf.d/nvidia.conf \ + && echo "/usr/local/nvidia/lib64" >> /etc/ld.so.conf.d/nvidia.conf + +ENV PATH /usr/local/nvidia/bin:/usr/local/cuda/bin:${PATH} +ENV LD_LIBRARY_PATH /usr/local/nvidia/lib:/usr/local/nvidia/lib64 + +# nvidia-container-runtime +ENV NVIDIA_VISIBLE_DEVICES all +ENV NVIDIA_DRIVER_CAPABILITIES compute,utility + +ENV NV_CUDA_LIB_VERSION 11.8.0-1 + +ENV NV_NVTX_VERSION 11.8.86-1 +ENV NV_LIBNPP_VERSION 11.8.0.86-1 +ENV NV_LIBNPP_PACKAGE libnpp-11-8=${NV_LIBNPP_VERSION} +ENV NV_LIBCUSPARSE_VERSION 11.7.5.86-1 + +ENV NV_LIBCUBLAS_PACKAGE_NAME libcublas-11-8 +ENV NV_LIBCUBLAS_VERSION 11.11.3.6-1 +ENV NV_LIBCUBLAS_PACKAGE ${NV_LIBCUBLAS_PACKAGE_NAME}=${NV_LIBCUBLAS_VERSION} + +RUN apt-get update && apt-get install -y --no-install-recommends \ + cuda-libraries-11-8=${NV_CUDA_LIB_VERSION} \ + ${NV_LIBNPP_PACKAGE} \ + cuda-nvtx-11-8=${NV_NVTX_VERSION} \ + libcusparse-11-8=${NV_LIBCUSPARSE_VERSION} \ + ${NV_LIBCUBLAS_PACKAGE} \ + && rm -rf /var/lib/apt/lists/* + +# Keep apt from auto upgrading the cublas and nccl packages. See https://gitlab.com/nvidia/container-images/cuda/-/issues/88 +RUN apt-mark hold ${NV_LIBCUBLAS_PACKAGE_NAME} + +ENV NV_CUDA_CUDART_DEV_VERSION 11.8.89-1 +ENV NV_NVML_DEV_VERSION 11.8.86-1 +ENV NV_LIBCUSPARSE_DEV_VERSION 11.7.5.86-1 +ENV NV_LIBNPP_DEV_VERSION 11.8.0.86-1 +ENV NV_LIBNPP_DEV_PACKAGE libnpp-dev-11-8=${NV_LIBNPP_DEV_VERSION} + +ENV NV_LIBCUBLAS_DEV_VERSION 11.11.3.6-1 +ENV NV_LIBCUBLAS_DEV_PACKAGE_NAME libcublas-dev-11-8 +ENV NV_LIBCUBLAS_DEV_PACKAGE ${NV_LIBCUBLAS_DEV_PACKAGE_NAME}=${NV_LIBCUBLAS_DEV_VERSION} + +ENV NV_NVPROF_VERSION 11.8.87-1 +ENV NV_NVPROF_DEV_PACKAGE cuda-nvprof-11-8=${NV_NVPROF_VERSION} + +RUN apt-get update && apt-get install -y --no-install-recommends \ + libtinfo5 libncursesw5 \ + cuda-cudart-dev-11-8=${NV_CUDA_CUDART_DEV_VERSION} \ + cuda-command-line-tools-11-8=${NV_CUDA_LIB_VERSION} \ + cuda-minimal-build-11-8=${NV_CUDA_LIB_VERSION} \ + cuda-libraries-dev-11-8=${NV_CUDA_LIB_VERSION} \ + cuda-nvml-dev-11-8=${NV_NVML_DEV_VERSION} \ + ${NV_NVPROF_DEV_PACKAGE} \ + ${NV_LIBNPP_DEV_PACKAGE} \ + libcusparse-dev-11-8=${NV_LIBCUSPARSE_DEV_VERSION} \ + ${NV_LIBCUBLAS_DEV_PACKAGE} \ + && rm -rf /var/lib/apt/lists/* + +ENV LIBRARY_PATH /usr/local/cuda/lib64/stubs diff --git a/docker/zk-rust-nightly-environment/Dockerfile b/docker/zk-rust-nightly-environment/Dockerfile new file mode 100644 index 000000000000..902724a06466 --- /dev/null +++ b/docker/zk-rust-nightly-environment/Dockerfile @@ -0,0 +1,20 @@ +FROM debian:buster-slim + +ARG DEBIAN_FRONTEND=noninteractive + +RUN apt update && apt install git curl clang openssl libssl-dev gcc g++ pkg-config build-essential libclang-dev -y + +ENV RUSTUP_HOME=/usr/local/rustup \ + CARGO_HOME=/usr/local/cargo \ + PATH=/usr/local/cargo/bin:$PATH + +# Setup rust nightly +RUN curl https://sh.rustup.rs -sSf | bash -s -- -y && \ + rustup install nightly-2022-08-23 && \ + rustup default nightly-2022-08-23 + + +# Setup cmake +RUN curl -Lo cmake-3.24.2-linux-x86_64.sh https://github.com/Kitware/CMake/releases/download/v3.24.2/cmake-3.24.2-linux-x86_64.sh && \ + chmod +x cmake-3.24.2-linux-x86_64.sh && \ + ./cmake-3.24.2-linux-x86_64.sh --skip-license --prefix=/usr/local diff --git a/docs/architecture.md b/docs/architecture.md new file mode 100644 index 000000000000..fe4bd951a5af --- /dev/null +++ b/docs/architecture.md @@ -0,0 +1,110 @@ +# zkSync v2 Project Architecture + +This document will help you answer the question: _where can I find the logic for x?_ by giving a directory-tree style +structure of the physical architecture of the zkSync Era project. + +## High-Level Overview + +The zksync-2-dev repository has the following main units: + +**Smart Contracts:** All the smart contracts in charge of the protocols on the L1 & L2. Some main contracts: + +- L1 & L2 bridge contracts. +- The zkSync rollup contract on Ethereum. +- The L1 proof verifier contract. + +**Core App:** The execution layer. A node running the zkSync network in charge of the following components: + +- Monitoring the L1 smart contract for deposits or priority operations. +- Maintaining a mempool that receives transactions. +- Picking up transactions from the mempool, executing them in a VM, and changing the state accordingly. +- Generating zkSync chain blocks. +- Preparing circuits for executed blocks to be proved. +- Submitting blocks and proofs to the L1 smart contract. +- Exposing the Ethereum-compatible web3 API. + +**Prover App:** The prover app takes blocks and metadata generated by the server and constructs a validity zk +proof for them. + +**Storage Layer:** The different components and subcomponents don't communicate with each other directly via +APIs, rather via the single source of truth -- the db storage layer. + +## Low-Level Overview + +This section provides a physical map of folders & files in this repository. + +- `/contracts` + + - `/ethereum`: Smart contracts deployed on the Ethereum L1. + - `/zksync`: Smart contracts deployed on the zkSync L2. + +- `/core` + + - `/bin`: Executables for the microservices components comprising zkSync Core Node. + + - `/admin-tools`: CLI tools for admin operations (e.g. restarting prover jobs). + - `/prover`: zkSync prover orchestrator application. + + - `zksync_core/src` + - `/api_server` Externally facing APIs. + - `/web3`: zkSync implementation of the Web3 API. + - `/tx_sender`: Helper module encapsulating the transaction processing logic. + - `/bin`: The executable main starting point for the zkSync server. + - `/circuit_breaker_checker`: zkSync watchdog. + - `/eth_sender`: Submits transactions to the zkSync smart contract. + - `/eth_watch`: Fetches data from the L1. for L2 censorship resistance. + - `/fee_monitor`: Monitors the ratio of fees collected by executing txs over the costs of interacting with + Ethereum. + - `/fee_ticker`: Module to define the price components of L2 transactions. + - `/gas_adjuster`: Module to determine the fees to pay in txs containing blocks submitted to the L1. + - `/gas_tracker`: Module for predicting L1 gas cost for the Commit/PublishProof/Execute operations. + - `/metadata_calculator`: Module to maintain the zkSync state tree. + - `/state_keeper`: The sequencer. In charge of collecting the pending txs from the mempool, executing them in the + VM, and sealing them in blocks. + - `/witness_generator`: Takes the sealed blocks and generates a _Witness_, the input for the prover containing the + circuits to be proved. + + - `/lib`: All the library crates used as dependencies of the binary crates above. + + - `/basic_types`: Crate with essential zkSync primitive types. + - `/config`: All the configured values used by the different zkSync apps. + - `/contracts`: Contains definitions of commonly used smart contracts. + - `/crypto`: Cryptographical primitives used by the different zkSync crates. + - `/dal`: Data availability layer + - `/migrations`: All the db migrations applied to create the storage layer. + - `/src`: Functionality to interact with the different db tables. + - `/eth_client`: Module providing an interface to interact with an Ethereum node. + - `/eth_signer`: Module to sign messages and txs. + - `/mempool`: Implementation of the zkSync transaction pool. + - `/merkle_tree`: Implementation of a sparse Merkle tree. + - `/mini_merkle_tree`: In-memory implementation of a sparse Merkle tree. + - `/object_store`: Abstraction for storing blobs outside the main data store. + - `/prometheus_exporter`: Prometheus data exporter. + - `/prover_utils`: Utilities related to the proof generation. + - `/queued_job_processor`: An abstraction for async job processing + - `/storage`: An encapsulated database interface. + - `/types`: zkSync network operations, transactions, and common types. + - `/utils`: Miscellaneous helpers for zkSync crates. + - `/vlog`: zkSync logging utility. + - `/vm`: ULightweight out-of-circuit VM interface. + - `/web3_decl`: Declaration of the Web3 API. + + - `/tests`: Testing infrastructure for zkSync network. + - `/loadnext`: An app for load testing the zkSync server. + - `/test_account`: A representation of zkSync account. + - `/testkit`: A relatively low-level testing library and test suite for zkSync. + - `/ts-integration`: Integration tests set implemented in TypeScript. + +- `/docker`: Project docker files. + +- `/bin` & `/infrastructure`: Infrastructure scripts that help to work with zkSync applications. + +- `/etc`: Configration files. + + - `/env`:`.env` files that contain environment variables for different configurations of zkSync Server / Prover. + +- `/keys`: Verification keys for `circuit` module. + +- `/sdk`: Implementation of client libraries for the zkSync network in different programming languages. + - `/zksync-rs`: Rust client library for zkSync. + - `/zksync-web3.js`: A JavaScript / TypeScript client library for zkSync. diff --git a/docs/development.md b/docs/development.md new file mode 100644 index 000000000000..b69ca40b1025 --- /dev/null +++ b/docs/development.md @@ -0,0 +1,148 @@ +# Development guide + +This document covers development-related actions in zkSync. + +## Initializing the project + +To setup the main toolkit, `zk`, simply run: + +``` +zk +``` + +You may also configure autocompletion for your shell via: + +``` +zk completion install +``` + +Once all the dependencies were installed, project can be initialized: + +``` +zk init +``` + +This command will do the following: + +- Generate `$ZKSYNC_HOME/etc/env/dev.env` file with settings for the applications. +- Initialize docker containers with `geth` Ethereum node for local development. +- Download and unpack files for cryptographical backend. +- Generate required smart contracts. +- Compile all the smart contracts. +- Deploy smart contracts to the local Ethereum network. +- Create “genesis block” for server. + +Initializing may take pretty long, but many steps (such as downloading & unpacking keys and initializing containers) are +required to be done only once. + +Usually, it is a good idea to do `zk init` once after each merge to the `main` branch (as application setup may change). + +Additionally, there is a subcommand `zk clean` to remove previously generated data. Examples: + +``` +zk clean --all # Remove generated configs, database and backups. +zl clean --config # Remove configs only. +zk clean --database # Remove database. +zk clean --backups # Remove backups. +zk clean --database --backups # Remove database *and* backups, but not configs. +``` + +**When do you need it?** + +1. If you have an initialized database and want to run `zk init`, you have to remove the database first. +2. If after getting new functionality from the `main` branch your code stopped working and `zk init` doesn't help, you + may try removing `$ZKSYNC_HOME/etc/env/dev.env` and running `zk init` once again. This may help if the application + configuration has changed. + +If you don’t need all of the `zk init` functionality, but just need to start/stop containers, use the following +commands: + +``` +zk up # Set up `geth` container +zk down # Shut down `geth` container +``` + +## Reinitializing + +When actively changing something that affects infrastructure (for example, contracts code), you normally don't need the +whole `init` functionality, as it contains many external steps (e.g. deploying ERC20 tokens) which don't have to be +redone. + +For this case, there is an additional command: + +``` +zk reinit +``` + +This command does the minimal subset of `zk init` actions required to "reinitialize" the network. It assumes that +`zk init` was called in the current environment before. If `zk reinit` doesn't work for you, you may want to run +`zk init` instead. + +## Committing changes + +`zksync` uses pre-commit and pre-push git hooks for basic code integrity checks. Hooks are set up automatically within +the workspace initialization process. These hooks will not allow to commit the code which does not pass several checks. + +Currently the following criteria are checked: + +- Rust code should always be formatted via `cargo fmt`. +- Other code should always be formatted via `zk fmt`. +- Dummy Prover should not be staged for commit (see below for the explanation). + +## Using Dummy Prover + +By default, the chosen prover is a "dummy" one, meaning that it doesn't actually compute proofs but rather uses mocks to +avoid expensive computations in the development environment. + +To switch dummy prover to real prover, one must change `dummy_verifier` to `false` in `contracts.toml` for your env +(most likely, `etc/env/dev/contracts.toml`) and run `zk init` to redeploy smart contracts. + +## Testing + +- Running the `rust` unit-tests: + + ``` + zk test rust + ``` + +- Running a specific `rust` unit-test: + + ``` + zk test rust --package --lib ::tests:: -- --exact + # e.g. zk test rust --package zksync_core --lib eth_sender::tests::resend_each_block -- --exact + ``` + +- Running the integration test: + + ``` + zk server # Has to be run in the 1st terminal + zk test i server # Has to be run in the 2nd terminal + ``` + +- Running the benchmarks: + + ``` + zk f cargo bench + ``` + +- Running the loadtest: + + ``` + zk server # Has to be run in the 1st terminal + zk prover # Has to be run in the 2nd terminal if you want to use real prover, otherwise it's not required. + zk run loadtest # Has to be run in the 3rd terminal + ``` + +## Contracts + +### Re-build contracts + +``` +zk contract build +``` + +### Publish source code on etherscan + +``` +zk contract publish +``` diff --git a/docs/launch.md b/docs/launch.md new file mode 100644 index 000000000000..7f68f927e4c8 --- /dev/null +++ b/docs/launch.md @@ -0,0 +1,183 @@ +# Running the application + +This document covers common scenarios of launching zkSync applications set locally. + +## Prerequisites + +Prepare dev environment prerequisites: see + +[Installing dependencies](./setup-dev.md) + +## Setup local dev environment + +Setup: + +``` +zk # installs and builds zk itself +zk init +``` + +During the first initialization you have to download around 8 GB of setup files, this should be done once. If you have a +problem on this step of the initialization, see help for the `zk run plonk-setup` command. + +If you face any other problems with the `zk init` command, go to the [Troubleshooting](##Troubleshooting) section at the +end of this file. There are solutions for some common error cases. + +To completely reset the dev environment: + +- Stop services: + + ``` + zk down + ``` + +- Repeat the setup procedure above + +If `zk init` has already been executed, and now you only need to start docker containers (e.g. after reboot), simply +launch: + +``` +zk up +``` + +## (Re)deploy db and contracts + +``` +zk contract redeploy +``` + +## Environment configurations + +Env config files are held in `etc/env/` + +List configurations: + +``` +zk env +``` + +Switch between configurations: + +``` +zk env +``` + +Default configuration is `dev.env`, which is generated automatically from `dev.env.example` during `zk init` command +execution. + +## Build and run server + +Run server: + +``` +zk server +``` + +Server is configured using env files in `./etc/env` directory. After the first initialization, file +`./etc/env/dev.env`will be created. By default, this file is copied from the `./etc/env/dev.env.example` template. + +Make sure you have environment variables set right, you can check it by running: `zk env`. You should see `* dev` in +output. + +## Running server using Google cloud storage object store instead of default In memory store + +Get the service_account.json file containing the GCP credentials from kubernetes secret for relevant environment(stage2/ +testnet2) add that file to the default location ~/gcloud/service_account.json or update object_store.toml with the file +location + +``` +zk server +``` + +## Running prover server + +Running on machine without GPU + +```shell +zk f cargo +nightly run --release --bin zksync_prover +``` + +Running on machine with GPU + +```shell +zk f cargo +nightly run --features gpu --release --bin zksync_prover +``` + +## Running the verification key generator + +```shell +# ensure that the setup_2^26.key in the current directory, the file can be download from https://storage.googleapis.com/universal-setup/setup_2\^26.key + +# To generate all verification keys +cargo run --release --bin zksync_verification_key_generator + + +``` + +## Running the setup key generator on machine with GPU + +- uncomment `"core/bin/setup_key_generator_and_server",` from root `Cargo.toml` file. +- ensure that the setup_2^26.key in the current directory, the file can be downloaded from + + +```shell +export BELLMAN_CUDA_DIR=$PWD +# To generate setup key for specific circuit type[0 - 17], 2 below corresponds to circuit type 2. +cargo +nightly run --features gpu --release --bin zksync_setup_key_generator -- --numeric-circuit 2 +``` + +## Running the setup key generator on machine without GPU + +- uncomment `"core/bin/setup_key_generator_and_server",` from root `Cargo.toml` file. +- ensure that the setup_2^26.key in the current directory, the file can be downloaded from + + +```shell +# To generate setup key for specific circuit type[0 - 17], 2 below corresponds to circuit type 2. +cargo +nightly run --release --bin zksync_setup_key_generator -- --numeric-circuit 2 +``` + +## Generating binary verification keys for existing json verification keys + +```shell +cargo run --release --bin zksync_json_to_binary_vk_converter -- -o /path/to/output-binary-vk +``` + +## Generating commitment for existing verification keys + +```shell +cargo run --release --bin zksync_commitment_generator +``` + +## Running the contract verifier + +```shell +# To process fixed number of jobs +cargo run --release --bin zksync_contract_verifier -- --jobs-number X + +# To run until manual exit +zk contract_verifier +``` + +## Troubleshooting + +### SSL error: certificate verify failed + +**Problem**. `zk init` fails with the following error: + +``` +Initializing download: https://storage.googleapis.com/universal-setup/setup_2%5E20.key +SSL error: certificate verify failed +``` + +**Solution**. Make sure that the version of `axel` on your computer is `2.17.10` or higher. + +### rmSync is not a function + +**Problem**. `zk init` fails with the following error: + +``` +fs_1.default.rmSync is not a function +``` + +**Solution**. Make sure that the version of `node.js` installed on your computer is `14.14.0` or higher. diff --git a/docs/setup-dev.md b/docs/setup-dev.md new file mode 100644 index 000000000000..7070d77536f4 --- /dev/null +++ b/docs/setup-dev.md @@ -0,0 +1,184 @@ +# Installing dependencies + +## Supported operating systems + +zkSync is currently can be launched on any \*nix operating system (e.g. any linux distribution or MacOS). + +If you're using Windows, then make sure to use WSL 2, since WSL 1 is known to cause troubles. + +Additionally, if you are going to use WSL 2, make sure that your project is located in the _linux filesystem_, since +accessing NTFS partitions from inside of WSL is very slow. + +## `Docker` + +Install `docker`. It is recommended to follow the instructions from the +[official site](https://docs.docker.com/install/). + +Note: currently official site proposes using Docker Desktop for linux, which is a GUI tool with plenty of quirks. If you +want to only have CLI tool, you need the `docker-ce` package and you can follow +[this guide](https://www.digitalocean.com/community/tutorials/how-to-install-and-use-docker-on-ubuntu-20-04) for Ubuntu. + +Installing `docker` via `snap` or from the default repository can cause troubles. + +You need to install both `docker` and `docker-compose`. + +**Note:** `docker-compose` is installed automatically with `Docker Desktop`. + +**Note:** On linux you may encounter the following error when you’ll try to work with `zksync`: + +``` +ERROR: Couldn't connect to Docker daemon - you might need to run `docker-machine start default`. +``` + +If so, you **do not need** to install `docker-machine`. Most probably, it means that your user is not added to +the`docker` group. You can check it as follows: + +```bash +docker-compose up # Should raise the same error. +sudo docker-compose up # Should start doing things. +``` + +If the first command fails, but the second succeeds, then you need to add your user to the `docker` group: + +```bash +sudo usermod -a -G docker your_user_name +``` + +After that, you should logout and login again (user groups are refreshed after the login). The problem should be solved +at this step. + +If logging out does not help, restarting the computer should. + +## `Node` & `Yarn` + +1. Install `Node` (requires version 14.14.0). Since our team attempts to always use the latest LTS version of`Node.js`, + we suggest you to install [nvm](https://github.com/nvm-sh/nvm). It will allow you to update `Node.js`version easily + in the future. +2. Install `yarn`. Instructions can be found on the [official site](https://classic.yarnpkg.com/en/docs/install/). Check + if `yarn` is installed by running `yarn -v`. If you face any problems when installing `yarn`, it might be the case + that your package manager installed the wrong package.Make sure to thoroughly follow the instructions above on the + official website. It contains a lot of troubleshooting guides in it. + +## `Axel` + +Install `axel` for downloading keys: + +On mac: + +```bash +brew install axel +``` + +On debian-based linux: + +```bash +sudo apt-get install axel +``` + +Check the version of `axel` with the following command: + +``` +axel --version +``` + +Make sure the version is higher than `2.17.10`. + +## `clang` + +In order to compile RocksDB, you must have LLVM available. On debian-based linux it can be installed as follows: + +On linux: + +```bash +sudo apt-get install build-essential pkg-config cmake clang lldb lld +``` + +On mac: + +You need to have an up-to-date `Xcode`. You can install it directly from `App Store`. With Xcode command line tools, you +get the Clang compiler installed by default. Thus, having XCode you don't need to install `clang`. + +## `OpenSSL` + +Install OpenSSL: + +On mac: + +```bash +brew install openssl +``` + +On linux: + +```bash +sudo apt-get install libssl-dev +``` + +## `Rust` + +Install the latest `rust` version. + +Instructions can be found on the [official site](https://www.rust-lang.org/tools/install). + +Verify the `rust` installation: + +```bash +rustc --version +rustc 1.xx.y (xxxxxx 20xx-yy-zz) # Output may vary depending on actual version of rust +``` + +### Postgres + +Install the latest postgres. + +```bash +brew install postgresql +``` + +### SQLx CLI + +SQLx is a Rust library we use to interact with Postgres, and its CLI is used to manage DB migrations and support several +features of the library. + +```bash +cargo install sqlx-cli --version 0.5.13 +``` + +## Environment + +Edit the lines below and add them to your shell profile file (e.g. `~/.bash_profile`, `~/.zshrc`): + +```bash +# Add path here: +export ZKSYNC_HOME=/path/to/zksync + +export PATH=$ZKSYNC_HOME/bin:$PATH + +# If you're like me, uncomment: +# cd $ZKSYNC_HOME +``` + +### Tip: `mold` + +Optionally, you may want to optimize the build time with the modern linker, [`mold`](https://github.com/rui314/mold). + +This linker will speed up the build times, which can be pretty big for Rust binaries. + +Follow the instructions in the repo in order to install it and enable for Rust. + +## Tip: Speeding up building `RocksDB` + +By default, each time you compile `rocksdb` crate, it will compile required C++ sources from scratch. It can be avoided +by using precompiled versions of library, and it will significantly improve your build times. + +In order to do so, you can put compiled libraries to some persistent location, and add the following to your shell +configuration file (e.g. `.zshrc` or `.bashrc`): + +``` +export ROCKSDB_LIB_DIR= +export SNAPPY_LIB_DIR= +``` + +Make sure that compiled libraries match the current version of RocksDB. One way to obtain them, is to compile the +project in the usual way once, and then take built libraries from +`target/{debug,release}/build/librocksdb-sys-{some random value}/out`. diff --git a/eraLogo.svg b/eraLogo.svg new file mode 100644 index 000000000000..5af0f3a0c384 --- /dev/null +++ b/eraLogo.svg @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/etc/ERC20/contracts/ZkSyncERC20.sol b/etc/ERC20/contracts/ZkSyncERC20.sol new file mode 100644 index 000000000000..4604a5a97493 --- /dev/null +++ b/etc/ERC20/contracts/ZkSyncERC20.sol @@ -0,0 +1,15 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.0; + +import "./interfaces/ERC20.sol"; + +contract ZkSyncERC20 is ERC20 { + constructor( + string memory name, + string memory symbol, + uint8 decimals + ) ERC20(name, symbol) { + _setupDecimals(decimals); + } +} diff --git a/etc/ERC20/contracts/interfaces/Context.sol b/etc/ERC20/contracts/interfaces/Context.sol new file mode 100644 index 000000000000..df4427b9ebda --- /dev/null +++ b/etc/ERC20/contracts/interfaces/Context.sol @@ -0,0 +1,23 @@ +// SPDX-License-Identifier: MIT + +pragma solidity >=0.8.0; + +/** + * @dev Provides information about the current execution context, including the + * sender of the transaction and its data. While these are generally available + * via msg.sender and msg.data, they should not be accessed in such a direct + * manner, since when dealing with meta-transactions the account sending and + * paying for execution may not be the actual sender (as far as an application + * is concerned). + * + * This contract is only required for intermediate, library-like contracts. + */ +abstract contract Context { + function _msgSender() internal view virtual returns (address) { + return msg.sender; + } + + function _msgData() internal view virtual returns (bytes calldata) { + return msg.data; + } +} diff --git a/etc/ERC20/contracts/interfaces/ERC20.sol b/etc/ERC20/contracts/interfaces/ERC20.sol new file mode 100644 index 000000000000..f3da1db61a4b --- /dev/null +++ b/etc/ERC20/contracts/interfaces/ERC20.sol @@ -0,0 +1,368 @@ +// SPDX-License-Identifier: MIT + +pragma solidity >=0.8.0; + +import "./IERC20.sol"; +import "./IERC20Metadata.sol"; +import "./Context.sol"; + +/** + * @dev Implementation of the {IERC20} interface. + * + * This implementation is agnostic to the way tokens are created. This means + * that a supply mechanism has to be added in a derived contract using {_mint}. + * For a generic mechanism see {ERC20PresetMinterPauser}. + * + * TIP: For a detailed writeup see our guide + * https://forum.zeppelin.solutions/t/how-to-implement-erc20-supply-mechanisms/226[How + * to implement supply mechanisms]. + * + * We have followed general OpenZeppelin Contracts guidelines: functions revert + * instead returning `false` on failure. This behavior is nonetheless + * conventional and does not conflict with the expectations of ERC20 + * applications. + * + * Additionally, an {Approval} event is emitted on calls to {transferFrom}. + * This allows applications to reconstruct the allowance for all accounts just + * by listening to said events. Other implementations of the EIP may not emit + * these events, as it isn't required by the specification. + * + * Finally, the non-standard {decreaseAllowance} and {increaseAllowance} + * functions have been added to mitigate the well-known issues around setting + * allowances. See {IERC20-approve}. + */ +contract ERC20 is Context, IERC20, IERC20Metadata { + mapping(address => uint256) private _balances; + + mapping(address => mapping(address => uint256)) private _allowances; + + uint256 private _totalSupply; + + string private _name; + string private _symbol; + + uint8 private _decimals; + + /** + * @dev Sets the values for {name} and {symbol}. + * + * The default value of {decimals} is 18. To select a different value for + * {decimals} you should overload it. + * + * All two of these values are immutable: they can only be set once during + * construction. + */ + constructor(string memory name_, string memory symbol_) { + _name = name_; + _symbol = symbol_; + } + + /** + * @dev Returns the name of the token. + */ + function name() public view virtual override returns (string memory) { + return _name; + } + + /** + * @dev Returns the symbol of the token, usually a shorter version of the + * name. + */ + function symbol() public view virtual override returns (string memory) { + return _symbol; + } + + /** + * @dev Returns the number of decimals used to get its user representation. + * For example, if `decimals` equals `2`, a balance of `505` tokens should + * be displayed to a user as `5.05` (`505 / 10 ** 2`). + * + * Tokens usually opt for a value of 18, imitating the relationship between + * Ether and Wei. This is the value {ERC20} uses, unless this function is + * overridden; + * + * NOTE: This information is only used for _display_ purposes: it in + * no way affects any of the arithmetic of the contract, including + * {IERC20-balanceOf} and {IERC20-transfer}. + */ + function decimals() public view virtual override returns (uint8) { + return _decimals; + } + + /** + * @dev See {IERC20-totalSupply}. + */ + function totalSupply() public view virtual override returns (uint256) { + return _totalSupply; + } + + /** + * @dev See {IERC20-balanceOf}. + */ + function balanceOf(address account) public view virtual override returns (uint256) { + return _balances[account]; + } + + /** + * @dev See {IERC20-transfer}. + * + * Requirements: + * + * - `recipient` cannot be the zero address. + * - the caller must have a balance of at least `amount`. + */ + function transfer(address recipient, uint256 amount) public virtual override returns (bool) { + _transfer(_msgSender(), recipient, amount); + return true; + } + + /** + * @dev See {IERC20-allowance}. + */ + function allowance(address owner, address spender) public view virtual override returns (uint256) { + return _allowances[owner][spender]; + } + + /** + * @dev See {IERC20-approve}. + * + * Requirements: + * + * - `spender` cannot be the zero address. + */ + function approve(address spender, uint256 amount) public virtual override returns (bool) { + _approve(_msgSender(), spender, amount); + return true; + } + + /** + * @dev See {IERC20-transferFrom}. + * + * Emits an {Approval} event indicating the updated allowance. This is not + * required by the EIP. See the note at the beginning of {ERC20}. + * + * Requirements: + * + * - `sender` and `recipient` cannot be the zero address. + * - `sender` must have a balance of at least `amount`. + * - the caller must have allowance for ``sender``'s tokens of at least + * `amount`. + */ + function transferFrom( + address sender, + address recipient, + uint256 amount + ) public virtual override returns (bool) { + _transfer(sender, recipient, amount); + + uint256 currentAllowance = _allowances[sender][_msgSender()]; + require(currentAllowance >= amount, "ERC20: transfer amount exceeds allowance"); + unchecked { + _approve(sender, _msgSender(), currentAllowance - amount); + } + + return true; + } + + /** + * @dev Atomically increases the allowance granted to `spender` by the caller. + * + * This is an alternative to {approve} that can be used as a mitigation for + * problems described in {IERC20-approve}. + * + * Emits an {Approval} event indicating the updated allowance. + * + * Requirements: + * + * - `spender` cannot be the zero address. + */ + function increaseAllowance(address spender, uint256 addedValue) public virtual returns (bool) { + _approve(_msgSender(), spender, _allowances[_msgSender()][spender] + addedValue); + return true; + } + + /** + * @dev Atomically decreases the allowance granted to `spender` by the caller. + * + * This is an alternative to {approve} that can be used as a mitigation for + * problems described in {IERC20-approve}. + * + * Emits an {Approval} event indicating the updated allowance. + * + * Requirements: + * + * - `spender` cannot be the zero address. + * - `spender` must have allowance for the caller of at least + * `subtractedValue`. + */ + function decreaseAllowance(address spender, uint256 subtractedValue) public virtual returns (bool) { + uint256 currentAllowance = _allowances[_msgSender()][spender]; + require(currentAllowance >= subtractedValue, "ERC20: decreased allowance below zero"); + unchecked { + _approve(_msgSender(), spender, currentAllowance - subtractedValue); + } + + return true; + } + + /** + * @dev Moves `amount` of tokens from `sender` to `recipient`. + * + * This internal function is equivalent to {transfer}, and can be used to + * e.g. implement automatic token fees, slashing mechanisms, etc. + * + * Emits a {Transfer} event. + * + * Requirements: + * + * - `sender` cannot be the zero address. + * - `recipient` cannot be the zero address. + * - `sender` must have a balance of at least `amount`. + */ + function _transfer( + address sender, + address recipient, + uint256 amount + ) public /*internal*/ virtual { + require(sender != address(0), "ERC20: transfer from the zero address"); + require(recipient != address(0), "ERC20: transfer to the zero address"); + + _beforeTokenTransfer(sender, recipient, amount); + + uint256 senderBalance = _balances[sender]; + require(senderBalance >= amount, "ERC20: transfer amount exceeds balance"); + unchecked { + _balances[sender] = senderBalance - amount; + } + _balances[recipient] += amount; + + emit Transfer(sender, recipient, amount); + + _afterTokenTransfer(sender, recipient, amount); + } + + /** @dev Creates `amount` tokens and assigns them to `account`, increasing + * the total supply. + * + * Emits a {Transfer} event with `from` set to the zero address. + * + * Requirements: + * + * - `account` cannot be the zero address. + */ + function _mint(address account, uint256 amount) public /*internal*/ virtual { + require(account != address(0), "ERC20: mint to the zero address"); + + _beforeTokenTransfer(address(0), account, amount); + + _totalSupply += amount; + _balances[account] += amount; + emit Transfer(address(0), account, amount); + + _afterTokenTransfer(address(0), account, amount); + } + + /** + * @dev Destroys `amount` tokens from `account`, reducing the + * total supply. + * + * Emits a {Transfer} event with `to` set to the zero address. + * + * Requirements: + * + * - `account` cannot be the zero address. + * - `account` must have at least `amount` tokens. + */ + function _burn(address account, uint256 amount) public /*internal*/ virtual { + require(account != address(0), "ERC20: burn from the zero address"); + + _beforeTokenTransfer(account, address(0), amount); + + uint256 accountBalance = _balances[account]; + require(accountBalance >= amount, "ERC20: burn amount exceeds balance"); + unchecked { + _balances[account] = accountBalance - amount; + } + _totalSupply -= amount; + + emit Transfer(account, address(0), amount); + + _afterTokenTransfer(account, address(0), amount); + } + + /** + * @dev Sets `amount` as the allowance of `spender` over the `owner` s tokens. + * + * This internal function is equivalent to `approve`, and can be used to + * e.g. set automatic allowances for certain subsystems, etc. + * + * Emits an {Approval} event. + * + * Requirements: + * + * - `owner` cannot be the zero address. + * - `spender` cannot be the zero address. + */ + function _approve( + address owner, + address spender, + uint256 amount + ) public /*internal*/ virtual { + require(owner != address(0), "ERC20: approve from the zero address"); + require(spender != address(0), "ERC20: approve to the zero address"); + + _allowances[owner][spender] = amount; + emit Approval(owner, spender, amount); + } + + /** + * @dev Hook that is called before any transfer of tokens. This includes + * minting and burning. + * + * Calling conditions: + * + * - when `from` and `to` are both non-zero, `amount` of ``from``'s tokens + * will be transferred to `to`. + * - when `from` is zero, `amount` tokens will be minted for `to`. + * - when `to` is zero, `amount` of ``from``'s tokens will be burned. + * - `from` and `to` are never both zero. + * + * To learn more about hooks, head to xref:ROOT:extending-contracts.adoc#using-hooks[Using Hooks]. + */ + function _beforeTokenTransfer( + address from, + address to, + uint256 amount + ) public /*internal*/ virtual {} + + /** + * @dev Hook that is called after any transfer of tokens. This includes + * minting and burning. + * + * Calling conditions: + * + * - when `from` and `to` are both non-zero, `amount` of ``from``'s tokens + * has been transferred to `to`. + * - when `from` is zero, `amount` tokens have been minted for `to`. + * - when `to` is zero, `amount` of ``from``'s tokens have been burned. + * - `from` and `to` are never both zero. + * + * To learn more about hooks, head to xref:ROOT:extending-contracts.adoc#using-hooks[Using Hooks]. + */ + function _afterTokenTransfer( + address from, + address to, + uint256 amount + ) public /*internal*/ virtual {} + + /** + * @dev Sets {decimals} to a value other than the default one of 18. + * + * WARNING: This function should only be called from the constructor. Most + * applications that interact with token contracts will not expect + * {decimals} to ever change, and may work incorrectly if it does. + */ + function _setupDecimals(uint8 decimals_) internal { + _decimals = decimals_; + } +} diff --git a/etc/ERC20/contracts/interfaces/IERC20.sol b/etc/ERC20/contracts/interfaces/IERC20.sol new file mode 100644 index 000000000000..2693a9f9ba15 --- /dev/null +++ b/etc/ERC20/contracts/interfaces/IERC20.sol @@ -0,0 +1,81 @@ +// SPDX-License-Identifier: MIT + +pragma solidity >=0.8.0; + +/** + * @dev Interface of the ERC20 standard as defined in the EIP. + */ +interface IERC20 { + /** + * @dev Returns the amount of tokens in existence. + */ + function totalSupply() external view returns (uint256); + + /** + * @dev Returns the amount of tokens owned by `account`. + */ + function balanceOf(address account) external view returns (uint256); + + /** + * @dev Moves `amount` tokens from the caller's account to `recipient`. + * + * Returns a boolean value indicating whether the operation succeeded. + * + * Emits a {Transfer} event. + */ + function transfer(address recipient, uint256 amount) external returns (bool); + + /** + * @dev Returns the remaining number of tokens that `spender` will be + * allowed to spend on behalf of `owner` through {transferFrom}. This is + * zero by default. + * + * This value changes when {approve} or {transferFrom} are called. + */ + function allowance(address owner, address spender) external view returns (uint256); + + /** + * @dev Sets `amount` as the allowance of `spender` over the caller's tokens. + * + * Returns a boolean value indicating whether the operation succeeded. + * + * IMPORTANT: Beware that changing an allowance with this method brings the risk + * that someone may use both the old and the new allowance by unfortunate + * transaction ordering. One possible solution to mitigate this race + * condition is to first reduce the spender's allowance to 0 and set the + * desired value afterwards: + * https://github.com/ethereum/EIPs/issues/20#issuecomment-263524729 + * + * Emits an {Approval} event. + */ + function approve(address spender, uint256 amount) external returns (bool); + + /** + * @dev Moves `amount` tokens from `sender` to `recipient` using the + * allowance mechanism. `amount` is then deducted from the caller's + * allowance. + * + * Returns a boolean value indicating whether the operation succeeded. + * + * Emits a {Transfer} event. + */ + function transferFrom( + address sender, + address recipient, + uint256 amount + ) external returns (bool); + + /** + * @dev Emitted when `value` tokens are moved from one account (`from`) to + * another (`to`). + * + * Note that `value` may be zero. + */ + event Transfer(address indexed from, address indexed to, uint256 value); + + /** + * @dev Emitted when the allowance of a `spender` for an `owner` is set by + * a call to {approve}. `value` is the new allowance. + */ + event Approval(address indexed owner, address indexed spender, uint256 value); +} diff --git a/etc/ERC20/contracts/interfaces/IERC20Metadata.sol b/etc/ERC20/contracts/interfaces/IERC20Metadata.sol new file mode 100644 index 000000000000..1846275af4b3 --- /dev/null +++ b/etc/ERC20/contracts/interfaces/IERC20Metadata.sol @@ -0,0 +1,27 @@ +// SPDX-License-Identifier: MIT + +pragma solidity >=0.8.0; + +import "./IERC20.sol"; + +/** + * @dev Interface for the optional metadata functions from the ERC20 standard. + * + * _Available since v4.1._ + */ +interface IERC20Metadata is IERC20 { + /** + * @dev Returns the name of the token. + */ + function name() external view returns (string memory); + + /** + * @dev Returns the symbol of the token. + */ + function symbol() external view returns (string memory); + + /** + * @dev Returns the decimals places of the token. + */ + function decimals() external view returns (uint8); +} diff --git a/etc/ERC20/hardhat.config.ts b/etc/ERC20/hardhat.config.ts new file mode 100644 index 000000000000..59080306c84e --- /dev/null +++ b/etc/ERC20/hardhat.config.ts @@ -0,0 +1,19 @@ +import '@matterlabs/hardhat-zksync-solc'; + +export default { + zksolc: { + version: '1.3.1', + compilerSource: 'binary', + settings: { + isSystem: true + } + }, + networks: { + hardhat: { + zksync: true + } + }, + solidity: { + version: '0.8.16' + } +}; diff --git a/etc/ERC20/package.json b/etc/ERC20/package.json new file mode 100644 index 000000000000..5c3adca0a349 --- /dev/null +++ b/etc/ERC20/package.json @@ -0,0 +1,11 @@ +{ + "name": "zksync-erc20", + "version": "0.1.0", + "main": "index.js", + "license": "MIT", + "dependencies": {}, + "devDependencies": { + "@matterlabs/hardhat-zksync-solc": "^0.3.14-beta.3", + "hardhat": "=2.12.4" + } +} diff --git a/etc/commitment_tests/zksync_testharness_test.json b/etc/commitment_tests/zksync_testharness_test.json new file mode 100644 index 000000000000..3240c3b4d9e7 --- /dev/null +++ b/etc/commitment_tests/zksync_testharness_test.json @@ -0,0 +1,74 @@ +{ + "auxiliary_input": { + "l2_l1_logs": [ + { + "shard_id": 0, + "is_service": false, + "tx_number_in_block": 0, + "sender": "0x000000000000000000000000000000000000800b", + "key": "0x000000000000000000000000000000000000000000000000000000006349f8b5", + "value": "0x57a1099a71e51ea9cce8d89b5a9f1741d3a704e5258077c811223a8b604cdc8a" + }, + { + "shard_id": 0, + "is_service": true, + "tx_number_in_block": 0, + "sender": "0x0000000000000000000000000000000000008001", + "key": "0xcb99d29a1b4ffeaefdbf74b8b8b07c78e5e02b3100946f8d0463b79789086aff", + "value": "0x0000000000000000000000000000000000000000000000000000000000000001" + } + + ], + "initial_writes": [ + { + "key": "67072178531597269656886364866735921437190043920019428573160283992739950957275", + "value": "0x57a1099a71e51ea9cce8d89b5a9f1741d3a704e5258077c811223a8b604cdc8a" + }, + { + "key": "79166359714126316814230739348523312986818270679454369433096599654700971101193", + "value": "0x0000000000000000000000000000000000000000000000000000000000000064" + } + ], + "repeated_writes": [ + { "index": 658, "value": "0x0000000000000000000000000000000c0000000000000000000000000000001f" }, + { "index": 61, "value": "0x000000000000000000000000000000000000000000000003aec912ce8057d164" }, + { "index": 62, "value": "0x0000000000000000000000000000000000000000000000000003acf87e3e2200" }, + { "index": 660, "value": "0x00000000000000000000000000000000000000000000000001962afc49a1eb2e" }, + { "index": 40, "value": "0x000000000000000000000000000000690000000000000000000000006349f8b5" }, + { "index": 664, "value": "0x00000000000000000000000000000000000000000000000000d3abb1bfb7be70" } + ] + }, + "meta_parameters": { + "zkporter_is_available": false, + "bootloader_code_hash": "0x0100037723960c07cda7251089daffbdd567476a7e31971ff801568a3856e8e8", + "default_aa_code_hash": "0x010006699c833b654b365f0e3ce866c394626d5e40461a6868809d452738606f" + }, + "pass_through_data": { + "shared_states": [ + { + "last_leaf_index": 299, + "root_hash": "0xbf08d89aaedde3696967d5ac74d2733f10ace64c3a492f503f23b7566b37ab17" + }, + { + "last_leaf_index": 0, + "root_hash": "0x0000000000000000000000000000000000000000000000000000000000000000" + } + ] + }, + "expected_outputs": { + "l2_l1_bytes": "0000000200000000000000000000000000000000000000000000800b000000000000000000000000000000000000000000000000000000006349f8b557a1099a71e51ea9cce8d89b5a9f1741d3a704e5258077c811223a8b604cdc8a000100000000000000000000000000000000000000008001cb99d29a1b4ffeaefdbf74b8b8b07c78e5e02b3100946f8d0463b79789086aff0000000000000000000000000000000000000000000000000000000000000001", + "l2_l1_linear_hash": "0x680f578a7b39e9f74385a3aabfb4cf054917f23aea9ae165d2afaac02fc9f3b8", + "l2_l1_root_hash": "0xcb5f7b72ab30095b81e2cd35c308a7a752fe59213475339b8a833e91bf731837", + "initial_writes_bytes": "00000002db1231bec2de6342908165662c0d968bb89db1e63211d92fa9547a7efb81499457a1099a71e51ea9cce8d89b5a9f1741d3a704e5258077c811223a8b604cdc8a098c669256db6fe36d87834ceae9c8161af6b72f8b1543b8a3ffacca5b9206af0000000000000000000000000000000000000000000000000000000000000064", + "repeated_writes_bytes": "0000000600000000000002920000000000000000000000000000000c0000000000000000000000000000001f000000000000003d000000000000000000000000000000000000000000000003aec912ce8057d164000000000000003e0000000000000000000000000000000000000000000000000003acf87e3e2200000000000000029400000000000000000000000000000000000000000000000001962afc49a1eb2e0000000000000028000000000000000000000000000000690000000000000000000000006349f8b5000000000000029800000000000000000000000000000000000000000000000000d3abb1bfb7be70", + "repeated_writes_hash": "0xdc5a883793479c779f5c99b0fca910deb20195d8ccf430afad05a9c2bd9f81bd", + "initial_writes_hash": "0xdcc4877ab0c07a79a16ae34de6fb7971a54128db0d11791fd5064bd6d03076c1", + "pass_through_bytes": "000000000000012bbf08d89aaedde3696967d5ac74d2733f10ace64c3a492f503f23b7566b37ab1700000000000000000000000000000000000000000000000000000000000000000000000000000000", + "pass_through_hash": "0x1c695ec7d7944f720a2c0fc6b5651cbd3178967407bc4df579a15985652350e9", + "meta_params_bytes": "000100037723960c07cda7251089daffbdd567476a7e31971ff801568a3856e8e8010006699c833b654b365f0e3ce866c394626d5e40461a6868809d452738606f", + "meta_params_hash": "0x57404e50342edcd09180fb27fa49634676f71a3ce1a76e9b3edf6185bf164082", + "auxiliary_bytes": "cb5f7b72ab30095b81e2cd35c308a7a752fe59213475339b8a833e91bf731837680f578a7b39e9f74385a3aabfb4cf054917f23aea9ae165d2afaac02fc9f3b8dcc4877ab0c07a79a16ae34de6fb7971a54128db0d11791fd5064bd6d03076c1dc5a883793479c779f5c99b0fca910deb20195d8ccf430afad05a9c2bd9f81bd", + "auxiliary_hash": "0x31fff4dc27ee5cbba99aac88a1fd05be00133398b9b7679774663f56b3775dd1", + "commitment_hash": "0x3af4672cd1362badfc0cbc47a7e8b3fbcd3c947055af041b4481bb15009c41a8" + } +} diff --git a/etc/contracts-test-data/README.md b/etc/contracts-test-data/README.md new file mode 100644 index 000000000000..d08f934e8456 --- /dev/null +++ b/etc/contracts-test-data/README.md @@ -0,0 +1,4 @@ +# Contracts test data + +This folder contains data for contracts that are being used for testing to check the correctness of the smart contract +flow in zkSync. diff --git a/etc/contracts-test-data/contracts/basic-constructor/basic-constructor.sol b/etc/contracts-test-data/contracts/basic-constructor/basic-constructor.sol new file mode 100644 index 000000000000..d2fe2d0eefb9 --- /dev/null +++ b/etc/contracts-test-data/contracts/basic-constructor/basic-constructor.sol @@ -0,0 +1,16 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 + +pragma solidity ^0.8.0; + +contract SimpleConstructor { + uint256 c; + + constructor(uint256 a, uint256 b, bool shouldRevert) { + c = a * b; + require(!shouldRevert, "reverted deploy"); + } + + function get() public view returns (uint256) { + return c; + } +} diff --git a/etc/contracts-test-data/contracts/context/context.sol b/etc/contracts-test-data/contracts/context/context.sol new file mode 100644 index 000000000000..94969ac66f9e --- /dev/null +++ b/etc/contracts-test-data/contracts/context/context.sol @@ -0,0 +1,47 @@ +// SPDX-License-Identifier: UNLICENSED + +pragma solidity ^0.8.0; + +contract Context { + function getBlockNumber() public view returns (uint256) { + return block.number; + } + + function getBlockTimestamp() public view returns (uint256) { + return block.timestamp; + } + + function getBlockGasLimit() public view returns (uint256) { + return block.gaslimit; + } + + function getTxGasPrice() public view returns (uint256) { + return tx.gasprice; + } + + function checkBlockNumber(uint256 fromBlockNumber, uint256 toBlockNumber) public { + require(fromBlockNumber <= block.number && block.number <= toBlockNumber, "block number is out of range"); + } + + function checkBlockTimestamp(uint256 fromTimestamp, uint256 toTimestamp) public { + require(fromTimestamp <= block.timestamp && block.timestamp <= toTimestamp, "block timestamp is out of range"); + } + + function checkTxOrigin(address expectedOrigin) public { + require(tx.origin == expectedOrigin, "tx.origin is invalid"); + } + + function getBaseFee() public view returns (uint256) { + return block.basefee; + } + + function requireMsgValue(uint256 _requiredValue) external payable { + require(msg.value == _requiredValue); + } + + uint256 public valueOnCreate; + + constructor() payable { + valueOnCreate = msg.value; + } +} diff --git a/etc/contracts-test-data/contracts/counter/counter.sol b/etc/contracts-test-data/contracts/counter/counter.sol new file mode 100644 index 000000000000..acf613538a22 --- /dev/null +++ b/etc/contracts-test-data/contracts/counter/counter.sol @@ -0,0 +1,22 @@ +// SPDX-License-Identifier: UNLICENSED + +pragma solidity ^0.8.0; + +contract Counter { + uint256 value; + + function increment(uint256 x) public { + value += x; + } + + function incrementWithRevert(uint256 x, bool shouldRevert) public { + value += x; + if(shouldRevert) { + revert("This method always reverts"); + } + } + + function get() public view returns (uint256) { + return value; + } +} diff --git a/etc/contracts-test-data/contracts/create/Foo.sol b/etc/contracts-test-data/contracts/create/Foo.sol new file mode 100644 index 000000000000..1ae4868e5bf6 --- /dev/null +++ b/etc/contracts-test-data/contracts/create/Foo.sol @@ -0,0 +1,8 @@ +// SPDX-License-Identifier: MIT + +pragma solidity >=0.8.1; +pragma abicoder v2; + +contract Foo { + string public name = "Foo"; +} diff --git a/etc/contracts-test-data/contracts/create/create.sol b/etc/contracts-test-data/contracts/create/create.sol new file mode 100644 index 000000000000..ef03e7c457ce --- /dev/null +++ b/etc/contracts-test-data/contracts/create/create.sol @@ -0,0 +1,17 @@ +// SPDX-License-Identifier: MIT + +pragma solidity >=0.8.1; +pragma abicoder v2; + +// import Foo.sol from current directory +import "./Foo.sol"; + +contract Import { + // Initialize Foo.sol + Foo public foo = new Foo(); + + // Test Foo.sol by getting it's name. + function getFooName() public view returns (string memory) { + return foo.name(); + } +} \ No newline at end of file diff --git a/etc/contracts-test-data/contracts/custom-account/Constants.sol b/etc/contracts-test-data/contracts/custom-account/Constants.sol new file mode 100644 index 000000000000..76d5b000d6f2 --- /dev/null +++ b/etc/contracts-test-data/contracts/custom-account/Constants.sol @@ -0,0 +1,37 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 + +pragma solidity ^0.8.0; + +import "./interfaces/INonceHolder.sol"; +import "./interfaces/IContractDeployer.sol"; +import "./SystemContext.sol"; + +uint160 constant SYSTEM_CONTRACTS_OFFSET = 0x8000; // 2^15 + +address constant ECRECOVER_SYSTEM_CONTRACT = address(0x01); +address constant SHA256_SYSTEM_CONTRACT = address(0x02); + +address payable constant BOOTLOADER_FORMAL_ADDRESS = payable(address(SYSTEM_CONTRACTS_OFFSET + 0x01)); +INonceHolder constant NONCE_HOLDER_SYSTEM_CONTRACT = INonceHolder(address(SYSTEM_CONTRACTS_OFFSET + 0x03)); + +// A contract that is allowed to deploy any codehash +// on any address. To be used only during an upgrade. +address constant FORCE_DEPLOYER = address(SYSTEM_CONTRACTS_OFFSET + 0x07); +address constant MSG_VALUE_SYSTEM_CONTRACT = address(SYSTEM_CONTRACTS_OFFSET + 0x09); +IContractDeployer constant DEPLOYER_SYSTEM_CONTRACT = IContractDeployer(address(SYSTEM_CONTRACTS_OFFSET + 0x06)); + + +address constant KECCAK256_SYSTEM_CONTRACT = address(SYSTEM_CONTRACTS_OFFSET + 0x10); + +address constant ETH_TOKEN_SYSTEM_CONTRACT = address(SYSTEM_CONTRACTS_OFFSET + 0x0a); +SystemContext constant SYSTEM_CONTEXT_CONTRACT = SystemContext(address(SYSTEM_CONTRACTS_OFFSET + 0x0b)); + +uint256 constant MAX_SYSTEM_CONTRACT_ADDRESS = 0xffff; + +bytes32 constant DEFAULT_ACCOUNT_CODE_HASH = 0x00; + +// The number of bytes that are published during the contract deployment +// in addition to the bytecode itself. +uint256 constant BYTECODE_PUBLISHING_OVERHEAD = 100; + +uint256 constant MSG_VALUE_SIMULATOR_IS_SYSTEM_BIT = 2**128; diff --git a/etc/contracts-test-data/contracts/custom-account/RLPEncoder.sol b/etc/contracts-test-data/contracts/custom-account/RLPEncoder.sol new file mode 100644 index 000000000000..13884b71727b --- /dev/null +++ b/etc/contracts-test-data/contracts/custom-account/RLPEncoder.sol @@ -0,0 +1,99 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 + +pragma solidity ^0.8.0; + +library RLPEncoder { + function encodeAddress(address _val) internal pure returns (bytes memory encoded) { + // The size is equal to 14 bytes of the address itself + 1 for encoding bytes length in RLP. + encoded = new bytes(0x15); + + bytes20 shiftedVal = bytes20(_val); + assembly { + // In the first byte we write the encoded length as 0x80 + 0x14 == 0x94. + mstore(add(encoded, 0x20), 0x9400000000000000000000000000000000000000000000000000000000000000) + // Write address data without stripping zeros. + mstore(add(encoded, 0x21), shiftedVal) + } + } + + function encodeUint256(uint256 _val) internal pure returns (bytes memory encoded) { + unchecked { + if (_val < 128) { + encoded = new bytes(1); + // Handle zero as a non-value, since stripping zeroes results in an empty byte array + encoded[0] = (_val == 0) ? bytes1(uint8(128)) : bytes1(uint8(_val)); + } else { + uint256 hbs = _highestByteSet(_val); + + encoded = new bytes(hbs + 2); + encoded[0] = bytes1(uint8(hbs + 0x81)); + + uint256 lbs = 31 - hbs; + uint256 shiftedVal = _val << (lbs * 8); + + assembly { + mstore(add(encoded, 0x21), shiftedVal) + } + } + } + } + + /// @notice Encodes the size of bytes in RLP format. + /// NOTE: panics if the length is 1, since the length encoding is ambiguous in this case. + function encodeNonSingleBytesLen(uint256 _len) internal pure returns (bytes memory) { + assert(_len != 1); + return _encodeLength(_len, 0x80); + } + + /// @notice Encodes the size of list items in RLP format. + function encodeListLen(uint256 _len) internal pure returns (bytes memory) { + return _encodeLength(_len, 0xc0); + } + + function _encodeLength(uint256 _len, uint256 _offset) private pure returns (bytes memory encoded) { + unchecked { + if (_len < 56) { + encoded = new bytes(1); + encoded[0] = bytes1(uint8(_len + _offset)); + } else { + uint256 hbs = _highestByteSet(_len); + + encoded = new bytes(hbs + 2); + encoded[0] = bytes1(uint8(_offset + hbs + 56)); + + uint256 lbs = 31 - hbs; + uint256 shiftedVal = _len << (lbs * 8); + + assembly { + mstore(add(encoded, 0x21), shiftedVal) + } + } + } + } + + /// @notice Computes the index of the highest byte set in number. + /// @notice Uses little endian ordering (The least significant byte has index `0`). + /// NOTE: returns `0` for `0` + function _highestByteSet(uint256 _number) private pure returns (uint256 hbs) { + // should be resolver after evaluating the cost of opcodes. + if (_number >= 2**128) { + _number >>= 128; + hbs += 16; + } + if (_number >= 2**64) { + _number >>= 64; + hbs += 8; + } + if (_number >= 2**32) { + _number >>= 32; + hbs += 4; + } + if (_number >= 2**16) { + _number >>= 16; + hbs += 2; + } + if (_number >= 2**8) { + hbs += 1; + } + } +} diff --git a/etc/contracts-test-data/contracts/custom-account/SystemContext.sol b/etc/contracts-test-data/contracts/custom-account/SystemContext.sol new file mode 100644 index 000000000000..dbf81002d516 --- /dev/null +++ b/etc/contracts-test-data/contracts/custom-account/SystemContext.sol @@ -0,0 +1,67 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 + +pragma solidity ^0.8.0; + +import "./Constants.sol"; + +/** + * @author Matter Labs + * @notice Contract that stores some of the context variables, that may be either + * block-scoped, tx-scoped or system-wide. + */ +contract SystemContext { + modifier onlyBootloader { + require(msg.sender == BOOTLOADER_FORMAL_ADDRESS); + _; + } + + uint256 public chainId = 270; + address public origin; + uint256 public gasPrice; + // Some dummy value, maybe will be possible to change it in the future. + uint256 public blockGasLimit = (1 << 30); + // For the support of coinbase, we will the bootloader formal address for now + address public coinbase = BOOTLOADER_FORMAL_ADDRESS; + // For consistency with other L2s + uint256 public difficulty = 2500000000000000; + uint256 public msize = (1 << 24); + uint256 public baseFee; + + uint256 constant BLOCK_INFO_BLOCK_NUMBER_PART = (1<<128); + // 2^128 * block_number + block_timestamp + uint256 public currentBlockInfo; + + mapping(uint256 => bytes32) public blockHash; + + function setTxOrigin(address _newOrigin) external { + origin = _newOrigin; + } + + function setGasPrice(uint256 _gasPrice) external onlyBootloader { + gasPrice = _gasPrice; + } + + function getBlockHashEVM(uint256 _block) external view returns (bytes32 hash) { + if(block.number < _block || block.number - _block > 256) { + hash = bytes32(0); + } else { + hash = blockHash[_block]; + } + } + + function getBlockNumberAndTimestamp() public view returns (uint256 blockNumber, uint256 blockTimestamp) { + uint256 blockInfo = currentBlockInfo; + blockNumber = blockInfo / BLOCK_INFO_BLOCK_NUMBER_PART; + blockTimestamp = blockInfo % BLOCK_INFO_BLOCK_NUMBER_PART; + } + + // Note, that for now, the implementation of the bootloader allows this variables to + // be incremented multiple times inside a block, so it should not relied upon right now. + function getBlockNumber() public view returns (uint256 blockNumber) { + (blockNumber, ) = getBlockNumberAndTimestamp(); + } + + function getBlockTimestamp() public view returns (uint256 timestamp) { + (, timestamp) = getBlockNumberAndTimestamp(); + } +} diff --git a/etc/contracts-test-data/contracts/custom-account/SystemContractsCaller.sol b/etc/contracts-test-data/contracts/custom-account/SystemContractsCaller.sol new file mode 100644 index 000000000000..01b7b5198add --- /dev/null +++ b/etc/contracts-test-data/contracts/custom-account/SystemContractsCaller.sol @@ -0,0 +1,249 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 + +pragma solidity ^0.8; + +import {MSG_VALUE_SIMULATOR_IS_SYSTEM_BIT, MSG_VALUE_SYSTEM_CONTRACT} from "./Constants.sol"; +import "./Utils.sol"; + +// Addresses used for the compiler to be replaced with the +// zkSync-specific opcodes during the compilation. +// IMPORTANT: these are just compile-time constants and are used +// only if used in-place by Yul optimizer. +address constant TO_L1_CALL_ADDRESS = address((1 << 16) - 1); +address constant CODE_ADDRESS_CALL_ADDRESS = address((1 << 16) - 2); +address constant PRECOMPILE_CALL_ADDRESS = address((1 << 16) - 3); +address constant META_CALL_ADDRESS = address((1 << 16) - 4); +address constant MIMIC_CALL_CALL_ADDRESS = address((1 << 16) - 5); +address constant SYSTEM_MIMIC_CALL_CALL_ADDRESS = address((1 << 16) - 6); +address constant MIMIC_CALL_BY_REF_CALL_ADDRESS = address((1 << 16) - 7); +address constant SYSTEM_MIMIC_CALL_BY_REF_CALL_ADDRESS = address((1 << 16) - 8); +address constant RAW_FAR_CALL_CALL_ADDRESS = address((1 << 16) - 9); +address constant RAW_FAR_CALL_BY_REF_CALL_ADDRESS = address((1 << 16) - 10); +address constant SYSTEM_CALL_CALL_ADDRESS = address((1 << 16) - 11); +address constant SYSTEM_CALL_BY_REF_CALL_ADDRESS = address((1 << 16) - 12); +address constant SET_CONTEXT_VALUE_CALL_ADDRESS = address((1 << 16) - 13); +address constant SET_PUBDATA_PRICE_CALL_ADDRESS = address((1 << 16) - 14); +address constant INCREMENT_TX_COUNTER_CALL_ADDRESS = address((1 << 16) - 15); +address constant PTR_CALLDATA_CALL_ADDRESS = address((1 << 16) - 16); +address constant CALLFLAGS_CALL_ADDRESS = address((1 << 16) - 17); +address constant PTR_RETURNDATA_CALL_ADDRESS = address((1 << 16) - 18); +address constant EVENT_INITIALIZE_ADDRESS = address((1 << 16) - 19); +address constant EVENT_WRITE_ADDRESS = address((1 << 16) - 20); +address constant LOAD_CALLDATA_INTO_ACTIVE_PTR_CALL_ADDRESS = address((1 << 16) - 21); +address constant LOAD_LATEST_RETURNDATA_INTO_ACTIVE_PTR_CALL_ADDRESS = address((1 << 16) - 22); +address constant PTR_ADD_INTO_ACTIVE_CALL_ADDRESS = address((1 << 16) - 23); +address constant PTR_SHRINK_INTO_ACTIVE_CALL_ADDRESS = address((1 << 16) - 24); +address constant PTR_PACK_INTO_ACTIVE_CALL_ADDRESS = address((1 << 16) - 25); +address constant MULTIPLICATION_HIGH_ADDRESS = address((1 << 16) - 26); +address constant GET_EXTRA_ABI_DATA_ADDRESS = address((1 << 16) - 27); + +// All the offsets are in bits +uint256 constant META_GAS_PER_PUBDATA_BYTE_OFFSET = 0 * 8; +uint256 constant META_HEAP_SIZE_OFFSET = 8 * 8; +uint256 constant META_AUX_HEAP_SIZE_OFFSET = 12 * 8; +uint256 constant META_SHARD_ID_OFFSET = 28 * 8; +uint256 constant META_CALLER_SHARD_ID_OFFSET = 29 * 8; +uint256 constant META_CODE_SHARD_ID_OFFSET = 30 * 8; + +/// @notice The way to forward the calldata: +/// - Use the current heap (i.e. the same as on EVM). +/// - Use the auxiliary heap. +/// - Forward via a pointer +/// @dev Note, that currently, users do not have access to the auxiliary +/// heap and so the only type of forwarding that will be used by the users +/// are UseHeap and ForwardFatPointer for forwarding a slice of the current calldata +/// to the next call. +enum CalldataForwardingMode { + UseHeap, + ForwardFatPointer, + UseAuxHeap +} + +/** + * @author Matter Labs + * @notice A library that allows calling contracts with the `isSystem` flag. + * @dev It is needed to call ContractDeployer and NonceHolder. + */ +library SystemContractsCaller { + /// @notice Makes a call with the `isSystem` flag. + /// @param gasLimit The gas limit for the call. + /// @param to The address to call. + /// @param value The value to pass with the transaction. + /// @param data The calldata. + /// @return success Whether the transaction has been successful. + /// @dev Note, that the `isSystem` flag can only be set when calling system contracts. + function systemCall( + uint32 gasLimit, + address to, + uint128 value, + bytes memory data + ) internal returns (bool success) { + address callAddr = SYSTEM_CALL_CALL_ADDRESS; + + uint32 dataStart; + assembly { + dataStart := add(data, 0x20) + } + uint32 dataLength = uint32(Utils.safeCastToU32(data.length)); + + uint256 farCallAbi = SystemContractsCaller.getFarCallABI( + 0, + 0, + dataStart, + dataLength, + gasLimit, + // Only rollup is supported for now + 0, + CalldataForwardingMode.UseHeap, + false, + true + ); + + if (value == 0) { + // Doing the system call directly + assembly { + success := call(to, callAddr, 0, 0, farCallAbi, 0, 0) + } + } else { + require(value <= MSG_VALUE_SIMULATOR_IS_SYSTEM_BIT, "Value can not be greater than 2**128"); + // We must direct the call through the MSG_VALUE_SIMULATOR + // The first abi param for the MSG_VALUE_SIMULATOR carries + // the value of the call and whether the call should be a system one + // (in our case, it should be) + uint256 abiParam1 = (MSG_VALUE_SIMULATOR_IS_SYSTEM_BIT | value); + + // The second abi param carries the address to call. + uint256 abiParam2 = uint256(uint160(to)); + + address msgValueSimulator = MSG_VALUE_SYSTEM_CONTRACT; + assembly { + success := call(msgValueSimulator, callAddr, abiParam1, abiParam2, farCallAbi, 0, 0) + } + } + } + + /// @notice Makes a call with the `isSystem` flag. + /// @param gasLimit The gas limit for the call. + /// @param to The address to call. + /// @param value The value to pass with the transaction. + /// @param data The calldata. + /// @return success Whether the transaction has been successful. + /// @return returnData The returndata of the transaction (revert reason in case the transaction has failed). + /// @dev Note, that the `isSystem` flag can only be set when calling system contracts. + function systemCallWithReturndata( + uint32 gasLimit, + address to, + uint128 value, + bytes memory data + ) internal returns (bool success, bytes memory returnData) { + success = systemCall(gasLimit, to, value, data); + + uint256 size; + assembly { + size := returndatasize() + } + + returnData = new bytes(size); + assembly { + returndatacopy(add(returnData, 0x20), 0, size) + } + } + + /// @notice Makes a call with the `isSystem` flag. + /// @param gasLimit The gas limit for the call. + /// @param to The address to call. + /// @param value The value to pass with the transaction. + /// @param data The calldata. + /// @return returnData The returndata of the transaction. In case the transaction reverts, the error + /// bubbles up to the parent frame. + /// @dev Note, that the `isSystem` flag can only be set when calling system contracts. + function systemCallWithPropagatedRevert( + uint32 gasLimit, + address to, + uint128 value, + bytes memory data + ) internal returns (bytes memory returnData) { + bool success; + (success, returnData) = systemCallWithReturndata(gasLimit, to, value, data); + + if(!success) { + assembly { + let size := mload(returnData) + revert(add(returnData, 0x20), size) + } + } + } + + /// @notice Calculates the packed representation of the FarCallABI. + /// @param dataOffset Calldata offset in memory. Provide 0 unless using custom pointer. + /// @param memoryPage Memory page to use. Provide 0 unless using custom pointer. + /// @param dataStart The start of the calldata slice. Provide the offset in memory + /// if not using custom pointer. + /// @param dataLength The calldata length. Provide the length of the calldata in bytes + /// unless using custom pointer. + /// @param gasPassed The gas to pass with the call. + /// @param shardId Of the account to call. Currently only 0 is supported. + /// @param forwardingMode The forwarding mode to use: + /// - provide CalldataForwardingMode.UseHeap when using your current memory + /// - provide CalldataForwardingMode.ForwardFatPointer when using custom pointer. + /// @param isConstructorCall Whether the call will be a call to the constructor + /// (ignored when the caller is not a system contract). + /// @param isSystemCall Whether the call will have the `isSystem` flag. + /// @return farCallAbi The far call ABI. + /// @dev The `FarCallABI` has the following structure: + /// pub struct FarCallABI { + /// pub memory_quasi_fat_pointer: FatPointer, + /// pub gas_passed: u32, + /// pub shard_id: u8, + /// pub forwarding_mode: FarCallForwardPageType, + /// pub constructor_call: bool, + /// pub to_system: bool, + /// } + /// + /// The FatPointer struct: + /// + /// pub struct FatPointer { + /// pub offset: u32, // offset relative to `start` + /// pub memory_page: u32, // memory page where slice is located + /// pub start: u32, // absolute start of the slice + /// pub length: u32, // length of the slice + /// } + /// + /// @dev Note, that the actual layout is the following: + /// + /// [0..32) bits -- the calldata offset + /// [32..64) bits -- the memory page to use. Can be left blank in most of the cases. + /// [64..96) bits -- the absolute start of the slice + /// [96..128) bits -- the length of the slice. + /// [128..192) bits -- empty bits. + /// [192..224) bits -- gasPassed. + /// [224..232) bits -- shard id. + /// [232..240) bits -- forwarding_mode + /// [240..248) bits -- constructor call flag + /// [248..256] bits -- system call flag + function getFarCallABI( + uint32 dataOffset, + uint32 memoryPage, + uint32 dataStart, + uint32 dataLength, + uint32 gasPassed, + uint8 shardId, + CalldataForwardingMode forwardingMode, + bool isConstructorCall, + bool isSystemCall + ) internal pure returns (uint256 farCallAbi) { + farCallAbi |= dataOffset; + farCallAbi |= (uint256(memoryPage) << 32); + farCallAbi |= (uint256(dataStart) << 64); + farCallAbi |= (uint256(dataLength) << 96); + farCallAbi |= (uint256(gasPassed) << 192); + farCallAbi |= (uint256(shardId) << 224); + farCallAbi |= (uint256(forwardingMode) << 232); + if (isConstructorCall) { + farCallAbi |= (1 << 240); + } + if (isSystemCall) { + farCallAbi |= (1 << 248); + } + } +} diff --git a/etc/contracts-test-data/contracts/custom-account/TransactionHelper.sol b/etc/contracts-test-data/contracts/custom-account/TransactionHelper.sol new file mode 100644 index 000000000000..7097097437c5 --- /dev/null +++ b/etc/contracts-test-data/contracts/custom-account/TransactionHelper.sol @@ -0,0 +1,467 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 + +pragma solidity ^0.8.0; + +import "@openzeppelin/contracts/token/ERC20/IERC20.sol"; +import "@openzeppelin/contracts/token/ERC20/utils/SafeERC20.sol"; + +import "./interfaces/IPaymasterFlow.sol"; +import "./interfaces/IContractDeployer.sol"; +import {ETH_TOKEN_SYSTEM_CONTRACT, BOOTLOADER_FORMAL_ADDRESS} from "./Constants.sol"; +import "./RLPEncoder.sol"; + +/// @dev The type id of zkSync's EIP-712-signed transaction. +uint8 constant EIP_712_TX_TYPE = 0x71; + +/// @dev The type id of legacy transactions. +uint8 constant LEGACY_TX_TYPE = 0x0; +/// @dev The type id of legacy transactions. +uint8 constant EIP_2930_TX_TYPE = 0x01; +/// @dev The type id of EIP1559 transactions. +uint8 constant EIP_1559_TX_TYPE = 0x02; + +/// @notice Structure used to represent zkSync transaction. +struct Transaction { + // The type of the transaction. + uint256 txType; + // The caller. + uint256 from; + // The callee. + uint256 to; + // The gasLimit to pass with the transaction. + // It has the same meaning as Ethereum's gasLimit. + uint256 gasLimit; + // The maximum amount of gas the user is willing to pay for a byte of pubdata. + uint256 gasPerPubdataByteLimit; + // The maximum fee per gas that the user is willing to pay. + // It is akin to EIP1559's maxFeePerGas. + uint256 maxFeePerGas; + // The maximum priority fee per gas that the user is willing to pay. + // It is akin to EIP1559's maxPriorityFeePerGas. + uint256 maxPriorityFeePerGas; + // The transaction's paymaster. If there is no paymaster, it is equal to 0. + uint256 paymaster; + // The nonce of the transaction. + uint256 nonce; + // The value to pass with the transaction. + uint256 value; + // In the future, we might want to add some + // new fields to the struct. The `txData` struct + // is to be passed to account and any changes to its structure + // would mean a breaking change to these accounts. In order to prevent this, + // we should keep some fields as "reserved". + // It is also recommended that their length is fixed, since + // it would allow easier proof integration (in case we will need + // some special circuit for preprocessing transactions). + uint256[4] reserved; + // The transaction's calldata. + bytes data; + // The signature of the transaction. + bytes signature; + // The properly formatted hashes of bytecodes that must be published on L1 + // with the inclusion of this transaction. Note, that a bytecode has been published + // before, the user won't pay fees for its republishing. + bytes32[] factoryDeps; + // The input to the paymaster. + bytes paymasterInput; + // Reserved dynamic type for the future use-case. Using it should be avoided, + // But it is still here, just in case we want to enable some additional functionality. + bytes reservedDynamic; +} + +/** + * @author Matter Labs + * @notice Library is used to help custom accounts to work with common methods for the Transaction type. + */ +library TransactionHelper { + using SafeERC20 for IERC20; + + /// @notice The EIP-712 typehash for the contract's domain + bytes32 constant EIP712_DOMAIN_TYPEHASH = + keccak256("EIP712Domain(string name,string version,uint256 chainId)"); + + bytes32 constant EIP712_TRANSACTION_TYPE_HASH = + keccak256( + "Transaction(uint256 txType,uint256 from,uint256 to,uint256 gasLimit,uint256 gasPerPubdataByteLimit,uint256 maxFeePerGas,uint256 maxPriorityFeePerGas,uint256 paymaster,uint256 nonce,uint256 value,bytes data,bytes32[] factoryDeps,bytes paymasterInput)" + ); + + /// @notice Whether the token is Ethereum. + /// @param _addr The address of the token + /// @return `true` or `false` based on whether the token is Ether. + /// @dev This method assumes that address is Ether either if the address is 0 (for convenience) + /// or if the address is the address of the L2EthToken system contract. + function isEthToken(uint256 _addr) internal pure returns (bool) { + return + _addr == uint256(uint160(address(ETH_TOKEN_SYSTEM_CONTRACT))) || + _addr == 0; + } + + /// @notice Calculate the suggested signed hash of the transaction, + /// i.e. the hash that is signed by EOAs and is recommended to be signed by other accounts. + function encodeHash(Transaction calldata _transaction) + internal + view + returns (bytes32 resultHash) + { + if (_transaction.txType == LEGACY_TX_TYPE) { + resultHash = _encodeHashLegacyTransaction(_transaction); + } else if (_transaction.txType == EIP_712_TX_TYPE) { + resultHash = _encodeHashEIP712Transaction(_transaction); + } else if (_transaction.txType == EIP_1559_TX_TYPE) { + resultHash = _encodeHashEIP1559Transaction(_transaction); + } else if (_transaction.txType == EIP_2930_TX_TYPE) { + resultHash = _encodeHashEIP2930Transaction(_transaction); + } else { + // Currently no other transaction types are supported. + // Any new transaction types will be processed in a similar manner. + revert("Encoding unsupported tx"); + } + } + + /// @notice Encode hash of the zkSync native transaction type. + /// @return keccak256 hash of the EIP-712 encoded representation of transaction + function _encodeHashEIP712Transaction(Transaction calldata _transaction) + private + view + returns (bytes32) + { + bytes32 structHash = keccak256( + abi.encode( + EIP712_TRANSACTION_TYPE_HASH, + _transaction.txType, + _transaction.from, + _transaction.to, + _transaction.gasLimit, + _transaction.gasPerPubdataByteLimit, + _transaction.maxFeePerGas, + _transaction.maxPriorityFeePerGas, + _transaction.paymaster, + _transaction.nonce, + _transaction.value, + keccak256(_transaction.data), + keccak256(abi.encodePacked(_transaction.factoryDeps)), + keccak256(_transaction.paymasterInput) + ) + ); + + bytes32 domainSeparator = keccak256( + abi.encode( + EIP712_DOMAIN_TYPEHASH, + keccak256("zkSync"), + keccak256("2"), + block.chainid + ) + ); + + return + keccak256( + abi.encodePacked("\x19\x01", domainSeparator, structHash) + ); + } + + /// @notice Encode hash of the legacy transaction type. + /// @return keccak256 of the serialized RLP encoded representation of transaction + function _encodeHashLegacyTransaction(Transaction calldata _transaction) + private + view + returns (bytes32) + { + // Hash of legacy transactions are encoded as one of the: + // - RLP(nonce, gasPrice, gasLimit, to, value, data, chainId, 0, 0) + // - RLP(nonce, gasPrice, gasLimit, to, value, data) + // + // In this RLP encoding, only the first one above list appears, so we encode each element + // inside list and then concatenate the length of all elements with them. + + bytes memory encodedNonce = RLPEncoder.encodeUint256(_transaction.nonce); + // Encode `gasPrice` and `gasLimit` together to prevent "stack too deep error". + bytes memory encodedGasParam; + { + bytes memory encodedGasPrice = RLPEncoder.encodeUint256( + _transaction.maxFeePerGas + ); + bytes memory encodedGasLimit = RLPEncoder.encodeUint256( + _transaction.gasLimit + ); + encodedGasParam = bytes.concat(encodedGasPrice, encodedGasLimit); + } + + bytes memory encodedTo = RLPEncoder.encodeAddress(address(uint160(_transaction.to))); + bytes memory encodedValue = RLPEncoder.encodeUint256(_transaction.value); + // Encode only the length of the transaction data, and not the data itself, + // so as not to copy to memory a potentially huge transaction data twice. + bytes memory encodedDataLength; + { + // Safe cast, because the length of the transaction data can't be so large. + uint64 txDataLen = uint64(_transaction.data.length); + if (txDataLen != 1) { + // If the length is not equal to one, then only using the length can it be encoded definitely. + encodedDataLength = RLPEncoder.encodeNonSingleBytesLen( + txDataLen + ); + } else if (_transaction.data[0] >= 0x80) { + // If input is a byte in [0x80, 0xff] range, RLP encoding will concatenates 0x81 with the byte. + encodedDataLength = hex"81"; + } + // Otherwise the length is not encoded at all. + } + + // Encode `chainId` according to EIP-155, but only if the `chainId` is specified in the transaction. + bytes memory encodedChainId; + if (_transaction.reserved[0] != 0) { + encodedChainId = bytes.concat(RLPEncoder.encodeUint256(block.chainid), hex"80_80"); + } + + bytes memory encodedListLength; + unchecked { + uint256 listLength = encodedNonce.length + + encodedGasParam.length + + encodedTo.length + + encodedValue.length + + encodedDataLength.length + + _transaction.data.length + + encodedChainId.length; + + // Safe cast, because the length of the list can't be so large. + encodedListLength = RLPEncoder.encodeListLen(uint64(listLength)); + } + + return + keccak256( + bytes.concat( + encodedListLength, + encodedNonce, + encodedGasParam, + encodedTo, + encodedValue, + encodedDataLength, + _transaction.data, + encodedChainId + ) + ); + } + + /// @notice Encode hash of the EIP2930 transaction type. + /// @return keccak256 of the serialized RLP encoded representation of transaction + function _encodeHashEIP2930Transaction(Transaction calldata _transaction) + private + view + returns (bytes32) + { + // Hash of EIP2930 transactions is encoded the following way: + // H(0x01 || RLP(chain_id, nonce, gas_price, gas_limit, destination, amount, data, access_list)) + // + // Note, that on zkSync access lists are not supported and should always be empty. + + // Encode all fixed-length params to avoid "stack too deep error" + bytes memory encodedFixedLengthParams; + { + bytes memory encodedChainId = RLPEncoder.encodeUint256(block.chainid); + bytes memory encodedNonce = RLPEncoder.encodeUint256(_transaction.nonce); + bytes memory encodedGasPrice = RLPEncoder.encodeUint256(_transaction.maxFeePerGas); + bytes memory encodedGasLimit = RLPEncoder.encodeUint256(_transaction.gasLimit); + bytes memory encodedTo = RLPEncoder.encodeAddress(address(uint160(_transaction.to))); + bytes memory encodedValue = RLPEncoder.encodeUint256(_transaction.value); + encodedFixedLengthParams = bytes.concat( + encodedChainId, + encodedNonce, + encodedGasPrice, + encodedGasLimit, + encodedTo, + encodedValue + ); + } + + // Encode only the length of the transaction data, and not the data itself, + // so as not to copy to memory a potentially huge transaction data twice. + bytes memory encodedDataLength; + { + // Safe cast, because the length of the transaction data can't be so large. + uint64 txDataLen = uint64(_transaction.data.length); + if (txDataLen != 1) { + // If the length is not equal to one, then only using the length can it be encoded definitely. + encodedDataLength = RLPEncoder.encodeNonSingleBytesLen( + txDataLen + ); + } else if (_transaction.data[0] >= 0x80) { + // If input is a byte in [0x80, 0xff] range, RLP encoding will concatenates 0x81 with the byte. + encodedDataLength = hex"81"; + } + // Otherwise the length is not encoded at all. + } + + // On zkSync, access lists are always zero length (at least for now). + bytes memory encodedAccessListLength = RLPEncoder.encodeListLen(0); + + bytes memory encodedListLength; + unchecked { + uint256 listLength = encodedFixedLengthParams.length + + encodedDataLength.length + + _transaction.data.length + + encodedAccessListLength.length; + + // Safe cast, because the length of the list can't be so large. + encodedListLength = RLPEncoder.encodeListLen(uint64(listLength)); + } + + return + keccak256( + bytes.concat( + "\x01", + encodedListLength, + encodedFixedLengthParams, + encodedDataLength, + _transaction.data, + encodedAccessListLength + ) + ); + } + + /// @notice Encode hash of the EIP1559 transaction type. + /// @return keccak256 of the serialized RLP encoded representation of transaction + function _encodeHashEIP1559Transaction(Transaction calldata _transaction) + private + view + returns (bytes32) + { + // Hash of EIP1559 transactions is encoded the following way: + // H(0x02 || RLP(chain_id, nonce, max_priority_fee_per_gas, max_fee_per_gas, gas_limit, destination, amount, data, access_list)) + // + // Note, that on zkSync access lists are not supported and should always be empty. + + // Encode all fixed-length params to avoid "stack too deep error" + bytes memory encodedFixedLengthParams; + { + bytes memory encodedChainId = RLPEncoder.encodeUint256(block.chainid); + bytes memory encodedNonce = RLPEncoder.encodeUint256(_transaction.nonce); + bytes memory encodedMaxPriorityFeePerGas = RLPEncoder.encodeUint256(_transaction.maxPriorityFeePerGas); + bytes memory encodedMaxFeePerGas = RLPEncoder.encodeUint256(_transaction.maxFeePerGas); + bytes memory encodedGasLimit = RLPEncoder.encodeUint256(_transaction.gasLimit); + bytes memory encodedTo = RLPEncoder.encodeAddress(address(uint160(_transaction.to))); + bytes memory encodedValue = RLPEncoder.encodeUint256(_transaction.value); + encodedFixedLengthParams = bytes.concat( + encodedChainId, + encodedNonce, + encodedMaxPriorityFeePerGas, + encodedMaxFeePerGas, + encodedGasLimit, + encodedTo, + encodedValue + ); + } + + // Encode only the length of the transaction data, and not the data itself, + // so as not to copy to memory a potentially huge transaction data twice. + bytes memory encodedDataLength; + { + // Safe cast, because the length of the transaction data can't be so large. + uint64 txDataLen = uint64(_transaction.data.length); + if (txDataLen != 1) { + // If the length is not equal to one, then only using the length can it be encoded definitely. + encodedDataLength = RLPEncoder.encodeNonSingleBytesLen( + txDataLen + ); + } else if (_transaction.data[0] >= 0x80) { + // If input is a byte in [0x80, 0xff] range, RLP encoding will concatenates 0x81 with the byte. + encodedDataLength = hex"81"; + } + // Otherwise the length is not encoded at all. + } + + // On zkSync, access lists are always zero length (at least for now). + bytes memory encodedAccessListLength = RLPEncoder.encodeListLen(0); + + bytes memory encodedListLength; + unchecked { + uint256 listLength = encodedFixedLengthParams.length + + encodedDataLength.length + + _transaction.data.length + + encodedAccessListLength.length; + + // Safe cast, because the length of the list can't be so large. + encodedListLength = RLPEncoder.encodeListLen(uint64(listLength)); + } + + return + keccak256( + bytes.concat( + "\x02", + encodedListLength, + encodedFixedLengthParams, + encodedDataLength, + _transaction.data, + encodedAccessListLength + ) + ); + } + + /// @notice Processes the common paymaster flows, e.g. setting proper allowance + /// for tokens, etc. For more information on the expected behavior, check out + /// the "Paymaster flows" section in the documentation. + function processPaymasterInput(Transaction calldata _transaction) internal { + require( + _transaction.paymasterInput.length >= 4, + "The standard paymaster input must be at least 4 bytes long" + ); + + bytes4 paymasterInputSelector = bytes4( + _transaction.paymasterInput[0:4] + ); + if (paymasterInputSelector == IPaymasterFlow.approvalBased.selector) { + require( + _transaction.paymasterInput.length >= 68, + "The approvalBased paymaster input must be at least 68 bytes long" + ); + + // While the actual data consists of address, uint256 and bytes data, + // the data is needed only for the paymaster, so we ignore it here for the sake of optimization + (address token, uint256 minAllowance) = abi.decode( + _transaction.paymasterInput[4:68], + (address, uint256) + ); + address paymaster = address(uint160(_transaction.paymaster)); + + uint256 currentAllowance = IERC20(token).allowance( + address(this), + paymaster + ); + if (currentAllowance < minAllowance) { + // Some tokens, e.g. USDT require that the allowance is firsty set to zero + // and only then updated to the new value. + + IERC20(token).safeApprove(paymaster, 0); + IERC20(token).safeApprove(paymaster, minAllowance); + } + } else if (paymasterInputSelector == IPaymasterFlow.general.selector) { + // Do nothing. general(bytes) paymaster flow means that the paymaster must interpret these bytes on his own. + } else { + revert("Unsupported paymaster flow"); + } + } + + /// @notice Pays the required fee for the transaction to the bootloader. + /// @dev Currently it pays the maximum amount "_transaction.maxFeePerGas * _transaction.gasLimit", + /// it will change in the future. + function payToTheBootloader(Transaction calldata _transaction) + internal + returns (bool success) + { + address bootloaderAddr = BOOTLOADER_FORMAL_ADDRESS; + uint256 amount = _transaction.maxFeePerGas * _transaction.gasLimit; + + assembly { + success := call(gas(), bootloaderAddr, amount, 0, 0, 0, 0) + } + } + + // Returns the balance required to process the transaction. + function totalRequiredBalance(Transaction calldata _transaction) internal pure returns (uint256 requiredBalance) { + if(address(uint160(_transaction.paymaster)) != address(0)) { + // Paymaster pays for the fee + requiredBalance = _transaction.value; + } else { + // The user should have enough balance for both the fee and the value of the transaction + requiredBalance = _transaction.maxFeePerGas * _transaction.gasLimit + _transaction.value; + } + } +} diff --git a/etc/contracts-test-data/contracts/custom-account/Utils.sol b/etc/contracts-test-data/contracts/custom-account/Utils.sol new file mode 100644 index 000000000000..da3d4eb60878 --- /dev/null +++ b/etc/contracts-test-data/contracts/custom-account/Utils.sol @@ -0,0 +1,38 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 +pragma solidity >=0.8.0; + +/** + * @author Matter Labs + * @dev Common utilities used in zkSync system contracts + */ +library Utils { + function safeCastToU128(uint256 _x) internal pure returns (uint128) { + require(_x <= type(uint128).max, "Overflow"); + + return uint128(_x); + } + + function safeCastToU32(uint256 _x) internal pure returns (uint32) { + require(_x <= type(uint32).max, "Overflow"); + + return uint32(_x); + } + + function safeCastToU24(uint256 _x) internal pure returns (uint24) { + require(_x <= type(uint24).max, "Overflow"); + + return uint24(_x); + } + + /// @return codeLength The bytecode length in bytes + function bytecodeLenInBytes(bytes32 _bytecodeHash) internal pure returns (uint256 codeLength) { + codeLength = bytecodeLenInWords(_bytecodeHash) << 5; // _bytecodeHash * 32 + } + + /// @return codeLengthInWords The bytecode length in machine words + function bytecodeLenInWords(bytes32 _bytecodeHash) internal pure returns (uint256 codeLengthInWords) { + unchecked { + codeLengthInWords = uint256(uint8(_bytecodeHash[2])) * 256 + uint256(uint8(_bytecodeHash[3])); + } + } +} diff --git a/etc/contracts-test-data/contracts/custom-account/custom-account.sol b/etc/contracts-test-data/contracts/custom-account/custom-account.sol new file mode 100644 index 000000000000..0cb105c5c666 --- /dev/null +++ b/etc/contracts-test-data/contracts/custom-account/custom-account.sol @@ -0,0 +1,106 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 + +pragma solidity ^0.8.0; + +import './Constants.sol'; +import './TransactionHelper.sol'; + +import './SystemContractsCaller.sol'; + +import './interfaces/IAccount.sol'; + +contract CustomAccount is IAccount { + using TransactionHelper for Transaction; + + bool public violateValidationRules; + + bytes32 public lastTxHash; + + constructor(bool _violateValidationRules) { + violateValidationRules = _violateValidationRules; + } + + // bytes4(keccak256("isValidSignature(bytes32,bytes)") + bytes4 constant EIP1271_SUCCESS_RETURN_VALUE = 0x1626ba7e; + + function validateTransaction(bytes32 _txHash, bytes32 _suggestedSignedTxHash, Transaction calldata _transaction) external payable override returns (bytes4 magic) { + // By default we consider the transaction as successful + magic = VALIDATION_SUCCESS_MAGIC; + + _validateTransaction(_suggestedSignedTxHash, _transaction); + lastTxHash = _txHash; + + if (violateValidationRules) { + // Such a tx should not pass the validation step, because it depends on the balance of another account + require(BOOTLOADER_FORMAL_ADDRESS.balance == 0, "Bootloader balance must be zero"); + } + } + + function _validateTransaction(bytes32 _suggestedSignedTxHash, Transaction calldata _transaction) internal { + if (_suggestedSignedTxHash == bytes32(0)) { + _suggestedSignedTxHash = _transaction.encodeHash(); + } + + SystemContractsCaller.systemCallWithPropagatedRevert( + uint32(gasleft()), + address(NONCE_HOLDER_SYSTEM_CONTRACT), + 0, + abi.encodeCall(INonceHolder.incrementMinNonceIfEquals, (_transaction.nonce)) + ); + + bytes memory correctSignature = abi.encodePacked(_suggestedSignedTxHash, address(this)); + require(keccak256(_transaction.signature) == keccak256(correctSignature), "Incorrect signature"); + } + + function executeTransaction(bytes32, bytes32, Transaction calldata _transaction) external payable override { + _execute(_transaction); + } + + function executeTransactionFromOutside(Transaction calldata _transaction) external payable override { + _validateTransaction(bytes32(0), _transaction); + _execute(_transaction); + } + + function _execute(Transaction calldata _transaction) internal { + address to = address(uint160(_transaction.to)); + uint256 value = _transaction.reserved[1]; + bytes memory data = _transaction.data; + + if(to == address(DEPLOYER_SYSTEM_CONTRACT)) { + // We allow calling ContractDeployer with any calldata + SystemContractsCaller.systemCallWithPropagatedRevert( + uint32(gasleft()), + to, + uint128(_transaction.reserved[1]), // By convention, reserved[1] is `value` + _transaction.data + ); + } else { + bool success; + assembly { + success := call(gas(), to, value, add(data, 0x20), mload(data), 0, 0) + } + require(success); + } + } + + // Here, the user pays the bootloader for the transaction + function payForTransaction(bytes32, bytes32, Transaction calldata _transaction) external payable { + bool success = _transaction.payToTheBootloader(); + require(success, "Failed to pay the fee to the operator"); + } + + // Here, the user should prepare for the transaction to be paid for by a paymaster + // Here, the account should set the allowance for the smart contracts + function prepareForPaymaster(bytes32, bytes32, Transaction calldata _transaction) external payable { + _transaction.processPaymasterInput(); + } + + fallback() external payable { + // fallback of default AA shouldn't be called by bootloader under no circumstances + assert(msg.sender != BOOTLOADER_FORMAL_ADDRESS); + + // If the contract is called directly, behave like an EOA + } + + receive() external payable {} +} diff --git a/etc/contracts-test-data/contracts/custom-account/custom-paymaster.sol b/etc/contracts-test-data/contracts/custom-account/custom-paymaster.sol new file mode 100644 index 000000000000..743412c95b8a --- /dev/null +++ b/etc/contracts-test-data/contracts/custom-account/custom-paymaster.sol @@ -0,0 +1,81 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.0; + +import "./interfaces/IPaymaster.sol"; +import "./interfaces/IPaymasterFlow.sol"; +import "./TransactionHelper.sol"; +import "./Constants.sol"; + +// This is a dummy paymaster. It expects the paymasterInput to contain its "signature" as well as the needed exchange rate. +// It supports only approval-based paymaster flow. +contract CustomPaymaster is IPaymaster { + using TransactionHelper for Transaction; + + uint256 public txCounter = 0; + mapping(uint256 => bool) public calledContext; + uint256 public wasAnytime = 0; + + bytes32 lastTxHash = 0; + + function validateSignature(bytes memory _signature) internal pure { + // For the purpose of this test, any signature of length 46 is fine. + require(_signature.length == 46); + } + + function validateAndPayForPaymasterTransaction(bytes32 _txHash, bytes32, Transaction calldata _transaction) override external payable returns (bytes4 magic, bytes memory context) { + // By default we consider the transaction as passed + magic = PAYMASTER_VALIDATION_SUCCESS_MAGIC; + + lastTxHash = _txHash; + require(_transaction.paymasterInput.length >= 4, "The standard paymaster input must be at least 4 bytes long"); + + bytes4 paymasterInputSelector = bytes4(_transaction.paymasterInput[0:4]); + if (paymasterInputSelector == IPaymasterFlow.approvalBased.selector) { + // While the actual data consists of address, uint256 and bytes data, + // the data is needed only for the paymaster, so we ignore it here for the sake of optimization + (address token,, bytes memory input) = abi.decode(_transaction.paymasterInput[4:], (address, uint256, bytes)); + + (bytes memory pseudoSignature, uint256 rateNumerator, uint256 rateDenominator, uint256 amount) = abi.decode(input, (bytes, uint256, uint256, uint256)); + validateSignature(pseudoSignature); + + // Firstly, we verify that the user has provided enough allowance + address userAddress = address(uint160(_transaction.from)); + address thisAddress = address(this); + + uint256 providedAllowance = IERC20(token).allowance(userAddress, thisAddress); + require(providedAllowance >= amount, "The user did not provide enough allowance"); + + uint256 requiredETH = _transaction.gasLimit * _transaction.maxFeePerGas; + uint256 ethExchnaged = amount * rateNumerator / rateDenominator; + + require(ethExchnaged >= requiredETH, "User does not provide enough tokens to exchange"); + + // Pulling all the tokens from the user + IERC20(token).transferFrom(userAddress, thisAddress, amount); + bool success = _transaction.payToTheBootloader(); + require(success, "Failed to transfer funds to the bootloader"); + + // For now, refunds are not supported, so we just test the fact that the transfered context is correct + txCounter += 1; + context = abi.encode(txCounter); + } else { + revert("Unsupported paymaster flow"); + } + } + + function postTransaction( + bytes calldata _context, + Transaction calldata, + bytes32 _txHash, + bytes32, + ExecutionResult, + uint256 + ) override external payable { + require(_txHash == lastTxHash, "Incorrect last tx hash"); + uint256 contextCounter = abi.decode(_context, (uint256)); + calledContext[contextCounter] = true; + } + + receive() external payable {} +} diff --git a/etc/contracts-test-data/contracts/custom-account/interfaces/IAccount.sol b/etc/contracts-test-data/contracts/custom-account/interfaces/IAccount.sol new file mode 100644 index 000000000000..521ae96d4133 --- /dev/null +++ b/etc/contracts-test-data/contracts/custom-account/interfaces/IAccount.sol @@ -0,0 +1,47 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 + +pragma solidity ^0.8.0; + +import "../TransactionHelper.sol"; + +bytes4 constant VALIDATION_SUCCESS_MAGIC = IAccount.validateTransaction.selector; + +interface IAccount { + /// @notice Called by the bootloader to validate that an account agrees to process the transaction + /// (and potentially pay for it). + /// @param _txHash The hash of the transaction to be used in the explorer + /// @param _suggestedSignedHash The hash of the transaction is signed by EOAs + /// @param _transaction The transaction itself + /// @return magic The magic value that should be equal to the signature of this function + /// if the user agrees to proceed with the transaction. + /// @dev The developer should strive to preserve as many steps as possible both for valid + /// and invalid transactions as this very method is also used during the gas fee estimation + /// (without some of the necessary data, e.g. signature). + function validateTransaction( + bytes32 _txHash, + bytes32 _suggestedSignedHash, + Transaction calldata _transaction + ) external payable returns (bytes4 magic); + + function executeTransaction( + bytes32 _txHash, + bytes32 _suggestedSignedHash, + Transaction calldata _transaction + ) external payable; + + // There is no point in providing possible signed hash in the `executeTransactionFromOutside` method, + // since it typically should not be trusted. + function executeTransactionFromOutside(Transaction calldata _transaction) external payable; + + function payForTransaction( + bytes32 _txHash, + bytes32 _suggestedSignedHash, + Transaction calldata _transaction + ) external payable; + + function prepareForPaymaster( + bytes32 _txHash, + bytes32 _possibleSignedHash, + Transaction calldata _transaction + ) external payable; +} diff --git a/etc/contracts-test-data/contracts/custom-account/interfaces/IContractDeployer.sol b/etc/contracts-test-data/contracts/custom-account/interfaces/IContractDeployer.sol new file mode 100644 index 000000000000..aa8a7718c8da --- /dev/null +++ b/etc/contracts-test-data/contracts/custom-account/interfaces/IContractDeployer.sol @@ -0,0 +1,108 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 + +pragma solidity ^0.8.0; + +interface IContractDeployer { + /// @notice Defines the version of the account abstraction protocol + /// that a contract claims to follow. + /// - `None` means that the account is just a contract and it should never be interacted + /// with as a custom account + /// - `Version1` means that the account follows the first version of the account abstraction protocol + enum AccountAbstractionVersion { + None, + Version1 + } + + /// @notice Defines the nonce ordering used by the account + /// - `Sequential` means that it is expected that the nonces are monotonic and increment by 1 + /// at a time (the same as EOAs). + /// - `Arbitrary` means that the nonces for the accounts can be arbitrary. The operator + /// should serve the transactions from such an account on a first-come-first-serve basis. + /// @dev This ordering is more of a suggestion to the operator on how the AA expects its transactions + /// to be processed and is not considered as a system invariant. + enum AccountNonceOrdering { + Sequential, + Arbitrary + } + + struct AccountInfo { + AccountAbstractionVersion supportedAAVersion; + AccountNonceOrdering nonceOrdering; + } + + event ContractDeployed( + address indexed deployerAddress, + bytes32 indexed bytecodeHash, + address indexed contractAddress + ); + + function getNewAddressCreate2( + address _sender, + bytes32 _bytecodeHash, + bytes32 _salt, + bytes calldata _input + ) external pure returns (address newAddress); + + function getNewAddressCreate(address _sender, uint256 _senderNonce) external pure returns (address newAddress); + + function create2( + bytes32 _salt, + bytes32 _bytecodeHash, + bytes calldata _input + ) external payable returns (address newAddress); + + function create2Account( + bytes32 _salt, + bytes32 _bytecodeHash, + bytes calldata _input, + AccountAbstractionVersion _aaVersion + ) external payable returns (address newAddress); + + /// @dev While the `_salt` parameter is not used anywhere here, + /// it is still needed for consistency between `create` and + /// `create2` functions (required by the compiler). + function create( + bytes32 _salt, + bytes32 _bytecodeHash, + bytes calldata _input + ) external payable returns (address newAddress); + + /// @dev While `_salt` is never used here, we leave it here as a parameter + /// for the consistency with the `create` function. + function createAccount( + bytes32 _salt, + bytes32 _bytecodeHash, + bytes calldata _input, + AccountAbstractionVersion _aaVersion + ) external payable returns (address newAddress); + + /// @notice Returns the information about a certain AA. + function getAccountInfo( + address _address + ) external view returns (AccountInfo memory info); + + /// @notice Can be called by an account to update its account version + function updateAccountVersion(AccountAbstractionVersion _version) external; + + /// @notice Can be called by an account to update its nonce ordering + function updateNonceOrdering(AccountNonceOrdering _nonceOrdering) external; + + /// @notice A struct that describes a forced deployment on an address + struct ForceDeployment { + // The bytecode hash to put on an address + bytes32 bytecodeHash; + // The address on which to deploy the bytecodehash to + address newAddress; + // The value with which to initialize a contract + uint256 value; + // The constructor calldata + bytes input; + } + + /// @notice This method is to be used only during an upgrade to set a bytecode on any address. + /// @dev We do not require `onlySystemCall` here, since the method is accessible only + /// by `FORCE_DEPLOYER`. + function forceDeployOnAddresses( + ForceDeployment[] calldata _deployments + ) external payable; +} diff --git a/etc/contracts-test-data/contracts/custom-account/interfaces/IERC20.sol b/etc/contracts-test-data/contracts/custom-account/interfaces/IERC20.sol new file mode 100644 index 000000000000..b816bfed0863 --- /dev/null +++ b/etc/contracts-test-data/contracts/custom-account/interfaces/IERC20.sol @@ -0,0 +1,82 @@ +// SPDX-License-Identifier: MIT +// OpenZeppelin Contracts (last updated v4.6.0) (token/ERC20/IERC20.sol) + +pragma solidity ^0.8.0; + +/** + * @dev Interface of the ERC20 standard as defined in the EIP. + */ +interface IERC20 { + /** + * @dev Emitted when `value` tokens are moved from one account (`from`) to + * another (`to`). + * + * Note that `value` may be zero. + */ + event Transfer(address indexed from, address indexed to, uint256 value); + + /** + * @dev Emitted when the allowance of a `spender` for an `owner` is set by + * a call to {approve}. `value` is the new allowance. + */ + event Approval(address indexed owner, address indexed spender, uint256 value); + + /** + * @dev Returns the amount of tokens in existence. + */ + function totalSupply() external view returns (uint256); + + /** + * @dev Returns the amount of tokens owned by `account`. + */ + function balanceOf(address account) external view returns (uint256); + + /** + * @dev Moves `amount` tokens from the caller's account to `to`. + * + * Returns a boolean value indicating whether the operation succeeded. + * + * Emits a {Transfer} event. + */ + function transfer(address to, uint256 amount) external returns (bool); + + /** + * @dev Returns the remaining number of tokens that `spender` will be + * allowed to spend on behalf of `owner` through {transferFrom}. This is + * zero by default. + * + * This value changes when {approve} or {transferFrom} are called. + */ + function allowance(address owner, address spender) external view returns (uint256); + + /** + * @dev Sets `amount` as the allowance of `spender` over the caller's tokens. + * + * Returns a boolean value indicating whether the operation succeeded. + * + * IMPORTANT: Beware that changing an allowance with this method brings the risk + * that someone may use both the old and the new allowance by unfortunate + * transaction ordering. One possible solution to mitigate this race + * condition is to first reduce the spender's allowance to 0 and set the + * desired value afterwards: + * https://github.com/ethereum/EIPs/issues/20#issuecomment-263524729 + * + * Emits an {Approval} event. + */ + function approve(address spender, uint256 amount) external returns (bool); + + /** + * @dev Moves `amount` tokens from `from` to `to` using the + * allowance mechanism. `amount` is then deducted from the caller's + * allowance. + * + * Returns a boolean value indicating whether the operation succeeded. + * + * Emits a {Transfer} event. + */ + function transferFrom( + address from, + address to, + uint256 amount + ) external returns (bool); +} diff --git a/etc/contracts-test-data/contracts/custom-account/interfaces/INonceHolder.sol b/etc/contracts-test-data/contracts/custom-account/interfaces/INonceHolder.sol new file mode 100644 index 000000000000..18ac47023266 --- /dev/null +++ b/etc/contracts-test-data/contracts/custom-account/interfaces/INonceHolder.sol @@ -0,0 +1,42 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 + +pragma solidity ^0.8.0; + +/** + * @author Matter Labs + * @dev Interface of the nonce holder contract -- a contract used by the system to ensure + * that there is always a unique identifier for a transaction with a particular account (we call it nonce). + * In other words, the pair of (address, nonce) should always be unique. + * @dev Custom accounts should use methods of this contract to store nonces or other possible unique identifiers + * for the transaction. + */ +interface INonceHolder { + /// @dev Returns the current minimal nonce for account. + function getMinNonce(address _address) external view returns (uint256); + + /// @dev Returns the raw version of the current minimal nonce + /// (equal to minNonce + 2^128 * deployment nonce). + function getRawNonce(address _address) external view returns (uint256); + + /// @dev Increases the minimal nonce for the msg.sender. + function increaseMinNonce(uint256 _value) external returns (uint256); + + /// @dev Sets the nonce value `key` as used. + function setValueUnderNonce(uint256 _key, uint256 _value) external; + + /// @dev Gets the value stored inside a custom nonce. + function getValueUnderNonce(uint256 _key) external view returns (uint256); + + /// @dev A convenience method to increment the minimal nonce if it is equal + /// to the `_expectedNonce`. + function incrementMinNonceIfEquals(uint256 _expectedNonce) external; + + /// @dev Returns the deployment nonce for the accounts used for CREATE opcode. + function getDeploymentNonce(address _address) external view returns (uint256); + + /// @dev Increments the deployment nonce for the account and returns the previous one. + function incrementDeploymentNonce(address _address) external returns (uint256); + + /// @dev Determines whether a certain nonce has been already used for an account. + function validateNonceUsage(address _address, uint256 _key, bool _shouldBeUsed) external view; +} diff --git a/etc/contracts-test-data/contracts/custom-account/interfaces/IPaymaster.sol b/etc/contracts-test-data/contracts/custom-account/interfaces/IPaymaster.sol new file mode 100644 index 000000000000..cf5ced948782 --- /dev/null +++ b/etc/contracts-test-data/contracts/custom-account/interfaces/IPaymaster.sol @@ -0,0 +1,51 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 + +pragma solidity ^0.8.0; + +import "../TransactionHelper.sol"; + +enum ExecutionResult { + Revert, + Success +} + +bytes4 constant PAYMASTER_VALIDATION_SUCCESS_MAGIC = IPaymaster.validateAndPayForPaymasterTransaction.selector; + +interface IPaymaster { + /// @dev Called by the bootloader to verify that the paymaster agrees to pay for the + /// fee for the transaction. This transaction should also send the necessary amount of funds onto the bootloader + /// address. + /// @param _txHash The hash of the transaction + /// @param _suggestedSignedHash The hash of the transaction that is signed by an EOA + /// @param _transaction The transaction itself. + /// @return magic The value that should be equal to the signature of the validateAndPayForPaymasterTransaction + /// if the paymaster agrees to pay for the transaction. + /// @return context The "context" of the transaction: an array of bytes of length at most 1024 bytes, which will be + /// passed to the `postTransaction` method of the account. + /// @dev The developer should strive to preserve as many steps as possible both for valid + /// and invalid transactions as this very method is also used during the gas fee estimation + /// (without some of the necessary data, e.g. signature). + function validateAndPayForPaymasterTransaction( + bytes32 _txHash, + bytes32 _suggestedSignedHash, + Transaction calldata _transaction + ) external payable returns (bytes4 magic, bytes memory context); + + /// @dev Called by the bootloader after the execution of the transaction. Please note that + /// there is no guarantee that this method will be called at all. Unlike the original EIP4337, + /// this method won't be called if the transaction execution results in out-of-gas. + /// @param _context, the context of the execution, returned by the "validateAndPayForPaymasterTransaction" method. + /// @param _transaction, the users' transaction. + /// @param _txResult, the result of the transaction execution (success or failure). + /// @param _maxRefundedGas, the upper bound on the amout of gas that could be refunded to the paymaster. + /// @dev The exact amount refunded depends on the gas spent by the "postOp" itself and so the developers should + /// take that into account. + function postTransaction( + bytes calldata _context, + Transaction calldata _transaction, + bytes32 _txHash, + bytes32 _suggestedSignedHash, + ExecutionResult _txResult, + uint256 _maxRefundedGas + ) external payable; +} diff --git a/etc/contracts-test-data/contracts/custom-account/interfaces/IPaymasterFlow.sol b/etc/contracts-test-data/contracts/custom-account/interfaces/IPaymasterFlow.sol new file mode 100644 index 000000000000..97bd95079292 --- /dev/null +++ b/etc/contracts-test-data/contracts/custom-account/interfaces/IPaymasterFlow.sol @@ -0,0 +1,16 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 + +pragma solidity ^0.8.0; + +/** + * @author Matter Labs + * @dev The interface that is used for encoding/decoding of + * different types of paymaster flows. + * @notice This is NOT an interface to be implementated + * by contracts. It is just used for encoding. + */ +interface IPaymasterFlow { + function general(bytes calldata input) external; + + function approvalBased(address _token, uint256 _minAllowance, bytes calldata _innerInput) external; +} diff --git a/etc/contracts-test-data/contracts/custom-account/nonce-holder-test.sol b/etc/contracts-test-data/contracts/custom-account/nonce-holder-test.sol new file mode 100644 index 000000000000..5e276eab3af7 --- /dev/null +++ b/etc/contracts-test-data/contracts/custom-account/nonce-holder-test.sol @@ -0,0 +1,100 @@ +// SPDX-License-Identifier: MIT OR Apache-2.0 + +pragma solidity ^0.8.0; + +import './Constants.sol'; +import './TransactionHelper.sol'; + +import './interfaces/IAccount.sol'; +import './interfaces/IContractDeployer.sol'; + +import './SystemContractsCaller.sol'; + +/** +* @author Matter Labs +* @dev Dummy account used for tests that accepts any transaction. +*/ +contract NonceHolderTest is IAccount { + using TransactionHelper for Transaction; + + // bytes4(keccak256("isValidSignature(bytes32,bytes)") + bytes4 constant EIP1271_SUCCESS_RETURN_VALUE = 0x1626ba7e; + + function validateTransaction(bytes32, bytes32, Transaction calldata _transaction) external payable override returns (bytes4 magic) { + // By default we consider the transaction as successful + magic = VALIDATION_SUCCESS_MAGIC; + + _validateTransaction(_transaction); + } + + function _validateTransaction(Transaction calldata _transaction) internal { + bytes memory data; + + if (uint8(_transaction.signature[0]) == 0) { + // It only erases nonce as non-allowed + data = abi.encodeCall(NONCE_HOLDER_SYSTEM_CONTRACT.setValueUnderNonce, (_transaction.nonce, 1)); + } else if(uint8(_transaction.signature[0]) == 1) { + // It should increase minimal nonce by 5 + data = abi.encodeCall(NONCE_HOLDER_SYSTEM_CONTRACT.increaseMinNonce, (5)); + } else if(uint8(_transaction.signature[0]) == 2) { + // It should try increasing nnonce by 2**90 + data = abi.encodeCall(NONCE_HOLDER_SYSTEM_CONTRACT.increaseMinNonce, (2**90)); + } else if (uint8(_transaction.signature[0]) == 3) { + // Do nothing + return; + } else if(uint8(_transaction.signature[0]) == 4) { + // It should increase minimal nonce by 1 + data = abi.encodeCall(NONCE_HOLDER_SYSTEM_CONTRACT.increaseMinNonce, (1)); + } else if (uint8(_transaction.signature[0]) == 5) { + // Increase minimal nonce by 5 and set the nonce ordering of the account as arbitrary + data = abi.encodeCall(NONCE_HOLDER_SYSTEM_CONTRACT.increaseMinNonce, (5)); + SystemContractsCaller.systemCallWithPropagatedRevert( + uint32(gasleft()), + address(DEPLOYER_SYSTEM_CONTRACT), + 0, + abi.encodeCall(DEPLOYER_SYSTEM_CONTRACT.updateNonceOrdering, (IContractDeployer.AccountNonceOrdering.Arbitrary)) + ); + } else { + revert("Unsupported test"); + } + + SystemContractsCaller.systemCallWithPropagatedRevert( + uint32(gasleft()), + address(NONCE_HOLDER_SYSTEM_CONTRACT), + 0, + data + ); + } + + function executeTransaction(bytes32, bytes32, Transaction calldata _transaction) external payable override { + _execute(_transaction); + } + + function executeTransactionFromOutside(Transaction calldata _transaction) external payable override { + _validateTransaction(_transaction); + _execute(_transaction); + } + + function _execute(Transaction calldata _transaction) internal {} + + // Here, the user pays the bootloader for the transaction + function payForTransaction(bytes32, bytes32, Transaction calldata _transaction) external payable override { + bool success = _transaction.payToTheBootloader(); + require(success, "Failed to pay the fee to the operator"); + } + + // Here, the user should prepare for the transaction to be paid for by a paymaster + // Here, the account should set the allowance for the smart contracts + function prepareForPaymaster(bytes32, bytes32, Transaction calldata _transaction) external payable override { + _transaction.processPaymasterInput(); + } + + fallback() external payable { + // fallback of default AA shouldn't be called by bootloader under no circumstances + assert(msg.sender != BOOTLOADER_FORMAL_ADDRESS); + + // If the contract is called directly, behave like an EOA + } + + receive() external payable {} +} diff --git a/etc/contracts-test-data/contracts/error/error.sol b/etc/contracts-test-data/contracts/error/error.sol new file mode 100644 index 000000000000..ba8085c26654 --- /dev/null +++ b/etc/contracts-test-data/contracts/error/error.sol @@ -0,0 +1,22 @@ +pragma solidity ^0.8.0; + +// SPDX-License-Identifier: MIT OR Apache-2.0 + +contract SimpleRequire { + error TestError(uint256 one, uint256 two, uint256 three, string data); + + function new_error() public pure { + revert TestError({one: 1, two: 2, three: 1, data: "data"}); + } + + function require_short() public pure { + require(false, "short"); + } + + function require_long() public pure { + require( + false, + 'longlonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglonglong' + ); + } +} diff --git a/etc/contracts-test-data/contracts/estimator/estimator.sol b/etc/contracts-test-data/contracts/estimator/estimator.sol new file mode 100644 index 000000000000..7fc7dfffc64b --- /dev/null +++ b/etc/contracts-test-data/contracts/estimator/estimator.sol @@ -0,0 +1,30 @@ +// SPDX-License-Identifier: UNLICENSED + +// This contract is used to estimate the protocol properties +// related to the fee calculation, such as block capacity +// and different operations costs. + +pragma solidity ^0.8.0; + +// Copied from `contracts/zksync/contracts/L2ContractHelper.sol`. +interface IL2Messenger { + function sendToL1(bytes memory _message) external returns (bytes32); +} + +uint160 constant SYSTEM_CONTRACTS_OFFSET = 0x8000; // 2^15 +IL2Messenger constant L2_MESSENGER = IL2Messenger(address(SYSTEM_CONTRACTS_OFFSET + 0x08)); + +// TODO: Should be set to the actual value (SMA-1185). +// Represents the maximum amount of L2->L1 messages that can happen in one block. +uint256 constant MAX_L2_L1_MESSAGES_IN_BLOCK = 256; + +contract Estimator { + function estimateBlockCapacity() public { + // Block capacity is defined by several parameters, but the "cheapest" way to seal the block + // is to send a limited amount of messages to the L1. + // Here we're going to do just it. + for (uint256 i = 0; i < MAX_L2_L1_MESSAGES_IN_BLOCK; i++) { + L2_MESSENGER.sendToL1(bytes("")); + } + } +} diff --git a/etc/contracts-test-data/contracts/events/events.sol b/etc/contracts-test-data/contracts/events/events.sol new file mode 100644 index 000000000000..93a451d54695 --- /dev/null +++ b/etc/contracts-test-data/contracts/events/events.sol @@ -0,0 +1,15 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.0; + +contract Emitter { + event Trivial(); + event Simple(uint256 Number, address Account); + event Indexed(uint256 indexed Number, address Account); + + function test(uint256 number) public { + emit Trivial(); + emit Simple(number, address(0xdeadbeef)); + emit Indexed(number, address(0xc0ffee)); + } +} diff --git a/etc/contracts-test-data/contracts/events/sample-calldata b/etc/contracts-test-data/contracts/events/sample-calldata new file mode 100644 index 0000000000000000000000000000000000000000..c137101ba026010f41d872325c4d53eab9d99a27 GIT binary patch literal 96 UcmY#kARn;Lf2oO2HzQCI07%#Y-T(jq literal 0 HcmV?d00001 diff --git a/etc/contracts-test-data/contracts/expensive/expensive.sol b/etc/contracts-test-data/contracts/expensive/expensive.sol new file mode 100644 index 000000000000..c3b99df48923 --- /dev/null +++ b/etc/contracts-test-data/contracts/expensive/expensive.sol @@ -0,0 +1,15 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.0; +pragma abicoder v2; + +contract Expensive { + uint[] array; + + function expensive(uint iterations) public returns (bytes32) { + for (uint i = 0; i < iterations; i++) { + array.push(i); + } + return keccak256(abi.encodePacked(array)); + } +} diff --git a/etc/contracts-test-data/contracts/infinite/infinite.sol b/etc/contracts-test-data/contracts/infinite/infinite.sol new file mode 100644 index 000000000000..3ed4e035f601 --- /dev/null +++ b/etc/contracts-test-data/contracts/infinite/infinite.sol @@ -0,0 +1,19 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.0; +pragma abicoder v2; + +contract InfiniteLoop { + event Iteration(uint256 number); + + function infiniteLoop() public { + uint256 x = 0; + + while (true) { + x += 1; + // This event is needed so that LLVM + // won't optimize the loop away. + emit Iteration(x); + } + } +} diff --git a/etc/contracts-test-data/contracts/loadnext/loadnext_contract.sol b/etc/contracts-test-data/contracts/loadnext/loadnext_contract.sol new file mode 100644 index 000000000000..b14286a45038 --- /dev/null +++ b/etc/contracts-test-data/contracts/loadnext/loadnext_contract.sol @@ -0,0 +1,56 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.0; +pragma abicoder v2; + +contract LoadnextContract { + event Event(uint val); + uint[] readArray; + uint[] writeArray; + + constructor (uint reads) { + for (uint i = 0; i < reads; i++) { + readArray.push(i); + } + } + + function execute(uint reads, uint writes, uint hashes, uint events, uint max_recursion, uint deploys) external returns(uint) { + if (max_recursion > 0) { + return this.execute(reads, writes, hashes, events, max_recursion - 1, deploys); + } + + uint sum = 0; + + // Somehow use result of storage read for compiler to not optimize this place. + for (uint i = 0; i < reads; i++) { + sum += readArray[i]; + } + + for (uint i = 0; i < writes; i++) { + writeArray.push(i); + } + + for (uint i = 0; i < events; i++) { + emit Event(i); + } + + // Somehow use result of keccak for compiler to not optimize this place. + for (uint i = 0; i < hashes; i++) { + sum += uint8(keccak256(abi.encodePacked("Message for encoding"))[0]); + } + + for (uint i = 0; i < deploys; i++) { + Foo foo = new Foo(); + } + return sum; + } + + function burnGas(uint256 gasToBurn) external { + uint256 initialGas = gasleft(); + while(initialGas - gasleft() < gasToBurn) {} + } +} + +contract Foo { + string public name = "Foo"; +} diff --git a/etc/contracts-test-data/hardhat.config.ts b/etc/contracts-test-data/hardhat.config.ts new file mode 100644 index 000000000000..59080306c84e --- /dev/null +++ b/etc/contracts-test-data/hardhat.config.ts @@ -0,0 +1,19 @@ +import '@matterlabs/hardhat-zksync-solc'; + +export default { + zksolc: { + version: '1.3.1', + compilerSource: 'binary', + settings: { + isSystem: true + } + }, + networks: { + hardhat: { + zksync: true + } + }, + solidity: { + version: '0.8.16' + } +}; diff --git a/etc/contracts-test-data/package.json b/etc/contracts-test-data/package.json new file mode 100644 index 000000000000..a68fd1074344 --- /dev/null +++ b/etc/contracts-test-data/package.json @@ -0,0 +1,12 @@ +{ + "name": "contracts-test-data", + "version": "0.1.0", + "license": "MIT", + "dependencies": { + "@openzeppelin/contracts": "^4.8.0", + "hardhat": "2.12.4" + }, + "devDependencies": { + "@matterlabs/hardhat-zksync-solc": "^0.3.14-beta.3" + } +} diff --git a/etc/env/base/README.md b/etc/env/base/README.md new file mode 100644 index 000000000000..a41c6fb9e284 --- /dev/null +++ b/etc/env/base/README.md @@ -0,0 +1,29 @@ +# Base configuration for zkSync stack + +This folder contains the template for generating the configuration for zkSync applications. Configs in this folder are +assigned default values suitable for the development. + +Since all the applications expect configuration to be set via the environment variables, these configs are compiled into +one `*.env` file, which will be loaded prior to the application launch. + +Configuration files can be compiled with the `zk` subcommand: + +```sh +zk config compile +``` + +Without any additional arguments specified, this subcommand will do the following: + +1. Check whether `etc/env/current` file exists. If so, it is read and the name of the current environment is taken from + there. Otherwise, the environment is assumed to be called `dev`. +2. Check whether the folder with the name same as current environment exists. If so, configs are read from there. + Otherwise behavior depends on the environment name: for `dev` environment, `dev` folder will be created as a copy of + the `base` folder. For any other environment, an error will be reported. +3. `zk` will iterate through all the `toml` files and load specified values. Once all the data is loaded, a new file + named `.dev` is created and all the values are placed there. + +It is possible to specify the config you want to compile: + +```sh +zk config compile testnet # Will compile configs for the `testnet` environment. +``` diff --git a/etc/env/base/api.toml b/etc/env/base/api.toml new file mode 100644 index 000000000000..4cf93b5c81fd --- /dev/null +++ b/etc/env/base/api.toml @@ -0,0 +1,60 @@ +# Configuration for the core API servers. + + +# Configuration for the web3 JSON RPC server +[api.web3_json_rpc] +# Port for the HTTP RPC API. +http_port=3050 +http_url="http://127.0.0.1:3050" +# Port for the WebSocket RPC API. +ws_port=3051 +ws_url="ws://127.0.0.1:3051" +req_entities_limit=10000 +filters_limit=10000 +subscriptions_limit=10000 +# Interval between polling db for pubsub (in ms). +pubsub_polling_interval=200 +threads_per_server=128 +max_nonce_ahead=50 +gas_price_scale_factor=1.2 +request_timeout=10 +account_pks=[ + "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80", + "0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d", + "0x5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a", + "0x7c852118294e51e653712a81e05800f419141751be58f605c371e15141b007a6", + "0x47e179ec197488593b187f80a00eb0da91f1b9d0b13f8733639f19c30a34926a", + "0x8b3a350cf5c34c9194ca85829a2df0ec3153be0318b5e2d3348e872092edffba", + "0x92db14e403b83dfe3df233f83dfa3a0d7096f21ca9b0d6d6b8d88b2b4ec1564e", + "0x4bbbf85ce3377467afe5d46f804f221813b2bb87f24d81f60f1fcdbf7cbf4356", + "0xdbda1821b80551c9d65939329250298aa3472ba22feea921c0cf5d620ea67b97", + "0x2a871d0798f97d79848a013d4936a73bf4cc922c825d33c1cf7073dff6d409c6", + "0xf214f2b2cd398c806f84e317254e0f0b801d0643303237d97a22a48e01628897", + "0x701b615bbdfb9de65240bc28bd21bbc0d996645a3dd57e7b12bc2bdf6f192c82", + "0xa267530f49f8280200edf313ee7af6b827f2a8bce2897751d06a843f644967b1", + "0x47c99abed3324a2707c28affff1267e45918ec8c3f20b8aa892e8b065d2942dd", + "0xc526ee95bf44d8fc405a158bb884d9d1238d99f0612e9f33d006bb0789009aaa", + "0x8166f546bab6da521a8369cab06c5d2b9e46670292d85c875ee9ec20e84ffb61", + "0xea6c44ac03bff858b476bba40716402b03e41b8e97e276d1baec7c37d42484a0", + "0x689af8efa8c651a91ad287602527f3af2fe9f6501a7ac4b061667b5a93e037fd", + "0xde9be858da4a475276426320d5e9262ecfc3ba460bfac56360bfa6c4c28b4ee0", + "0xdf57089febbacf7ba0bc227dafbffa9fc08a93fdc68e1e42411a14efcf23656e" +] +estimate_gas_scale_factor=1.2 +estimate_gas_acceptable_overestimation=1000 +# Configuration for the explorer API +[api.explorer] +# Port for the explorer API. +port=3070 +url="http://127.0.0.1:3070" +# Interval between polling db for network stats (in ms). +network_stats_polling_interval=1000 +req_entities_limit=100 +offset_limit=10000 +threads_per_server=128 + +# Configuration for the prometheus exporter server. +[api.prometheus] +listener_port=3312 +pushgateway_url="http://127.0.0.1:9091" +push_interval_ms=100 diff --git a/etc/env/base/chain.toml b/etc/env/base/chain.toml new file mode 100644 index 000000000000..0b22f30111b3 --- /dev/null +++ b/etc/env/base/chain.toml @@ -0,0 +1,63 @@ +# zkSync chain parameters + +[chain.eth] +# Name of the used Ethereum network +network="localhost" +# Name of current zkSync network +# Used for Sentry environment +zksync_network="localhost" +# ID of current zkSync network treated as ETH network ID. +# Used to distinguish zkSync from other Web3-capable networks. +zksync_network_id=270 + +[chain.state_keeper] +fee_account_addr="0xde03a0B5963f75f1C8485B355fF6D30f3093BDE7" + +# Detones the amount of slots for transactions in the block. +transaction_slots=250 + +max_allowed_l2_tx_gas_limit=4000000000 +block_commit_deadline_ms=2500 +miniblock_commit_deadline_ms=1000 +# Max gas that can used to include single block in aggregated operation +max_single_tx_gas=6000000 + +# Configuration option for block to be sealed in case +# it takes more percentage of the max block capacity than this value. +close_block_at_geometry_percentage=0.95 +# Configuration option for block to be sealed in case +# it takes more percentage of the max block capacity than this value. +close_block_at_eth_params_percentage=0.95 + +# Configuration option for block to be sealed in case +# it takes more percentage of the max block capacity than this value. +close_block_at_gas_percentage=0.95 + +# Configuration option for tx to be rejected in case +# it takes more percentage of the block capacity than this value. +reject_tx_at_geometry_percentage=0.95 +# Configuration option for block to be sealed in case +# it takes more percentage of the max block capacity than this value. +reject_tx_at_eth_params_percentage=0.95 + +# Configuration option for block to be sealed in case +# it takes more percentage of the max block gas capacity than this value. +reject_tx_at_gas_percentage=0.95 + + +# Whether all transactions should be reexecuted. This is needed to test the rollback functionality. +reexecute_each_tx=true + +[chain.operations_manager] +# Sleep time when there is no new input data +delay_interval=100 + +[chain.mempool] +sync_interval_ms=10 +sync_batch_size = 1000 +capacity=10_000_000 +stuck_tx_timeout=86400 # 1 day in seconds +remove_stuck_txs=true + +[chain.circuit_breaker] +sync_interval_ms=30000 diff --git a/etc/env/base/circuit_synthesizer.toml b/etc/env/base/circuit_synthesizer.toml new file mode 100644 index 000000000000..766c1ab7a1de --- /dev/null +++ b/etc/env/base/circuit_synthesizer.toml @@ -0,0 +1,9 @@ +[circuit_synthesizer] +generation_timeout_in_secs=3000 +max_attempts=3 +gpu_prover_queue_timeout_in_secs=600 +prover_instance_wait_timeout_in_secs=200 +prover_instance_poll_time_in_milli_secs=250 +prometheus_listener_port=3314 +prometheus_pushgateway_url="http://127.0.0.1:9091" +prometheus_push_interval_ms=100 diff --git a/etc/env/base/contract_verifier.toml b/etc/env/base/contract_verifier.toml new file mode 100644 index 000000000000..d66cf7e45dc3 --- /dev/null +++ b/etc/env/base/contract_verifier.toml @@ -0,0 +1,4 @@ +[contract_verifier] +compilation_timeout=30 +polling_interval=1000 +prometheus_port=3314 diff --git a/etc/env/base/contracts.toml b/etc/env/base/contracts.toml new file mode 100644 index 000000000000..7a0226a412b5 --- /dev/null +++ b/etc/env/base/contracts.toml @@ -0,0 +1,33 @@ +# Addresses of the deployed zkSync contracts. +# Values of this file are updated automatically by the contract deploy script. + +[contracts] +DIAMOND_INIT_ADDR="0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9" +DIAMOND_UPGRADE_INIT_ADDR="0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9" +MAILBOX_FACET_ADDR="0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9" +DIAMOND_CUT_FACET_ADDR="0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9" +EXECUTOR_FACET_ADDR="0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9" +GOVERNANCE_FACET_ADDR="0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9" +GETTERS_FACET_ADDR="0x5E6D086F5eC079ADFF4FB3774CDf3e8D6a34F7E9" +VERIFIER_ADDR="0xDAbb67b676F5b01FcC8997Cc8439846D0d8078ca" +DIAMOND_PROXY_ADDR="0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +L1_ERC20_BRIDGE_PROXY_ADDR="0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +L1_ERC20_BRIDGE_IMPL_ADDR="0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +L2_ERC20_BRIDGE_ADDR="0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +L2_TESTNET_PAYMASTER_ADDR="0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +L1_ALLOW_LIST_ADDR="0xFC073319977e314F251EAE6ae6bE76B0B3BAeeCF" +CREATE2_FACTORY_ADDR="0xce0042B868300000d44A59004Da54A005ffdcf9f" +VK_COMMITMENT_BASIC_CIRCUITS="0x0af0d77503b93a15fedd086638b7326cd3d169a2f388e568f41ea906c7a6eb93" +VK_COMMITMENT_LEAF="0x101e08b00193e529145ee09823378ef51a3bc8966504064f1f6ba3f1ba863210" +VK_COMMITMENT_NODE="0x1186ec268d49f1905f8d9c1e9d39fc33e98c74f91d91a21b8f7ef78bd09a8db8" +GENESIS_TX_HASH="0xb99ebfea46cbe05a21cd80fe5597d97b204befc52a16303f579c607dc1ac2e2e" +GENESIS_ROOT="0x2d5ab622df708ab44944bb02377be85b6f27812e9ae520734873b7a193898ba4" +PRIORITY_TX_MAX_GAS_LIMIT=72000000 +GENESIS_BLOCK_COMMITMENT="0x6c7f89335e3ade24a7768ed73c425afd9fac92a094e0681f76cb6feabf8b6223" +# Current rollup leaf index after genesis +GENESIS_ROLLUP_LEAF_INDEX="21" + +[contracts.test] +dummy_verifier=true +easy_priority_mode=false + diff --git a/etc/env/base/database.toml b/etc/env/base/database.toml new file mode 100644 index 000000000000..3e039a7e59f3 --- /dev/null +++ b/etc/env/base/database.toml @@ -0,0 +1,13 @@ +[database] + +# Path to the database data directory. +path="./db" +# Path to the database data directory that contains state cache. +state_keeper_db_path="./db/state_keeper" +merkle_tree_backup_path="./db/backups" +merkle_tree_fast_ssd_path="./db/lightweight" +backup_count=5 +backup_interval_ms=60000 +max_block_batch=100 +# Amount of open connections to the database. +pool_size=50 diff --git a/etc/env/base/eth_client.toml b/etc/env/base/eth_client.toml new file mode 100644 index 000000000000..ef0e1015514a --- /dev/null +++ b/etc/env/base/eth_client.toml @@ -0,0 +1,4 @@ +[eth_client] +chain_id=9 +# Addresses of the Ethereum node API, separated by comma +web3_url="http://127.0.0.1:8545" diff --git a/etc/env/base/eth_sender.toml b/etc/env/base/eth_sender.toml new file mode 100644 index 000000000000..ee1bda17018a --- /dev/null +++ b/etc/env/base/eth_sender.toml @@ -0,0 +1,57 @@ +# Configuration for the Ethereum sender crate + +[eth_sender] + +[eth_sender.sender] +# operator_private_key is defined in the `private.toml` +# operator_commit_eth_addr is defined in the `private.toml` + +# Amount of confirmations required to consider L1 transaction committed. +wait_confirmations=1 +# Amount of blocks we will wait before considering L1 transaction stuck. +expected_wait_time_block=30 +# Node polling period in seconds. +tx_poll_period=1 +# The maximum amount of simultaneously sent Ethereum transactions. +max_txs_in_flight=30 # Safe in the local environment, do not repeat on prod (right now it will produce way too many extra calls to web3) +proof_sending_mode="SkipEveryProof" + +# Max L2 blocks to commit in one L1 transaction +max_aggregated_blocks_to_commit=10 +# Max L2 blocks to execute in one L1 transaction +max_aggregated_blocks_to_execute=10 + +aggregated_block_commit_deadline=1 +aggregated_block_prove_deadline=10 +aggregated_block_execute_deadline=10 + +timestamp_criteria_max_allowed_lag=30 + +# Based on geth implementation max size of transaction is 128kb. +max_eth_tx_data_size=120000 +# Aggregated proof sizes to be generated by server. +aggregated_proof_sizes=[1,4] + +# Max gas that can be used to execute aggregated operation +# for now (should be > 4kk which is max gas for one block commit/verify/execute) +max_aggregated_tx_gas=4000000 + +# Max gas that can used to include single block in aggregated operation +max_single_tx_gas=6000000 + +[eth_sender.gas_adjuster] +# Priority fee to be used by GasAdjuster (in wei). +default_priority_fee_per_gas=1_000_000_000 +# Max number of base fees from previous blocks to be used to correctly price transactions. +max_base_fee_samples=10_000 +# These two are parameters of the base_fee_per_gas formula in GasAdjuster. +# The possible formulas are: +# 1. base_fee_median * (A + B * time_in_mempool) +# 2. base_fee_median * A * B ^ time_in_mempool +# Currently the second is used. +# To confirm, see core/bin/zksync_core/src/eth_sender/gas_adjuster/mod.rs +pricing_formula_parameter_a=1.5 +pricing_formula_parameter_b=1.0005 +internal_l1_pricing_multiplier=0.8 +# Node polling period in seconds. +poll_period=5 diff --git a/etc/env/base/eth_watch.toml b/etc/env/base/eth_watch.toml new file mode 100644 index 000000000000..1bdeb2397631 --- /dev/null +++ b/etc/env/base/eth_watch.toml @@ -0,0 +1,6 @@ +[eth_watch] +# Amount of confirmations for the priority operation to be processed. +# In production this should be a non-zero value because of block reverts. +confirmations_for_eth_event=0 +# How often we want to poll the Ethereum node. +eth_node_poll_interval=300 diff --git a/etc/env/base/fetcher.toml b/etc/env/base/fetcher.toml new file mode 100644 index 000000000000..f6d7016b4241 --- /dev/null +++ b/etc/env/base/fetcher.toml @@ -0,0 +1,16 @@ +[fetcher] + +[fetcher.token_list] +source="Mock" +url="" +fetching_interval=3 + +[fetcher.token_price] +source="Mock" +url="" +fetching_interval=3 + +[fetcher.token_trading_volume] +source="Mock" +url="" +fetching_interval=3 diff --git a/etc/env/base/misc.toml b/etc/env/base/misc.toml new file mode 100644 index 000000000000..ee9925618ef8 --- /dev/null +++ b/etc/env/base/misc.toml @@ -0,0 +1,15 @@ +# Whether to ask user about dangerous actions or not +zksync_action="dont_ask" + +# Miscellaneous options for different infrastructure elements +[misc] +# fee_account_private_key is set in `private.toml` + +# Format of logs in stdout could be "plain" for development purposes and "json" for production +log_format="plain" + +sentry_url="unset" +sentry_panic_interval="1800" +sentry_error_interval="10800" + +otlp_url="unset" diff --git a/etc/env/base/nfs.toml b/etc/env/base/nfs.toml new file mode 100644 index 000000000000..471431d5b88e --- /dev/null +++ b/etc/env/base/nfs.toml @@ -0,0 +1,2 @@ +[nfs] +setup_key_mount_path="/home/setup_keys/" diff --git a/etc/env/base/object_store.toml b/etc/env/base/object_store.toml new file mode 100644 index 000000000000..961eb3a80909 --- /dev/null +++ b/etc/env/base/object_store.toml @@ -0,0 +1,5 @@ +[object_store] +service_account_path="~/gcloud/service_account.json" +bucket_base_url="base_url" +mode="FileBacked" +file_backed_base_path="artifacts" diff --git a/etc/env/base/private.toml b/etc/env/base/private.toml new file mode 100644 index 000000000000..2d77e3e805e1 --- /dev/null +++ b/etc/env/base/private.toml @@ -0,0 +1,16 @@ +# Sensitive values which MUST be different for production +# Values provided here are valid for the development infrastructure only. + +database_url="postgres://postgres@localhost/zksync_local" +test_database_url="postgres://postgres@localhost/zksync_local_test" + +[eth_sender.sender] +# Set in env file for development, production, staging and testnet. +operator_private_key="0x27593fea79697e947890ecbecce7901b0008345e5d7259710d0dd5e500d040be" +# Address to be used for zkSync account managing the interaction with a contract on Ethereum. +# Derived from the `OPERATOR_PRIVATE_KEY`. +operator_commit_eth_addr="0xde03a0B5963f75f1C8485B355fF6D30f3093BDE7" + +[misc] +# Private key for the fee seller account +fee_account_private_key="0x27593fea79697e947890ecbecce7901b0008345e5d7259710d0dd5e500d040be" diff --git a/etc/env/base/prover.toml b/etc/env/base/prover.toml new file mode 100644 index 000000000000..7071f499ce36 --- /dev/null +++ b/etc/env/base/prover.toml @@ -0,0 +1,74 @@ +[prover.non_gpu] +prometheus_port=3313 +initial_setup_key_path="./../../../keys/setup/setup_2^22.key" +key_download_url="https://storage.googleapis.com/universal-setup/setup_2^22.key" +generation_timeout_in_secs=2700 +number_of_threads=22 +max_attempts=1 +polling_duration_in_millis=750 +setup_keys_path="/usr/src/setup-keys" +number_of_setup_slots=2 +assembly_receiver_port=17791 +assembly_receiver_poll_time_in_millis=250 +assembly_queue_capacity=1 +specialized_prover_group_id=0 + +[prover.two_gpu_forty_gb_mem] +prometheus_port=3313 +initial_setup_key_path="./../../../keys/setup/setup_2^26.key" +key_download_url="https://storage.googleapis.com/universal-setup/setup_2^26.key" +generation_timeout_in_secs=2700 +number_of_threads=5 +max_attempts=1 +polling_duration_in_millis=750 +setup_keys_path="/usr/src/setup-keys" +number_of_setup_slots=5 +assembly_receiver_port=17791 +assembly_receiver_poll_time_in_millis=250 +assembly_queue_capacity=3 +specialized_prover_group_id=1 + +[prover.one_gpu_eighty_gb_mem] +prometheus_port=3313 +initial_setup_key_path="./../../../keys/setup/setup_2^26.key" +key_download_url="https://storage.googleapis.com/universal-setup/setup_2^26.key" +generation_timeout_in_secs=2700 +number_of_threads=5 +max_attempts=1 +polling_duration_in_millis=750 +setup_keys_path="/usr/src/setup-keys" +number_of_setup_slots=5 +assembly_receiver_port=17791 +assembly_receiver_poll_time_in_millis=250 +assembly_queue_capacity=3 +specialized_prover_group_id=2 + +[prover.two_gpu_eighty_gb_mem] +prometheus_port=3313 +initial_setup_key_path="./../../../keys/setup/setup_2^26.key" +key_download_url="https://storage.googleapis.com/universal-setup/setup_2^26.key" +generation_timeout_in_secs=2700 +number_of_threads=9 +max_attempts=1 +polling_duration_in_millis=750 +setup_keys_path="/usr/src/setup-keys" +number_of_setup_slots=11 +assembly_receiver_port=17791 +assembly_receiver_poll_time_in_millis=250 +assembly_queue_capacity=4 +specialized_prover_group_id=3 + +[prover.four_gpu_eighty_gb_mem] +prometheus_port=3313 +initial_setup_key_path="./../../../keys/setup/setup_2^26.key" +key_download_url="https://storage.googleapis.com/universal-setup/setup_2^26.key" +generation_timeout_in_secs=2700 +number_of_threads=18 +max_attempts=1 +polling_duration_in_millis=750 +setup_keys_path="/usr/src/setup-keys" +number_of_setup_slots=18 +assembly_receiver_port=17791 +assembly_receiver_poll_time_in_millis=250 +assembly_queue_capacity=20 +specialized_prover_group_id=4 diff --git a/etc/env/base/prover_group.toml b/etc/env/base/prover_group.toml new file mode 100644 index 000000000000..b0824219ca52 --- /dev/null +++ b/etc/env/base/prover_group.toml @@ -0,0 +1,11 @@ +[prover_group] +group_0_circuit_ids="0,18" +group_1_circuit_ids="1,4" +group_2_circuit_ids="2,5" +group_3_circuit_ids="6,7" +group_4_circuit_ids="8,9" +group_5_circuit_ids="10,11" +group_6_circuit_ids="12,13" +group_7_circuit_ids="14,15" +group_8_circuit_ids="16,17" +group_9_circuit_ids="3" diff --git a/etc/env/base/rust.toml b/etc/env/base/rust.toml new file mode 100644 index 000000000000..31404c0f550b --- /dev/null +++ b/etc/env/base/rust.toml @@ -0,0 +1,33 @@ +# Environment configuration for the Rust code +# We don't provide the group name like `[rust]` here, because we don't want +# these variables to be prefixed during the compiling. + +# `RUST_LOG` environmnet variable for `env_logger` +# Here we use TOML multiline strings: newlines will be trimmed. +RUST_LOG="""\ +zksync_core=debug,\ +zksync_server=debug,\ +zksync_prover=debug,\ +zksync_contract_verifier=debug,\ +zksync_dal=info,\ +zksync_eth_client=info,\ +zksync_storage=info,\ +zksync_db_manager=info,\ +zksync_merkle_tree=info,\ +zksync_state=debug,\ +zksync_utils=debug,\ +zksync_queued_job_processor=info,\ +zksync_types=info,\ +zksync_mempool=debug,\ +loadnext=debug,\ +vm=info,\ +block_sizes_test=info,\ +zksync_verification_key_generator_and_server=info,\ +zksync_object_store=info,\ +setup_key_generator_and_server=info,\ +zksync_circuit_synthesizer=info,\ +""" + +# `RUST_BACKTRACE` variable +RUST_BACKTRACE="full" +RUST_LIB_BACKTRACE="1" diff --git a/etc/env/base/witness_generator.toml b/etc/env/base/witness_generator.toml new file mode 100644 index 000000000000..1486a9e76cce --- /dev/null +++ b/etc/env/base/witness_generator.toml @@ -0,0 +1,11 @@ +[witness] +generation_timeout_in_secs=900 +initial_setup_key_path="./keys/setup/setup_2^22.key" +key_download_url="https://storage.googleapis.com/universal-setup/setup_2^22.key" +max_attempts=1 +# Witness/proof sampling params. +# Sampling will be enabled only if `sampling_enabled=true` and `sampling_safe_prover_lag`, +# `sampling_max_prover_lag` are provided, otherwise it will generate witnesses/proofs for every block. +# When setting `sampling_safe_prover_lag=sampling_max_prover_lag=0` every block proof is skipped. +sampling_enabled=false +dump_arguments_for_blocks="2,3" diff --git a/etc/env/docker.env b/etc/env/docker.env new file mode 100644 index 000000000000..bf32ba775417 --- /dev/null +++ b/etc/env/docker.env @@ -0,0 +1,14 @@ +ETH_CLIENT_WEB3_URL=http://geth:8545 +FEE_TICKER_COINMARKETCAP_BASE_URL=http://dev-ticker:9876 +FEE_TICKER_COINGECKO_BASE_URL=http://dev-ticker:9876 +DATABASE_URL=postgres://postgres@postgres/zksync_local +TEST_DATABASE_URL=postgres://postgres@postgres/zksync_local_test +FEE_TICKER_UNISWAP_URL=http://dev-liquidity-token-watcher:9975/graphql +DEV_LIQUIDITY_TOKEN_WATCHER_BLACKLISTED_TOKENS=0x0000000000000000000000000000000000000001 +DEV_LIQUIDITY_TOKEN_WATCHER_DEFAULT_VOLUME=500 +DEV_LIQUIDITY_TOKEN_WATCHER_REGIME=whitelist + +# Time to process one miniblock (in ms) +CHAIN_STATE_KEEPER_MINIBLOCK_ITERATION_INTERVAL=50 +# For loadtest performing +L1_RPC_ADDRESS=http://geth:8545 diff --git a/etc/lint-config/js.js b/etc/lint-config/js.js new file mode 100644 index 000000000000..0398a4f487f9 --- /dev/null +++ b/etc/lint-config/js.js @@ -0,0 +1,35 @@ +module.exports = { + root: true, + env: { + browser: true, + node: true, + es6: true, + mocha: true + }, + extends: ['alloy'], + rules: { + 'no-console': 'off', + 'no-debugger': 'error', + semi: 'warn', + 'no-extra-semi': 'off', + 'no-empty': 'warn', + 'spaced-comment': 'off', + eqeqeq: 'off', + 'max-params': 'off', + 'no-eq-null': 'off', + 'no-implicit-coercion': 'off', + 'accessor-pairs': 'off', + 'no-promise-executor-return': 'off' + }, + parserOptions: { + parser: 'babel-eslint' + }, + overrides: [ + { + files: ['./contracts/test/**/*.js'], + rules: { + 'no-invalid-this': 'off' + } + } + ] +}; diff --git a/etc/lint-config/md.js b/etc/lint-config/md.js new file mode 100644 index 000000000000..485fd1dc5c57 --- /dev/null +++ b/etc/lint-config/md.js @@ -0,0 +1,8 @@ +module.exports = { + "default": true, + "header-increment": false, + "no-duplicate-header": false, + "no-inline-html": false, + "line-length": false, + "fenced-code-language": false +}; diff --git a/etc/lint-config/sol.js b/etc/lint-config/sol.js new file mode 100644 index 000000000000..2d29c78f9477 --- /dev/null +++ b/etc/lint-config/sol.js @@ -0,0 +1,25 @@ +module.exports = { + "extends": "solhint:recommended", + "rules": { + // Unfortunately on the time of this writing, `--quiet` option of solhint is not working. + // And also there were >290 warnings on *.sol files. Since changes to *.sol + // files require an audit, it was decided to postpone the changes to make the solhint + // pass. + // + // TODO (ZKS-329): Turn on the majority of the rules and make the solhint comply with them. + "state-visibility": "off", + "var-name-mixedcase": "off", + "avoid-call-value": "off", + "no-empty-blocks": "off", + "not-rely-on-time": "off", + "avoid-low-level-calls": "off", + "no-inline-assembly": "off", + "const-name-snakecase": "off", + "no-complex-fallback": "off", + "reason-string": "off", + "func-name-mixedcase": "off", + "no-unused-vars": "off", + "max-states-count": "off", + "compiler-version": ["warn", "^0.7.0"] + } +}; diff --git a/etc/lint-config/ts.js b/etc/lint-config/ts.js new file mode 100644 index 000000000000..885c67db86f6 --- /dev/null +++ b/etc/lint-config/ts.js @@ -0,0 +1,15 @@ +module.exports = { + root: true, + env: { + browser: true, + node: true, + es6: true, + mocha: true + }, + parser: '@typescript-eslint/parser', + plugins: ['@typescript-eslint'], + rules: { + // This is the only rule that should be enforced in typescript + '@typescript-eslint/no-unused-vars': ["error", { "argsIgnorePattern": "^_" }] + } +}; diff --git a/etc/openzeppelin-contracts b/etc/openzeppelin-contracts new file mode 160000 index 000000000000..6e8d885ca715 --- /dev/null +++ b/etc/openzeppelin-contracts @@ -0,0 +1 @@ +Subproject commit 6e8d885ca71584659ad728411832840cf0bf507b diff --git a/etc/prettier-config/js.js b/etc/prettier-config/js.js new file mode 100644 index 000000000000..5c31df4c4e16 --- /dev/null +++ b/etc/prettier-config/js.js @@ -0,0 +1,7 @@ +module.exports = { + "tabWidth": 4, + "printWidth": 120, + "singleQuote": true, + "trailingComma": "none", + "bracketSpacing": true +}; diff --git a/etc/prettier-config/md.js b/etc/prettier-config/md.js new file mode 100644 index 000000000000..493f4ad63d47 --- /dev/null +++ b/etc/prettier-config/md.js @@ -0,0 +1,9 @@ +module.exports = { + "tabWidth": 2, + "printWidth": 120, + "parser": "markdown", + "singleQuote": true, + "trailingComma": "none", + "bracketSpacing": true, + "proseWrap": "always" +}; diff --git a/etc/prettier-config/sol.js b/etc/prettier-config/sol.js new file mode 100644 index 000000000000..521a8ba95ea9 --- /dev/null +++ b/etc/prettier-config/sol.js @@ -0,0 +1,8 @@ +module.exports = { + "printWidth": 120, + "tabWidth": 4, + "useTabs": false, + "singleQuote": false, + "bracketSpacing": false, + "explicitTypes": "always" +}; diff --git a/etc/prettier-config/ts.js b/etc/prettier-config/ts.js new file mode 100644 index 000000000000..2486c4ce95f0 --- /dev/null +++ b/etc/prettier-config/ts.js @@ -0,0 +1,8 @@ +module.exports = { + "tabWidth": 4, + "printWidth": 120, + "parser": "typescript", + "singleQuote": true, + "trailingComma": "none", + "bracketSpacing": true +}; diff --git a/etc/prettier-config/vue.js b/etc/prettier-config/vue.js new file mode 100644 index 000000000000..5c31df4c4e16 --- /dev/null +++ b/etc/prettier-config/vue.js @@ -0,0 +1,7 @@ +module.exports = { + "tabWidth": 4, + "printWidth": 120, + "singleQuote": true, + "trailingComma": "none", + "bracketSpacing": true +}; diff --git a/etc/scripts/prepare_bellman_cuda.sh b/etc/scripts/prepare_bellman_cuda.sh new file mode 100755 index 000000000000..db0ba745bbab --- /dev/null +++ b/etc/scripts/prepare_bellman_cuda.sh @@ -0,0 +1,7 @@ +echo "preparing bellman cuda directory" +gh release -R github.com/matter-labs/bellman-cuda download "$1" +gh release -R github.com/matter-labs/bellman-cuda download "$1" -A tar.gz +mkdir -p bellman-cuda +tar xvf bellman-cuda.tar.gz -C ./bellman-cuda +tar xvf bellman-cuda-"$1".tar.gz +mv bellman-cuda-"$1"/* ./bellman-cuda/ diff --git a/etc/system-contracts b/etc/system-contracts new file mode 160000 index 000000000000..0cee44614be5 --- /dev/null +++ b/etc/system-contracts @@ -0,0 +1 @@ +Subproject commit 0cee44614be5344a8ffd196d0303ffec226c5c15 diff --git a/etc/test_config/.gitignore b/etc/test_config/.gitignore new file mode 100644 index 000000000000..fda0c2b24379 --- /dev/null +++ b/etc/test_config/.gitignore @@ -0,0 +1,2 @@ +volatile/* +!volatile/.empty diff --git a/etc/test_config/README.md b/etc/test_config/README.md new file mode 100644 index 000000000000..ac7ecffd4ec7 --- /dev/null +++ b/etc/test_config/README.md @@ -0,0 +1,11 @@ +# Test data for zkSync + +This folder contains the data required for various zkSync tests. + +Directory contains three subfolders: + +- `constant`: Data that remains the same between various runs, filled manually and committed to the repository. For + example, private / public keys of test accounts. +- `volatile`: Data that may change, filled by scripts and is **not** committed to the repository. For example, deployed + contracts addresses. +- `sdk`: Data used to test SDK implementations. diff --git a/etc/test_config/constant/api.json b/etc/test_config/constant/api.json new file mode 100644 index 000000000000..050daba0e66e --- /dev/null +++ b/etc/test_config/constant/api.json @@ -0,0 +1,3 @@ +{ + "rest_api_url": "http://127.0.0.1:3001" +} diff --git a/etc/test_config/constant/eth.json b/etc/test_config/constant/eth.json new file mode 100644 index 000000000000..624e605e3c20 --- /dev/null +++ b/etc/test_config/constant/eth.json @@ -0,0 +1,5 @@ +{ + "web3_url": "http://127.0.0.1:8545", + "test_mnemonic": "stuff slice staff easily soup parent arm payment cotton trade scatter struggle", + "mnemonic": "fine music test violin matrix prize squirrel panther purchase material script deal" +} diff --git a/etc/test_config/volatile/.empty b/etc/test_config/volatile/.empty new file mode 100644 index 000000000000..8ccbd755e401 --- /dev/null +++ b/etc/test_config/volatile/.empty @@ -0,0 +1,2 @@ +# Empty file just so the folder is committed into the repository. +# This directory will be filled by scripts. diff --git a/etc/thread b/etc/thread new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/etc/tokens/goerli.json b/etc/tokens/goerli.json new file mode 100644 index 000000000000..3e42e7eaec93 --- /dev/null +++ b/etc/tokens/goerli.json @@ -0,0 +1,26 @@ +[ + { + "name": "ChainLink Token (goerli)", + "symbol": "LINK", + "decimals": 18, + "address": "0x63bfb2118771bd0da7A6936667A7BB705A06c1bA" + }, + { + "name": "wBTC", + "symbol": "wBTC", + "decimals": 8, + "address": "0xCA063A2AB07491eE991dCecb456D1265f842b568" + }, + { + "name": "USD Coin (goerli)", + "symbol": "USDC", + "decimals": 6, + "address": "0xd35CCeEAD182dcee0F148EbaC9447DA2c4D449c4" + }, + { + "name": "DAI", + "symbol": "DAI", + "decimals": 18, + "address": "0x5C221E77624690fff6dd741493D735a17716c26B" + } +] diff --git a/etc/tokens/mainnet.json b/etc/tokens/mainnet.json new file mode 100644 index 000000000000..39c2cbd0b6d2 --- /dev/null +++ b/etc/tokens/mainnet.json @@ -0,0 +1,98 @@ +[ + { + "address": "0x6B175474E89094C44Da98b954EedeAC495271d0F", + "decimals": 18, + "symbol": "DAI", + "name": "Dai Stablecoin" + }, + { + "address": "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48", + "decimals": 6, + "symbol": "USDC", + "name": "USD Coin" + }, + { + "address": "0x0000000000085d4780B73119b644AE5ecd22b376", + "decimals": 18, + "symbol": "TUSD", + "name": "TrueUSD" + }, + { + "address": "0xdAC17F958D2ee523a2206206994597C13D831ec7", + "decimals": 6, + "symbol": "USDT", + "name": "Tether USD" + }, + { + "address": "0x57Ab1ec28D129707052df4dF418D58a2D46d5f51", + "decimals": 18, + "symbol": "SUSD", + "name": "Synth sUSD" + }, + { + "address": "0x4Fabb145d64652a948d72533023f6E7A623C7C53", + "decimals": 18, + "symbol": "BUSD", + "name": "Binance USD" + }, + { + "address": "0x80fB784B7eD66730e8b1DBd9820aFD29931aab03", + "decimals": 18, + "symbol": "LEND", + "name": "EthLend" + }, + { + "address": "0x0D8775F648430679A709E98d2b0Cb6250d2887EF", + "decimals": 18, + "symbol": "BAT", + "name": "Basic Attention Token" + }, + { + "address": "0xdd974D5C2e2928deA5F71b9825b8b646686BD200", + "decimals": 18, + "symbol": "KNC", + "name": "KyberNetwork" + }, + { + "address": "0x514910771AF9Ca656af840dff83E8264EcF986CA", + "decimals": 18, + "symbol": "LINK", + "name": "ChainLink Token" + }, + { + "address": "0x0F5D2fB29fb7d3CFeE444a200298f468908cC942", + "decimals": 18, + "symbol": "MANA", + "name": "Decentraland" + }, + { + "address": "0x9f8F72aA9304c8B593d555F12eF6589cC3A579A2", + "decimals": 18, + "symbol": "MKR", + "name": "Maker" + }, + { + "address": "0x1985365e9f78359a9B6AD760e32412f4a445E862", + "decimals": 18, + "symbol": "REP", + "name": "Augur" + }, + { + "address": "0xC011a73ee8576Fb46F5E1c5751cA3B9Fe0af2a6F", + "decimals": 18, + "symbol": "SNX", + "name": "Synthetix Network Token" + }, + { + "address": "0x2260FAC5E5542a773Aa44fBCfeDf7C193bc2C599", + "decimals": 8, + "symbol": "WBTC", + "name": "Wrapped BTC" + }, + { + "address": "0xE41d2489571d322189246DaFA5ebDe1F4699F498", + "decimals": 18, + "symbol": "ZRX", + "name": "0x Protocol Token" + } +] diff --git a/etc/tokens/rinkeby.json b/etc/tokens/rinkeby.json new file mode 100644 index 000000000000..28310bbde3c3 --- /dev/null +++ b/etc/tokens/rinkeby.json @@ -0,0 +1,122 @@ +[ + { + "address": "0x3B00Ef435fA4FcFF5C209a37d1f3dcff37c705aD", + "decimals": 6, + "symbol": "USDT", + "name": "Tether USD" + }, + { + "address": "0xeb8f08a975Ab53E34D8a0330E0D34de942C95926", + "decimals": 6, + "symbol": "USDC", + "name": "USD Coin" + }, + { + "address": "0x4da8d0795830f75BE471F072a034d42c369B5d0A", + "decimals": 18, + "symbol": "LINK", + "name": "ChainLink Token" + }, + { + "address": "0xd2255612F9b045e9c81244bB874aBb413Ca139a3", + "decimals": 18, + "symbol": "TUSD", + "name": "TrueUSD" + }, + { + "address": "0x14700Cae8B2943bad34C70bB76AE27ECF5bC5013", + "decimals": 18, + "symbol": "HT", + "name": "HuobiToken" + }, + { + "address": "0x2B203de02AD6109521e09985b3aF9B8c62541Cd6", + "decimals": 18, + "symbol": "OMG", + "name": "OMGToken" + }, + { + "address": "0x2655F3a9eEB7F960be83098457144813ffaD07a4", + "decimals": 18, + "symbol": "TRB", + "name": "Tellor Tributes" + }, + { + "address": "0xDB7F2B9f6a0cB35FE5D236e5ed871D3aD4184290", + "decimals": 18, + "symbol": "ZRX", + "name": "0x Protocol Token" + }, + { + "address": "0xD2084eA2AE4bBE1424E4fe3CDE25B713632fb988", + "decimals": 18, + "symbol": "BAT", + "name": "Basic Attention Token" + }, + { + "address": "0x9Cac8508b9ff26501439590a24893D80e7E84D21", + "decimals": 18, + "symbol": "REP", + "name": "Reputation" + }, + { + "address": "0x8098165d982765097E4aa17138816e5b95f9fDb5", + "decimals": 8, + "symbol": "STORJ", + "name": "StorjToken" + }, + { + "address": "0x02d01f0835B7FDfa5d801A8f5f74c37F2BB1aE6a", + "decimals": 18, + "symbol": "NEXO", + "name": "Nexo" + }, + { + "address": "0xd93adDB2921b8061B697C2Ab055979BbEFE2B7AC", + "decimals": 8, + "symbol": "MCO", + "name": "MCO" + }, + { + "address": "0x290EBa6EC56EcC9fF81C72E8eccc77D2c2BF63eB", + "decimals": 18, + "symbol": "KNC", + "name": "Kyber Network Crystal" + }, + { + "address": "0x9ecec4d48Efdd96aE377aF3AB868f99De865CfF8", + "decimals": 18, + "symbol": "LAMB", + "name": "Lambda" + }, + { + "address": "0xd94e3DC39d4Cad1DAd634e7eb585A57A19dC7EFE", + "decimals": 18, + "symbol": "tGLM", + "name": "Test Golem Network Token" + }, + { + "address": "0x690f4886c6911d81beb8130DB30C825c27281F22", + "decimals": 18, + "symbol": "MLTT", + "name": "Matter Labs Trial Token" + }, + { + "address": "0xc3904a7c3A95bc265066Bb5BfC4D6664B2174774", + "decimals": 0, + "symbol": "NEM", + "name": "Dec0" + }, + { + "address": "0x70a4fCF3e4C8591B5B4318CEC5fACbB96a604198", + "decimals": 18, + "symbol": "DAI", + "name": "DAI" + }, + { + "address": "0x7457fc3f89ac99837d44f60B7860691fb2f09Bf5", + "decimals": 8, + "symbol": "wBTC", + "name": "wBTC" + } +] diff --git a/etc/tokens/ropsten.json b/etc/tokens/ropsten.json new file mode 100644 index 000000000000..41d975261ccd --- /dev/null +++ b/etc/tokens/ropsten.json @@ -0,0 +1,56 @@ +[ + { + "address": "0x351714Df444b8213f2C46aaA28829Fc5a8921304", + "decimals": 18, + "symbol": "DAI", + "name": "Dai Stablecoin" + }, + { + "address": "0x793f38AE147852C37071684CdffC1FF7c87f7d07", + "decimals": 18, + "symbol": "LINK", + "name": "ChainLink Token" + }, + { + "address": "0x5Ae45f7f17F0dF0b24aBe25a5522a9c9341aC04D", + "decimals": 18, + "symbol": "OKB", + "name": "OKB" + }, + { + "address": "0x7e317ceaa15fe7d5474349043332319C5f28cC11", + "decimals": 18, + "symbol": "FSN", + "name": "Fusion Token" + }, + { + "address": "0x16c550a97Ad2ae12C0C8CF1CC3f8DB4e0c45238f", + "decimals": 18, + "symbol": "BUSD", + "name": "Binance USD" + }, + { + "address": "0x6856eC11F56267e3326f536D0e9F36eC7f7D1498", + "decimals": 18, + "symbol": "TUSD", + "name": "TrueUSD" + }, + { + "address": "0xC865bCBe4b6eF4B58a790052f2B51B4f06f586aC", + "decimals": 18, + "symbol": "ZRX", + "name": "0x Protocol Token" + }, + { + "address": "0x1B46bd2FC40030B6959A2d407f7D16f66aFaDD52", + "decimals": 18, + "symbol": "BAT", + "name": "Basic Attention Token" + }, + { + "name": "Matter Labs Test Token", + "symbol": "MLTT", + "decimals": 18, + "address": "0xb36B2e278F5F1980631aD10F693AB3E1bEBd9f70" + } +] diff --git a/etc/tokens/test.json b/etc/tokens/test.json new file mode 100644 index 000000000000..fe51488c7066 --- /dev/null +++ b/etc/tokens/test.json @@ -0,0 +1 @@ +[] diff --git a/infrastructure/local-setup-preparation/.gitignore b/infrastructure/local-setup-preparation/.gitignore new file mode 100644 index 000000000000..796b96d1c402 --- /dev/null +++ b/infrastructure/local-setup-preparation/.gitignore @@ -0,0 +1 @@ +/build diff --git a/infrastructure/local-setup-preparation/README.md b/infrastructure/local-setup-preparation/README.md new file mode 100644 index 000000000000..6fbbb7b3f37c --- /dev/null +++ b/infrastructure/local-setup-preparation/README.md @@ -0,0 +1,4 @@ +# Scripts for local setup preparation + +This project contains scripts that should be executed when preparing the zkSync local setup used by outside developers, +e.g. deposit ETH to some of the test accounts. diff --git a/infrastructure/local-setup-preparation/package.json b/infrastructure/local-setup-preparation/package.json new file mode 100644 index 000000000000..61fc44e1e916 --- /dev/null +++ b/infrastructure/local-setup-preparation/package.json @@ -0,0 +1,17 @@ +{ + "name": "local-setup-preparation", + "version": "1.0.0", + "main": "build/index.js", + "license": "MIT", + "dependencies": { + "ts-node": "^10.7.0", + "ethers": "~5.5.0", + "zksync-web3": "link:../../sdk/zksync-web3.js" + }, + "devDependencies": { + "typescript": "^4.5.5" + }, + "scripts": { + "start": "ts-node ./src/index.ts" + } +} diff --git a/infrastructure/local-setup-preparation/src/index.ts b/infrastructure/local-setup-preparation/src/index.ts new file mode 100644 index 000000000000..de25e226f59e --- /dev/null +++ b/infrastructure/local-setup-preparation/src/index.ts @@ -0,0 +1,58 @@ +import { utils } from 'zksync-web3'; +import { ethers } from 'ethers'; +import { getEthersProvider, getWalletKeys } from './utils'; + +// 10**12 ether +const AMOUNT_TO_DEPOSIT = ethers.utils.parseEther('1000000000000'); + +async function depositWithRichAccounts() { + const ethProvider = getEthersProvider(); + const wallets = getWalletKeys().map((wk) => new ethers.Wallet(wk.privateKey, ethProvider)); + + const handles: Promise[] = []; + + if (!process.env.CONTRACTS_DIAMOND_PROXY_ADDR) { + throw new Error('zkSync L1 Main contract address was not found'); + } + + for (const wallet of wallets) { + const contract = new ethers.Contract(process.env.CONTRACTS_DIAMOND_PROXY_ADDR, utils.ZKSYNC_MAIN_ABI, wallet); + + const overrides = { + value: AMOUNT_TO_DEPOSIT + }; + + const balance = await wallet.getBalance(); + console.log(`Wallet balance is ${ethers.utils.formatEther(balance)} ETH`); + + handles.push( + // We have to implement the deposit manually because we run this script before running the server, + // deposit method from wallet requires a running server + contract.requestL2Transaction( + wallet.address, + AMOUNT_TO_DEPOSIT, + '0x', + utils.RECOMMENDED_DEPOSIT_L2_GAS_LIMIT, + utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + [], + wallet.address, + overrides + ) + ); + } + + const depositHandles = (await Promise.all(handles)).map((h) => h.wait()); + await Promise.all(depositHandles); +} + +async function main() { + await depositWithRichAccounts(); +} + +main() + .then(() => { + console.log('Successfully deposited funds for the rich accounts!'); + }) + .catch((e) => { + console.log(`Execution failed with error ${e}`); + }); diff --git a/infrastructure/local-setup-preparation/src/utils.ts b/infrastructure/local-setup-preparation/src/utils.ts new file mode 100644 index 000000000000..713b3027bf9b --- /dev/null +++ b/infrastructure/local-setup-preparation/src/utils.ts @@ -0,0 +1,28 @@ +import { ethers } from 'ethers'; +import * as fs from 'fs'; +import * as path from 'path'; + +interface WalletKey { + address: string; + privateKey: string; +} + +export function getWalletKeys(): WalletKey[] { + const testConfigPath = path.join(process.env.ZKSYNC_HOME as string, `etc/test_config/constant`); + const ethTestConfig = JSON.parse(fs.readFileSync(`${testConfigPath}/eth.json`, { encoding: 'utf-8' })); + const NUM_TEST_WALLETS = 10; + const baseWalletPath = "m/44'/60'/0'/0/"; + const walletKeys: WalletKey[] = []; + for (let i = 0; i < NUM_TEST_WALLETS; ++i) { + const ethWallet = ethers.Wallet.fromMnemonic(ethTestConfig.test_mnemonic as string, baseWalletPath + i); + walletKeys.push({ + address: ethWallet.address, + privateKey: ethWallet.privateKey + }); + } + return walletKeys; +} + +export function getEthersProvider(): ethers.providers.JsonRpcProvider { + return new ethers.providers.JsonRpcProvider(process.env.ETH_CLIENT_WEB3_URL || 'http://localhost:8545'); +} diff --git a/infrastructure/local-setup-preparation/tsconfig.json b/infrastructure/local-setup-preparation/tsconfig.json new file mode 100644 index 000000000000..25645d23a739 --- /dev/null +++ b/infrastructure/local-setup-preparation/tsconfig.json @@ -0,0 +1,16 @@ +{ + "compilerOptions": { + "module": "commonjs", + "target": "es6", + + "outDir": "./build", + "esModuleInterop": true, + "declaration": true, + + "preserveSymlinks": true, + "preserveWatchOutput": true + }, + "files": [ + "src/index.ts" + ] +} diff --git a/infrastructure/openzeppelin-tests-preparation/package.json b/infrastructure/openzeppelin-tests-preparation/package.json new file mode 100644 index 000000000000..e23fcf34f96a --- /dev/null +++ b/infrastructure/openzeppelin-tests-preparation/package.json @@ -0,0 +1,20 @@ +{ + "name": "openzeppelin-tests-preparation", + "version": "1.0.0", + "main": "build/index.js", + "license": "MIT", + "dependencies": { + "axios": "^0.27.2", + "ethers": "~5.7.0", + "fs": "^0.0.1-security", + "path": "^0.12.7", + "ts-node": "^10.7.0", + "zksync-web3": "link:../../sdk/zksync-web3.js" + }, + "devDependencies": { + "typescript": "^4.5.5" + }, + "scripts": { + "start": "ts-node ./src/index.ts" + } +} diff --git a/infrastructure/openzeppelin-tests-preparation/src/index.ts b/infrastructure/openzeppelin-tests-preparation/src/index.ts new file mode 100644 index 000000000000..6ac9b3a1d7e9 --- /dev/null +++ b/infrastructure/openzeppelin-tests-preparation/src/index.ts @@ -0,0 +1,72 @@ +import * as zkweb3 from 'zksync-web3'; +import * as ethers from 'ethers'; +import * as path from 'path'; +import * as fs from 'fs'; +import * as axios from 'axios'; + +async function depositTestAccounts() { + const ethProvider = new ethers.providers.JsonRpcProvider( + process.env.L1_RPC_ADDRESS || process.env.ETH_CLIENT_WEB3_URL + ); + const testConfigPath = path.join(process.env.ZKSYNC_HOME!, `etc/test_config/constant`); + const ethTestConfig = JSON.parse(fs.readFileSync(`${testConfigPath}/eth.json`, { encoding: 'utf-8' })); + const ethWallet = ethers.Wallet.fromMnemonic(ethTestConfig.test_mnemonic as string, "m/44'/60'/0'/0/0").connect( + ethProvider + ); + const web3Provider = new zkweb3.Provider(process.env.ZKSYNC_WEB3_API_URL || 'http://localhost:3050'); + const syncWallet = new zkweb3.Wallet(ethWallet.privateKey, web3Provider, ethProvider); + + const testAccountPks = process.env.API_WEB3_JSON_RPC_ACCOUNT_PKS!.split(','); + let handles = []; + for (const key of testAccountPks) { + const wallet = new zkweb3.Wallet(key, web3Provider, ethProvider); + handles.push( + await syncWallet.deposit({ + token: ethers.constants.AddressZero, + to: wallet.address, + amount: ethers.utils.parseEther('10000') + }) + ); + } + for (const handle of handles) { + await handle.wait(); + } +} + +async function sendBytecodeFromFolder(folderPath: string) { + const files = fs.readdirSync(folderPath); + for (const file of files) { + const filePath = path.join(folderPath, file); + if (fs.lstatSync(filePath).isDirectory()) { + await sendBytecodeFromFolder(filePath); + } else { + if (filePath.includes('.json')) { + const text = fs.readFileSync(filePath, 'utf-8'); + const data = JSON.parse(text); + if ('bytecode' in data) { + const req = { + jsonrpc: '2.0', + method: 'zks_setKnownBytecode', + params: [data.bytecode], + id: 1 + }; + const resp = await axios.default.post('http://127.0.0.1:3050', req); + console.log(filePath + ': ' + resp.data.toString()); + } + } + } + } +} + +async function main() { + await depositTestAccounts(); + await sendBytecodeFromFolder(`${process.env.ZKSYNC_HOME}/etc/openzeppelin-contracts/artifacts-zk`); +} + +main() + .then(() => { + console.log('Finished successfully'); + }) + .catch((err) => { + console.log('err: ' + err); + }); diff --git a/infrastructure/openzeppelin-tests-preparation/tsconfig.json b/infrastructure/openzeppelin-tests-preparation/tsconfig.json new file mode 100644 index 000000000000..25645d23a739 --- /dev/null +++ b/infrastructure/openzeppelin-tests-preparation/tsconfig.json @@ -0,0 +1,16 @@ +{ + "compilerOptions": { + "module": "commonjs", + "target": "es6", + + "outDir": "./build", + "esModuleInterop": true, + "declaration": true, + + "preserveSymlinks": true, + "preserveWatchOutput": true + }, + "files": [ + "src/index.ts" + ] +} diff --git a/infrastructure/reading-tool/.gitignore b/infrastructure/reading-tool/.gitignore new file mode 100644 index 000000000000..796b96d1c402 --- /dev/null +++ b/infrastructure/reading-tool/.gitignore @@ -0,0 +1 @@ +/build diff --git a/infrastructure/reading-tool/README.md b/infrastructure/reading-tool/README.md new file mode 100644 index 000000000000..334f7ea88070 --- /dev/null +++ b/infrastructure/reading-tool/README.md @@ -0,0 +1,10 @@ +# Tool for reading test config + +An utility to read data from test configs for ts-tests and zksync.js. Currently, it reads data from 'etc/test_config/' +and 'etc/tokens/'. + +## Compile + +``` +yarn run build +``` diff --git a/infrastructure/reading-tool/package.json b/infrastructure/reading-tool/package.json new file mode 100644 index 000000000000..7e4a59c483c1 --- /dev/null +++ b/infrastructure/reading-tool/package.json @@ -0,0 +1,15 @@ +{ + "name": "reading-tool", + "version": "1.0.0", + "license": "MIT", + "main": "build/index.js", + "private": true, + "devDependencies": { + "ts-node": "^10.1.0", + "typescript": "^4.3.5" + }, + "scripts": { + "build": "tsc", + "watch": "tsc --watch" + } +} diff --git a/infrastructure/reading-tool/src/index.ts b/infrastructure/reading-tool/src/index.ts new file mode 100644 index 000000000000..bbaa3ef5cd5d --- /dev/null +++ b/infrastructure/reading-tool/src/index.ts @@ -0,0 +1,47 @@ +import * as fs from 'fs'; + +function configPath(postfix: string) { + return `${process.env.ZKSYNC_HOME}/etc/test_config/${postfix}`; +} + +function loadConfig(path: string) { + return JSON.parse( + fs.readFileSync(path, { + encoding: 'utf-8' + }) + ); +} + +export function loadTestConfig(withWithdrawalHelpers: boolean) { + const ethConstantPath = configPath('constant/eth.json'); + const ethConfig = loadConfig(ethConstantPath); + + if (withWithdrawalHelpers) { + const withdrawalHelpersConfigPath = configPath('volatile/withdrawal-helpers.json'); + const withdrawalHelpersConfig = loadConfig(withdrawalHelpersConfigPath); + return { + eth: ethConfig, + withdrawalHelpers: withdrawalHelpersConfig + }; + } else { + return { + eth: ethConfig + }; + } +} + +export type Token = { + name: string; + symbol: string; + decimals: number; + address: string; +}; + +export function getTokens(network: string): Token[] { + const configPath = `${process.env.ZKSYNC_HOME}/etc/tokens/${network}.json`; + return JSON.parse( + fs.readFileSync(configPath, { + encoding: 'utf-8' + }) + ); +} diff --git a/infrastructure/reading-tool/tsconfig.json b/infrastructure/reading-tool/tsconfig.json new file mode 100644 index 000000000000..2e0816945ae7 --- /dev/null +++ b/infrastructure/reading-tool/tsconfig.json @@ -0,0 +1,16 @@ +{ + "compilerOptions": { + "module": "commonjs", + "target": "es6", + + "outDir": "./build", + "esModuleInterop": true, + "declaration": true, + + "preserveSymlinks": true, + "preserveWatchOutput": true + }, + "files": [ + "./src/index.ts" + ] +} diff --git a/infrastructure/zk/.gitignore b/infrastructure/zk/.gitignore new file mode 100644 index 000000000000..7d1718405578 --- /dev/null +++ b/infrastructure/zk/.gitignore @@ -0,0 +1,2 @@ +/build +todo diff --git a/infrastructure/zk/README.md b/infrastructure/zk/README.md new file mode 100644 index 000000000000..dc58d965ff0d --- /dev/null +++ b/infrastructure/zk/README.md @@ -0,0 +1,30 @@ +# `zk` + +### Scope + +This document describes how to extend the `zk` tool. For usage tips please use `--help` flag available on all +subcommands. + +### Adding a top-level subcommand + +To add a top-level subcommand `cmd` follow these steps: + +- create a file `src/cmd.ts` +- implement all needed functionality (preferably export it, too) +- create an `export const command` via [`commander.js`](https://github.com/tj/commander.js) API, possibly extending + itself with subcommands +- declare `import { command as cmd } from './cmd';` in `src/index.ts` +- add `cmd` as a subcommand via `.addCommand(cmd)` +- notify the team to rebuild `zk` upon merge + +If `cmd` will have deeply nested subcommands, consider creating a directory `cmd/` instead of a file. See `db/` +structure as an example. + +### Extending an existing subcommand + +Simply add the needed functionality to the corresponding `.ts` file and add your subcommand to the existing +`const command` via `.command(...)` API. Don't forget to notify the team to rebuild `zk` upon merge. + +### Building changes + +Simply run `zk` which will `yarn build` the changes for you! diff --git a/infrastructure/zk/package.json b/infrastructure/zk/package.json new file mode 100644 index 000000000000..e88091396864 --- /dev/null +++ b/infrastructure/zk/package.json @@ -0,0 +1,32 @@ +{ + "name": "zk", + "version": "0.1.0", + "main": "build/index.js", + "license": "MIT", + "bin": "build/index.js", + "scripts": { + "build": "tsc", + "watch": "tsc --watch", + "start": "node build/index.js" + }, + "dependencies": { + "@iarna/toml": "^2.2.5", + "chalk": "^4.0.0", + "commander": "^6.0.0", + "deep-extend": "^0.6.0", + "dotenv": "^8.2.0", + "ethers": "~5.5.0", + "node-fetch": "^2.6.1", + "tabtab": "^3.0.2", + "zksync-web3": "link:../../sdk/zksync-web3.js" + }, + "devDependencies": { + "@matterlabs/hardhat-zksync-solc": "^0.3.14-beta.3", + "@types/deep-extend": "^0.4.31", + "@types/node": "^14.6.1", + "@types/node-fetch": "^2.5.7", + "@types/tabtab": "^3.0.1", + "hardhat": "=2.12.4", + "typescript": "^4.3.5" + } +} diff --git a/infrastructure/zk/src/clean.ts b/infrastructure/zk/src/clean.ts new file mode 100644 index 000000000000..9f306eae1ab0 --- /dev/null +++ b/infrastructure/zk/src/clean.ts @@ -0,0 +1,54 @@ +import { Command } from 'commander'; +import * as fs from 'fs'; +import { confirmAction } from './utils'; + +export function clean(directory: string) { + if (fs.existsSync(directory)) { + fs.rmdirSync(directory, { recursive: true }); + console.log(`Successfully removed ${directory}`); + } +} + +export const command = new Command('clean') + .option('--config [environment]') + .option('--database') + .option('--backups') + .option('--contracts') + .option('--artifacts') + .option('--all') + .description('removes generated files') + .action(async (cmd) => { + if (!cmd.contracts && !cmd.config && !cmd.database && !cmd.backups) { + cmd.all = true; // default is all + } + await confirmAction(); + + if (cmd.all || cmd.config) { + const env = cmd.environment || process.env.ZKSYNC_ENV || 'dev'; + clean(`etc/env/${env}`); + + fs.rmSync(`etc/env/${env}.env`); + console.log(`Successfully removed etc/env/${env}.env`); + } + + if (cmd.all || cmd.artifacts) { + clean(`artifacts`); + } + + if (cmd.all || cmd.database) { + clean('db'); + } + + if (cmd.all || cmd.backups) { + clean('backups'); + } + + if (cmd.all || cmd.contracts) { + clean('contracts/ethereum/artifacts'); + clean('contracts/ethereum/cache'); + clean('contracts/ethereum/typechain'); + clean('contracts/zksync/artifacts-zk'); + clean('contracts/zksync/cache-zk'); + clean('contracts/zksync/typechain'); + } + }); diff --git a/infrastructure/zk/src/compiler.ts b/infrastructure/zk/src/compiler.ts new file mode 100644 index 000000000000..ee69e60ac211 --- /dev/null +++ b/infrastructure/zk/src/compiler.ts @@ -0,0 +1,29 @@ +import { Command } from 'commander'; +import * as utils from './utils'; + +export async function compileTestContracts() { + await utils.spawn('yarn --cwd etc/contracts-test-data hardhat compile'); + await utils.spawn('yarn --cwd core/tests/ts-integration/contracts hardhat compile'); +} + +export async function compileSystemContracts() { + await utils.spawn('yarn --cwd etc/ERC20 hardhat compile'); + + process.chdir('etc/system-contracts'); + await utils.spawn('yarn'); + await utils.spawn('yarn hardhat compile'); + await utils.spawn('yarn preprocess'); + await utils.spawn('yarn hardhat run ./scripts/compile-yul.ts'); + process.chdir('../..'); +} + +export async function compileAll() { + await compileSystemContracts(); + await compileTestContracts(); +} + +export const command = new Command('compiler').description('compile contract'); + +command.command('all').description('').action(compileAll); +command.command('system-contracts').description('').action(compileSystemContracts); +command.command('test-contracts').description('').action(compileTestContracts); diff --git a/infrastructure/zk/src/completion.ts b/infrastructure/zk/src/completion.ts new file mode 100644 index 000000000000..e472c476cbd7 --- /dev/null +++ b/infrastructure/zk/src/completion.ts @@ -0,0 +1,73 @@ +// This file is responsible for generating shell completion +// If the completion script is installed, there is no need to update it after adding a new subcommand + +import { Command, Option } from 'commander'; +import tabtab from 'tabtab'; + +type CommandInfo = { + command: string; + description: string; + options: string[]; + subcommands: CommandInfo[]; +}; + +function commandInfo(cmd: Command): CommandInfo { + return { + command: cmd._name, + description: cmd._description, + options: cmd.options.map((option: Option) => option.long || option.short), + subcommands: cmd.commands.map((subcmd) => commandInfo(subcmd as Command)) + }; +} + +// this actually completes the current subcommand +// see `tabtab` docs for a better understanding +function completer(env: any, info: CommandInfo) { + if (!env.complete) return; + if (env.prev == info.command) { + tabtab.log( + info.subcommands.map((subcmd) => { + return { + name: subcmd.command, + description: subcmd.description + }; + }) + ); + tabtab.log(info.options); + return; + } + info.subcommands.map((subcmd) => completer(env, subcmd)); +} + +// the command `completion` without subcommands +// is only meant to be used internally by the completion +// engine of the shell and not by actual users. +export function command(program: Command) { + // prettier-ignore + const completion = new Command('completion') + .description('generate shell completion scripts') + .action(() => { + const env = tabtab.parseEnv(process.env); + const info = commandInfo(program); + return completer(env, info); + }); + + completion + .command('install') + .description('install shell completions for zk') + .action(async () => { + await tabtab.install({ + name: 'zk', + completer: 'zk' + }); + }); + + completion + .command('uninstall') + .description('uninstall shell completions for zk') + .action(async () => { + await tabtab.uninstall({ name: 'zk' }); + }); + + return completion; +} diff --git a/infrastructure/zk/src/config.ts b/infrastructure/zk/src/config.ts new file mode 100644 index 000000000000..71c4483f97c9 --- /dev/null +++ b/infrastructure/zk/src/config.ts @@ -0,0 +1,170 @@ +import { Command } from 'commander'; +import * as toml from '@iarna/toml'; +import * as fs from 'fs'; +import * as path from 'path'; +import deepExtend from 'deep-extend'; +import { env } from 'process'; + +const CONFIG_FILES = [ + 'api.toml', + 'chain.toml', + 'contract_verifier.toml', + 'contracts.toml', + 'database.toml', + 'eth_client.toml', + 'eth_sender.toml', + 'eth_watch.toml', + 'misc.toml', + 'object_store.toml', + 'nfs.toml', + 'prover.toml', + 'rust.toml', + 'private.toml', + 'fetcher.toml', + 'witness_generator.toml', + 'circuit_synthesizer.toml', + 'prover_group.toml' +]; + +async function getEnvironment(): Promise { + const environmentFilePath = path.join(envDirPath(), 'current'); + // Try to read environment from file. + if (fs.existsSync(environmentFilePath)) { + const environment = (await fs.promises.readFile(environmentFilePath)).toString().trim(); + if (environment !== '') { + return environment; + } + } + + // Fallback scenario: file doesn't exist or is empty. + return 'dev'; +} + +function envDirPath(): string { + return path.join(env['ZKSYNC_HOME'] as string, 'etc', 'env'); +} + +function getConfigPath(environment: string, configName: string): string { + return path.join(envDirPath(), environment, configName); +} + +async function loadConfig(environment: string, configName: string) { + const configPath = getConfigPath(environment, configName); + const fileContents = await fs.promises.readFile(configPath); + try { + return toml.parse(fileContents.toString()); + } catch (e: any) { + console.error( + `<${environment}/${configName}> load failed: Parsing error on line ${e.line} column ${e.column}: ${e.message}` + ); + process.exit(1); + } +} + +async function checkConfigExistence(environment: string) { + const configFolder = path.join(envDirPath(), environment); + + // Check if the folder exists and it's not empty. + if (fs.existsSync(configFolder) && fs.readdirSync(configFolder).length != 0) { + return; + } + + // Folder doesn't exist or it's empty. + if (environment == 'dev') { + // Copy configs from the `base` folder. + // Folder may be created, just be empty, so create it only if needed. + if (!fs.existsSync(configFolder)) { + await fs.promises.mkdir(configFolder); + } + + for (const configFile of CONFIG_FILES) { + const from = getConfigPath('base', configFile); + const to = getConfigPath('dev', configFile); + await fs.promises.copyFile(from, to); + } + return; + } + + // Folder doesn't exist and the environment is not `dev`. + console.error(`Configuration files were not found for environment <${environment}>`); + process.exit(1); +} + +function collectVariables(prefix: string, config: any): Map { + let variables: Map = new Map(); + + for (const key in config) { + const keyUppercase = key.toLocaleUpperCase(); + if (typeof config[key] == 'object' && config[key] !== null && !Array.isArray(config[key])) { + // It's a map object: parse it recursively. + + // Add a prefix for the child elements: + // '' -> 'KEY_'; 'KEY_' -> 'KEY_ANOTHER_KEY_'. + const newPrefix = `${prefix}${keyUppercase}_`; + + const nestedEntries = collectVariables(newPrefix, config[key]); + variables = new Map([...variables, ...nestedEntries]); + } else { + const variableName = `${prefix}${keyUppercase}`; + const value = Array.isArray(config[key]) ? config[key].join(',') : config[key]; + + variables.set(variableName, value); + } + } + + return variables; +} + +async function loadAllConfigs(environment?: string) { + if (!environment) { + environment = await getEnvironment(); + } + + // Check that config folder exists (or initialize it). + await checkConfigExistence(environment); + + // Accumulator to which we will load all the configs. + let config = {}; + + for (const configFile of CONFIG_FILES) { + const localConfig = await loadConfig(environment, configFile); + + // Extend the `config` with the new values. + deepExtend(config, localConfig); + } + + return config; +} + +export async function printAllConfigs(environment?: string) { + const config = await loadAllConfigs(environment); + console.log(`${JSON.stringify(config, null, 2)}`); +} + +export async function compileConfig(environment?: string) { + if (!environment) { + environment = await getEnvironment(); + } + + const config = await loadAllConfigs(environment); + + const variables = collectVariables('', config); + + let outputFileContents = `# This file is generated automatically by 'zk config compile'\n`; + outputFileContents += `# Do not edit manually!\n\n`; + variables.forEach((value: string, key: string) => { + outputFileContents += `${key}=${value}\n`; + }); + + const outputFileName = path.join(envDirPath(), `${environment}.env`); + await fs.promises.writeFile(outputFileName, outputFileContents); + console.log('Configs compiled'); +} + +export const command = new Command('config').description('config management'); + +command.command('load [environment]').description('load the config for a certain environment').action(printAllConfigs); +command + .command('compile [environment]') + .description('compile the config for a certain environment') + .action(compileConfig); diff --git a/infrastructure/zk/src/contract.ts b/infrastructure/zk/src/contract.ts new file mode 100644 index 000000000000..87373a328147 --- /dev/null +++ b/infrastructure/zk/src/contract.ts @@ -0,0 +1,125 @@ +import { Command } from 'commander'; +import * as utils from './utils'; +import * as env from './env'; +import fs from 'fs'; + +export async function build() { + await utils.spawn('yarn l1-contracts build'); +} + +export async function verifyL1Contracts() { + // Spawning a new script is expensive, so if we know that publishing is disabled, it's better to not launch + // it at all (even though `verify` checks the network as well). + if (process.env.CHAIN_ETH_NETWORK == 'localhost') { + console.log('Skip contract verification on localhost'); + return; + } + await utils.spawn('yarn l1-contracts verify'); +} + +function updateContractsEnv(deployLog: String, envVars: Array) { + let updatedContracts = ''; + for (const envVar of envVars) { + const pattern = new RegExp(`${envVar}=.*`, 'g'); + const matches = deployLog.match(pattern); + if (matches !== null) { + const varContents = matches[0]; + env.modify(envVar, varContents); + env.modify_contracts_toml(envVar, varContents); + + updatedContracts += `${varContents}\n`; + } + } + + return updatedContracts; +} + +export async function initializeL1AllowList(args: any[] = []) { + await utils.confirmAction(); + + const isLocalSetup = process.env.ZKSYNC_LOCAL_SETUP; + const baseCommandL1 = isLocalSetup ? `yarn --cwd /contracts/ethereum` : `yarn l1-contracts`; + + await utils.spawn(`${baseCommandL1} initialize-allow-list ${args.join(' ')} | tee initilizeL1AllowList.log`); +} + +export async function deployL2(args: any[] = []) { + await utils.confirmAction(); + + const isLocalSetup = process.env.ZKSYNC_LOCAL_SETUP; + + // In the localhost setup scenario we don't have the workspace, + // so we have to `--cwd` into the required directory. + const baseCommandL2 = isLocalSetup ? `yarn --cwd /contracts/zksync` : `yarn l2-contracts`; + const baseCommandL1 = isLocalSetup ? `yarn --cwd /contracts/ethereum` : `yarn l1-contracts`; + + // Skip compilation for local setup, since we already copied artifacts into the container. + await utils.spawn(`${baseCommandL2} build`); + await utils.spawn(`${baseCommandL2} compile-and-deploy-libs ${args.join(' ')}`); + + // IMPORTANT: initialize-bridges must go strictly *right after* the compile-and-deploy-libs step. + // Otherwise, the ExternalDecoder library will be erased. + await utils.spawn(`${baseCommandL1} initialize-bridges ${args.join(' ')} | tee deployL2.log`); + + await utils.spawn(`${baseCommandL2} deploy-testnet-paymaster ${args.join(' ')} | tee -a deployL2.log`); + + const deployLog = fs.readFileSync('deployL2.log').toString(); + const envVars = [ + 'CONTRACTS_L2_ETH_BRIDGE_ADDR', + 'CONTRACTS_L2_ERC20_BRIDGE_ADDR', + 'CONTRACTS_L2_TESTNET_PAYMASTER_ADDR' + ]; + + updateContractsEnv(deployLog, envVars); +} + +export async function deployL1(args: any[]) { + await utils.confirmAction(); + + // In the localhost setup scenario we don't have the workspace, + // so we have to `--cwd` into the required directory. + const baseCommand = process.env.ZKSYNC_LOCAL_SETUP ? `yarn --cwd /contracts/ethereum` : `yarn l1-contracts`; + + await utils.spawn(`${baseCommand} deploy-no-build ${args.join(' ')} | tee deployL1.log`); + const deployLog = fs.readFileSync('deployL1.log').toString(); + const envVars = [ + 'CONTRACTS_DIAMOND_CUT_FACET_ADDR', + 'CONTRACTS_DIAMOND_UPGRADE_INIT_ADDR', + 'CONTRACTS_GOVERNANCE_FACET_ADDR', + 'CONTRACTS_MAILBOX_FACET_ADDR', + 'CONTRACTS_EXECUTOR_FACET_ADDR', + 'CONTRACTS_GETTERS_FACET_ADDR', + 'CONTRACTS_VERIFIER_ADDR', + 'CONTRACTS_DIAMOND_INIT_ADDR', + 'CONTRACTS_DIAMOND_PROXY_ADDR', + 'CONTRACTS_GENESIS_TX_HASH', + 'CONTRACTS_L1_ERC20_BRIDGE_PROXY_ADDR', + 'CONTRACTS_L1_ERC20_BRIDGE_IMPL_ADDR', + 'CONTRACTS_L1_ALLOW_LIST_ADDR' + ]; + const updatedContracts = updateContractsEnv(deployLog, envVars); + + // Write updated contract addresses and tx hashes to the separate file + // Currently it's used by loadtest github action to update deployment configmap. + fs.writeFileSync('deployed_contracts.log', updatedContracts); +} + +export async function redeployL1(args: any[]) { + await deployL1(args); + await verifyL1Contracts(); +} + +export const command = new Command('contract').description('contract management'); + +command + .command('redeploy [deploy-opts...]') + .allowUnknownOption(true) + .description('redeploy contracts') + .action(redeployL1); +command.command('deploy [deploy-opts...]').allowUnknownOption(true).description('deploy contracts').action(deployL1); +command.command('build').description('build contracts').action(build); +command + .command('initilize-l1-allow-list-contract') + .description('initialize L1 allow list contract') + .action(initializeL1AllowList); +command.command('verify').description('verify L1 contracts').action(verifyL1Contracts); diff --git a/infrastructure/zk/src/contract_verifier.ts b/infrastructure/zk/src/contract_verifier.ts new file mode 100644 index 000000000000..507834866bb3 --- /dev/null +++ b/infrastructure/zk/src/contract_verifier.ts @@ -0,0 +1,10 @@ +import { Command } from 'commander'; +import * as utils from './utils'; + +export async function contractVerifier() { + await utils.spawn(`cargo run --bin zksync_contract_verifier --release`); +} + +export const command = new Command('contract_verifier') + .description('start zksync contract verifier') + .action(contractVerifier); diff --git a/infrastructure/zk/src/database/database.ts b/infrastructure/zk/src/database/database.ts new file mode 100644 index 000000000000..c9e34734d160 --- /dev/null +++ b/infrastructure/zk/src/database/database.ts @@ -0,0 +1,78 @@ +import { Command } from 'commander'; +import * as utils from '../utils'; +import * as env from '../env'; + +export async function reset() { + await utils.confirmAction(); + await wait(); + await drop(); + await setup(); +} + +export async function resetTest() { + const databaseUrl = process.env.DATABASE_URL as string; + process.env.DATABASE_URL = databaseUrl.replace('zksync_local', 'zksync_local_test'); + await utils.confirmAction(); + await drop(); + await setup(); +} + +export async function drop() { + await utils.confirmAction(); + console.log('Dropping DB...'); + await utils.spawn('cargo sqlx database drop -y'); +} + +export async function migrate() { + await utils.confirmAction(); + console.log('Running migrations...'); + await utils.spawn('cd core/lib/dal && cargo sqlx database create && cargo sqlx migrate run'); +} + +export async function generateMigration(name: String) { + console.log('Generating migration... '); + process.chdir('core/lib/dal'); + await utils.exec(`cargo sqlx migrate add -r ${name}`); + + process.chdir(process.env.ZKSYNC_HOME as string); +} + +export async function setup() { + process.chdir('core/lib/dal'); + const localDbUrl = 'postgres://postgres@localhost/zksync_local'; + const localTestDbUrl = 'postgres://postgres@localhost/zksync_local_test'; + if (process.env.DATABASE_URL == localDbUrl || process.env.DATABASE_URL == localTestDbUrl) { + console.log(`Using localhost database:`); + console.log(`DATABASE_URL = ${process.env.DATABASE_URL}`); + } else { + // Remote database, we can't show the contents. + console.log(`WARNING! Using prod db!`); + } + await utils.spawn('cargo sqlx database create'); + await utils.spawn('cargo sqlx migrate run'); + if (process.env.DATABASE_URL == localDbUrl) { + await utils.spawn('cargo sqlx prepare --check -- --tests || cargo sqlx prepare -- --tests'); + } + + process.chdir(process.env.ZKSYNC_HOME as string); + env.reload(); +} + +export async function wait(tries: number = 4) { + for (let i = 0; i < tries; i++) { + const result = await utils.allowFail(utils.exec(`pg_isready -d "${process.env.DATABASE_URL}"`)); + if (result !== null) return; // null means failure + await utils.sleep(5); + } + await utils.exec(`pg_isready -d "${process.env.DATABASE_URL}"`); +} + +export const command = new Command('db').description('database management'); + +command.command('drop').description('drop the database').action(drop); +command.command('migrate').description('run migrations').action(migrate); +command.command('new-migration ').description('generate a new migration').action(generateMigration); +command.command('setup').description('initialize the database and perform migrations').action(setup); +command.command('wait').description('wait for database to get ready for interaction').action(wait); +command.command('reset').description('reinitialize the database').action(reset); +command.command('reset-test').description('reinitialize the database for test').action(resetTest); diff --git a/infrastructure/zk/src/docker.ts b/infrastructure/zk/src/docker.ts new file mode 100644 index 000000000000..131b9ec01240 --- /dev/null +++ b/infrastructure/zk/src/docker.ts @@ -0,0 +1,119 @@ +import { Command } from 'commander'; +import * as utils from './utils'; +import * as contract from './contract'; + +const IMAGES = [ + 'server-v2', + 'contract-verifier', + 'prover-v2', + 'geth', + 'local-node', + 'zk-environment', + 'circuit-synthesizer' +]; +const UNIX_TIMESTAMP = Date.now(); + +async function dockerCommand(command: 'push' | 'build', image: string, customTag?: string) { + // Generating all tags for containers. We need 2 tags here: SHA and SHA+TS + const { stdout: COMMIT_SHORT_SHA }: { stdout: string } = await utils.exec('git rev-parse --short HEAD'); + const imageTagShaTS: string = process.env.IMAGE_TAG_SUFFIX + ? process.env.IMAGE_TAG_SUFFIX + : `${COMMIT_SHORT_SHA.trim()}-${UNIX_TIMESTAMP}`; + + // we want alternative flow for rust image + if (image == 'rust') { + await dockerCommand(command, 'server-v2', customTag); + await dockerCommand(command, 'prover', customTag); + return; + } + if (!IMAGES.includes(image)) { + throw new Error(`Wrong image name: ${image}`); + } + + if (image == 'keybase') { + image = 'keybase-secret'; + } + + const tagList = customTag ? [customTag] : defaultTagList(image, COMMIT_SHORT_SHA.trim(), imageTagShaTS); + + // Main build\push flow + // COMMIT_SHORT_SHA returnes with newline, so we need to trim it + switch (command) { + case 'build': + await _build(image, tagList); + break; + case 'push': + await _push(image, tagList); + break; + default: + console.log(`Unknown command for docker ${command}.`); + break; + } +} + +function defaultTagList(image: string, imageTagSha: string, imageTagShaTS: string) { + const tagList = ['server-v2', 'prover', 'contract-verifier', 'prover-v2', 'circuit-synthesizer'].includes(image) + ? ['latest2.0', `2.0-${imageTagSha}`, `2.0-${imageTagShaTS}`] + : [`latest2.0`]; + + return tagList; +} + +async function _build(image: string, tagList: string[]) { + if (image == 'server-v2' || image == 'prover') { + await contract.build(); + } + + const tagsToBuild = tagList.map((tag) => `-t matterlabs/${image}:${tag}`).join(' '); + + // generate list of tags for image - we want 3 tags (latest, SHA, SHA+TimeStamp) for listed components and only "latest" for everything else + + await utils.spawn(`CARGO_HOME=./cargo cargo fetch`); + + // HACK + // For prover-v2 which is not a prover, but should be built from the prover dockerfile. So here we go. + const imagePath = image == 'prover-v2' ? 'prover' : image; + + // build image with needed tags + await utils.spawn(`DOCKER_BUILDKIT=1 docker build ${tagsToBuild} -f ./docker/${imagePath}/Dockerfile .`); +} + +async function _push(image: string, tagList: string[]) { + // For development purposes, we want to use `2.0` tags for 2.0 images, just to not interfere with 1.x + + for (const tag of tagList) { + await utils.spawn(`docker push matterlabs/${image}:${tag}`); + } +} + +export async function build(image: string, cmd: Command) { + await dockerCommand('build', image, cmd.customTag); +} + +export async function push(image: string, cmd: Command) { + await dockerCommand('build', image, cmd.customTag); + await dockerCommand('push', image, cmd.customTag); +} + +export async function restart(container: string) { + await utils.spawn(`docker-compose restart ${container}`); +} + +export async function pull() { + await utils.spawn('docker-compose pull'); +} + +export const command = new Command('docker').description('docker management'); + +command + .command('build ') + .option('--custom-tag ', 'Custom tag for image') + .description('build docker image') + .action(build); +command + .command('push ') + .option('--custom-tag ', 'Custom tag for image') + .description('build and push docker image') + .action(push); +command.command('pull').description('pull all containers').action(pull); +command.command('restart ').description('restart container in docker-compose.yml').action(restart); diff --git a/infrastructure/zk/src/down.ts b/infrastructure/zk/src/down.ts new file mode 100644 index 000000000000..85f07cca6dfb --- /dev/null +++ b/infrastructure/zk/src/down.ts @@ -0,0 +1,8 @@ +import { Command } from 'commander'; +import * as utils from './utils'; + +export async function down() { + await utils.spawn('docker-compose stop geth postgres'); +} + +export const command = new Command('down').description('stop development containers').action(down); diff --git a/infrastructure/zk/src/dummy-prover.ts b/infrastructure/zk/src/dummy-prover.ts new file mode 100644 index 000000000000..f9e136dfc7be --- /dev/null +++ b/infrastructure/zk/src/dummy-prover.ts @@ -0,0 +1,69 @@ +import { Command } from 'commander'; + +import * as server from './server'; +import * as contract from './contract'; +import * as env from './env'; + +async function performRedeployment() { + await contract.build(); + + try { + await server.genesis_from_sources(); + } catch { + console.log('Failed to genesis the state'); + } + + await contract.redeployL1([]); +} + +export async function status() { + if (process.env.CONTRACTS_DUMMY_VERIFIER == 'true') { + console.log('Dummy Prover status: enabled'); + return true; + } + console.log('Dummy Prover status: disabled'); + return false; +} + +async function setStatus(value: boolean, redeploy: boolean) { + env.modify('CONTRACTS_DUMMY_VERIFIER', `CONTRACTS_DUMMY_VERIFIER="${value}"`); + env.modify_contracts_toml('CONTRACTS_DUMMY_VERIFIER', `CONTRACTS_DUMMY_VERIFIER="${value}"`); + await status(); + if (redeploy) { + console.log('Redeploying the contract...'); + await performRedeployment(); + console.log('Done.'); + } +} + +export async function enable(redeploy: boolean = true) { + await setStatus(true, redeploy); +} + +export async function disable(redeploy: boolean = true) { + await setStatus(false, redeploy); +} + +export const command = new Command('dummy-prover').description('commands for zksync dummy prover'); + +command + .command('enable') + .description('enable the dummy prover') + .option('--no-redeploy', 'do not redeploy the contracts') + .action(async (cmd: Command) => { + await enable(cmd.redeploy); + }); + +command + .command('disable') + .description('disable the dummy prover') + .option('--no-redeploy', 'do not redeploy the contracts') + .action(async (cmd: Command) => { + await disable(cmd.redeploy); + }); + +command + .command('status') + .description('check if dummy prover is enabled') + // @ts-ignore + .action(status); diff --git a/infrastructure/zk/src/env.ts b/infrastructure/zk/src/env.ts new file mode 100644 index 000000000000..735ac33606f2 --- /dev/null +++ b/infrastructure/zk/src/env.ts @@ -0,0 +1,152 @@ +import { Command } from 'commander'; +import fs from 'fs'; +import dotenv from 'dotenv'; +import * as utils from './utils'; +import * as config from './config'; +import * as toml from '@iarna/toml'; + +export function get() { + fs.readdirSync('etc/env').forEach((file) => { + if (!file.endsWith('.env')) { + return; + } + + const env = file.replace(/\..*$/, ''); + if (env == process.env.ZKSYNC_ENV) { + console.log(' * ' + env); + } else { + console.log(' ' + env); + } + }); +} + +export async function gitHooks() { + if (fs.existsSync('.git')) { + await utils.exec(` + git config --local core.hooksPath || + git config --local core.hooksPath ${process.env.ZKSYNC_HOME}/.githooks + `); + } +} + +export function set(env: string) { + const envFile = `etc/env/${env}.env`; + const envDir = `etc/env/${env}`; + if (!fs.existsSync(envFile)) { + throw new Error(envFile + ' not found'); + } + if (!fs.existsSync(envDir)) { + throw new Error(envFile + ' not found'); + } + + fs.writeFileSync('etc/env/current', env); + process.env.ENV_FILE = envFile; + process.env.ENV_DIR = envDir; + process.env.ZKSYNC_ENV = env; + get(); +} + +// we have to manually override the environment +// because dotenv won't override variables that are already set +export function reload() { + const envFile = process.env.ENV_FILE as string; + const env = dotenv.parse(fs.readFileSync(envFile)); + for (const envVar in env) { + process.env[envVar] = env[envVar]; + } + load_docker(); +} + +export function load_docker() { + const in_docker: number = parseInt(process.env.IN_DOCKER || '0'); + if (!in_docker) { + return; + } + const envFile = process.env.DOCKER_ENV_FILE as string; + const env = dotenv.parse(fs.readFileSync(envFile)); + for (const envVar in env) { + process.env[envVar] = env[envVar]; + } +} + +// loads environment variables +export async function load() { + const current = 'etc/env/current'; + const zksyncEnv = + process.env.ZKSYNC_ENV || (fs.existsSync(current) ? fs.readFileSync(current).toString().trim() : 'dev'); + const envFile = `etc/env/${zksyncEnv}.env`; + const envDir = `etc/env/${zksyncEnv}`; + const dockerEnvFile = `etc/env/docker.env`; + if (zksyncEnv == 'dev') { + // If there no folder with toml files (or it's empty) we should delete + // the old dev.env and regenerate toml files + if (!fs.existsSync('etc/env/dev') || fs.readdirSync('etc/env/dev').length == 0) { + if (fs.existsSync('etc/env/dev.env')) { + fs.rmSync('etc/env/dev.env'); + } + } + + if (!fs.existsSync('etc/env/dev.env')) { + await config.compileConfig(); + } + } + if (!fs.existsSync(envFile)) { + throw new Error('ZkSync config file not found: ' + envFile); + } + if (fs.existsSync(dockerEnvFile)) { + process.env.DOCKER_ENV_FILE = dockerEnvFile; + } + process.env.ZKSYNC_ENV = zksyncEnv; + process.env.ENV_FILE = envFile; + process.env.ENV_DIR = envDir; + dotenv.config({ path: envFile }); + load_docker(); + + // This suppresses the warning that looks like: "Warning: Accessing non-existent property 'INVALID_ALT_NUMBER'...". + // This warning is spawned from the `antlr4`, which is a dep of old `solidity-parser` library. + // Old version of `solidity-parser` is still videly used, and currently we can't get rid of it fully. + process.env.NODE_OPTIONS = '--no-warnings'; +} + +// replaces an env variable in current .env file +// takes variable name, e.g. VARIABLE +// and the new assignment, e.g. VARIABLE=foo +export function modify(variable: string, assignedVariable: string) { + if (!process.env.ENV_FILE) { + // ENV_FILE variable is not set, do nothing. + return; + } + + const envFile = process.env.ENV_FILE as string; + if (!fs.existsSync(envFile)) { + console.log(`${process.env.ENV_FILE} env file was not found, skipping update...`); + return; + } + + utils.replaceInFile(envFile, `${variable}=.*`, assignedVariable.trim()); + reload(); +} + +export function modify_contracts_toml(variable: string, assignedVariable: string) { + const toml_file = `${process.env.ENV_DIR}/contracts.toml`; + + if (!fs.existsSync(toml_file)) { + console.log(`contracts.toml config file was not found, skipping update...`); + return; + } + + const source = fs.readFileSync(toml_file).toString(); + const toml_res = toml.parse(source); + const trimmed_variable = variable.replace('CONTRACTS_', ''); + const trimmed_value = assignedVariable.split('='); + // @ts-ignore + toml_res['contracts'][trimmed_variable] = trimmed_value[1]; + fs.writeFileSync(toml_file, toml.stringify(toml_res)); +} + +export const command = new Command('env') + .arguments('[env_name]') + .description('get or set zksync environment') + .action((envName?: string) => { + envName ? set(envName) : get(); + }); diff --git a/infrastructure/zk/src/fmt.ts b/infrastructure/zk/src/fmt.ts new file mode 100644 index 000000000000..a0ba8ab9db21 --- /dev/null +++ b/infrastructure/zk/src/fmt.ts @@ -0,0 +1,67 @@ +import { Command } from 'commander'; +import * as utils from './utils'; + +const EXTENSIONS = ['ts', 'md', 'sol', 'js']; +const CONFIG_PATH = 'etc/prettier-config'; + +export async function prettier(extension: string, check: boolean = false) { + if (!EXTENSIONS.includes(extension)) { + throw new Error('Unsupported extension'); + } + + const command = check ? 'check' : 'write'; + const files = await utils.getUnignoredFiles(extension); + + if (files.length === 0) { + console.log(`No files of extension ${extension} to format`); + return; + } + + await utils.spawn(`yarn --silent prettier --config ${CONFIG_PATH}/${extension}.js --${command} ${files}`); +} + +export async function rustfmt(check: boolean = false) { + process.chdir(process.env.ZKSYNC_HOME as string); + const command = check ? 'cargo fmt -- --check' : 'cargo fmt'; + await utils.spawn(command); +} + +export const command = new Command('fmt') + .description('format code with prettier & rustfmt') + .option('--check') + .arguments('[extension]') + .action(async (extension: string | null, cmd: Command) => { + if (extension) { + if (extension == 'rust') { + await rustfmt(cmd.check); + } else { + await prettier(extension, cmd.check); + } + } else { + // Run all the checks in parallel. + const promises = EXTENSIONS.map((ext) => prettier(ext, cmd.check)); + promises.push(rustfmt(cmd.check)); + await Promise.all(promises); + } + }); + +command + .command('prettier') + .option('--check') + .arguments('[extension]') + .action(async (extension: string | null, cmd: Command) => { + if (extension) { + await prettier(extension, cmd.check); + } else { + for (const ext of EXTENSIONS) { + await prettier(ext, cmd.check); + } + } + }); + +command + .command('rustfmt') + .option('--check') + .action(async (cmd: Command) => { + await rustfmt(cmd.check); + }); diff --git a/infrastructure/zk/src/index.ts b/infrastructure/zk/src/index.ts new file mode 100644 index 000000000000..08bdafa7e5fc --- /dev/null +++ b/infrastructure/zk/src/index.ts @@ -0,0 +1,89 @@ +#!/usr/bin/env node + +import { program, Command } from 'commander'; +import { spawnSync } from 'child_process'; +import { command as server } from './server'; +import { command as contractVerifier } from './contract_verifier'; +import { command as up } from './up'; +import { command as down } from './down'; +import { command as contract } from './contract'; +import { command as dummyProver } from './dummy-prover'; +import { initCommand as init, reinitCommand as reinit, lightweightInitCommand as lightweight_init } from './init'; +import { command as prover } from './prover'; +import { command as run } from './run/run'; +import { command as test } from './test/test'; +import { command as docker } from './docker'; +import { command as fmt } from './fmt'; +import { command as lint } from './lint'; +import { command as compiler } from './compiler'; +import { command as completion } from './completion'; +import { command as config } from './config'; +import { command as clean } from './clean'; +import { command as db } from './database/database'; +// import { command as uni } from './uni'; +import * as env from './env'; + +const COMMANDS = [ + server, + contractVerifier, + up, + down, + db, + contract, + dummyProver, + init, + reinit, + lightweight_init, + prover, + run, + test, + fmt, + lint, + docker, + config, + clean, + compiler, + // uni, + env.command, + completion(program as Command) +]; + +async function main() { + const cwd = process.cwd(); + const ZKSYNC_HOME = process.env.ZKSYNC_HOME; + + if (!ZKSYNC_HOME) { + throw new Error('Please set $ZKSYNC_HOME to the root of zkSync repo!'); + } else { + process.chdir(ZKSYNC_HOME); + } + + await env.load(); + + program.version('0.1.0').name('zk').description('zksync workflow tools'); + + for (const command of COMMANDS) { + program.addCommand(command); + } + + // f command is special-cased because it is necessary + // for it to run from $PWD and not from $ZKSYNC_HOME + program + .command('f ') + .allowUnknownOption() + .action((command: string[]) => { + process.chdir(cwd); + const result = spawnSync(command[0], command.slice(1), { stdio: 'inherit' }); + if (result.error) { + throw result.error; + } + process.exitCode = result.status || undefined; + }); + + await program.parseAsync(process.argv); +} + +main().catch((err: Error) => { + console.error('Error:', err.message || err); + process.exitCode = 1; +}); diff --git a/infrastructure/zk/src/init.ts b/infrastructure/zk/src/init.ts new file mode 100644 index 000000000000..242ffca9fa46 --- /dev/null +++ b/infrastructure/zk/src/init.ts @@ -0,0 +1,123 @@ +import { Command } from 'commander'; +import chalk from 'chalk'; +import * as utils from './utils'; + +import * as server from './server'; +import * as contract from './contract'; +import * as run from './run/run'; +import * as compiler from './compiler'; +import * as db from './database/database'; +import { clean } from './clean'; +import * as env from './env'; +import * as docker from './docker'; +import { up } from './up'; + +const entry = chalk.bold.yellow; +const announce = chalk.yellow; +const success = chalk.green; +const timestamp = chalk.grey; + +export async function init(skipSubmodulesCheckout: boolean) { + await announced('Creating docker volumes', createVolumes()); + if (!process.env.CI) { + await announced('Pulling images', docker.pull()); + await announced('Checking environment', checkEnv()); + await announced('Checking git hooks', env.gitHooks()); + await announced('Setting up containers', up()); + } + if (!skipSubmodulesCheckout) { + await announced('Checkout system-contracts submodule', submoduleUpdate()); + } + await announced('Compiling JS packages', run.yarn()); + await announced('Compile l2 contracts', compiler.compileAll()); + await announced('Drop postgres db', db.drop()); + await announced('Setup postgres db', db.setup()); + await announced('Clean rocksdb', clean('db')); + await announced('Clean backups', clean('backups')); + await announced('Checking PLONK setup', run.plonkSetup()); + await announced('Building contracts', contract.build()); + await announced('Deploying localhost ERC20 tokens', run.deployERC20('dev')); + await announced('Running server genesis setup', server.genesis_from_sources()); + await announced('Deploying L1 contracts', contract.redeployL1([])); + await announced('Initialize L1 allow list', contract.initializeL1AllowList()); + await announced('Deploying L2 contracts', contract.deployL2()); +} + +// A smaller version of `init` that "resets" the localhost environment, for which `init` was already called before. +// It does less and runs much faster. +export async function reinit() { + await announced('Setting up containers', up()); + await announced('Compiling JS packages', run.yarn()); + await announced('Compile l2 contracts', compiler.compileAll()); + await announced('Drop postgres db', db.drop()); + await announced('Setup postgres db', db.setup()); + await announced('Clean rocksdb', clean('db')); + await announced('Clean backups', clean('backups')); + await announced('Building contracts', contract.build()); + await announced('Running server genesis setup', server.genesis_from_sources()); + await announced('Deploying L1 contracts', contract.redeployL1([])); + await announced('Initializing L1 Allow list', contract.initializeL1AllowList()); + await announced('Deploying L2 contracts', contract.deployL2()); +} + +// A lightweight version of `init` that sets up local databases, generates genesis and deploys precompiled contracts +export async function lightweightInit() { + await announced('Clean rocksdb', clean('db')); + await announced('Clean backups', clean('backups')); + await announced('Running server genesis setup', server.genesis_from_binary()); + await announced('Deploying L1 contracts', contract.redeployL1([])); + await announced('Initializing L1 Allow list', contract.initializeL1AllowList()); + await announced('Deploying L2 contracts', contract.deployL2()); +} + +// Wrapper that writes an announcement and completion notes for each executed task. +async function announced(fn: string, promise: Promise | void) { + const announceLine = `${entry('>')} ${announce(fn)}`; + const separator = '-'.repeat(fn.length + 2); // 2 is the length of "> ". + console.log(`\n` + separator); // So it's easier to see each individual step in the console. + console.log(announceLine); + + const start = new Date().getTime(); + // The actual execution part + await promise; + + const time = new Date().getTime() - start; + const successLine = `${success('✔')} ${fn} done`; + const timestampLine = timestamp(`(${time}ms)`); + console.log(`${successLine} ${timestampLine}`); +} + +async function createVolumes() { + await utils.exec('mkdir -p $ZKSYNC_HOME/volumes/geth'); + await utils.exec('mkdir -p $ZKSYNC_HOME/volumes/postgres'); +} + +async function submoduleUpdate() { + await utils.exec('git submodule update'); +} + +async function checkEnv() { + const tools = ['node', 'yarn', 'docker', 'docker-compose', 'cargo']; + for (const tool of tools) { + await utils.exec(`which ${tool}`); + } + const { stdout: version } = await utils.exec('node --version'); + // Node v14.14 is required because + // the `fs.rmSync` function was added in v14.14.0 + if ('v14.14' >= version) { + throw new Error('Error, node.js version 14.14.0 or higher is required'); + } +} + +export const initCommand = new Command('init') + .option('--skip-submodules-checkout') + .description('perform zksync network initialization for development') + .action(async (cmd: Command) => { + await init(cmd.skipSubmodulesCheckout); + }); +export const reinitCommand = new Command('reinit') + .description('"reinitializes" network. Runs faster than `init`, but requires `init` to be executed prior') + .action(reinit); +export const lightweightInitCommand = new Command('lightweight-init') + .description('perform lightweight zksync network initialization for development') + .action(lightweightInit); diff --git a/infrastructure/zk/src/lint.ts b/infrastructure/zk/src/lint.ts new file mode 100644 index 000000000000..fbc7e144a0f4 --- /dev/null +++ b/infrastructure/zk/src/lint.ts @@ -0,0 +1,54 @@ +import { Command } from 'commander'; +import * as utils from './utils'; + +// Note that `rust` is not noted here, as clippy isn't run via `yarn`. +// `rust` option is still supported though. +const LINT_COMMANDS = { + md: 'markdownlint', + sol: 'solhint', + js: 'eslint', + ts: 'eslint --ext ts' + // This is needed to silence typescipt. It is possible to create type + // guards, but unfortunately they would have rather weird type, so + // Record is a better solution. +} as Record; +const EXTENSIONS = Object.keys(LINT_COMMANDS); +const CONFIG_PATH = 'etc/lint-config'; + +export async function lint(extension: string, check: boolean = false) { + if (extension == 'rust') { + await clippy(); + return; + } + + if (!EXTENSIONS.includes(extension)) { + throw new Error('Unsupported extension'); + } + + const files = await utils.getUnignoredFiles(extension); + const command = LINT_COMMANDS[extension]; + const fixOption = check ? '' : '--fix'; + + await utils.spawn(`yarn --silent ${command} ${fixOption} --config ${CONFIG_PATH}/${extension}.js ${files}`); +} + +async function clippy() { + process.chdir(process.env.ZKSYNC_HOME as string); + await utils.spawn('cargo clippy --tests -- -D warnings'); +} + +export const command = new Command('lint') + .description('lint non-rust code') + .option('--check') + .arguments('[extension]') + .action(async (extension: string | null, cmd: Command) => { + if (extension) { + await lint(extension, cmd.check); + } else { + for (const ext of EXTENSIONS) { + await lint(ext, cmd.check); + } + + await clippy(); + } + }); diff --git a/infrastructure/zk/src/prover.ts b/infrastructure/zk/src/prover.ts new file mode 100644 index 000000000000..82f96125bb78 --- /dev/null +++ b/infrastructure/zk/src/prover.ts @@ -0,0 +1,26 @@ +import { Command } from 'commander'; +import * as utils from './utils'; +import os from 'os'; + +export async function prover(totalProvers: number) { + let children: Promise[] = []; + + for (let id = 1; id <= totalProvers; id++) { + const name = `${os.hostname()}_${id}_blocks`; + console.log('Started prover', name); + const child = utils.spawn( + `cargo run --release --bin zksync_prover -- --worker_name=${name} plonk-step-by-step` + ); + children.push(child); + } + + await Promise.all(children); +} + +export const command = new Command('prover') + .description('run zksync prover') + .arguments('[number_of_provers]') + .action(async (provers?: string) => { + const totalProvers = provers ? parseInt(provers) : 1; + await prover(totalProvers); + }); diff --git a/infrastructure/zk/src/run/data-restore.ts b/infrastructure/zk/src/run/data-restore.ts new file mode 100644 index 000000000000..35b223054d6b --- /dev/null +++ b/infrastructure/zk/src/run/data-restore.ts @@ -0,0 +1,66 @@ +import { Command } from 'commander'; +import * as utils from '../utils'; + +export async function rootHash(): Promise { + throw new Error('Not implemented'); + // const query = ` + // WITH last_block (number) AS ( + // SELECT max(block_number) + // FROM operations + // WHERE action_type = 'VERIFY' and confirmed = true + // ) + // SELECT encode(root_hash, 'hex') + // FROM blocks, last_block + // WHERE blocks.number = last_block.number;`; + // const { stdout: blockHash } = await utils.exec(`echo "${query}" | psql "${process.env.DATABASE_URL}" -t`); + // if (blockHash.trim() == '') { + // throw new Error('Unable to load the latest block hash'); + // } + // return blockHash.trim(); +} + +export async function restart() { + await utils.spawn('cargo run --bin zksync_data_restore --release -- --genesis --finite'); +} + +export async function resume() { + await utils.spawn('cargo run --bin zksync_data_restore --release -- --continue'); +} + +export async function run() { + await utils.spawn('cargo run --bin zksync_data_restore --release -- --genesis --finite'); +} + +export async function check(expectedHash: string) { + await utils.spawn( + `cargo run --bin zksync_data_restore --release -- --genesis --finite --final_hash ${expectedHash}` + ); +} + +export async function checkExisting() { + const expectedHash = await rootHash(); + await check(expectedHash); +} + +export const command = new Command('data-restore'); + +command.command('restart').description('wipe the database and run data restore in finite mode').action(restart); +command.command('resume').description('run data restore in "resume" mode').action(resume); +command.command('run').description('do not wipe the database and run data restore in finite mode').action(run); + +command + .command('check ') + .description('wipe the database, run the data restore in finite mode and check the root hash') + .action(check); + +command + .command('check-existing') + .description(`like "check", but instead hash is loaded from the database before wiping it`) + .action(checkExisting); + +command + .command('root-hash') + .description('find the hash of the latest verified block and print it') + .action(async () => { + console.log(await rootHash()); + }); diff --git a/infrastructure/zk/src/run/run.ts b/infrastructure/zk/src/run/run.ts new file mode 100644 index 000000000000..f1d74844c4f0 --- /dev/null +++ b/infrastructure/zk/src/run/run.ts @@ -0,0 +1,200 @@ +import { Command } from 'commander'; +import * as utils from '../utils'; +import { Wallet } from 'ethers'; +import fs from 'fs'; +import * as path from 'path'; +import * as dataRestore from './data-restore'; + +export { dataRestore }; + +export async function deployERC20(command: 'dev' | 'new', name?: string, symbol?: string, decimals?: string) { + if (command == 'dev') { + await utils.spawn(`yarn --silent --cwd contracts/ethereum deploy-erc20 add-multi ' + [ + { "name": "DAI", "symbol": "DAI", "decimals": 18 }, + { "name": "wBTC", "symbol": "wBTC", "decimals": 8, "implementation": "RevertTransferERC20" }, + { "name": "BAT", "symbol": "BAT", "decimals": 18 }, + { "name": "GNT", "symbol": "GNT", "decimals": 18 }, + { "name": "MLTT", "symbol": "MLTT", "decimals": 18 }, + { "name": "DAIK", "symbol": "DAIK", "decimals": 18 }, + { "name": "wBTCK", "symbol": "wBTCK", "decimals": 8, "implementation": "RevertTransferERC20" }, + { "name": "BATK", "symbol": "BATS", "decimals": 18 }, + { "name": "GNTK", "symbol": "GNTS", "decimals": 18 }, + { "name": "MLTTK", "symbol": "MLTTS", "decimals": 18 }, + { "name": "DAIL", "symbol": "DAIL", "decimals": 18 }, + { "name": "wBTCL", "symbol": "wBTCP", "decimals": 8, "implementation": "RevertTransferERC20" }, + { "name": "BATL", "symbol": "BATW", "decimals": 18 }, + { "name": "GNTL", "symbol": "GNTW", "decimals": 18 }, + { "name": "MLTTL", "symbol": "MLTTW", "decimals": 18 } + ]' > ./etc/tokens/localhost.json`); + } else if (command == 'new') { + await utils.spawn( + `yarn --silent --cwd contracts/ethereum deploy-erc20 add --token-name ${name} --symbol ${symbol} --decimals ${decimals}` + ); + } +} + +export async function tokenInfo(address: string) { + await utils.spawn(`yarn l1-contracts token-info info ${address}`); +} + +// installs all dependencies and builds our js packages +export async function yarn() { + await utils.spawn('yarn'); + await utils.spawn('yarn init-build'); +} + +export async function deployTestkit(genesisRoot: string) { + await utils.spawn(`yarn l1-contracts deploy-testkit --genesis-root ${genesisRoot}`); +} + +export async function plonkSetup(powers?: number[]) { + if (!powers) { + powers = [20, 21, 22, 23, 24, 25, 26]; + } + const URL = 'https://storage.googleapis.com/universal-setup'; + fs.mkdirSync('keys/setup', { recursive: true }); + process.chdir('keys/setup'); + for (let power = 20; power <= 26; power++) { + if (!fs.existsSync(`setup_2^${power}.key`)) { + await utils.spawn(`curl -LO ${URL}/setup_2^${power}.key`); + await utils.sleep(1); + } + } + process.chdir(process.env.ZKSYNC_HOME as string); +} + +export async function revertReason(txHash: string, web3url?: string) { + await utils.spawn(`yarn l1-contracts ts-node scripts/revert-reason.ts ${txHash} ${web3url || ''}`); +} + +export async function explorer() { + await utils.spawn('yarn explorer serve'); +} + +export async function exitProof(...args: string[]) { + await utils.spawn(`cargo run --example generate_exit_proof --release -- ${args.join(' ')}`); +} + +export async function catLogs(exitCode?: number) { + utils.allowFailSync(() => { + console.log('\nSERVER LOGS:\n', fs.readFileSync('server.log').toString()); + console.log('\nPROVER LOGS:\n', fs.readFileSync('dummy_prover.log').toString()); + }); + if (exitCode !== undefined) { + process.exit(exitCode); + } +} + +export async function testAccounts() { + const testConfigPath = path.join(process.env.ZKSYNC_HOME as string, `etc/test_config/constant`); + const ethTestConfig = JSON.parse(fs.readFileSync(`${testConfigPath}/eth.json`, { encoding: 'utf-8' })); + const NUM_TEST_WALLETS = 10; + const baseWalletPath = "m/44'/60'/0'/0/"; + const walletKeys = []; + for (let i = 0; i < NUM_TEST_WALLETS; ++i) { + const ethWallet = Wallet.fromMnemonic(ethTestConfig.test_mnemonic as string, baseWalletPath + i); + walletKeys.push({ + address: ethWallet.address, + privateKey: ethWallet.privateKey + }); + } + console.log(JSON.stringify(walletKeys, null, 4)); +} + +export async function loadtest(...args: string[]) { + console.log(args); + await utils.spawn(`cargo run --release --bin loadnext -- ${args.join(' ')}`); +} + +export async function readVariable(address: string, contractName: string, variableName: string, file?: string) { + if (file === undefined) + await utils.spawn( + `yarn --silent --cwd contracts/ethereum read-variable read ${address} ${contractName} ${variableName}` + ); + else + await utils.spawn( + `yarn --silent --cwd contracts/ethereum read-variable read ${address} ${contractName} ${variableName} -f ${file}` + ); +} + +export const command = new Command('run').description('run miscellaneous applications').addCommand(dataRestore.command); + +command.command('test-accounts').description('print ethereum test accounts').action(testAccounts); +command.command('explorer').description('run zksync explorer locally').action(explorer); +command.command('yarn').description('install all JS dependencies').action(yarn); +command.command('cat-logs [exit_code]').description('print server and prover logs').action(catLogs); + +command + .command('deploy-erc20 [name] [symbol] [decimals]') + .description('deploy ERC20 tokens') + .action(async (command: string, name?: string, symbol?: string, decimals?: string) => { + if (command != 'dev' && command != 'new') { + throw new Error('only "dev" and "new" subcommands are allowed'); + } + await deployERC20(command, name, symbol, decimals); + }); + +command + .command('token-info
') + .description('get symbol, name and decimals parameters from token') + .action(async (address: string) => { + await tokenInfo(address); + }); + +command + .command('plonk-setup [powers]') + .description('download missing keys') + .action(async (powers?: string) => { + const powersArray = powers + ?.split(' ') + .map((x) => parseInt(x)) + .filter((x) => !Number.isNaN(x)); + await plonkSetup(powersArray); + }); + +command + .command('deploy-testkit') + .description('deploy testkit contracts') + .requiredOption('--genesis-root ') + .action(async (cmd: Command) => { + await deployTestkit(cmd.genesisRoot); + }); + +command + .command('revert-reason [web3_url]') + .description('get the revert reason for ethereum transaction') + .action(revertReason); + +command + .command('exit-proof') + .option('--account ') + .option('--token ') + .option('--help') + .description('generate exit proof') + .action(async (cmd: Command) => { + if (!cmd.account || !cmd.token) { + await exitProof('--help'); + } else { + await exitProof('--account_id', cmd.account, '--token', cmd.token); + } + }); + +command + .command('loadtest [options...]') + .description('run the loadtest') + .allowUnknownOption() + .action(async (options: string[]) => { + await loadtest(...options); + }); + +command + .command('read-variable
') + .option( + '-f --file ', + 'file with contract source code(default $ZKSYNC_HOME/contracts/contracts/${contractName}.sol)' + ) + .description('Read value of contract variable') + .action(async (address: string, contractName: string, variableName: string, cmd: Command) => { + await readVariable(address, contractName, variableName, cmd.file); + }); diff --git a/infrastructure/zk/src/server.ts b/infrastructure/zk/src/server.ts new file mode 100644 index 000000000000..79dfeb27401b --- /dev/null +++ b/infrastructure/zk/src/server.ts @@ -0,0 +1,92 @@ +import { Command } from 'commander'; +import * as utils from './utils'; +import * as env from './env'; +import { clean } from './clean'; +import fs from 'fs'; + +export async function server(rebuildTree: boolean, openzeppelinTests: boolean, components?: string) { + let options = ''; + if (openzeppelinTests) { + options += '--features=openzeppelin_tests'; + } + if (rebuildTree || components) { + options += ' --'; + } + if (rebuildTree) { + clean('db'); + options += ' --rebuild-tree'; + } + if (components) { + options += ` --components=${components}`; + } + await utils.spawn(`cargo run --bin zksync_server --release ${options}`); +} + +async function create_genesis(cmd: string) { + await utils.confirmAction(); + await utils.spawn(`${cmd} | tee genesis.log`); + const genesisContents = fs.readFileSync('genesis.log').toString().split('\n'); + const genesisBlockCommitment = genesisContents.find((line) => line.includes('CONTRACTS_GENESIS_BLOCK_COMMITMENT=')); + const genesisRoot = genesisContents.find((line) => line.includes('CONTRACTS_GENESIS_ROOT=')); + const genesisRollupLeafIndex = genesisContents.find((line) => + line.includes('CONTRACTS_GENESIS_ROLLUP_LEAF_INDEX=') + ); + if (genesisRoot == null || !/^CONTRACTS_GENESIS_ROOT=0x[a-fA-F0-9]{64}$/.test(genesisRoot)) { + throw Error(`Genesis is not needed (either Postgres DB or tree's Rocks DB is not empty)`); + } + + if ( + genesisBlockCommitment == null || + !/^CONTRACTS_GENESIS_BLOCK_COMMITMENT=0x[a-fA-F0-9]{64}$/.test(genesisBlockCommitment) + ) { + throw Error(`Genesis is not needed (either Postgres DB or tree's Rocks DB is not empty)`); + } + + if ( + genesisRollupLeafIndex == null || + !/^CONTRACTS_GENESIS_ROLLUP_LEAF_INDEX=([1-9]\d*|0)$/.test(genesisRollupLeafIndex) + ) { + throw Error(`Genesis is not needed (either Postgres DB or tree's Rocks DB is not empty)`); + } + + const date = new Date(); + const [year, month, day, hour, minute, second] = [ + date.getFullYear(), + date.getMonth(), + date.getDate(), + date.getHours(), + date.getMinutes(), + date.getSeconds() + ]; + const label = `${process.env.ZKSYNC_ENV}-Genesis_gen-${year}-${month}-${day}-${hour}${minute}${second}`; + fs.mkdirSync(`logs/${label}`, { recursive: true }); + fs.copyFileSync('genesis.log', `logs/${label}/genesis.log`); + env.modify('CONTRACTS_GENESIS_ROOT', genesisRoot); + env.modify('CONTRACTS_GENESIS_BLOCK_COMMITMENT', genesisBlockCommitment); + env.modify('CONTRACTS_GENESIS_ROLLUP_LEAF_INDEX', genesisRollupLeafIndex); + env.modify_contracts_toml('CONTRACTS_GENESIS_ROOT', genesisRoot); + env.modify_contracts_toml('CONTRACTS_GENESIS_BLOCK_COMMITMENT', genesisBlockCommitment); + env.modify_contracts_toml('CONTRACTS_GENESIS_ROLLUP_LEAF_INDEX', genesisRollupLeafIndex); +} + +export async function genesis_from_sources() { + await create_genesis('cargo run --bin zksync_server --release -- --genesis'); +} + +export async function genesis_from_binary() { + await create_genesis('zksync_server --genesis'); +} + +export const command = new Command('server') + .description('start zksync server') + .option('--genesis', 'generate genesis data via server') + .option('--rebuild-tree', 'rebuilds merkle tree from database logs', 'rebuild_tree') + .option('--openzeppelin-tests', `enables 'openzeppelin_tests' feature`) + .option('--components ', 'comma-separated list of components to run') + .action(async (cmd: Command) => { + if (cmd.genesis) { + await genesis_from_sources(); + } else { + await server(cmd.rebuildTree, cmd.openzeppelinTests, cmd.components); + } + }); diff --git a/infrastructure/zk/src/test/integration.ts b/infrastructure/zk/src/test/integration.ts new file mode 100644 index 000000000000..75285b6f2f17 --- /dev/null +++ b/infrastructure/zk/src/test/integration.ts @@ -0,0 +1,143 @@ +import { Command } from 'commander'; +import * as utils from '../utils'; +import * as contract from '../contract'; +import * as run from '../run/run'; +import * as compiler from '../compiler'; + +export async function all() { + await server(); + await api(); + await rustSDK(); + // have to kill server before running data-restore + await utils.spawn('pkill zksync_server'); + await run.dataRestore.checkExisting(); +} + +export async function api(bail: boolean = false) { + const flag = bail ? ' --bail' : ''; + await utils.spawn('yarn ts-tests api-test' + flag); +} + +export async function server() { + await utils.spawn('yarn ts-integration test'); +} + +export async function revert(bail: boolean = false) { + const flag = bail ? ' --bail' : ''; + await utils.spawn('yarn revert-test revert-and-restart-test' + flag); +} + +export async function withdrawalHelpers() { + await utils.spawn('yarn ts-tests withdrawal-helpers-test'); +} + +export async function testkit(args: string[], timeout: number) { + let containerID = ''; + const prevUrls = process.env.ETH_CLIENT_WEB3_URL?.split(',')[0]; + if (process.env.ZKSYNC_ENV == 'dev' && process.env.CI != '1') { + const { stdout } = await utils.exec('docker run --rm -d -p 7545:8545 matterlabs/geth:latest fast'); + containerID = stdout; + process.env.ETH_CLIENT_WEB3_URL = 'http://localhost:7545'; + } + process.on('SIGINT', () => { + console.log('interrupt received'); + // we have to emit this manually, as SIGINT is considered explicit termination + process.emit('beforeExit', 130); + }); + + // set a timeout in case tests hang + const timer = setTimeout(() => { + console.log('Timeout reached!'); + process.emit('beforeExit', 1); + }, timeout * 1000); + timer.unref(); + + // since we HAVE to make an async call upon exit, + // the only solution is to use beforeExit hook + // but be careful! this is not called upon explicit termination + // e.g. on SIGINT or process.exit() + process.on('beforeExit', async (code) => { + if (process.env.ZKSYNC_ENV == 'dev' && process.env.CI != '1') { + try { + // probably should be replaced with child_process.execSync in future + // to change the hook to program.on('exit', ...) + await utils.exec(`docker kill ${containerID}`); + } catch { + console.error('Problem killing', containerID); + } + process.env.ETH_CLIENT_WEB3_URL = prevUrls; + // this has to be here - or else we will call this hook again + process.exit(code); + } + }); + + process.env.CHAIN_ETH_NETWORK = 'test'; + await compiler.compileAll(); + await contract.build(); + + await utils.spawn(`cargo run --release --bin zksync_testkit -- ${args.join(' ')}`); +} + +export async function rustSDK() { + await utils.spawn('cargo test -p zksync --release -- --ignored --test-threads=1'); +} + +export const command = new Command('integration').description('zksync integration tests').alias('i'); + +command + .command('all') + .description('run all integration tests (no testkit)') + .action(async () => { + await all(); + }); + +command + .command('server') + .description('run server integration tests') + .action(async () => { + await server(); + }); + +command + .command('revert') + .description('run revert test') + .option('--bail') + .action(async (cmd: Command) => { + await revert(cmd.bail); + }); + +command + .command('rust-sdk') + .description('run rust SDK integration tests') + .option('--with-server') + .action(async () => { + await rustSDK(); + }); + +command + .command('api') + .description('run api integration tests') + .option('--bail') + .action(async (cmd: Command) => { + await api(cmd.bail); + }); + +command + .command('testkit [options...]') + .allowUnknownOption(true) + .description('run testkit tests') + .option('--offline') + .action(async (options: string[], offline: boolean = false) => { + if (offline) { + process.env.SQLX_OFFLINE = 'true'; + } + if (options.length == 0) { + options.push('all'); + } + + await testkit(options, 6000); + + if (offline) { + delete process.env.SQLX_OFFLINE; + } + }); diff --git a/infrastructure/zk/src/test/test.ts b/infrastructure/zk/src/test/test.ts new file mode 100644 index 000000000000..def91a724def --- /dev/null +++ b/infrastructure/zk/src/test/test.ts @@ -0,0 +1,54 @@ +import { Command } from 'commander'; +import * as utils from '../utils'; + +import * as integration from './integration'; +import * as db from '../database/database'; +export { integration }; + +export async function l1Contracts() { + await utils.spawn('yarn l1-contracts test'); +} + +export async function prover() { + // await utils.spawn('cargo test -p zksync_prover --release'); +} + +export async function js() { + await utils.spawn('yarn web3 tests'); +} + +export async function rust(options: string[]) { + await db.resetTest(); + + let cmd = `cargo test --release ${options.join(' ')}`; + console.log(`running unit tests with '${cmd}'`); + + await utils.spawn(cmd); +} + +export async function openzeppelin() { + process.chdir(`${process.env.ZKSYNC_HOME}/etc/openzeppelin-contracts`); + await utils.spawn('yarn'); + process.chdir(`${process.env.ZKSYNC_HOME}/infrastructure/openzeppelin-tests-preparation`); + await utils.spawn('yarn && yarn start'); + process.chdir(`${process.env.ZKSYNC_HOME}/etc/openzeppelin-contracts`); + await utils.spawn('yarn test'); + + process.chdir(process.env.ZKSYNC_HOME as string); +} + +export const command = new Command('test').description('run test suites').addCommand(integration.command); + +command.command('js').description('run unit-tests for javascript packages').action(js); +command.command('prover').description('run unit-tests for the prover').action(prover); +command.command('l1-contracts').description('run unit-tests for the layer 1 smart contracts').action(l1Contracts); +command.command('openzeppelin').description(`run openzeppelin contracts' tests`).action(openzeppelin); +command + .command('rust [command...]') + .allowUnknownOption() + .description( + 'run unit-tests. the default is running all tests in all rust bins and libs. accepts optional arbitrary cargo test flags' + ) + .action(async (args: string[]) => { + await rust(args); + }); diff --git a/infrastructure/zk/src/up.ts b/infrastructure/zk/src/up.ts new file mode 100644 index 000000000000..9201f89f469a --- /dev/null +++ b/infrastructure/zk/src/up.ts @@ -0,0 +1,8 @@ +import { Command } from 'commander'; +import * as utils from './utils'; + +export async function up() { + await utils.spawn('docker-compose up -d geth postgres'); +} + +export const command = new Command('up').description('start development containers').action(up); diff --git a/infrastructure/zk/src/utils.ts b/infrastructure/zk/src/utils.ts new file mode 100644 index 000000000000..27c9a92f25ea --- /dev/null +++ b/infrastructure/zk/src/utils.ts @@ -0,0 +1,149 @@ +import { exec as _exec, spawn as _spawn } from 'child_process'; +import { promisify } from 'util'; +import fs from 'fs'; +import readline from 'readline'; + +export type { ChildProcess } from 'child_process'; + +const IGNORED_DIRS = [ + 'target', + 'node_modules', + 'volumes', + 'build', + 'dist', + '.git', + 'generated', + 'grafonnet-lib', + 'prettier-config', + 'lint-config', + 'cache', + 'artifacts', + 'typechain', + 'binaryen', + 'system-contracts', + 'openzeppelin-contracts', + 'artifacts-zk', + 'cache-zk' +]; +const IGNORED_FILES = ['KeysWithPlonkVerifier.sol', 'TokenInit.sol', '.tslintrc.js']; + +// async executor of shell commands +// spawns a new shell and can execute arbitrary commands, like "ls -la | grep .env" +// returns { stdout, stderr } +const promisified = promisify(_exec); +export function exec(command: string) { + command = command.replace(/\n/g, ' '); + return promisified(command); +} + +// executes a command in a new shell +// but pipes data to parent's stdout/stderr +export function spawn(command: string) { + command = command.replace(/\n/g, ' '); + const child = _spawn(command, { stdio: 'inherit', shell: true }); + return new Promise((resolve, reject) => { + child.on('error', reject); + child.on('close', (code) => { + code == 0 ? resolve(code) : reject(`Child process exited with code ${code}`); + }); + }); +} + +// executes a command in background and returns a child process handle +// by default pipes data to parent's stdio but this can be overridden +export function background(command: string, stdio: any = 'inherit') { + command = command.replace(/\n/g, ' '); + return _spawn(command, { stdio: stdio, shell: true, detached: true }); +} + +export async function confirmAction() { + if (process.env.ZKSYNC_ACTION == 'dont_ask') return; + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout + }); + const input = await new Promise((resolve) => { + rl.question( + 'Dangerous action! (set ZKSYNC_ACTION=dont_ask to always allow)\n' + + `Type environment name (${process.env.ZKSYNC_ENV}) to confirm: `, + (input) => { + rl.close(); + resolve(input); + } + ); + }); + if (input !== process.env.ZKSYNC_ENV) { + throw new Error('[aborted] action was not confirmed'); + } +} + +export async function sleep(seconds: number) { + return new Promise((resolve) => setTimeout(resolve, seconds * 1000)); +} + +// the sync version of sleep is needed +// for process.on('exit') hook, which MUST be synchronous. +// no idea why it has to be so ugly, though +export function sleepSync(seconds: number) { + Atomics.wait(new Int32Array(new SharedArrayBuffer(4)), 0, 0, seconds * 1000); +} + +export async function allowFail(promise: Promise) { + try { + return await promise; + } catch { + return null; + } +} + +export function allowFailSync(func: () => T) { + try { + return func(); + } catch { + return null; + } +} + +export function replaceInFile(filename: string, before: string | RegExp, after: string) { + before = new RegExp(before, 'g'); + modifyFile(filename, (source) => source.replace(before, after)); +} + +// performs an operation on the content of `filename` +export function modifyFile(filename: string, modifier: (s: string) => string) { + const source = fs.readFileSync(filename).toString(); + fs.writeFileSync(filename, modifier(source)); +} + +// If you wonder why this is written so obscurely through find and not through .prettierignore and globs, +// it's because prettier *first* expands globs and *then* applies ignore rules, which leads to an error +// because it can't expand into volumes folder with not enough access rights, even if it is ignored. +// +// And if we let the shell handle glob expansion instead of prettier, `shopt -s globstar` will be +// disabled (because yarn spawns its own shell that does not load .bashrc) and thus glob patterns +// with double-stars will not work +export async function getUnignoredFiles(extension: string) { + const root = extension == 'sol' ? 'contracts' : '.'; + const ignored_dirs = IGNORED_DIRS.map((dir) => `-o -path '*/${dir}' -prune`).join(' '); + const ignored_files = IGNORED_FILES.map((file) => `-a ! -name '${file}'`).join(' '); + const { stdout: files } = await exec( + `find ${root} -type f -name '*.${extension}' ${ignored_files} -print ${ignored_dirs}` + ); + + return files; +} + +export function web3Url() { + return process.env.ETH_CLIENT_WEB3_URL!; +} + +export async function readZkSyncAbi() { + const zksync = process.env.ZKSYNC_HOME; + const path = `${zksync}/contracts/artifacts/cache/solpp-generated-contracts/interfaces/IZkSync.sol/IZkSync.json`; + + const fileContent = (await fs.promises.readFile(path)).toString(); + + const abi = JSON.parse(fileContent).abi; + + return abi; +} diff --git a/infrastructure/zk/tsconfig.json b/infrastructure/zk/tsconfig.json new file mode 100644 index 000000000000..f96df8d60edb --- /dev/null +++ b/infrastructure/zk/tsconfig.json @@ -0,0 +1,15 @@ +{ + "compilerOptions": { + "target": "es2019", + "module": "commonjs", + "outDir": "build", + "strict": true, + "esModuleInterop": true, + "noEmitOnError": true, + "skipLibCheck": true, + "declaration": true + }, + "files": [ + "src/index.ts" + ] +} diff --git a/package.json b/package.json new file mode 100644 index 000000000000..bfa80568d438 --- /dev/null +++ b/package.json @@ -0,0 +1,51 @@ +{ + "name": "zksync-root", + "version": "1.0.0", + "license": "MIT", + "private": true, + "workspaces": { + "packages": [ + "sdk/zksync-web3.js", + "contracts/ethereum", + "contracts/zksync", + "etc/contracts-test-data", + "etc/ERC20", + "infrastructure/zk", + "infrastructure/reading-tool", + "infrastructure/local-setup-preparation", + "infrastructure/openzeppelin-tests-preparation", + "core/tests/revert-test", + "core/tests/ts-integration" + ], + "nohoist": [ + "**/@types/jest", + "**/@types/jasmine" + ] + }, + "scripts": { + "build:zksync-sdk-web3": "yarn web3 build", + "build:reading-tool": "yarn reading-tool build", + "web3": "yarn workspace zksync-web3", + "local-prep": "yarn workspace local-setup-preparation", + "l1-contracts": "yarn workspace l1-zksync-contracts", + "l2-contracts": "yarn workspace l2-zksync-contracts", + "revert-test": "yarn workspace revert-test", + "ts-integration": "yarn workspace ts-integration", + "zk": "yarn workspace zk", + "reading-tool": "yarn workspace reading-tool", + "init-build": "yarn npm-run-all --parallel build:*" + }, + "devDependencies": { + "@ethersproject/bignumber": "~5.5.0", + "@typescript-eslint/eslint-plugin": "^4.10.0", + "@typescript-eslint/parser": "^4.10.0", + "babel-eslint": "^10.1.0", + "eslint": "^7.16.0", + "eslint-config-alloy": "^3.8.2", + "markdownlint-cli": "^0.24.0", + "npm-run-all": "^4.1.5", + "prettier": "^2.3.2", + "prettier-plugin-solidity": "=1.0.0-dev.22", + "solhint": "^3.3.2" + } +} diff --git a/renovate.json b/renovate.json new file mode 100644 index 000000000000..751c26d2767b --- /dev/null +++ b/renovate.json @@ -0,0 +1,5 @@ +{ + "extends": ["config:base", "helpers:pinGitHubActionDigests"], + "enabledManagers": ["github-actions"], + "prCreation": "immediate" +} diff --git a/rust-toolchain b/rust-toolchain new file mode 100644 index 000000000000..737e2ba5099d --- /dev/null +++ b/rust-toolchain @@ -0,0 +1 @@ +1.67.1 diff --git a/sdk/zksync-rs/Cargo.toml b/sdk/zksync-rs/Cargo.toml new file mode 100644 index 000000000000..09c990ac4223 --- /dev/null +++ b/sdk/zksync-rs/Cargo.toml @@ -0,0 +1,30 @@ +[package] +name = "zksync" +version = "0.3.0" +authors = ["The Matter Labs Team "] +edition = "2018" + +[dependencies] +zksync_types = { path = "../../core/lib/types", version = "1.0" } +zksync_utils = { path = "../../core/lib/utils", version = "1.0" } +zksync_eth_client = { path = "../../core/lib/eth_client", version = "1.0" } +zksync_eth_signer = { path = "../../core/lib/eth_signer", version = "1.0" } +zksync_web3_decl = { path = "../../core/lib/web3_decl", version = "1.0", default-features = false, features = [ + "client", +] } + +tokio = { version = "1", features = ["time"] } + +serde_json = "1.0" +num = { version = "0.3.1", features = ["serde"] } +thiserror = "1.0" + +[dev-dependencies] +zksync_config = { path = "../../core/lib/config", version = "1.0" } +tokio = { version = "1", features = ["full"] } +anyhow = "1.0" +hex = "0.4" + +[features] +integration-tests = [] +mint = [] diff --git a/sdk/zksync-rs/README.md b/sdk/zksync-rs/README.md new file mode 100644 index 000000000000..f4b6444d71e6 --- /dev/null +++ b/sdk/zksync-rs/README.md @@ -0,0 +1,4 @@ +# Rust SDK for zkSync + +This SDK is currently under construction and is only used for internal needs. A public version of the SDK would be +announced later. diff --git a/sdk/zksync-rs/src/abi/IERC20.json b/sdk/zksync-rs/src/abi/IERC20.json new file mode 100644 index 000000000000..f99b7939ed4c --- /dev/null +++ b/sdk/zksync-rs/src/abi/IERC20.json @@ -0,0 +1,463 @@ +{ + "abi": [ + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "spender", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "value", + "type": "uint256" + } + ], + "name": "Approval", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "value", + "type": "uint256" + } + ], + "name": "Transfer", + "type": "event" + }, + { + "constant": true, + "inputs": [ + { + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "internalType": "address", + "name": "spender", + "type": "address" + } + ], + "name": "allowance", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "payable": false, + "stateMutability": "view", + "type": "function" + }, + { + "constant": false, + "inputs": [ + { + "internalType": "address", + "name": "spender", + "type": "address" + }, + { + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "approve", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "payable": false, + "stateMutability": "nonpayable", + "type": "function" + }, + { + "constant": true, + "inputs": [ + { + "internalType": "address", + "name": "account", + "type": "address" + } + ], + "name": "balanceOf", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "payable": false, + "stateMutability": "view", + "type": "function" + }, + { + "constant": true, + "inputs": [], + "name": "totalSupply", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "payable": false, + "stateMutability": "view", + "type": "function" + }, + { + "constant": false, + "inputs": [ + { + "internalType": "address", + "name": "recipient", + "type": "address" + }, + { + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "transfer", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "payable": false, + "stateMutability": "nonpayable", + "type": "function" + }, + { + "constant": false, + "inputs": [ + { + "internalType": "address", + "name": "sender", + "type": "address" + }, + { + "internalType": "address", + "name": "recipient", + "type": "address" + }, + { + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "transferFrom", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "payable": false, + "stateMutability": "nonpayable", + "type": "function" + }, + { + "constant": false, + "inputs": [ + { + "internalType": "address", + "name": "_to", + "type": "address" + }, + { + "internalType": "uint256", + "name": "_amount", + "type": "uint256" + } + ], + "name": "mint", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "payable": false, + "stateMutability": "nonpayable", + "type": "function" + } + ], + "evm": { + "bytecode": { + "linkReferences": {}, + "object": "", + "opcodes": "", + "sourceMap": "" + }, + "deployedBytecode": { + "linkReferences": {}, + "object": "", + "opcodes": "", + "sourceMap": "" + } + }, + "interface": [ + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "spender", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "value", + "type": "uint256" + } + ], + "name": "Approval", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "value", + "type": "uint256" + } + ], + "name": "Transfer", + "type": "event" + }, + { + "constant": true, + "inputs": [ + { + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "internalType": "address", + "name": "spender", + "type": "address" + } + ], + "name": "allowance", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "payable": false, + "stateMutability": "view", + "type": "function" + }, + { + "constant": false, + "inputs": [ + { + "internalType": "address", + "name": "spender", + "type": "address" + }, + { + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "approve", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "payable": false, + "stateMutability": "nonpayable", + "type": "function" + }, + { + "constant": true, + "inputs": [ + { + "internalType": "address", + "name": "account", + "type": "address" + } + ], + "name": "balanceOf", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "payable": false, + "stateMutability": "view", + "type": "function" + }, + { + "constant": true, + "inputs": [], + "name": "totalSupply", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "payable": false, + "stateMutability": "view", + "type": "function" + }, + { + "constant": false, + "inputs": [ + { + "internalType": "address", + "name": "recipient", + "type": "address" + }, + { + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "transfer", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "payable": false, + "stateMutability": "nonpayable", + "type": "function" + }, + { + "constant": false, + "inputs": [ + { + "internalType": "address", + "name": "sender", + "type": "address" + }, + { + "internalType": "address", + "name": "recipient", + "type": "address" + }, + { + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "transferFrom", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "payable": false, + "stateMutability": "nonpayable", + "type": "function" + }, + { + "constant": false, + "inputs": [ + { + "internalType": "address", + "name": "_to", + "type": "address" + }, + { + "internalType": "uint256", + "name": "_amount", + "type": "uint256" + } + ], + "name": "mint", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "payable": false, + "stateMutability": "nonpayable", + "type": "function" + } + ], + "bytecode": "" +} diff --git a/sdk/zksync-rs/src/abi/IL1Bridge.json b/sdk/zksync-rs/src/abi/IL1Bridge.json new file mode 100644 index 000000000000..f0cb575994f5 --- /dev/null +++ b/sdk/zksync-rs/src/abi/IL1Bridge.json @@ -0,0 +1,243 @@ +{ + "abi": [ + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "l1Token", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "ClaimedFailedDeposit", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "l1Token", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "DepositInitiated", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "l1Token", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "WithdrawalFinalized", + "type": "event" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_depositSender", + "type": "address" + }, + { + "internalType": "address", + "name": "_l1Token", + "type": "address" + }, + { + "internalType": "bytes32", + "name": "_l2TxHash", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "_l2BlockNumber", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2MessageIndex", + "type": "uint256" + }, + { + "internalType": "uint16", + "name": "_l2TxNumberInBlock", + "type": "uint16" + }, + { + "internalType": "bytes32[]", + "name": "_merkleProof", + "type": "bytes32[]" + } + ], + "name": "claimFailedDeposit", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_l2Receiver", + "type": "address" + }, + { + "internalType": "address", + "name": "_l1Token", + "type": "address" + }, + { + "internalType": "uint256", + "name": "_amount", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2TxGasLimit", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2TxGasPerPubdataByte", + "type": "uint256" + } + ], + "name": "deposit", + "outputs": [ + { + "internalType": "bytes32", + "name": "txHash", + "type": "bytes32" + } + ], + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_l2BlockNumber", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2MessageIndex", + "type": "uint256" + }, + { + "internalType": "uint16", + "name": "_l2TxNumberInBlock", + "type": "uint16" + }, + { + "internalType": "bytes", + "name": "_message", + "type": "bytes" + }, + { + "internalType": "bytes32[]", + "name": "_merkleProof", + "type": "bytes32[]" + } + ], + "name": "finalizeWithdrawal", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_l2BlockNumber", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2MessageIndex", + "type": "uint256" + } + ], + "name": "isWithdrawalFinalized", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_l1Token", + "type": "address" + } + ], + "name": "l2TokenAddress", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + } + ] +} diff --git a/sdk/zksync-rs/src/abi/IPaymasterFlow.json b/sdk/zksync-rs/src/abi/IPaymasterFlow.json new file mode 100644 index 000000000000..c78e121fd538 --- /dev/null +++ b/sdk/zksync-rs/src/abi/IPaymasterFlow.json @@ -0,0 +1,40 @@ +{ + "abi": [ + { + "inputs": [ + { + "internalType": "address", + "name": "_token", + "type": "address" + }, + { + "internalType": "uint256", + "name": "_minAllowance", + "type": "uint256" + }, + { + "internalType": "bytes", + "name": "_innerInput", + "type": "bytes" + } + ], + "name": "approvalBased", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes", + "name": "input", + "type": "bytes" + } + ], + "name": "general", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + } + ] +} diff --git a/sdk/zksync-rs/src/abi/IZkSync.json b/sdk/zksync-rs/src/abi/IZkSync.json new file mode 100644 index 000000000000..afb3698e7c62 --- /dev/null +++ b/sdk/zksync-rs/src/abi/IZkSync.json @@ -0,0 +1,2176 @@ +{ + "abi": [ + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "uint256", + "name": "blockNumber", + "type": "uint256" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "blockHash", + "type": "bytes32" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "name": "BlockCommit", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "uint256", + "name": "blockNumber", + "type": "uint256" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "blockHash", + "type": "bytes32" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "name": "BlockExecution", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "uint256", + "name": "totalBlocksCommitted", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "totalBlocksVerified", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "totalBlocksExecuted", + "type": "uint256" + } + ], + "name": "BlocksRevert", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "uint256", + "name": "previousLastVerifiedBlock", + "type": "uint256" + }, + { + "indexed": true, + "internalType": "uint256", + "name": "currentLastVerifiedBlock", + "type": "uint256" + } + ], + "name": "BlocksVerification", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "uint256", + "name": "proposalId", + "type": "uint256" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "proposalHash", + "type": "bytes32" + } + ], + "name": "CancelUpgradeProposal", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "EthWithdrawalFinalized", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "uint256", + "name": "proposalId", + "type": "uint256" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "proposalHash", + "type": "bytes32" + }, + { + "indexed": false, + "internalType": "bytes32", + "name": "proposalSalt", + "type": "bytes32" + } + ], + "name": "ExecuteUpgrade", + "type": "event" + }, + { + "anonymous": false, + "inputs": [], + "name": "Freeze", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "bool", + "name": "isPorterAvailable", + "type": "bool" + } + ], + "name": "IsPorterAvailableStatusUpdate", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "oldGovernor", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "newGovernor", + "type": "address" + } + ], + "name": "NewGovernor", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "bytes32", + "name": "previousBytecodeHash", + "type": "bytes32" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "newBytecodeHash", + "type": "bytes32" + } + ], + "name": "NewL2BootloaderBytecodeHash", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "bytes32", + "name": "previousBytecodeHash", + "type": "bytes32" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "newBytecodeHash", + "type": "bytes32" + } + ], + "name": "NewL2DefaultAccountBytecodeHash", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "oldPendingGovernor", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "newPendingGovernor", + "type": "address" + } + ], + "name": "NewPendingGovernor", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "uint256", + "name": "txId", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "bytes32", + "name": "txHash", + "type": "bytes32" + }, + { + "indexed": false, + "internalType": "uint64", + "name": "expirationTimestamp", + "type": "uint64" + }, + { + "components": [ + { + "internalType": "uint256", + "name": "txType", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "from", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "to", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "gasLimit", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "gasPerPubdataByteLimit", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "maxFeePerGas", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "maxPriorityFeePerGas", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "paymaster", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "nonce", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "value", + "type": "uint256" + }, + { + "internalType": "uint256[4]", + "name": "reserved", + "type": "uint256[4]" + }, + { + "internalType": "bytes", + "name": "data", + "type": "bytes" + }, + { + "internalType": "bytes", + "name": "signature", + "type": "bytes" + }, + { + "internalType": "uint256[]", + "name": "factoryDeps", + "type": "uint256[]" + }, + { + "internalType": "bytes", + "name": "paymasterInput", + "type": "bytes" + }, + { + "internalType": "bytes", + "name": "reservedDynamic", + "type": "bytes" + } + ], + "indexed": false, + "internalType": "struct IMailbox.L2CanonicalTransaction", + "name": "transaction", + "type": "tuple" + }, + { + "indexed": false, + "internalType": "bytes[]", + "name": "factoryDeps", + "type": "bytes[]" + } + ], + "name": "NewPriorityRequest", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "uint256", + "name": "oldPriorityTxMaxGasLimit", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "newPriorityTxMaxGasLimit", + "type": "uint256" + } + ], + "name": "NewPriorityTxMaxGasLimit", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "oldVerifier", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "newVerifier", + "type": "address" + } + ], + "name": "NewVerifier", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "components": [ + { + "internalType": "bytes32", + "name": "recursionNodeLevelVkHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "recursionLeafLevelVkHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "recursionCircuitsSetVksHash", + "type": "bytes32" + } + ], + "indexed": false, + "internalType": "struct VerifierParams", + "name": "oldVerifierParams", + "type": "tuple" + }, + { + "components": [ + { + "internalType": "bytes32", + "name": "recursionNodeLevelVkHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "recursionLeafLevelVkHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "recursionCircuitsSetVksHash", + "type": "bytes32" + } + ], + "indexed": false, + "internalType": "struct VerifierParams", + "name": "newVerifierParams", + "type": "tuple" + } + ], + "name": "NewVerifierParams", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "uint256", + "name": "proposalId", + "type": "uint256" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "proposalHash", + "type": "bytes32" + } + ], + "name": "ProposeShadowUpgrade", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "components": [ + { + "components": [ + { + "internalType": "address", + "name": "facet", + "type": "address" + }, + { + "internalType": "enum Diamond.Action", + "name": "action", + "type": "uint8" + }, + { + "internalType": "bool", + "name": "isFreezable", + "type": "bool" + }, + { + "internalType": "bytes4[]", + "name": "selectors", + "type": "bytes4[]" + } + ], + "internalType": "struct Diamond.FacetCut[]", + "name": "facetCuts", + "type": "tuple[]" + }, + { + "internalType": "address", + "name": "initAddress", + "type": "address" + }, + { + "internalType": "bytes", + "name": "initCalldata", + "type": "bytes" + } + ], + "indexed": false, + "internalType": "struct Diamond.DiamondCutData", + "name": "diamondCut", + "type": "tuple" + }, + { + "indexed": true, + "internalType": "uint256", + "name": "proposalId", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "bytes32", + "name": "proposalSalt", + "type": "bytes32" + } + ], + "name": "ProposeTransparentUpgrade", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "uint256", + "name": "proposalId", + "type": "uint256" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "proposalHash", + "type": "bytes32" + } + ], + "name": "SecurityCouncilUpgradeApprove", + "type": "event" + }, + { + "anonymous": false, + "inputs": [], + "name": "Unfreeze", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "validatorAddress", + "type": "address" + }, + { + "indexed": false, + "internalType": "bool", + "name": "isActive", + "type": "bool" + } + ], + "name": "ValidatorStatusUpdate", + "type": "event" + }, + { + "inputs": [], + "name": "acceptGovernor", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "_proposedUpgradeHash", + "type": "bytes32" + } + ], + "name": "cancelUpgradeProposal", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "components": [ + { + "internalType": "uint64", + "name": "blockNumber", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "blockHash", + "type": "bytes32" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "l2LogsTreeRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "timestamp", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "internalType": "struct IExecutor.StoredBlockInfo", + "name": "_lastCommittedBlockData", + "type": "tuple" + }, + { + "components": [ + { + "internalType": "uint64", + "name": "blockNumber", + "type": "uint64" + }, + { + "internalType": "uint64", + "name": "timestamp", + "type": "uint64" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "newStateRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "l2LogsTreeRoot", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes", + "name": "initialStorageChanges", + "type": "bytes" + }, + { + "internalType": "bytes", + "name": "repeatedStorageChanges", + "type": "bytes" + }, + { + "internalType": "bytes", + "name": "l2Logs", + "type": "bytes" + }, + { + "internalType": "bytes[]", + "name": "l2ArbitraryLengthMessages", + "type": "bytes[]" + }, + { + "internalType": "bytes[]", + "name": "factoryDeps", + "type": "bytes[]" + } + ], + "internalType": "struct IExecutor.CommitBlockInfo[]", + "name": "_newBlocksData", + "type": "tuple[]" + } + ], + "name": "commitBlocks", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "components": [ + { + "internalType": "uint64", + "name": "blockNumber", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "blockHash", + "type": "bytes32" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "l2LogsTreeRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "timestamp", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "internalType": "struct IExecutor.StoredBlockInfo[]", + "name": "_blocksData", + "type": "tuple[]" + } + ], + "name": "executeBlocks", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "components": [ + { + "components": [ + { + "internalType": "address", + "name": "facet", + "type": "address" + }, + { + "internalType": "enum Diamond.Action", + "name": "action", + "type": "uint8" + }, + { + "internalType": "bool", + "name": "isFreezable", + "type": "bool" + }, + { + "internalType": "bytes4[]", + "name": "selectors", + "type": "bytes4[]" + } + ], + "internalType": "struct Diamond.FacetCut[]", + "name": "facetCuts", + "type": "tuple[]" + }, + { + "internalType": "address", + "name": "initAddress", + "type": "address" + }, + { + "internalType": "bytes", + "name": "initCalldata", + "type": "bytes" + } + ], + "internalType": "struct Diamond.DiamondCutData", + "name": "_diamondCut", + "type": "tuple" + }, + { + "internalType": "bytes32", + "name": "_proposalSalt", + "type": "bytes32" + } + ], + "name": "executeUpgrade", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes4", + "name": "_selector", + "type": "bytes4" + } + ], + "name": "facetAddress", + "outputs": [ + { + "internalType": "address", + "name": "facet", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "facetAddresses", + "outputs": [ + { + "internalType": "address[]", + "name": "facets", + "type": "address[]" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_facet", + "type": "address" + } + ], + "name": "facetFunctionSelectors", + "outputs": [ + { + "internalType": "bytes4[]", + "name": "", + "type": "bytes4[]" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "facets", + "outputs": [ + { + "components": [ + { + "internalType": "address", + "name": "addr", + "type": "address" + }, + { + "internalType": "bytes4[]", + "name": "selectors", + "type": "bytes4[]" + } + ], + "internalType": "struct IGetters.Facet[]", + "name": "", + "type": "tuple[]" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_l1WithdrawReceiver", + "type": "address" + }, + { + "internalType": "uint256", + "name": "_amount", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2BlockNumber", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2MessageIndex", + "type": "uint256" + }, + { + "internalType": "uint16", + "name": "_l2TxNumberInBlock", + "type": "uint16" + }, + { + "internalType": "bytes32[]", + "name": "_merkleProof", + "type": "bytes32[]" + } + ], + "name": "finalizeEthWithdrawal", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "freezeDiamond", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "getCurrentProposalId", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getFirstUnprocessedPriorityTx", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getGovernor", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getL2BootloaderBytecodeHash", + "outputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getL2DefaultAccountBytecodeHash", + "outputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getPendingGovernor", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getPriorityQueueSize", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getProposedUpgradeHash", + "outputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getProposedUpgradeTimestamp", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getSecurityCouncil", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getTotalBlocksCommitted", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getTotalBlocksExecuted", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getTotalBlocksVerified", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getTotalPriorityTxs", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getUpgradeProposalState", + "outputs": [ + { + "internalType": "enum UpgradeState", + "name": "", + "type": "uint8" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getVerifier", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getVerifierParams", + "outputs": [ + { + "components": [ + { + "internalType": "bytes32", + "name": "recursionNodeLevelVkHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "recursionLeafLevelVkHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "recursionCircuitsSetVksHash", + "type": "bytes32" + } + ], + "internalType": "struct VerifierParams", + "name": "", + "type": "tuple" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getpriorityTxMaxGasLimit", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "isApprovedBySecurityCouncil", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "isDiamondStorageFrozen", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_facet", + "type": "address" + } + ], + "name": "isFacetFreezable", + "outputs": [ + { + "internalType": "bool", + "name": "isFreezable", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes4", + "name": "_selector", + "type": "bytes4" + } + ], + "name": "isFunctionFreezable", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_address", + "type": "address" + } + ], + "name": "isValidator", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_blockNumber", + "type": "uint256" + } + ], + "name": "l2LogsRootHash", + "outputs": [ + { + "internalType": "bytes32", + "name": "hash", + "type": "bytes32" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_gasPrice", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2GasLimit", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2GasPerPubdataByteLimit", + "type": "uint256" + } + ], + "name": "l2TransactionBaseCost", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "priorityQueueFrontOperation", + "outputs": [ + { + "components": [ + { + "internalType": "bytes32", + "name": "canonicalTxHash", + "type": "bytes32" + }, + { + "internalType": "uint64", + "name": "expirationTimestamp", + "type": "uint64" + }, + { + "internalType": "uint192", + "name": "layer2Tip", + "type": "uint192" + } + ], + "internalType": "struct PriorityOperation", + "name": "", + "type": "tuple" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "_proposalHash", + "type": "bytes32" + }, + { + "internalType": "uint40", + "name": "_proposalId", + "type": "uint40" + } + ], + "name": "proposeShadowUpgrade", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "components": [ + { + "components": [ + { + "internalType": "address", + "name": "facet", + "type": "address" + }, + { + "internalType": "enum Diamond.Action", + "name": "action", + "type": "uint8" + }, + { + "internalType": "bool", + "name": "isFreezable", + "type": "bool" + }, + { + "internalType": "bytes4[]", + "name": "selectors", + "type": "bytes4[]" + } + ], + "internalType": "struct Diamond.FacetCut[]", + "name": "facetCuts", + "type": "tuple[]" + }, + { + "internalType": "address", + "name": "initAddress", + "type": "address" + }, + { + "internalType": "bytes", + "name": "initCalldata", + "type": "bytes" + } + ], + "internalType": "struct Diamond.DiamondCutData", + "name": "_diamondCut", + "type": "tuple" + }, + { + "internalType": "uint40", + "name": "_proposalId", + "type": "uint40" + } + ], + "name": "proposeTransparentUpgrade", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "components": [ + { + "internalType": "uint64", + "name": "blockNumber", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "blockHash", + "type": "bytes32" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "l2LogsTreeRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "timestamp", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "internalType": "struct IExecutor.StoredBlockInfo", + "name": "_prevBlock", + "type": "tuple" + }, + { + "components": [ + { + "internalType": "uint64", + "name": "blockNumber", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "blockHash", + "type": "bytes32" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "l2LogsTreeRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "timestamp", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "internalType": "struct IExecutor.StoredBlockInfo[]", + "name": "_committedBlocks", + "type": "tuple[]" + }, + { + "components": [ + { + "internalType": "uint256[]", + "name": "recursiveAggregationInput", + "type": "uint256[]" + }, + { + "internalType": "uint256[]", + "name": "serializedProof", + "type": "uint256[]" + } + ], + "internalType": "struct IExecutor.ProofInput", + "name": "_proof", + "type": "tuple" + } + ], + "name": "proveBlocks", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "_l2TxHash", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "_l2BlockNumber", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2MessageIndex", + "type": "uint256" + }, + { + "internalType": "uint16", + "name": "_l2TxNumberInBlock", + "type": "uint16" + }, + { + "internalType": "bytes32[]", + "name": "_merkleProof", + "type": "bytes32[]" + }, + { + "internalType": "enum TxStatus", + "name": "_status", + "type": "uint8" + } + ], + "name": "proveL1ToL2TransactionStatus", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_blockNumber", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_index", + "type": "uint256" + }, + { + "components": [ + { + "internalType": "uint8", + "name": "l2ShardId", + "type": "uint8" + }, + { + "internalType": "bool", + "name": "isService", + "type": "bool" + }, + { + "internalType": "uint16", + "name": "txNumberInBlock", + "type": "uint16" + }, + { + "internalType": "address", + "name": "sender", + "type": "address" + }, + { + "internalType": "bytes32", + "name": "key", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "value", + "type": "bytes32" + } + ], + "internalType": "struct L2Log", + "name": "_log", + "type": "tuple" + }, + { + "internalType": "bytes32[]", + "name": "_proof", + "type": "bytes32[]" + } + ], + "name": "proveL2LogInclusion", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_blockNumber", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_index", + "type": "uint256" + }, + { + "components": [ + { + "internalType": "uint16", + "name": "txNumberInBlock", + "type": "uint16" + }, + { + "internalType": "address", + "name": "sender", + "type": "address" + }, + { + "internalType": "bytes", + "name": "data", + "type": "bytes" + } + ], + "internalType": "struct L2Message", + "name": "_message", + "type": "tuple" + }, + { + "internalType": "bytes32[]", + "name": "_proof", + "type": "bytes32[]" + } + ], + "name": "proveL2MessageInclusion", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_contractL2", + "type": "address" + }, + { + "internalType": "uint256", + "name": "_l2Value", + "type": "uint256" + }, + { + "internalType": "bytes", + "name": "_calldata", + "type": "bytes" + }, + { + "internalType": "uint256", + "name": "_l2GasLimit", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2GasPerPubdataByteLimit", + "type": "uint256" + }, + { + "internalType": "bytes[]", + "name": "_factoryDeps", + "type": "bytes[]" + }, + { + "internalType": "address", + "name": "_refundRecipient", + "type": "address" + } + ], + "name": "requestL2Transaction", + "outputs": [ + { + "internalType": "bytes32", + "name": "canonicalTxHash", + "type": "bytes32" + } + ], + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_newLastBlock", + "type": "uint256" + } + ], + "name": "revertBlocks", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "_upgradeProposalHash", + "type": "bytes32" + } + ], + "name": "securityCouncilUpgradeApprove", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_txId", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2Value", + "type": "uint256" + }, + { + "internalType": "address", + "name": "_sender", + "type": "address" + }, + { + "internalType": "address", + "name": "_contractAddressL2", + "type": "address" + }, + { + "internalType": "bytes", + "name": "_calldata", + "type": "bytes" + }, + { + "internalType": "uint256", + "name": "_l2GasLimit", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2GasPerPubdataByteLimit", + "type": "uint256" + }, + { + "internalType": "bytes[]", + "name": "_factoryDeps", + "type": "bytes[]" + }, + { + "internalType": "uint256", + "name": "_toMint", + "type": "uint256" + }, + { + "internalType": "address", + "name": "_refundRecipient", + "type": "address" + } + ], + "name": "serializeL2Transaction", + "outputs": [ + { + "components": [ + { + "internalType": "uint256", + "name": "txType", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "from", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "to", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "gasLimit", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "gasPerPubdataByteLimit", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "maxFeePerGas", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "maxPriorityFeePerGas", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "paymaster", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "nonce", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "value", + "type": "uint256" + }, + { + "internalType": "uint256[4]", + "name": "reserved", + "type": "uint256[4]" + }, + { + "internalType": "bytes", + "name": "data", + "type": "bytes" + }, + { + "internalType": "bytes", + "name": "signature", + "type": "bytes" + }, + { + "internalType": "uint256[]", + "name": "factoryDeps", + "type": "uint256[]" + }, + { + "internalType": "bytes", + "name": "paymasterInput", + "type": "bytes" + }, + { + "internalType": "bytes", + "name": "reservedDynamic", + "type": "bytes" + } + ], + "internalType": "struct IMailbox.L2CanonicalTransaction", + "name": "", + "type": "tuple" + } + ], + "stateMutability": "pure", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "_l2BootloaderBytecodeHash", + "type": "bytes32" + } + ], + "name": "setL2BootloaderBytecodeHash", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "_l2DefaultAccountBytecodeHash", + "type": "bytes32" + } + ], + "name": "setL2DefaultAccountBytecodeHash", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_newPendingGovernor", + "type": "address" + } + ], + "name": "setPendingGovernor", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bool", + "name": "_zkPorterIsAvailable", + "type": "bool" + } + ], + "name": "setPorterAvailability", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_newPriorityTxMaxGasLimit", + "type": "uint256" + } + ], + "name": "setPriorityTxMaxGasLimit", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_validator", + "type": "address" + }, + { + "internalType": "bool", + "name": "_active", + "type": "bool" + } + ], + "name": "setValidator", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "contract Verifier", + "name": "_newVerifier", + "type": "address" + } + ], + "name": "setVerifier", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "components": [ + { + "internalType": "bytes32", + "name": "recursionNodeLevelVkHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "recursionLeafLevelVkHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "recursionCircuitsSetVksHash", + "type": "bytes32" + } + ], + "internalType": "struct VerifierParams", + "name": "_newVerifierParams", + "type": "tuple" + } + ], + "name": "setVerifierParams", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_blockNumber", + "type": "uint256" + } + ], + "name": "storedBlockHash", + "outputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "unfreezeDiamond", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "components": [ + { + "components": [ + { + "internalType": "address", + "name": "facet", + "type": "address" + }, + { + "internalType": "enum Diamond.Action", + "name": "action", + "type": "uint8" + }, + { + "internalType": "bool", + "name": "isFreezable", + "type": "bool" + }, + { + "internalType": "bytes4[]", + "name": "selectors", + "type": "bytes4[]" + } + ], + "internalType": "struct Diamond.FacetCut[]", + "name": "facetCuts", + "type": "tuple[]" + }, + { + "internalType": "address", + "name": "initAddress", + "type": "address" + }, + { + "internalType": "bytes", + "name": "initCalldata", + "type": "bytes" + } + ], + "internalType": "struct Diamond.DiamondCutData", + "name": "_diamondCut", + "type": "tuple" + }, + { + "internalType": "uint256", + "name": "_proposalId", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "_salt", + "type": "bytes32" + } + ], + "name": "upgradeProposalHash", + "outputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + } + ], + "stateMutability": "pure", + "type": "function" + } + ] +} diff --git a/sdk/zksync-rs/src/abi/update-abi.sh b/sdk/zksync-rs/src/abi/update-abi.sh new file mode 100755 index 000000000000..fb103de014dd --- /dev/null +++ b/sdk/zksync-rs/src/abi/update-abi.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +cd `dirname $0` + +# Main zkSync contract interface +cat $ZKSYNC_HOME/contracts/ethereum/artifacts/cache/solpp-generated-contracts/zksync/interfaces/IZkSync.sol/IZkSync.json | jq '{ abi: .abi}' > ZkSync.json +# Default L1 bridge +cat $ZKSYNC_HOME/contracts/ethereum/artifacts/cache/solpp-generated-contracts/bridge/interfaces/IL1Bridge.sol/IL1Bridge.json | jq '{ abi: .abi}' > L1Bridge.json +# Paymaster interface +cat $ZKSYNC_HOME/contracts/zksync/artifacts-zk/cache-zk/solpp-generated-contracts/interfaces/IPaymasterFlow.sol/IPaymasterFlow.json | jq '{ abi: .abi}' > IPaymasterFlow.json diff --git a/sdk/zksync-rs/src/error.rs b/sdk/zksync-rs/src/error.rs new file mode 100644 index 000000000000..d59b68cd960b --- /dev/null +++ b/sdk/zksync-rs/src/error.rs @@ -0,0 +1,44 @@ +use zksync_eth_signer::error::SignerError; +pub use zksync_web3_decl::jsonrpsee::core::Error as RpcError; + +#[derive(Debug, thiserror::Error)] +pub enum ClientError { + #[error("Network '{0}' is not supported")] + NetworkNotSupported(String), + #[error("Unable to decode server response: {0}")] + MalformedResponse(String), + #[error("RPC error: {0:?}")] + RpcError(#[from] RpcError), + #[error("Network error: {0}")] + NetworkError(String), + + #[error("Provided account credentials are incorrect")] + IncorrectCredentials, + #[error("Incorrect address")] + IncorrectAddress, + + #[error("Operation timeout")] + OperationTimeout, + #[error("Polling interval is too small")] + PollingIntervalIsTooSmall, + + #[error("Signing error: {0}")] + SigningError(#[from] SignerError), + #[error("Missing required field for a transaction: {0}")] + MissingRequiredField(String), + + #[error("Failed to estimate the cost of the contract execution")] + ExecutionContractEstimationError, + + #[error("Ethereum private key was not provided for this wallet")] + NoEthereumPrivateKey, + + #[error("Provided value is not packable")] + NotPackableValue, + + #[error("Provided function arguments are incorrect")] + IncorrectInput, + + #[error("Other")] + Other, +} diff --git a/sdk/zksync-rs/src/ethereum/DepositERC20GasLimit.json b/sdk/zksync-rs/src/ethereum/DepositERC20GasLimit.json new file mode 100644 index 000000000000..57864f93317f --- /dev/null +++ b/sdk/zksync-rs/src/ethereum/DepositERC20GasLimit.json @@ -0,0 +1,42 @@ +{ + "0x0000000000095413afc295d19edeb1ad7b71c952": 140000, + "0xeb4c2781e4eba804ce9a9803c67d0893436bb27d": 160000, + "0xbbbbca6a901c926f240b89eacb641d8aec7aeafd": 140000, + "0xb64ef51c888972c908cfacf59b47c1afbc0ab8ac": 140000, + "0x1f9840a85d5af5bf1d1762f925bdaddc4201f984": 150000, + "0x9ba00d6856a4edf4665bca2c2309936572473b7e": 270000, + "0x8daebade922df735c38c80c7ebd708af50815faa": 140000, + "0x0d8775f648430679a709e98d2b0cb6250d2887ef": 140000, + "0xdac17f958d2ee523a2206206994597c13d831ec7": 140000, + "0x6de037ef9ad2725eb40118bb1702ebb27e4aeb24": 150000, + "0x056fd409e1d7a124bd7017459dfea2f387b6d5cd": 180000, + "0x0f5d2fb29fb7d3cfee444a200298f468908cc942": 140000, + "0x514910771af9ca656af840dff83e8264ecf986ca": 140000, + "0x1985365e9f78359a9b6ad760e32412f4a445e862": 180000, + "0x2260fac5e5542a773aa44fbcfedf7c193bc2c599": 140000, + "0xe41d2489571d322189246dafa5ebde1f4699f498": 140000, + "0x6b175474e89094c44da98b954eedeac495271d0f": 140000, + "0xaaaebe6fe48e54f431b0c390cfaf0b017d09d42d": 150000, + "0x2b591e99afe9f32eaa6214f7b7629768c40eeb39": 140000, + "0x65ece136b89ebaa72a7f7aa815674946e44ca3f9": 140000, + "0x0000000000085d4780b73119b644ae5ecd22b376": 150000, + "0xdb25f211ab05b1c97d595516f45794528a807ad8": 180000, + "0x408e41876cccdc0f92210600ef50372656052a38": 140000, + "0x15a2b3cfafd696e1c783fe99eed168b78a3a371e": 160000, + "0x38e4adb44ef08f22f5b5b76a8f0c2d0dcbe7dca1": 160000, + "0x3108ccfd96816f9e663baa0e8c5951d229e8c6da": 140000, + "0x56d811088235f11c8920698a204a5010a788f4b3": 240000, + "0x57ab1ec28d129707052df4df418d58a2d46d5f51": 220000, + "0x9f8f72aa9304c8b593d555f12ef6589cc3a579a2": 140000, + "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48": 150000, + "0xc011a73ee8576fb46f5e1c5751ca3b9fe0af2a6f": 200000, + "0x744d70fdbe2ba4cf95131626614a1763df805b9e": 230000, + "0x0bc529c00c6401aef6d220be8c6ea1667f6ad93e": 140000, + "0x4c7065bca76fe44afb0d16c2441b1e6e163354e2": 250000, + "0xdd974d5c2e2928dea5f71b9825b8b646686bd200": 140000, + "0x80fb784b7ed66730e8b1dbd9820afd29931aab03": 140000, + "0xd56dac73a4d6766464b38ec6d91eb45ce7457c44": 140000, + "0x4fabb145d64652a948d72533023f6e7a623c7c53": 150000, + "0x38a2fdc11f526ddd5a607c1f251c065f40fbf2f7": 140000, + "0x7dd9c5cba05e151c895fde1cf355c9a1d5da6429": 140000 +} diff --git a/sdk/zksync-rs/src/ethereum/mod.rs b/sdk/zksync-rs/src/ethereum/mod.rs new file mode 100644 index 000000000000..045a28bdb1e4 --- /dev/null +++ b/sdk/zksync-rs/src/ethereum/mod.rs @@ -0,0 +1,594 @@ +//! Utilities for the on-chain operations, such as `Deposit` and `FullExit`. + +use core::{convert::TryFrom, time::Duration}; +use serde_json::{Map, Value}; +use std::time::Instant; +use zksync_types::{ + api::BridgeAddresses, + web3::{ + contract::{tokens::Tokenize, Options}, + ethabi, + transports::Http, + types::{TransactionReceipt, H160, H256, U256}, + }, + L1ChainId, U64, +}; +use zksync_web3_decl::namespaces::{EthNamespaceClient, ZksNamespaceClient}; + +use zksync_eth_client::clients::http_client::Error; +use zksync_eth_client::{ETHDirectClient, EthInterface}; +use zksync_eth_signer::EthereumSigner; +use zksync_types::network::Network; +use zksync_types::{l1::L1Tx, Address, L1TxCommonData}; + +use crate::web3::ethabi::Bytes; +use crate::{ + error::ClientError, + operations::SyncTransactionHandle, + utils::{is_token_eth, load_contract}, +}; + +const IERC20_INTERFACE: &str = include_str!("../abi/IERC20.json"); +const ZKSYNC_INTERFACE: &str = include_str!("../abi/IZkSync.json"); +const L1_DEFAULT_BRIDGE_INTERFACE: &str = include_str!("../abi/IL1Bridge.json"); +const RAW_ERC20_DEPOSIT_GAS_LIMIT: &str = include_str!("DepositERC20GasLimit.json"); + +// The gasPerPubdata to be used in L1->L2 requests. It may be almost any number, but here we 800 +// as an optimal one. In the future, it will be estimated. +const L1_TO_L2_GAS_PER_PUBDATA: u32 = 800; + +/// Returns `ethabi::Contract` object for zkSync smart contract. +pub fn zksync_contract() -> ethabi::Contract { + load_contract(ZKSYNC_INTERFACE) +} + +/// Returns `ethabi::Contract` object for ERC-20 smart contract interface. +pub fn ierc20_contract() -> ethabi::Contract { + load_contract(IERC20_INTERFACE) +} + +/// Returns `ethabi::Contract` object for L1 Bridge smart contract interface. +pub fn l1_bridge_contract() -> ethabi::Contract { + load_contract(L1_DEFAULT_BRIDGE_INTERFACE) +} + +/// `EthereumProvider` gains access to on-chain operations, such as deposits and full exits. +/// Methods to interact with Ethereum return corresponding Ethereum transaction hash. +/// In order to monitor transaction execution, an Ethereum node `web3` API is exposed +/// via `EthereumProvider::web3` method. +#[derive(Debug)] +pub struct EthereumProvider { + eth_client: ETHDirectClient, + default_bridges: BridgeAddresses, + erc20_abi: ethabi::Contract, + l1_bridge_abi: ethabi::Contract, + confirmation_timeout: Duration, + polling_interval: Duration, +} + +pub const DEFAULT_PRIORITY_FEE: u64 = 2_000_000_000; + +impl EthereumProvider { + /// Creates a new Ethereum provider. + pub async fn new

( + provider: &P, + eth_web3_url: impl AsRef, + eth_signer: S, + eth_addr: H160, + ) -> Result + where + P: ZksNamespaceClient + Sync, + { + let transport = Http::new(eth_web3_url.as_ref()) + .map_err(|err| ClientError::NetworkError(err.to_string()))?; + + let l1_chain_id = provider.l1_chain_id().await?; + if l1_chain_id > U64::from(u16::MAX) { + return Err(ClientError::MalformedResponse( + "Chain id overflow".to_owned(), + )); + } + let l1_chain_id = + u8::try_from(l1_chain_id).expect("Expected chain id to be in range 0..256"); + + let contract_address = provider.get_main_contract().await?; + let default_bridges = provider + .get_bridge_contracts() + .await + .map_err(|err| ClientError::NetworkError(err.to_string()))?; + + let eth_client = ETHDirectClient::new( + transport, + zksync_contract(), + eth_addr, + eth_signer, + contract_address, + DEFAULT_PRIORITY_FEE.into(), + L1ChainId(l1_chain_id), + ); + let erc20_abi = ierc20_contract(); + let l1_bridge_abi = l1_bridge_contract(); + + Ok(Self { + eth_client, + default_bridges, + erc20_abi, + l1_bridge_abi, + confirmation_timeout: Duration::from_secs(10), + polling_interval: Duration::from_secs(1), + }) + } + + /// Exposes Ethereum node `web3` API. + pub fn client(&self) -> ÐDirectClient { + &self.eth_client + } + + /// Returns the zkSync contract address. + pub fn contract_address(&self) -> H160 { + self.client().contract_addr() + } + + /// Returns the Ethereum account balance. + pub async fn balance(&self) -> Result { + self.client() + .sender_eth_balance("provider") + .await + .map_err(|err| ClientError::NetworkError(err.to_string())) + } + + /// Returns the ERC20 token account balance. + pub async fn erc20_balance( + &self, + address: Address, + token_address: Address, + ) -> Result { + let res = self + .eth_client + .call_contract_function( + "balanceOf", + address, + None, + Options::default(), + None, + token_address, + self.erc20_abi.clone(), + ) + .await + .map_err(|err| ClientError::NetworkError(err.to_string()))?; + Ok(res) + } + + /// Returns the pending nonce for the Ethereum account. + pub async fn nonce(&self) -> Result { + self.client() + .pending_nonce("provider") + .await + .map_err(|err| ClientError::NetworkError(err.to_string())) + } + + /// Checks whether ERC20 of a certain token deposit is approved for account. + pub async fn is_erc20_deposit_approved( + &self, + token_address: Address, + bridge: Option

, + ) -> Result { + self.is_limited_erc20_deposit_approved(token_address, U256::from(2).pow(255.into()), bridge) + .await + } + + pub async fn l2_token_address( + &self, + l1_token_address: Address, + bridge: Option
, + ) -> Result { + let bridge = bridge.unwrap_or(self.default_bridges.l1_erc20_default_bridge); + let l2_token_address = self + .eth_client + .call_contract_function( + "l2TokenAddress", + l1_token_address, + None, + Options::default(), + None, + bridge, + self.l1_bridge_abi.clone(), + ) + .await + .map_err(|err| ClientError::NetworkError(err.to_string()))?; + Ok(l2_token_address) + } + + /// Checks whether ERC20 of a certain token deposit with limit is approved for account. + pub async fn is_limited_erc20_deposit_approved( + &self, + token_address: Address, + erc20_approve_threshold: U256, + bridge: Option
, + ) -> Result { + let bridge = bridge.unwrap_or(self.default_bridges.l1_erc20_default_bridge); + let current_allowance = self + .client() + .allowance_on_contract(token_address, bridge, self.erc20_abi.clone()) + .await + .map_err(|err| ClientError::NetworkError(err.to_string()))?; + + Ok(current_allowance >= erc20_approve_threshold) + } + + /// Sends a transaction to ERC20 token contract to approve the ERC20 deposit. + pub async fn approve_erc20_token_deposits( + &self, + token_address: Address, + bridge: Option
, + ) -> Result { + self.limited_approve_erc20_token_deposits(token_address, U256::max_value(), bridge) + .await + } + + /// Sends a transaction to ERC20 token contract to approve the limited ERC20 deposit. + pub async fn limited_approve_erc20_token_deposits( + &self, + token_address: Address, + max_erc20_approve_amount: U256, + bridge: Option
, + ) -> Result { + let bridge = bridge.unwrap_or(self.default_bridges.l1_erc20_default_bridge); + let contract_function = self + .erc20_abi + .function("approve") + .expect("failed to get function parameters"); + let params = (bridge, max_erc20_approve_amount); + let data = contract_function + .encode_input(¶ms.into_tokens()) + .expect("failed to encode parameters"); + + let signed_tx = self + .client() + .sign_prepared_tx_for_addr( + data, + token_address, + Options { + gas: Some(300_000.into()), + ..Default::default() + }, + "provider", + ) + .await + .map_err(|_| ClientError::IncorrectCredentials)?; + + let transaction_hash = self + .client() + .send_raw_tx(signed_tx.raw_tx) + .await + .map_err(|err| ClientError::NetworkError(err.to_string()))?; + + Ok(transaction_hash) + } + + /// Performs a transfer of funds from one Ethereum account to another. + /// Note: This operation is performed on Ethereum, and not related to zkSync directly. + pub async fn transfer( + &self, + token_address: Address, + amount: U256, + to: H160, + options: Option, + ) -> Result { + let signed_tx = if is_token_eth(token_address) { + let options = Options { + value: Some(amount), + gas: Some(300_000.into()), + ..options.unwrap_or_default() + }; + self.client() + .sign_prepared_tx_for_addr(Vec::new(), to, options, "provider") + .await + .map_err(|_| ClientError::IncorrectCredentials)? + } else { + let contract_function = self + .erc20_abi + .function("transfer") + .expect("failed to get function parameters"); + let params = (to, amount); + let data = contract_function + .encode_input(¶ms.into_tokens()) + .expect("failed to encode parameters"); + + self.client() + .sign_prepared_tx_for_addr( + data, + token_address, + Options { + gas: Some(300_000.into()), + ..options.unwrap_or_default() + }, + "provider", + ) + .await + .map_err(|_| ClientError::IncorrectCredentials)? + }; + + let transaction_hash = self + .client() + .send_raw_tx(signed_tx.raw_tx) + .await + .map_err(|err| ClientError::NetworkError(err.to_string()))?; + + Ok(transaction_hash) + } + + #[cfg(feature = "mint")] + pub async fn mint_erc20( + &self, + token_address: Address, + amount: U256, + to: H160, + ) -> Result { + let signed_tx = { + let contract_function = self + .erc20_abi + .function("mint") + .expect("failed to get function parameters"); + let params = (to, amount); + let data = contract_function + .encode_input(¶ms.into_tokens()) + .expect("failed to encode parameters"); + + self.eth_client + .sign_prepared_tx_for_addr( + data, + token_address, + Options { + gas: Some(100_000.into()), + ..Default::default() + }, + "provider", + ) + .await + .map_err(|_| ClientError::IncorrectCredentials)? + }; + + let transaction_hash = self + .eth_client + .send_raw_tx(signed_tx.raw_tx) + .await + .map_err(|err| ClientError::NetworkError(err.to_string()))?; + + Ok(transaction_hash) + } + + pub async fn base_cost( + &self, + gas_limit: U256, + gas_per_pubdata_byte: u32, + gas_price: Option, + ) -> Result { + let gas_price = if let Some(gas_price) = gas_price { + gas_price + } else { + self.eth_client.get_gas_price("zksync-rs").await? + }; + self.eth_client + .call_main_contract_function( + "l2TransactionBaseCost", + (gas_price, gas_limit, gas_per_pubdata_byte), + None, + Default::default(), + None, + ) + .await + } + + #[allow(clippy::too_many_arguments)] + pub async fn request_execute( + &self, + contract_address: Address, + l2_value: U256, + calldata: Bytes, + gas_limit: U256, + factory_deps: Option>, + operator_tip: Option, + gas_price: Option, + refund_recipient: Address, + ) -> Result { + let operator_tip = operator_tip.unwrap_or_default(); + let factory_deps = factory_deps.unwrap_or_default(); + let gas_price = if let Some(gas_price) = gas_price { + gas_price + } else { + self.eth_client + .get_gas_price("zksync-rs") + .await + .map_err(|e| ClientError::NetworkError(e.to_string()))? + }; + let base_cost = self + .base_cost(gas_limit, L1_TO_L2_GAS_PER_PUBDATA, Some(gas_price)) + .await + .map_err(|e| ClientError::NetworkError(e.to_string()))?; + let value = base_cost + operator_tip + l2_value; + let tx_data = self.eth_client.encode_tx_data( + "requestL2Transaction", + ( + contract_address, + l2_value, + calldata, + gas_limit, + L1_TO_L2_GAS_PER_PUBDATA, + factory_deps, + refund_recipient, + ), + ); + + let tx = self + .eth_client + .sign_prepared_tx( + tx_data, + Options::with(|f| { + f.gas = Some(U256::from(300000)); + f.value = Some(value) + }), + "zksync-rs", + ) + .await + .map_err(|e| ClientError::NetworkError(e.to_string()))?; + + let tx_hash = self + .eth_client + .send_raw_tx(tx.raw_tx) + .await + .map_err(|e| ClientError::NetworkError(e.to_string()))?; + + Ok(tx_hash) + } + + /// Performs a deposit in zkSync network. + /// For ERC20 tokens, a deposit must be approved beforehand via the `EthereumProvider::approve_erc20_token_deposits` method. + #[allow(clippy::too_many_arguments)] + pub async fn deposit( + &self, + l1_token_address: Address, + amount: U256, + to: Address, + operator_tip: Option, + bridge_address: Option
, + eth_options: Option, + ) -> Result { + let operator_tip = operator_tip.unwrap_or_default(); + + let is_eth_deposit = l1_token_address == Address::zero(); + + let base_cost: U256 = U256::zero(); + + // Calculate the amount of ether to be sent in the transaction. + let total_value = if is_eth_deposit { + // Both fee component and the deposit amount are represented as `msg.value`. + base_cost + operator_tip + amount + } else { + // ERC20 token, `msg.value` is used only for the fee. + base_cost + operator_tip + }; + + // Calculate the gas limit for transaction: it may vary for different tokens. + let gas_limit = if is_eth_deposit { + 200_000u64 + } else { + let gas_limits: Map = serde_json::from_str(RAW_ERC20_DEPOSIT_GAS_LIMIT) + .map_err(|_| ClientError::Other)?; + let address_str = format!("{:?}", l1_token_address); + let is_mainnet = Network::from_chain_id(self.client().chain_id()) == Network::Mainnet; + if is_mainnet && gas_limits.contains_key(&address_str) { + gas_limits + .get(&address_str) + .unwrap() + .as_u64() + .ok_or(ClientError::Other)? + } else { + 300000u64 + } + }; + + let mut options = eth_options.unwrap_or_default(); + options.value = Some(total_value); + options.gas = Some(gas_limit.into()); + + let l2_gas_limit = U256::from(10000000u32); + let transaction_hash = if is_eth_deposit { + self.request_execute( + to, + amount, + Default::default(), + l2_gas_limit, + None, + None, + None, + Default::default(), + ) + .await? + } else { + let bridge_address = + bridge_address.unwrap_or(self.default_bridges.l1_erc20_default_bridge); + let contract_function = self + .l1_bridge_abi + .function("deposit") + .expect("failed to get function parameters"); + let params = ( + to, + l1_token_address, + amount, + l2_gas_limit, + U256::from(L1_TO_L2_GAS_PER_PUBDATA), + ); + let data = contract_function + .encode_input(¶ms.into_tokens()) + .expect("failed to encode parameters"); + + let signed_tx = self + .eth_client + .sign_prepared_tx_for_addr(data, bridge_address, options, "provider") + .await + .map_err(|_| ClientError::IncorrectCredentials)?; + self.eth_client + .send_raw_tx(signed_tx.raw_tx) + .await + .map_err(|err| ClientError::NetworkError(err.to_string()))? + }; + + Ok(transaction_hash) + } + + /// Sets the timeout to wait for transactions to appear in the Ethereum network. + /// By default it is set to 10 seconds. + pub fn set_confirmation_timeout(&mut self, timeout: Duration) { + self.confirmation_timeout = timeout; + } + + pub fn set_polling_interval(&mut self, polling_interval: Duration) { + self.polling_interval = polling_interval; + } + + /// Waits until the transaction is confirmed by the Ethereum blockchain. + pub async fn wait_for_tx(&self, tx_hash: H256) -> Result { + let mut poller = tokio::time::interval(self.polling_interval); + + let start = Instant::now(); + loop { + if let Some(receipt) = self + .client() + .tx_receipt(tx_hash, "provider") + .await + .map_err(|err| ClientError::NetworkError(err.to_string()))? + { + return Ok(receipt); + } + + if start.elapsed() > self.confirmation_timeout { + return Err(ClientError::OperationTimeout); + } + poller.tick().await; + } + } +} + +/// Trait describes the ability to receive the priority operation from this holder. +pub trait PriorityOpHolder { + /// Returns the priority operation if exists. + fn priority_op(&self) -> Option; + + /// Returns the handle for the priority operation. + fn priority_op_handle<'a, P>(&self, provider: &'a P) -> Option> + where + P: EthNamespaceClient + Sync, + { + self.priority_op() + .map(|op| SyncTransactionHandle::new(op.hash(), provider)) + } +} + +impl PriorityOpHolder for TransactionReceipt { + fn priority_op(&self) -> Option { + self.logs + .iter() + .find_map(|op| L1Tx::try_from(op.clone()).ok().map(|tx| tx.common_data)) + } +} diff --git a/sdk/zksync-rs/src/lib.rs b/sdk/zksync-rs/src/lib.rs new file mode 100644 index 000000000000..4ee03e8ff9da --- /dev/null +++ b/sdk/zksync-rs/src/lib.rs @@ -0,0 +1,18 @@ +pub mod error; +pub mod ethereum; +pub mod operations; +pub mod signer; +pub mod utils; +pub mod wallet; + +pub use crate::{ethereum::EthereumProvider, wallet::Wallet}; +pub use zksync_types::network::Network; + +pub use zksync_types; +pub use zksync_types::web3; + +pub use zksync_web3_decl::{ + jsonrpsee::http_client::*, + namespaces::{EthNamespaceClient, NetNamespaceClient, Web3NamespaceClient, ZksNamespaceClient}, + types, +}; diff --git a/sdk/zksync-rs/src/operations/deploy_contract.rs b/sdk/zksync-rs/src/operations/deploy_contract.rs new file mode 100644 index 000000000000..ef6c55fbfd0d --- /dev/null +++ b/sdk/zksync-rs/src/operations/deploy_contract.rs @@ -0,0 +1,162 @@ +use zksync_eth_signer::EthereumSigner; +use zksync_types::{ + l2::L2Tx, transaction_request::PaymasterParams, Execute, Nonce, CONTRACT_DEPLOYER_ADDRESS, U256, +}; +use zksync_utils::bytecode::hash_bytecode; + +use crate::{ + error::ClientError, operations::SyncTransactionHandle, wallet::Wallet, zksync_types::fee::Fee, + EthNamespaceClient, ZksNamespaceClient, +}; + +pub struct DeployContractBuilder<'a, S: EthereumSigner, P> { + wallet: &'a Wallet, + bytecode: Option>, + calldata: Option>, + fee: Option, + nonce: Option, + value: Option, + factory_deps: Option>>, + paymaster_params: Option, +} + +impl<'a, S, P> DeployContractBuilder<'a, S, P> +where + S: EthereumSigner, + P: ZksNamespaceClient + EthNamespaceClient + Sync, +{ + /// Initializes a change public key transaction building process. + pub fn new(wallet: &'a Wallet) -> Self { + Self { + wallet, + bytecode: None, + calldata: None, + fee: None, + nonce: None, + value: None, + factory_deps: None, + paymaster_params: None, + } + } + + /// Sends the transaction, returning the handle for its awaiting. + pub async fn tx(self) -> Result { + let paymaster_params = self.paymaster_params.clone().unwrap_or_default(); + + let fee = match self.fee { + Some(fee) => fee, + None => self.estimate_fee(Some(paymaster_params.clone())).await?, + }; + + let bytecode = self + .bytecode + .ok_or_else(|| ClientError::MissingRequiredField("bytecode".into()))?; + + let calldata = self.calldata.unwrap_or_default(); + + let nonce = match self.nonce { + Some(nonce) => nonce, + None => Nonce(self.wallet.get_nonce().await?), + }; + + let main_contract_hash = hash_bytecode(&bytecode); + let execute_calldata = + Execute::encode_deploy_params_create(Default::default(), main_contract_hash, calldata); + + let mut factory_deps = self.factory_deps.unwrap_or_default(); + factory_deps.push(bytecode.clone()); + + self.wallet + .signer + .sign_execute_contract_for_deploy( + CONTRACT_DEPLOYER_ADDRESS, + execute_calldata, + fee, + nonce, + Some(vec![bytecode.clone()]), + paymaster_params, + ) + .await + .map_err(ClientError::SigningError) + } + + /// Sends the transaction, returning the handle for its awaiting. + pub async fn send(self) -> Result, ClientError> { + let wallet = self.wallet; + let tx = self.tx().await?; + + wallet.send_transaction(tx).await + } + + /// Sets the calldata for deploying + pub fn constructor_calldata(mut self, calldata: Vec) -> Self { + self.calldata = Some(calldata); + self + } + + /// Sets the factory deps for deploying + pub fn factory_deps(mut self, factory_deps: Vec>) -> Self { + self.factory_deps = Some(factory_deps); + self + } + + /// Sets the deploy contract transaction bytecode. + pub fn bytecode(mut self, bytecode: Vec) -> Self { + self.bytecode = Some(bytecode); + self + } + + /// Set the fee amount. + /// + /// For more details, see [utils](../utils/index.html) functions. + pub fn fee(mut self, fee: Fee) -> Self { + self.fee = Some(fee); + self + } + + /// Sets the transaction nonce. + pub fn nonce(mut self, nonce: Nonce) -> Self { + self.nonce = Some(nonce); + self + } + + /// Sets the paymaster parameters. + pub fn paymaster_params(mut self, paymaster_params: PaymasterParams) -> Self { + self.paymaster_params = Some(paymaster_params); + self + } + + pub async fn estimate_fee( + &self, + paymaster_params: Option, + ) -> Result { + let bytecode = self + .bytecode + .clone() + .ok_or_else(|| ClientError::MissingRequiredField("bytecode".into()))?; + + let paymaster_params = paymaster_params + .or_else(|| self.paymaster_params.clone()) + .unwrap_or_default(); + + let calldata = self.calldata.clone().unwrap_or_default(); + let main_contract_hash = hash_bytecode(&bytecode); + let mut factory_deps = self.factory_deps.clone().unwrap_or_default(); + factory_deps.push(bytecode); + let l2_tx = L2Tx::new( + CONTRACT_DEPLOYER_ADDRESS, + Execute::encode_deploy_params_create(Default::default(), main_contract_hash, calldata), + Nonce(0), + Default::default(), + self.wallet.address(), + self.value.unwrap_or_default(), + Some(factory_deps), + paymaster_params, + ); + self.wallet + .provider + .estimate_fee(l2_tx.into()) + .await + .map_err(Into::into) + } +} diff --git a/sdk/zksync-rs/src/operations/execute_contract.rs b/sdk/zksync-rs/src/operations/execute_contract.rs new file mode 100644 index 000000000000..66df46580c59 --- /dev/null +++ b/sdk/zksync-rs/src/operations/execute_contract.rs @@ -0,0 +1,162 @@ +use zksync_eth_signer::EthereumSigner; +use zksync_types::{ + fee::Fee, l2::L2Tx, transaction_request::PaymasterParams, Address, Nonce, U256, +}; + +use crate::{ + error::ClientError, operations::SyncTransactionHandle, wallet::Wallet, EthNamespaceClient, + ZksNamespaceClient, +}; + +pub struct ExecuteContractBuilder<'a, S: EthereumSigner, P> { + wallet: &'a Wallet, + contract_address: Option
, + calldata: Option>, + fee: Option, + value: Option, + nonce: Option, + factory_deps: Option>>, + paymaster_params: Option, +} + +impl<'a, S, P> ExecuteContractBuilder<'a, S, P> +where + S: EthereumSigner, + P: ZksNamespaceClient + EthNamespaceClient + Sync, +{ + /// Initializes a change public key transaction building process. + pub fn new(wallet: &'a Wallet) -> Self { + Self { + wallet, + contract_address: None, + calldata: None, + fee: None, + nonce: None, + value: None, + factory_deps: None, + paymaster_params: None, + } + } + + /// Sends the transaction, returning the handle for its awaiting. + pub async fn tx(self) -> Result { + let paymaster_params = self.paymaster_params.clone().unwrap_or_default(); + + let fee = match self.fee { + Some(fee) => fee, + None => self.estimate_fee(Some(paymaster_params.clone())).await?, + }; + + let contract_address = self + .contract_address + .ok_or_else(|| ClientError::MissingRequiredField("contract_address".into()))?; + + let calldata = self + .calldata + .ok_or_else(|| ClientError::MissingRequiredField("calldata".into()))?; + + let nonce = match self.nonce { + Some(nonce) => nonce, + None => Nonce(self.wallet.get_nonce().await?), + }; + + self.wallet + .signer + .sign_execute_contract( + contract_address, + calldata, + fee, + nonce, + self.factory_deps, + paymaster_params, + ) + .await + .map_err(ClientError::SigningError) + } + + /// Sends the transaction, returning the handle for its awaiting. + pub async fn send(self) -> Result, ClientError> { + let wallet = self.wallet; + let tx = self.tx().await?; + + wallet.send_transaction(tx).await + } + + /// Sets the calldata for the transaction. + pub fn calldata(mut self, calldata: Vec) -> Self { + self.calldata = Some(calldata); + self + } + + /// Sets the value for the transaction. + pub fn value(mut self, value: U256) -> Self { + self.value = Some(value); + self + } + + /// Sets the transaction contract address. + pub fn contract_address(mut self, address: Address) -> Self { + self.contract_address = Some(address); + self + } + + /// Set the fee amount. + /// + /// For more details, see [utils](../utils/index.html) functions. + pub fn fee(mut self, fee: Fee) -> Self { + self.fee = Some(fee); + self + } + + /// Sets the transaction nonce. + pub fn nonce(mut self, nonce: Nonce) -> Self { + self.nonce = Some(nonce); + self + } + + /// Set factory deps + pub fn factory_deps(mut self, factory_deps: Vec>) -> Self { + self.factory_deps = Some(factory_deps); + self + } + + /// Sets the paymaster parameters. + pub fn paymaster_params(mut self, paymaster_params: PaymasterParams) -> Self { + self.paymaster_params = Some(paymaster_params); + self + } + + pub async fn estimate_fee( + &self, + paymaster_params: Option, + ) -> Result { + let contract_address = self + .contract_address + .ok_or_else(|| ClientError::MissingRequiredField("contract_address".into()))?; + + let calldata = self + .calldata + .clone() + .ok_or_else(|| ClientError::MissingRequiredField("calldata".into()))?; + + let paymaster_params = paymaster_params + .or_else(|| self.paymaster_params.clone()) + .unwrap_or_default(); + + let execute = L2Tx::new( + contract_address, + calldata, + Nonce(0), + Default::default(), + self.wallet.address(), + self.value.unwrap_or_default(), + self.factory_deps.clone(), + paymaster_params, + ); + self.wallet + .provider + .estimate_fee(execute.into()) + .await + .map_err(Into::into) + } +} diff --git a/sdk/zksync-rs/src/operations/mod.rs b/sdk/zksync-rs/src/operations/mod.rs new file mode 100644 index 000000000000..a59bd57a2dd9 --- /dev/null +++ b/sdk/zksync-rs/src/operations/mod.rs @@ -0,0 +1,151 @@ +//! This file contains representation of not signed transactions and builders for them. + +use std::time::{Duration, Instant}; + +use crate::{error::ClientError, EthNamespaceClient}; +use zksync_types::l2::L2Tx; +use zksync_types::{ + api::{BlockNumber, TransactionReceipt}, + Bytes, L2ChainId, H256, +}; + +pub use self::{ + deploy_contract::DeployContractBuilder, + execute_contract::ExecuteContractBuilder, + transfer::{create_transfer_calldata, TransferBuilder}, + withdraw::WithdrawBuilder, +}; + +mod deploy_contract; +mod execute_contract; +mod transfer; +mod withdraw; + +/// Encodes transaction into a bytes sequence in accordance +/// with the zkSync L2 protocol contract. +/// +/// Used for estimating fees and submitting transactions. +pub fn encode_transaction(tx: &L2Tx, chain_id: L2ChainId) -> Result { + Ok(tx.get_rlp_bytes(chain_id)) +} + +/// Handle for transaction, providing an interface to control its execution. +/// For obtained handle it's possible to set the polling interval, commit timeout +/// and verify timeout values. +/// +/// By default, awaiting for transaction may run up to forever, and the polling is +/// performed once a second. +#[derive(Debug)] +pub struct SyncTransactionHandle<'a, P> { + hash: H256, + provider: &'a P, + polling_interval: Duration, + commit_timeout: Option, + finalize_timeout: Option, +} + +impl<'a, P> SyncTransactionHandle<'a, P> +where + P: EthNamespaceClient + Sync, +{ + pub fn new(hash: H256, provider: &'a P) -> Self { + Self { + hash, + provider, + polling_interval: Duration::from_secs(1), // 1 second. + commit_timeout: None, // Wait until forever + finalize_timeout: None, // Wait until forever + } + } + + const MIN_POLLING_INTERVAL: Duration = Duration::from_millis(200); + + /// Sets the polling interval. Must be at least 200 milliseconds. + pub fn polling_interval(&mut self, polling_interval: Duration) -> Result<(), ClientError> { + if polling_interval >= Self::MIN_POLLING_INTERVAL { + self.polling_interval = polling_interval; + Ok(()) + } else { + Err(ClientError::PollingIntervalIsTooSmall) + } + } + + /// Returns the transaction hash. + pub fn hash(&self) -> H256 { + self.hash + } + + /// Sets the timeout for commit operation. + /// With this value set, `SyncTransactionHandle::wait_for_commit` will return a `ClientError::OperationTimeout` + /// error if block will not be committed within provided time range. + pub fn commit_timeout(mut self, commit_timeout: Duration) -> Self { + self.commit_timeout = Some(commit_timeout); + self + } + + /// Sets the timeout for finalize operation. + /// With this value set, `SyncTransactionHandle::wait_for_finalize` will return a `ClientError::OperationTimeout` + /// error if block will not be finalized within provided time range. + pub fn finalize_timeout(mut self, verify_timeout: Duration) -> Self { + self.finalize_timeout = Some(verify_timeout); + self + } + + /// Awaits for the transaction commit and returns the information about its execution. + pub async fn wait_for_commit(&self) -> Result { + self.wait_for(self.commit_timeout, BlockNumber::Committed) + .await + } + + /// Awaits for the transaction finalization and returns the information about its execution. + pub async fn wait_for_finalize(&self) -> Result { + self.wait_for(self.finalize_timeout, BlockNumber::Finalized) + .await + } + + async fn wait_for( + &self, + timeout: Option, + status: BlockNumber, + ) -> Result { + let mut timer = tokio::time::interval(self.polling_interval); + let start = Instant::now(); + + let mut receipt = None; + + loop { + timer.tick().await; + + if let Some(timeout) = timeout { + if start.elapsed() >= timeout { + return Err(ClientError::OperationTimeout); + } + } + + // First, wait for the receipt with a block number. + if receipt.is_none() { + let response = self.provider.get_transaction_receipt(self.hash).await?; + if response.as_ref().and_then(|r| r.block_number).is_some() { + receipt = response; + } else { + continue; + } + } + + // Wait for transaction to be included into the committed + // or finalized block: + // Fetch the latest block with the given status and + // check if it's greater than or equal to the one from receipt. + + let receipt_ref = receipt.as_ref().unwrap(); + let block_number = receipt_ref.block_number.unwrap(); + + let response = self.provider.get_block_by_number(status, false).await?; + if let Some(received_number) = response.map(|block| block.number) { + if block_number <= received_number { + return Ok(receipt.take().unwrap()); + } + } + } + } +} diff --git a/sdk/zksync-rs/src/operations/transfer.rs b/sdk/zksync-rs/src/operations/transfer.rs new file mode 100644 index 000000000000..ef47b1a28144 --- /dev/null +++ b/sdk/zksync-rs/src/operations/transfer.rs @@ -0,0 +1,197 @@ +use zksync_eth_signer::EthereumSigner; +use zksync_types::L2_ETH_TOKEN_ADDRESS; +use zksync_types::{fee::Fee, l2::L2Tx, Address, Nonce, U256}; + +use crate::ethereum::ierc20_contract; +use crate::web3::contract::tokens::Tokenize; +use crate::zksync_types::{transaction_request::PaymasterParams, Execute, L2TxCommonData}; +use crate::{ + error::ClientError, operations::SyncTransactionHandle, wallet::Wallet, EthNamespaceClient, + ZksNamespaceClient, +}; + +pub struct TransferBuilder<'a, S: EthereumSigner, P> { + wallet: &'a Wallet, + to: Option
, + token: Option
, + amount: Option, + fee: Option, + nonce: Option, + paymaster_params: Option, +} + +impl<'a, S, P> TransferBuilder<'a, S, P> +where + S: EthereumSigner, + P: ZksNamespaceClient + EthNamespaceClient + Sync, +{ + /// Initializes a transfer transaction building process. + pub fn new(wallet: &'a Wallet) -> Self { + Self { + wallet, + to: None, + token: None, + amount: None, + fee: None, + nonce: None, + paymaster_params: None, + } + } + + /// Directly returns the signed transfer transaction for the subsequent usage. + pub async fn tx(self) -> Result { + let paymaster_params = self.paymaster_params.clone().unwrap_or_default(); + + let fee = match self.fee { + Some(fee) => fee, + None => self.estimate_fee(Some(paymaster_params.clone())).await?, + }; + + let to = self + .to + .ok_or_else(|| ClientError::MissingRequiredField("to".into()))?; + let token = self + .token + .ok_or_else(|| ClientError::MissingRequiredField("token".into()))?; + let amount = self + .amount + .ok_or_else(|| ClientError::MissingRequiredField("amount".into()))?; + + let nonce = match self.nonce { + Some(nonce) => nonce, + None => Nonce(self.wallet.get_nonce().await?), + }; + + self.wallet + .signer + .sign_transfer(to, token, amount, fee, nonce, paymaster_params) + .await + .map_err(ClientError::SigningError) + } + + /// Sends the transaction, returning the handle for its awaiting. + pub async fn send(self) -> Result, ClientError> { + let wallet = self.wallet; + let tx = self.tx().await?; + + wallet.send_transaction(tx).await + } + + /// Sets the transaction token address. + pub fn token(mut self, token: Address) -> Self { + self.token = Some(token); + self + } + + /// Set the transfer amount. + /// + /// For more details, see [utils](../utils/index.html) functions. + pub fn amount(mut self, amount: U256) -> Self { + self.amount = Some(amount); + self + } + + /// Set the fee amount. + /// + /// For more details, see [utils](../utils/index.html) functions. + pub fn fee(mut self, fee: Fee) -> Self { + self.fee = Some(fee); + self + } + + /// Sets the transaction recipient. + pub fn to(mut self, to: Address) -> Self { + self.to = Some(to); + self + } + + /// Same as `TransferBuilder::to`, but accepts a string address value. + /// + /// Provided string value must be a correct address in a hexadecimal form, + /// otherwise an error will be returned. + pub fn str_to(mut self, to: impl AsRef) -> Result { + let to: Address = to + .as_ref() + .parse() + .map_err(|_| ClientError::IncorrectAddress)?; + + self.to = Some(to); + Ok(self) + } + + /// Sets the transaction nonce. + pub fn nonce(mut self, nonce: Nonce) -> Self { + self.nonce = Some(nonce); + self + } + + /// Sets the paymaster parameters. + pub fn paymaster_params(mut self, paymaster_params: PaymasterParams) -> Self { + self.paymaster_params = Some(paymaster_params); + self + } + + pub async fn estimate_fee( + &self, + paymaster_params: Option, + ) -> Result { + let to = self + .to + .ok_or_else(|| ClientError::MissingRequiredField("to".into()))?; + let token = self + .token + .ok_or_else(|| ClientError::MissingRequiredField("token".into()))?; + let amount = self + .amount + .ok_or_else(|| ClientError::MissingRequiredField("amount".into()))?; + + let paymaster_params = paymaster_params + .or_else(|| self.paymaster_params.clone()) + .unwrap_or_default(); + + let tx = if token.is_zero() || token == L2_ETH_TOKEN_ADDRESS { + // ETH estimate + Execute { + contract_address: to, + calldata: Default::default(), + factory_deps: None, + value: amount, + } + } else { + // ERC-20 estimate + Execute { + contract_address: token, + calldata: create_transfer_calldata(to, amount), + factory_deps: None, + value: Default::default(), + } + }; + let common_data = L2TxCommonData { + initiator_address: self.wallet.address(), + nonce: Nonce(0), + paymaster_params, + ..Default::default() + }; + let l2_tx = L2Tx { + common_data, + execute: tx, + received_timestamp_ms: 0, + }; + self.wallet + .provider + .estimate_fee(l2_tx.into()) + .await + .map_err(Into::into) + } +} + +pub fn create_transfer_calldata(to: Address, amount: U256) -> Vec { + let contract = ierc20_contract(); + let contract_function = contract + .function("transfer") + .expect("failed to get function parameters"); + let params = (to, amount); + contract_function + .encode_input(¶ms.into_tokens()) + .expect("failed to encode parameters") +} diff --git a/sdk/zksync-rs/src/operations/withdraw.rs b/sdk/zksync-rs/src/operations/withdraw.rs new file mode 100644 index 000000000000..b32629ad2d62 --- /dev/null +++ b/sdk/zksync-rs/src/operations/withdraw.rs @@ -0,0 +1,190 @@ +use zksync_eth_signer::EthereumSigner; + +use zksync_types::l2::L2Tx; +use zksync_types::{ + fee::Fee, tokens::ETHEREUM_ADDRESS, transaction_request::PaymasterParams, web3::ethabi, + Address, Nonce, L2_ETH_TOKEN_ADDRESS, U256, +}; + +use crate::{ + error::ClientError, + operations::{ExecuteContractBuilder, SyncTransactionHandle}, + wallet::Wallet, + EthNamespaceClient, ZksNamespaceClient, +}; + +pub struct WithdrawBuilder<'a, S: EthereumSigner, P> { + wallet: &'a Wallet, + to: Option
, + token: Option
, + amount: Option, + fee: Option, + nonce: Option, + bridge: Option
, + paymaster_params: Option, +} + +impl<'a, S, P> WithdrawBuilder<'a, S, P> +where + S: EthereumSigner, + P: ZksNamespaceClient + EthNamespaceClient + Sync, +{ + /// Initializes a withdraw transaction building process. + pub fn new(wallet: &'a Wallet) -> Self { + Self { + wallet, + to: None, + token: None, + amount: None, + fee: None, + nonce: None, + bridge: None, + paymaster_params: None, + } + } + + async fn get_execute_builder(&self) -> Result, ClientError> { + let token = self + .token + .ok_or_else(|| ClientError::MissingRequiredField("token".into()))?; + let to = self + .to + .ok_or_else(|| ClientError::MissingRequiredField("to".into()))?; + let amount = self + .amount + .ok_or_else(|| ClientError::MissingRequiredField("amount".into()))?; + + let (contract_address, calldata, value) = if token == ETHEREUM_ADDRESS { + let calldata_params = vec![ethabi::ParamType::Address]; + let mut calldata = ethabi::short_signature("withdraw", &calldata_params).to_vec(); + calldata.append(&mut ethabi::encode(&[ethabi::Token::Address(to)])); + (L2_ETH_TOKEN_ADDRESS, calldata, amount) + } else { + let bridge_address = if let Some(bridge) = self.bridge { + bridge + } else { + // Use the default bridge if one was not specified. + let default_bridges = self + .wallet + .provider + .get_bridge_contracts() + .await + .map_err(|err| ClientError::NetworkError(err.to_string()))?; + default_bridges.l2_erc20_default_bridge + }; + + let calldata_params = vec![ + ethabi::ParamType::Address, + ethabi::ParamType::Address, + ethabi::ParamType::Uint(256), + ]; + let mut calldata = ethabi::short_signature("withdraw", &calldata_params).to_vec(); + calldata.append(&mut ethabi::encode(&[ + ethabi::Token::Address(to), + ethabi::Token::Address(token), + ethabi::Token::Uint(amount), + ])); + (bridge_address, calldata, U256::zero()) + }; + + let paymaster_params = self.paymaster_params.clone().unwrap_or_default(); + + let mut builder = ExecuteContractBuilder::new(self.wallet) + .contract_address(contract_address) + .calldata(calldata) + .value(value) + .paymaster_params(paymaster_params); + + if let Some(fee) = self.fee.clone() { + builder = builder.fee(fee); + } + if let Some(nonce) = self.nonce { + builder = builder.nonce(nonce); + } + + Ok(builder) + } + + /// Directly returns the signed withdraw transaction for the subsequent usage. + pub async fn tx(self) -> Result { + let builder = self.get_execute_builder().await?; + builder.tx().await + } + + /// Sends the transaction, returning the handle for its awaiting. + pub async fn send(self) -> Result, ClientError> { + let wallet = self.wallet; + let tx = self.tx().await?; + + wallet.send_transaction(tx).await + } + + /// Set the withdrawal amount. + /// + /// For more details, see [utils](../utils/index.html) functions. + pub fn amount(mut self, amount: U256) -> Self { + self.amount = Some(amount); + self + } + + /// Set the withdrawal token. + pub fn token(mut self, token: Address) -> Self { + self.token = Some(token); + self + } + + /// Set the fee amount. + /// + /// For more details, see [utils](../utils/index.html) functions. + pub fn fee(mut self, fee: Fee) -> Self { + self.fee = Some(fee); + + self + } + + /// Sets the address of Ethereum wallet to withdraw funds to. + pub fn to(mut self, to: Address) -> Self { + self.to = Some(to); + self + } + + /// Same as `WithdrawBuilder::to`, but accepts a string address value. + /// + /// Provided string value must be a correct address in a hexadecimal form, + /// otherwise an error will be returned. + pub fn str_to(mut self, to: impl AsRef) -> Result { + let to: Address = to + .as_ref() + .parse() + .map_err(|_| ClientError::IncorrectAddress)?; + + self.to = Some(to); + Ok(self) + } + + /// Sets the transaction nonce. + pub fn nonce(mut self, nonce: Nonce) -> Self { + self.nonce = Some(nonce); + self + } + + /// Sets the bridge contract to request the withdrawal. + pub fn bridge(mut self, address: Address) -> Self { + self.bridge = Some(address); + self + } + + /// Sets the paymaster parameters. + pub fn paymaster_params(mut self, paymaster_params: PaymasterParams) -> Self { + self.paymaster_params = Some(paymaster_params); + self + } + + pub async fn estimate_fee( + &self, + paymaster_params: Option, + ) -> Result { + let builder = self.get_execute_builder().await?; + builder.estimate_fee(paymaster_params).await + } +} diff --git a/sdk/zksync-rs/src/signer.rs b/sdk/zksync-rs/src/signer.rs new file mode 100644 index 000000000000..0c92a354ce5b --- /dev/null +++ b/sdk/zksync-rs/src/signer.rs @@ -0,0 +1,146 @@ +// Built-in imports +use std::fmt::Debug; +// Workspace uses +use zksync_eth_signer::{error::SignerError, EthereumSigner}; +use zksync_types::L2_ETH_TOKEN_ADDRESS; +use zksync_types::{ + fee::Fee, l2::L2Tx, transaction_request::PaymasterParams, Address, Eip712Domain, L2ChainId, + Nonce, PackedEthSignature, U256, +}; +// Local imports +use crate::operations::create_transfer_calldata; +use crate::types::TransactionRequest; + +fn signing_failed_error(err: impl ToString) -> SignerError { + SignerError::SigningFailed(err.to_string()) +} + +#[derive(Debug)] +pub struct Signer { + pub(crate) eth_signer: S, + pub(crate) address: Address, + pub(crate) chain_id: L2ChainId, +} + +impl Signer { + pub fn new(eth_signer: S, address: Address, chain_id: L2ChainId) -> Self { + Self { + eth_signer, + address, + chain_id, + } + } + + pub async fn sign_transaction( + &self, + transaction: &L2Tx, + ) -> Result { + let domain = Eip712Domain::new(self.chain_id); + let transaction_request: TransactionRequest = transaction.clone().into(); + self.eth_signer + .sign_typed_data(&domain, &transaction_request) + .await + } + + pub async fn sign_transfer( + &self, + to: Address, + token: Address, + amount: U256, + fee: Fee, + nonce: Nonce, + paymaster_params: PaymasterParams, + ) -> Result { + // Sign Ether transfer + if token.is_zero() || token == L2_ETH_TOKEN_ADDRESS { + let mut transfer = L2Tx::new( + to, + Default::default(), + nonce, + fee, + self.eth_signer.get_address().await?, + amount, + None, + Default::default(), + ); + + let signature = self + .sign_transaction(&transfer) + .await + .map_err(signing_failed_error)?; + transfer.set_signature(signature); + + return Ok(transfer); + } + + // Sign ERC-20 transfer + let data = create_transfer_calldata(to, amount); + let mut transfer = L2Tx::new( + token, + data, + nonce, + fee, + self.eth_signer.get_address().await?, + U256::zero(), + None, + paymaster_params, + ); + + let signature = self + .sign_transaction(&transfer) + .await + .map_err(signing_failed_error)?; + transfer.set_signature(signature); + + Ok(transfer) + } + + pub async fn sign_execute_contract( + &self, + contract: Address, + calldata: Vec, + fee: Fee, + nonce: Nonce, + factory_deps: Option>>, + paymaster_params: PaymasterParams, + ) -> Result { + self.sign_execute_contract_for_deploy( + contract, + calldata, + fee, + nonce, + factory_deps, + paymaster_params, + ) + .await + } + + pub async fn sign_execute_contract_for_deploy( + &self, + contract: Address, + calldata: Vec, + fee: Fee, + nonce: Nonce, + factory_deps: Option>>, + paymaster_params: PaymasterParams, + ) -> Result { + let mut execute_contract = L2Tx::new( + contract, + calldata, + nonce, + fee, + self.eth_signer.get_address().await?, + U256::zero(), + factory_deps, + paymaster_params, + ); + + let signature = self + .sign_transaction(&execute_contract) + .await + .map_err(signing_failed_error)?; + execute_contract.set_signature(signature); + + Ok(execute_contract) + } +} diff --git a/sdk/zksync-rs/src/utils.rs b/sdk/zksync-rs/src/utils.rs new file mode 100644 index 000000000000..c97fe42d47e1 --- /dev/null +++ b/sdk/zksync-rs/src/utils.rs @@ -0,0 +1,115 @@ +use std::str::FromStr; + +use num::BigUint; + +use zksync_types::{transaction_request::PaymasterParams, Address, U256}; + +use crate::web3::ethabi::{Contract, Token}; + +const IPAYMASTER_FLOW_INTERFACE: &str = include_str!("./abi/IPaymasterFlow.json"); + +pub fn is_token_eth(token_address: Address) -> bool { + token_address == Address::zero() +} + +/// Converts `U256` into the corresponding `BigUint` value. +pub fn u256_to_biguint(value: U256) -> BigUint { + let mut bytes = [0u8; 32]; + value.to_little_endian(&mut bytes); + BigUint::from_bytes_le(&bytes) +} + +/// Converts `BigUint` value into the corresponding `U256` value. +pub fn biguint_to_u256(value: BigUint) -> U256 { + let bytes = value.to_bytes_le(); + U256::from_little_endian(&bytes) +} + +pub fn load_contract(raw_abi_string: &str) -> Contract { + let abi_string = serde_json::Value::from_str(raw_abi_string) + .expect("Malformed contract abi file") + .get("abi") + .expect("Malformed contract abi file") + .to_string(); + Contract::load(abi_string.as_bytes()).unwrap() +} + +pub fn get_approval_based_paymaster_input( + paymaster: Address, + token_address: Address, + min_allowance: U256, + inner_input: Vec, +) -> PaymasterParams { + let paymaster_contract = load_contract(IPAYMASTER_FLOW_INTERFACE); + let paymaster_input = paymaster_contract + .function("approvalBased") + .unwrap() + .encode_input(&[ + Token::Address(token_address), + Token::Uint(min_allowance), + Token::Bytes(inner_input), + ]) + .unwrap(); + PaymasterParams { + paymaster, + paymaster_input, + } +} + +pub fn get_approval_based_paymaster_input_for_estimation( + paymaster: Address, + token_address: Address, +) -> PaymasterParams { + get_approval_based_paymaster_input( + paymaster, + token_address, + Default::default(), + Default::default(), + ) +} + +pub fn get_general_paymaster_input(paymaster: Address, inner_input: Vec) -> PaymasterParams { + let paymaster_contract = load_contract(IPAYMASTER_FLOW_INTERFACE); + let paymaster_input = paymaster_contract + .function("general") + .unwrap() + .encode_input(&[Token::Bytes(inner_input)]) + .unwrap(); + PaymasterParams { + paymaster, + paymaster_input, + } +} + +#[cfg(test)] +mod tests { + use super::*; + + fn biguint_u256_conversion_roundrip(u256: U256) { + let biguint = u256_to_biguint(u256); + // Make sure that the string representations are the same. + assert_eq!(biguint.to_string(), u256.to_string()); + + let restored = biguint_to_u256(biguint); + assert_eq!(u256, restored); + } + + #[test] + fn test_zero_conversion() { + biguint_u256_conversion_roundrip(U256::zero()) + } + + #[test] + fn test_biguint_u256_conversion() { + // random value that is big enough + let u256 = U256::from(1_235_999_123_u64).pow(4u64.into()); + biguint_u256_conversion_roundrip(u256) + } + + #[test] + fn test_biguint_with_msb_conversion() { + // make sure the most significant bit was set + let u256 = U256::from_big_endian(&[0b11010011; 32]); + biguint_u256_conversion_roundrip(u256) + } +} diff --git a/sdk/zksync-rs/src/wallet.rs b/sdk/zksync-rs/src/wallet.rs new file mode 100644 index 000000000000..7d665b4f42e7 --- /dev/null +++ b/sdk/zksync-rs/src/wallet.rs @@ -0,0 +1,186 @@ +use zksync_eth_signer::EthereumSigner; +use zksync_types::transaction_request::CallRequest; +use zksync_types::{ + api::{BlockIdVariant, BlockNumber, TransactionRequest}, + l2::L2Tx, + tokens::ETHEREUM_ADDRESS, + Address, Bytes, Eip712Domain, U256, +}; + +use zksync_web3_decl::{ + jsonrpsee::http_client::{HttpClient, HttpClientBuilder}, + namespaces::{EthNamespaceClient, NetNamespaceClient, Web3NamespaceClient, ZksNamespaceClient}, +}; + +use crate::web3::contract::tokens::Tokenizable; +use crate::{ + error::ClientError, + ethereum::{ierc20_contract, EthereumProvider}, + operations::*, + signer::Signer, +}; + +#[derive(Debug)] +pub struct Wallet { + pub provider: P, + pub signer: Signer, +} + +impl Wallet +where + S: EthereumSigner, +{ + /// Creates new wallet with an HTTP client. + /// + /// # Panics + /// + /// Panics if the provided network is not supported. + pub fn with_http_client( + rpc_address: &str, + signer: Signer, + ) -> Result, ClientError> { + let client = HttpClientBuilder::default().build(rpc_address)?; + + Ok(Wallet { + provider: client, + signer, + }) + } +} + +impl Wallet +where + S: EthereumSigner, + P: EthNamespaceClient + ZksNamespaceClient + NetNamespaceClient + Web3NamespaceClient + Sync, +{ + pub fn new(provider: P, signer: Signer) -> Self { + Self { provider, signer } + } + + /// Returns the wallet address. + pub fn address(&self) -> Address { + self.signer.address + } + + /// Returns balance in the account. + pub async fn get_balance( + &self, + block_number: BlockNumber, + token_address: Address, + ) -> Result { + let balance = if token_address == ETHEREUM_ADDRESS { + self.provider + .get_balance( + self.address(), + Some(BlockIdVariant::BlockNumber(block_number)), + ) + .await? + } else { + let token_contract = ierc20_contract(); + let contract_function = token_contract + .function("balanceOf") + .expect("failed to get `balanceOf` function"); + let data = contract_function + .encode_input(&[self.address().into_token()]) + .expect("failed to encode parameters"); + let req = CallRequest { + to: Some(token_address), + data: Some(data.into()), + ..Default::default() + }; + let bytes = self + .provider + .call(req, Some(BlockIdVariant::BlockNumber(block_number))) + .await?; + if bytes.0.len() == 32 { + U256::from_big_endian(&bytes.0) + } else { + U256::zero() + } + }; + + Ok(balance) + } + + /// Returns committed account nonce. + pub async fn get_nonce(&self) -> Result { + let nonce = self + .provider + .get_transaction_count( + self.address(), + Some(BlockIdVariant::BlockNumber(BlockNumber::Committed)), + ) + .await? + .as_u32(); + + Ok(nonce) + } + + /// Initializes `Transfer` transaction sending. + pub fn start_transfer(&self) -> TransferBuilder<'_, S, P> { + TransferBuilder::new(self) + } + + /// Initializes `Withdraw` transaction sending. + pub fn start_withdraw(&self) -> WithdrawBuilder<'_, S, P> { + WithdrawBuilder::new(self) + } + + /// Initializes `DeployContract` transaction sending. + pub fn start_deploy_contract(&self) -> DeployContractBuilder<'_, S, P> { + DeployContractBuilder::new(self) + } + + /// Initializes `ExecuteContract` transaction sending. + pub fn start_execute_contract(&self) -> ExecuteContractBuilder<'_, S, P> { + ExecuteContractBuilder::new(self) + } + + /// Submits an L2 transaction. + pub async fn send_transaction( + &self, + tx: L2Tx, + ) -> Result, ClientError> { + // Since we sign the transaction with the Ethereum signature later on, + // we might want to get rid of the signature and the initiator left from `L2Tx`. + let transaction_request: TransactionRequest = { + let mut req: TransactionRequest = tx.into(); + if let Some(meta) = req.eip712_meta.as_mut() { + meta.custom_signature = None; + } + req.from = Some(self.address()); + req + }; + let domain = Eip712Domain::new(self.signer.chain_id); + let signature = self + .signer + .eth_signer + .sign_typed_data(&domain, &transaction_request) + .await?; + + let encoded_tx = transaction_request.get_signed_bytes(&signature, self.signer.chain_id); + let bytes = Bytes(encoded_tx); + + let tx_hash = self.provider.send_raw_transaction(bytes).await?; + + Ok(SyncTransactionHandle::new(tx_hash, &self.provider)) + } + + /// Creates an `EthereumProvider` to interact with the Ethereum network. + /// + /// Returns an error if wallet was created without providing an Ethereum private key. + pub async fn ethereum( + &self, + web3_addr: impl AsRef, + ) -> Result, ClientError> { + let ethereum_provider = EthereumProvider::new( + &self.provider, + web3_addr, + self.signer.eth_signer.clone(), + self.signer.address, + ) + .await?; + + Ok(ethereum_provider) + } +} diff --git a/sdk/zksync-rs/tests/integration.rs b/sdk/zksync-rs/tests/integration.rs new file mode 100644 index 000000000000..158d719808f7 --- /dev/null +++ b/sdk/zksync-rs/tests/integration.rs @@ -0,0 +1,731 @@ +// //! Integration test for zkSync Rust SDK. +// //! +// //! In order to pass these tests, there must be a running +// //! instance of zkSync server and prover: +// //! +// //! ```bash +// //! zk server &! +// //! zk dummy-prover run &! +// //! zk test integration rust-sdk +// //! ``` +// //! +// //! Note: If tests are failing, first check the following two things: +// //! +// //! 1. If tests are failing with an error "cannot operate after unexpected tx failure", +// //! ensure that dummy prover is enabled. +// //! 2. If tests are failing with an error "replacement transaction underpriced", +// //! ensure that tests are ran in one thread. Running the tests with many threads won't +// //! work, since many thread will attempt in sending transactions from one (main) Ethereum +// //! account, which may result in nonce mismatch. +// //! Also, if there will be many tests running at once, and the server will die, it will be +// //! hard to distinguish which test exactly caused this problem. + +// use std::env; +// use std::time::{Duration, Instant}; + +// use zksync::operations::SyncTransactionHandle; +// use zksync::{ +// error::ClientError, +// ethereum::ierc20_contract, +// provider::Provider, +// types::BlockStatus, +// web3::{ +// contract::{Contract, Options}, +// transports::Http, +// types::{Address, H160, H256, U256}, +// }, +// zksync_types::{tx::primitives::PackedEthSignature, Token, TokenLike, TxFeeTypes, ZkSyncTx}, +// EthereumProvider, Network, RpcProvider, Wallet, WalletCredentials, +// }; +// use zksync_eth_signer::{EthereumSigner, PrivateKeySigner}; + +// const ETH_ADDR: &str = "36615Cf349d7F6344891B1e7CA7C72883F5dc049"; +// const ETH_PRIVATE_KEY: &str = "7726827caac94a7f9e1b160f7ea819f172f7b6f9d2a97f992c38edeab82d4110"; +// const LOCALHOST_WEB3_ADDR: &str = "http://127.0.0.1:8545"; +// const DOCKER_WEB3_ADDR: &str = "http://geth:8545"; + +// fn web3_addr() -> &'static str { +// let ci: u8 = env::var("CI").map_or(0, |s| s.parse().unwrap()); +// if ci == 1 { +// DOCKER_WEB3_ADDR +// } else { +// LOCALHOST_WEB3_ADDR +// } +// } + +// fn eth_main_account_credentials() -> (H160, H256) { +// let addr = ETH_ADDR.parse().unwrap(); +// let eth_private_key = ETH_PRIVATE_KEY.parse().unwrap(); + +// (addr, eth_private_key) +// } + +// fn eth_random_account_credentials() -> (H160, H256) { +// let mut eth_private_key = H256::default(); +// eth_private_key.randomize(); + +// let address_from_pk = PackedEthSignature::address_from_private_key(ð_private_key).unwrap(); + +// (address_from_pk, eth_private_key) +// } + +// fn one_ether() -> U256 { +// U256::from(10).pow(18.into()) +// } + +// /// Auxiliary function that returns the balance of the account on Ethereum. +// async fn get_ethereum_balance( +// eth_provider: &EthereumProvider, +// address: Address, +// token: &Token, +// ) -> Result { +// if token.symbol == "ETH" { +// return eth_provider +// .client() +// .eth_balance(address) +// .await +// .map_err(|_e| anyhow::anyhow!("failed to request balance from Ethereum {}", _e)); +// } +// eth_provider +// .client() +// .call_contract_function( +// "balanceOf", +// address, +// None, +// Options::default(), +// None, +// token.address, +// ierc20_contract(), +// ) +// .await +// .map_err(|_e| anyhow::anyhow!("failed to request erc20 balance from Ethereum")) +// } + +// async fn wait_for_deposit_and_update_account_id(wallet: &mut Wallet) +// where +// S: EthereumSigner, +// P: Provider + Clone, +// { +// let timeout = Duration::from_secs(60); +// let mut poller = tokio::time::interval(std::time::Duration::from_millis(100)); +// let start = Instant::now(); +// while wallet +// .provider +// .account_info(wallet.address()) +// .await +// .unwrap() +// .id +// .is_none() +// { +// if start.elapsed() > timeout { +// panic!("Timeout elapsed while waiting for Ethereum transaction"); +// } +// poller.tick().await; +// } + +// wallet.update_account_id().await.unwrap(); +// assert!(wallet.account_id().is_some(), "Account ID was not set"); +// } + +// async fn transfer_to( +// token_like: impl Into, +// amount: impl Into, +// to: H160, +// ) -> Result<(), anyhow::Error> { +// let (main_eth_address, main_eth_private_key) = eth_main_account_credentials(); + +// let provider = RpcProvider::new(Network::Localhost); +// let eth_signer = PrivateKeySigner::new(main_eth_private_key); +// let credentials = +// WalletCredentials::from_eth_signer(main_eth_address, eth_signer, Network::Localhost) +// .await +// .unwrap(); + +// let wallet = Wallet::new(provider, credentials).await?; +// let ethereum = wallet.ethereum(web3_addr()).await?; +// let hash = ethereum +// .transfer(token_like.into(), amount.into(), to) +// .await +// .unwrap(); + +// ethereum.wait_for_tx(hash).await?; +// Ok(()) +// } + +// /// Creates a new wallet and tries to make a transfer +// /// from a new wallet without SigningKey. +// async fn test_tx_fail(zksync_depositor_wallet: &Wallet) -> Result<(), anyhow::Error> +// where +// S: EthereumSigner, +// P: Provider + Clone, +// { +// let provider = RpcProvider::new(Network::Localhost); + +// let (random_eth_address, random_eth_private_key) = eth_random_account_credentials(); +// let eth_signer = PrivateKeySigner::new(random_eth_private_key); +// let random_credentials = +// WalletCredentials::from_eth_signer(random_eth_address, eth_signer, Network::Localhost) +// .await?; +// let sync_wallet = Wallet::new(provider, random_credentials).await?; + +// let handle = sync_wallet +// .start_transfer() +// .to(zksync_depositor_wallet.address()) +// .token("ETH")? +// .amount(1_000_000u64) +// .send() +// .await; + +// assert!(matches!( +// handle, +// Err(ClientError::SigningError(_no_signing_key)) +// )); + +// Ok(()) +// } + +// /// Checks the correctness of the `Deposit` operation. +// async fn test_deposit( +// deposit_wallet: &Wallet, +// sync_wallet: &mut Wallet, +// token: &Token, +// amount: u128, +// ) -> Result<(), anyhow::Error> +// where +// S: EthereumSigner, +// P: Provider + Clone, +// { +// let ethereum = deposit_wallet.ethereum(web3_addr()).await?; + +// if !deposit_wallet.tokens.is_eth(token.address.into()) { +// if !ethereum.is_erc20_deposit_approved(token.address).await? { +// let tx_approve_deposits = ethereum +// .limited_approve_erc20_token_deposits(token.address, U256::from(amount)) +// .await?; +// ethereum.wait_for_tx(tx_approve_deposits).await?; +// } + +// assert!( +// ethereum +// .is_limited_erc20_deposit_approved(token.address, U256::from(amount)) +// .await?, +// "Token should be approved" +// ); +// }; + +// let deposit_tx_hash = ethereum +// .deposit( +// &token.symbol as &str, +// U256::from(amount), +// sync_wallet.address(), +// ) +// .await?; + +// ethereum.wait_for_tx(deposit_tx_hash).await?; +// wait_for_deposit_and_update_account_id(sync_wallet).await; + +// if !sync_wallet.tokens.is_eth(token.address.into()) { +// // It should not be approved because we have approved only DEPOSIT_AMOUNT, not the maximum possible amount of deposit +// assert!( +// !ethereum +// .is_limited_erc20_deposit_approved(token.address, U256::from(amount)) +// .await? +// ); +// // Unlimited approve for deposit +// let tx_approve_deposits = ethereum.approve_erc20_token_deposits(token.address).await?; +// ethereum.wait_for_tx(tx_approve_deposits).await?; +// assert!(ethereum.is_erc20_deposit_approved(token.address).await?); +// } + +// // To be sure that the deposit is committed, we need to listen to the event `NewPriorityRequest` +// // rust SDK doesn't support getting this information yet, but it will be added soon. +// // assert_eq!(balance_after - balance_before, u256_to_big_dec(amount / 2)); + +// Ok(()) +// } + +// /// Checks the correctness of the `ChangePubKey` operation. +// async fn test_change_pubkey( +// sync_wallet: &Wallet, +// token_symbol: &str, +// ) -> Result<(), anyhow::Error> +// where +// S: EthereumSigner, +// P: Provider + Clone, +// { +// if !sync_wallet.is_signing_key_set().await? { +// let handle = sync_wallet +// .start_change_pubkey() +// .fee_token(token_symbol)? +// .send() +// .await?; + +// handle +// .commit_timeout(Duration::from_secs(60)) +// .wait_for_commit() +// .await?; +// } +// assert!(sync_wallet.is_signing_key_set().await?); +// Ok(()) +// } + +// /// Makes a transfer from Alice to Bob inside zkSync +// /// checks the correctness of the amount of money before the transaction and after. +// async fn test_transfer( +// alice: &Wallet, +// bob: &Wallet, +// token_symbol: &str, +// transfer_amount: u128, +// ) -> Result<(), anyhow::Error> +// where +// S: EthereumSigner, +// P: Provider + Clone, +// { +// let transfer_amount = num::BigUint::from(transfer_amount); + +// let total_fee = alice +// .provider +// .get_tx_fee(TxFeeTypes::Transfer, bob.address(), token_symbol) +// .await? +// .total_fee; + +// let alice_balance_before = alice +// .get_balance(BlockStatus::Committed, token_symbol) +// .await?; + +// let bob_balance_before = bob +// .get_balance(BlockStatus::Committed, token_symbol) +// .await?; + +// let transfer_handle = alice +// .start_transfer() +// .to(bob.address()) +// .token(token_symbol)? +// .amount(transfer_amount.clone()) +// .send() +// .await?; + +// transfer_handle +// .commit_timeout(Duration::from_secs(180)) +// .wait_for_commit() +// .await?; + +// let alice_balance_after = alice +// .get_balance(BlockStatus::Committed, token_symbol) +// .await?; +// let bob_balance_after = bob +// .get_balance(BlockStatus::Committed, token_symbol) +// .await?; + +// assert_eq!( +// alice_balance_before - alice_balance_after, +// transfer_amount.clone() + total_fee +// ); +// assert_eq!(bob_balance_after - bob_balance_before, transfer_amount); +// Ok(()) +// } + +// /// Makes a transaction from the account to its own address +// /// checks if the expected amount of fee has been spent. +// async fn test_transfer_to_self( +// sync_wallet: &Wallet, +// token_symbol: &str, +// transfer_amount: u128, +// ) -> Result<(), anyhow::Error> +// where +// S: EthereumSigner, +// P: Provider + Clone, +// { +// let transfer_amount = num::BigUint::from(transfer_amount); +// let balance_before = sync_wallet +// .get_balance(BlockStatus::Committed, token_symbol) +// .await?; +// let total_fee = sync_wallet +// .provider +// .get_tx_fee(TxFeeTypes::Transfer, sync_wallet.address(), token_symbol) +// .await? +// .total_fee; + +// let transfer_handle = sync_wallet +// .start_transfer() +// .to(sync_wallet.address()) +// .token(token_symbol)? +// .amount(transfer_amount) +// .send() +// .await?; + +// transfer_handle +// .commit_timeout(Duration::from_secs(180)) +// .wait_for_commit() +// .await?; + +// let balance_after = sync_wallet +// .get_balance(BlockStatus::Committed, token_symbol) +// .await?; + +// assert_eq!(balance_before - balance_after, total_fee); + +// Ok(()) +// } + +// /// Makes a withdraw operation on L2 +// /// checks the correctness of their execution. +// async fn test_withdraw( +// eth_provider: &EthereumProvider, +// main_contract: &Contract, +// sync_wallet: &Wallet, +// withdraw_to: &Wallet, +// token: &Token, +// amount: u128, +// ) -> Result<(), anyhow::Error> +// where +// S: EthereumSigner, +// P: Provider + Clone, +// { +// let total_fee = sync_wallet +// .provider +// .get_tx_fee(TxFeeTypes::Withdraw, withdraw_to.address(), token.address) +// .await? +// .total_fee; +// let sync_balance_before = sync_wallet +// .get_balance(BlockStatus::Committed, &token.symbol as &str) +// .await?; +// let onchain_balance_before = +// get_ethereum_balance(eth_provider, withdraw_to.address(), token).await?; +// let pending_to_be_onchain_balance_before: U256 = { +// let query = main_contract.query( +// "getPendingBalance", +// (withdraw_to.address(), token.address), +// None, +// Options::default(), +// None, +// ); + +// query +// .await +// .map_err(|err| anyhow::anyhow!(format!("Contract query fail: {}", err)))? +// }; + +// let withdraw_handle = sync_wallet +// .start_withdraw() +// .to(withdraw_to.address()) +// .token(token.address)? +// .amount(amount) +// .send() +// .await?; + +// withdraw_handle +// .verify_timeout(Duration::from_secs(360)) +// .wait_for_verify() +// .await?; + +// let sync_balance_after = sync_wallet +// .get_balance(BlockStatus::Committed, &token.symbol as &str) +// .await?; +// let onchain_balance_after = +// get_ethereum_balance(eth_provider, withdraw_to.address(), token).await?; + +// let pending_to_be_onchain_balance_after: U256 = { +// let query = main_contract.query( +// "getPendingBalance", +// (withdraw_to.address(), token.address), +// None, +// Options::default(), +// None, +// ); + +// query +// .await +// .map_err(|err| anyhow::anyhow!(format!("Contract query fail: {}", err)))? +// }; + +// assert_eq!( +// onchain_balance_after - onchain_balance_before + pending_to_be_onchain_balance_after +// - pending_to_be_onchain_balance_before, +// U256::from(amount) +// ); +// assert_eq!( +// sync_balance_before - sync_balance_after, +// num::BigUint::from(amount) + total_fee +// ); + +// Ok(()) +// } + +// /// Makes transfers for different types of operations +// /// checks the correctness of their execution. +// async fn move_funds( +// main_contract: &Contract, +// eth_provider: &EthereumProvider, +// depositor_wallet: &Wallet, +// alice: &mut Wallet, +// bob: &Wallet, +// token_like: impl Into, +// deposit_amount: u128, +// ) -> Result<(), anyhow::Error> +// where +// S: EthereumSigner, +// P: Provider + Clone, +// { +// let token_like = token_like.into(); +// let token = depositor_wallet +// .tokens +// .resolve(token_like.clone()) +// .ok_or_else(|| anyhow::anyhow!("Error resolve token"))?; + +// let transfer_amount = deposit_amount / 10; +// let withdraw_amount = deposit_amount / 10; + +// test_deposit(depositor_wallet, alice, &token, deposit_amount).await?; +// println!("Deposit ok, Token: {}", token.symbol); + +// test_change_pubkey(alice, &token.symbol).await?; +// println!("Change pubkey ok"); + +// test_transfer(alice, bob, &token.symbol, transfer_amount).await?; +// println!("Transfer to new ok, Token: {}", token.symbol); + +// test_transfer(alice, bob, &token.symbol, transfer_amount).await?; +// println!("Transfer ok, Token: {}", token.symbol); + +// test_transfer_to_self(&alice, &token.symbol, transfer_amount).await?; +// println!("Transfer to self ok, Token: {}", token.symbol); + +// test_withdraw( +// ð_provider, +// &main_contract, +// &alice, +// &bob, +// &token, +// withdraw_amount, +// ) +// .await?; +// println!("Withdraw ok, Token: {}", token.symbol); + +// // Currently fast withdraw aren't supported by zksync-rs, but they will be in the near future. +// // test_fast_withdraw(eth, main_contract, &bob, &bob, &token, withdraw_amount); +// // println!("Fast withdraw ok, Token: {}", token.symbol); + +// // Currently multi transactions aren't supported by zksync-rs, but they will be in the near future. +// // test_multi_transfer(alice, bob, &token.symbol, transfersAmount / 2); +// // println!("Batched transfers ok, Token: {}, token.symbol"); + +// Ok(()) +// } + +// /// Auxiliary function that generates a new wallet, performs an initial deposit and changes the public key. +// async fn init_account_with_one_ether( +// ) -> Result, anyhow::Error> { +// let (eth_address, eth_private_key) = eth_random_account_credentials(); + +// // Transfer funds from "rich" account to a randomly created one (so we won't reuse the same +// // account in subsequent test runs). +// transfer_to("ETH", one_ether(), eth_address).await?; + +// let provider = RpcProvider::new(Network::Localhost); + +// let eth_signer = PrivateKeySigner::new(eth_private_key); +// let credentials = +// WalletCredentials::from_eth_signer(eth_address, eth_signer, Network::Localhost) +// .await +// .unwrap(); + +// let mut wallet = Wallet::new(provider, credentials).await?; +// let ethereum = wallet.ethereum(web3_addr()).await?; + +// let deposit_tx_hash = ethereum +// .deposit("ETH", one_ether() / 2, wallet.address()) +// .await?; + +// ethereum.wait_for_tx(deposit_tx_hash).await?; + +// // Update stored wallet ID after we initialized a wallet via deposit. +// wait_for_deposit_and_update_account_id(&mut wallet).await; + +// if !wallet.is_signing_key_set().await? { +// let handle = wallet +// .start_change_pubkey() +// .fee_token("ETH")? +// .send() +// .await?; + +// handle +// .commit_timeout(Duration::from_secs(60)) +// .wait_for_commit() +// .await?; +// } + +// Ok(wallet) +// } + +// async fn make_wallet( +// provider: RpcProvider, +// (eth_address, eth_private_key): (H160, H256), +// ) -> Result, ClientError> { +// let eth_signer = PrivateKeySigner::new(eth_private_key); +// let credentials = +// WalletCredentials::from_eth_signer(eth_address, eth_signer, Network::Localhost).await?; +// Wallet::new(provider, credentials).await +// } + +// #[tokio::test] +// #[cfg_attr(not(feature = "integration-tests"), ignore)] +// async fn comprehensive_test() -> Result<(), anyhow::Error> { +// let provider = RpcProvider::new(Network::Localhost); + +// let main_wallet = make_wallet(provider.clone(), eth_main_account_credentials()).await?; +// let sync_depositor_wallet = +// make_wallet(provider.clone(), eth_random_account_credentials()).await?; +// let mut alice_wallet1 = make_wallet(provider.clone(), eth_random_account_credentials()).await?; +// let bob_wallet1 = make_wallet(provider.clone(), eth_random_account_credentials()).await?; + +// let ethereum = main_wallet.ethereum(web3_addr()).await?; + +// let main_contract = { +// let address_response = provider.contract_address().await?; +// let contract_address = if address_response.main_contract.starts_with("0x") { +// &address_response.main_contract[2..] +// } else { +// &address_response.main_contract +// } +// .parse()?; +// ethereum +// .client() +// .main_contract_with_address(contract_address) +// }; + +// let token_eth = sync_depositor_wallet +// .tokens +// .resolve("ETH".into()) +// .ok_or_else(|| anyhow::anyhow!("Error resolve token"))?; +// let token_dai = sync_depositor_wallet +// .tokens +// .resolve("DAI".into()) +// .ok_or_else(|| anyhow::anyhow!("Error resolve token"))?; + +// let dai_deposit_amount = U256::from(10).pow(18.into()) * 10000; // 10000 DAI + +// // Move ETH to wallets so they will have some funds for L1 transactions. +// let eth_deposit_amount = U256::from(10).pow(17.into()); // 0.1 ETH +// transfer_to("ETH", eth_deposit_amount, sync_depositor_wallet.address()).await?; +// transfer_to("ETH", eth_deposit_amount, alice_wallet1.address()).await?; +// transfer_to("ETH", eth_deposit_amount, bob_wallet1.address()).await?; + +// transfer_to("DAI", dai_deposit_amount, sync_depositor_wallet.address()).await?; + +// assert_eq!( +// get_ethereum_balance(ðereum, sync_depositor_wallet.address(), &token_eth).await?, +// eth_deposit_amount +// ); +// assert_eq!( +// get_ethereum_balance(ðereum, sync_depositor_wallet.address(), &token_dai).await?, +// dai_deposit_amount +// ); + +// test_tx_fail(&sync_depositor_wallet).await?; + +// move_funds( +// &main_contract, +// ðereum, +// &sync_depositor_wallet, +// &mut alice_wallet1, +// &bob_wallet1, +// "DAI", +// // 200 DAI +// 200_000_000_000_000_000_000u128, +// ) +// .await?; + +// Ok(()) +// } + +// #[tokio::test] +// #[cfg_attr(not(feature = "integration-tests"), ignore)] +// async fn simple_transfer() -> Result<(), anyhow::Error> { +// let wallet = init_account_with_one_ether().await?; + +// // Perform a transfer to itself. +// let handle = wallet +// .start_transfer() +// .to(wallet.signer.address) +// .token("ETH")? +// .amount(1_000_000u64) +// .send() +// .await?; + +// handle +// .commit_timeout(Duration::from_secs(180)) +// .wait_for_commit() +// .await?; + +// Ok(()) +// } + +// #[tokio::test] +// #[cfg_attr(not(feature = "integration-tests"), ignore)] +// async fn batch_transfer() -> Result<(), anyhow::Error> { +// let wallet = init_account_with_one_ether().await?; + +// const RECIPIENT_COUNT: usize = 4; +// let recipients = vec![eth_random_account_credentials().0; RECIPIENT_COUNT]; + +// let token_like = TokenLike::Symbol("ETH".to_owned()); +// let token = wallet +// .tokens +// .resolve(token_like.clone()) +// .expect("ETH token resolving failed"); + +// let mut nonce = wallet.account_info().await?.committed.nonce; + +// // Sign a transfer for each recipient created above +// let mut signed_transfers = Vec::with_capacity(recipients.len()); + +// // Obtain total fee for this batch +// let mut total_fee = Some( +// wallet +// .provider +// .get_txs_batch_fee( +// vec![TxFeeTypes::Transfer; recipients.len()], +// recipients.clone(), +// token_like.clone(), +// ) +// .await?, +// ); + +// for recipient in recipients { +// let (transfer, signature) = wallet +// .signer +// .sign_transfer( +// token.clone(), +// 1_000_000u64.into(), +// // Set a total batch fee in the first transaction. +// total_fee.take().unwrap_or_default(), +// recipient, +// nonce, +// Default::default(), +// ) +// .await +// .expect("Transfer signing error"); + +// signed_transfers.push((ZkSyncTx::Transfer(Box::new(transfer)), signature)); + +// *nonce += 1; +// } + +// // Send the batch and store its transaction hashes +// let handles = wallet +// .provider +// .send_txs_batch(signed_transfers, None) +// .await? +// .into_iter() +// .map(|tx_hash| SyncTransactionHandle::new(tx_hash, wallet.provider.clone())); + +// for handle in handles { +// handle +// .commit_timeout(Duration::from_secs(180)) +// .wait_for_commit() +// .await?; +// } + +// Ok(()) +// } diff --git a/sdk/zksync-rs/tests/unit.rs b/sdk/zksync-rs/tests/unit.rs new file mode 100644 index 000000000000..033acb75e7d7 --- /dev/null +++ b/sdk/zksync-rs/tests/unit.rs @@ -0,0 +1,653 @@ +// use std::collections::HashMap; +// use zksync::{tokens_cache::TokensCache, utils::*, web3::types::H160, zksync_types::Token}; +// use zksync_config::test_config::unit_vectors::{Config as TestVectorsConfig, TestEntry}; +// use zksync_crypto::PrivateKey; +// use zksync_types::{tx::TxSignature, AccountId, Nonce, TokenId}; + +// #[test] +// fn test_tokens_cache() { +// let mut tokens: HashMap = HashMap::default(); + +// let token_eth = Token::new(TokenId(0), H160::default(), "ETH", 18); +// tokens.insert("ETH".to_string(), token_eth.clone()); +// let token_dai = Token::new(TokenId(1), H160::random(), "DAI", 18); +// tokens.insert("DAI".to_string(), token_dai.clone()); + +// let uncahed_token = Token::new(TokenId(2), H160::random(), "UNC", 5); + +// let tokens_hash = TokensCache::new(tokens); + +// assert_eq!( +// tokens_hash.resolve(token_eth.address.into()), +// Some(token_eth.clone()) +// ); +// assert_eq!( +// tokens_hash.resolve(token_eth.id.into()), +// Some(token_eth.clone()) +// ); +// assert_eq!( +// tokens_hash.resolve((&token_eth.symbol as &str).into()), +// Some(token_eth.clone()) +// ); + +// assert_eq!( +// tokens_hash.resolve(token_dai.address.into()), +// Some(token_dai.clone()) +// ); +// assert_eq!( +// tokens_hash.resolve(token_dai.id.into()), +// Some(token_dai.clone()) +// ); +// assert_eq!( +// tokens_hash.resolve((&token_dai.symbol as &str).into()), +// Some(token_dai.clone()) +// ); + +// assert_eq!(tokens_hash.resolve(uncahed_token.address.into()), None); +// assert_eq!(tokens_hash.resolve(uncahed_token.id.into()), None); +// assert_eq!( +// tokens_hash.resolve((&uncahed_token.symbol as &str).into()), +// None +// ); + +// assert!(tokens_hash.is_eth(token_eth.address.into())); +// assert!(tokens_hash.is_eth(token_eth.id.into())); +// assert!(tokens_hash.is_eth((&token_eth.symbol as &str).into())); + +// assert!(!tokens_hash.is_eth(token_dai.address.into())); +// assert!(!tokens_hash.is_eth(token_dai.id.into())); +// assert!(!tokens_hash.is_eth((&token_dai.symbol as &str).into())); +// } + +// fn priv_key_from_raw(raw: &[u8]) -> Option { +// use zksync_crypto::{ +// bellman::{pairing::ff::PrimeField, PrimeFieldRepr}, +// franklin_crypto::alt_babyjubjub::fs::FsRepr, +// priv_key_from_fs, Fs, +// }; + +// let mut fs_repr = FsRepr::default(); +// fs_repr.read_be(raw).ok()?; +// Fs::from_repr(fs_repr).ok().map(priv_key_from_fs) +// } + +// fn assert_tx_signature(signature: &TxSignature, expected_pub: &str, expected_sig: &str) { +// let TxSignature { pub_key, signature } = signature; + +// let pub_point = pub_key.serialize_packed().unwrap(); +// assert_eq!(hex::encode(pub_point), expected_pub); + +// let packed_sig = signature.serialize_packed().unwrap(); +// assert_eq!(hex::encode(packed_sig), expected_sig); +// } + +// #[cfg(test)] +// mod primitives_with_vectors { +// use super::*; + +// #[test] +// fn test_signature() { +// let test_vectors = TestVectorsConfig::load(); +// for TestEntry { inputs, outputs } in test_vectors.crypto_primitives.items { +// let private_key = +// private_key_from_seed(&inputs.seed).expect("Cannot get key from seed"); + +// assert_eq!( +// priv_key_from_raw(&outputs.private_key).unwrap().0, +// private_key.0 +// ); + +// let signature = TxSignature::sign_musig(&private_key, &inputs.message); +// assert_tx_signature(&signature, &outputs.pub_key, &outputs.signature); +// } +// } +// } + +// #[cfg(test)] +// mod utils_with_vectors { +// use super::*; +// use zksync_utils::format_units; + +// #[test] +// fn test_token_packing() { +// let test_vectors = TestVectorsConfig::load(); +// for TestEntry { inputs, outputs } in test_vectors.utils.amount_packing.items { +// let token_amount = inputs.value; + +// assert_eq!(is_token_amount_packable(&token_amount), outputs.packable); +// assert_eq!( +// closest_packable_token_amount(&token_amount), +// outputs.closest_packable +// ); +// assert_eq!(pack_token_amount(&token_amount), outputs.packed_value); +// } +// } + +// #[test] +// fn test_fee_packing() { +// let test_vectors = TestVectorsConfig::load(); +// for TestEntry { inputs, outputs } in test_vectors.utils.fee_packing.items { +// let fee_amount = inputs.value; + +// assert_eq!(is_fee_amount_packable(&fee_amount), outputs.packable); +// assert_eq!( +// closest_packable_fee_amount(&fee_amount), +// outputs.closest_packable +// ); +// assert_eq!(pack_fee_amount(&fee_amount), outputs.packed_value); +// } +// } + +// #[test] +// fn test_formatting() { +// let test_vectors = TestVectorsConfig::load(); +// for TestEntry { inputs, outputs } in test_vectors.utils.token_formatting.items { +// let units_str = format_units(inputs.amount, inputs.decimals); +// assert_eq!(format!("{} {}", units_str, inputs.token), outputs.formatted); +// } +// } +// } + +// #[cfg(test)] +// mod signatures_with_vectors { +// use super::*; +// use zksync::{signer::Signer, WalletCredentials}; +// use zksync_config::test_config::unit_vectors::TxData; +// use zksync_eth_signer::PrivateKeySigner; +// use zksync_types::tx::{ChangePubKeyECDSAData, ChangePubKeyEthAuthData}; +// use zksync_types::{network::Network, AccountId, Address, H256}; + +// async fn get_signer( +// eth_private_key_raw: &[u8], +// from_address: Address, +// account_id: AccountId, +// ) -> Signer { +// let eth_private_key = H256::from_slice(eth_private_key_raw); +// let eth_signer = PrivateKeySigner::new(eth_private_key); + +// let creds = WalletCredentials::from_eth_signer(from_address, eth_signer, Network::Mainnet) +// .await +// .unwrap(); + +// let mut signer = Signer::with_credentials(creds); +// signer.set_account_id(Some(account_id)); +// signer +// } + +// #[tokio::test] +// async fn test_transfer_signature() { +// let test_vectors = TestVectorsConfig::load(); +// for TestEntry { inputs, outputs } in test_vectors.transactions.items { +// if let TxData::Transfer { +// data: transfer_tx, +// eth_sign_data: sign_data, +// } = &inputs.data +// { +// let signer = get_signer( +// &inputs.eth_private_key, +// transfer_tx.from, +// sign_data.account_id, +// ) +// .await; + +// let token = Token { +// id: transfer_tx.token_id, +// address: Default::default(), +// symbol: sign_data.string_token.clone(), +// decimals: 0, +// }; +// let (transfer, eth_signature) = signer +// .sign_transfer( +// token, +// transfer_tx.amount.clone(), +// transfer_tx.fee.clone(), +// sign_data.to, +// sign_data.nonce, +// transfer_tx.time_range, +// ) +// .await +// .expect("Transfer signing error"); + +// assert_eq!(transfer.get_bytes(), outputs.sign_bytes); +// assert_tx_signature( +// &transfer.signature, +// &outputs.signature.pub_key, +// &outputs.signature.signature, +// ); + +// assert_eq!( +// transfer +// .get_ethereum_sign_message(&sign_data.string_token, 0) +// .into_bytes(), +// outputs.eth_sign_message.unwrap() +// ); + +// if let Some(expected_eth_signature) = outputs.eth_signature { +// let eth_signature = eth_signature.unwrap().serialize_packed(); +// assert_eq!(ð_signature[..], expected_eth_signature.as_slice()); +// } +// } +// } +// } + +// #[tokio::test] +// async fn test_withdraw_signature() { +// let test_vectors = TestVectorsConfig::load(); +// for TestEntry { inputs, outputs } in test_vectors.transactions.items { +// if let TxData::Withdraw { +// data: withdraw_tx, +// eth_sign_data: sign_data, +// } = &inputs.data +// { +// let signer = get_signer( +// &inputs.eth_private_key, +// withdraw_tx.from, +// sign_data.account_id, +// ) +// .await; + +// let token = Token { +// id: withdraw_tx.token_id, +// address: Default::default(), +// symbol: sign_data.string_token.clone(), +// decimals: 0, +// }; +// let (withdraw, eth_signature) = signer +// .sign_withdraw( +// token, +// withdraw_tx.amount.clone(), +// withdraw_tx.fee.clone(), +// sign_data.eth_address, +// sign_data.nonce, +// withdraw_tx.time_range, +// ) +// .await +// .expect("Withdraw signing error"); + +// assert_eq!(withdraw.get_bytes(), outputs.sign_bytes); +// assert_tx_signature( +// &withdraw.signature, +// &outputs.signature.pub_key, +// &outputs.signature.signature, +// ); + +// assert_eq!( +// withdraw +// .get_ethereum_sign_message(&sign_data.string_token, 0) +// .into_bytes(), +// outputs.eth_sign_message.unwrap() +// ); + +// if let Some(expected_eth_signature) = outputs.eth_signature { +// let eth_signature = eth_signature.unwrap().serialize_packed(); +// assert_eq!(ð_signature[..], expected_eth_signature.as_slice()); +// } +// } +// } +// } + +// #[tokio::test] +// async fn test_change_pubkey_signature() { +// let test_vectors = TestVectorsConfig::load(); +// for TestEntry { inputs, outputs } in test_vectors.transactions.items { +// if let TxData::ChangePubKey { +// data: change_pubkey_tx, +// eth_sign_data: sign_data, +// } = &inputs.data +// { +// let mut signer = get_signer( +// &inputs.eth_private_key, +// change_pubkey_tx.account, +// sign_data.account_id, +// ) +// .await; +// signer.pubkey_hash = change_pubkey_tx.new_pk_hash; + +// let token = Token { +// id: change_pubkey_tx.fee_token_id, +// address: Default::default(), +// symbol: String::new(), +// decimals: 0, +// }; +// let change_pub_key = signer +// .sign_change_pubkey_tx( +// sign_data.nonce, +// false, +// token, +// change_pubkey_tx.fee.clone(), +// change_pubkey_tx.time_range, +// ) +// .await +// .expect("Change pub key signing error"); + +// assert_eq!(change_pub_key.get_bytes(), outputs.sign_bytes); +// assert_tx_signature( +// &change_pub_key.signature, +// &outputs.signature.pub_key, +// &outputs.signature.signature, +// ); + +// assert_eq!( +// change_pub_key.get_eth_signed_data().unwrap(), +// outputs.eth_sign_message.unwrap() +// ); + +// if let Some(expected_eth_signature) = outputs.eth_signature { +// let eth_signature = match &change_pub_key.eth_auth_data { +// Some(ChangePubKeyEthAuthData::ECDSA(ChangePubKeyECDSAData { +// eth_signature, +// .. +// })) => eth_signature.serialize_packed(), +// _ => panic!("No ChangePubKey ethereum siganture"), +// }; +// assert_eq!(ð_signature[..], expected_eth_signature.as_slice()); +// } +// } +// } +// } + +// #[tokio::test] +// async fn test_forced_exit_signature() { +// let test_vectors = TestVectorsConfig::load(); +// for TestEntry { inputs, outputs } in test_vectors.transactions.items { +// if let TxData::ForcedExit { data: forced_exit } = &inputs.data { +// let signer = get_signer( +// &inputs.eth_private_key, +// forced_exit.from, +// forced_exit.initiator_account_id, +// ) +// .await; + +// let token = Token { +// id: forced_exit.token_id, +// address: Default::default(), +// symbol: String::new(), +// decimals: 0, +// }; +// let (forced_exit, _) = signer +// .sign_forced_exit( +// forced_exit.target, +// token, +// forced_exit.fee.clone(), +// forced_exit.nonce, +// forced_exit.time_range, +// ) +// .await +// .expect("Forced exit signing error"); + +// assert_eq!(forced_exit.get_bytes(), outputs.sign_bytes); +// assert_tx_signature( +// &forced_exit.signature, +// &outputs.signature.pub_key, +// &outputs.signature.signature, +// ); +// } +// } +// } +// } + +// #[cfg(test)] +// mod wallet_tests { +// use super::*; +// use num::{BigUint, ToPrimitive}; +// use zksync::{ +// error::ClientError, +// provider::Provider, +// signer::Signer, +// types::{ +// AccountInfo, AccountState, BlockStatus, ContractAddress, EthOpInfo, Fee, Tokens, +// TransactionInfo, +// }, +// Network, Wallet, WalletCredentials, +// }; +// use zksync_eth_signer::PrivateKeySigner; +// use zksync_types::{ +// tokens::get_genesis_token_list, +// tx::{PackedEthSignature, TxHash}, +// Address, PubKeyHash, TokenId, TokenLike, TxFeeTypes, ZkSyncTx, H256, +// }; + +// #[derive(Debug, Clone)] +// /// Provides some hardcoded values the `Provider` responsible to +// /// without communicating with the network +// struct MockProvider { +// network: Network, +// eth_private_key: H256, +// } + +// impl MockProvider { +// async fn pub_key_hash(&self) -> PubKeyHash { +// let address = +// PackedEthSignature::address_from_private_key(&self.eth_private_key).unwrap(); +// let eth_signer = PrivateKeySigner::new(self.eth_private_key); +// let creds = WalletCredentials::from_eth_signer(address, eth_signer, self.network) +// .await +// .unwrap(); +// let signer = Signer::with_credentials(creds); +// signer.pubkey_hash +// } +// } + +// #[async_trait::async_trait] +// impl Provider for MockProvider { +// /// Returns the example `AccountInfo` instance: +// /// - assigns the '42' value to account_id; +// /// - assigns the PubKeyHash to match the wallet's signer's PubKeyHash +// /// - adds single entry of "DAI" token to the committed balances; +// /// - adds single entry of "USDC" token to the verified balances. +// async fn account_info(&self, address: Address) -> Result { +// let mut committed_balances = HashMap::new(); +// committed_balances.insert("DAI".into(), BigUint::from(12345_u32).into()); + +// let mut verified_balances = HashMap::new(); +// verified_balances.insert("USDC".into(), BigUint::from(98765_u32).into()); + +// Ok(AccountInfo { +// address, +// id: Some(AccountId(42)), +// depositing: Default::default(), +// committed: AccountState { +// balances: committed_balances, +// nonce: Nonce(0), +// pub_key_hash: self.pub_key_hash().await, +// }, +// verified: AccountState { +// balances: verified_balances, +// ..Default::default() +// }, +// }) +// } + +// /// Returns first three tokens from the configuration found in +// /// $ZKSYNC_HOME/etc/tokens/.json +// async fn tokens(&self) -> Result { +// let genesis_tokens = get_genesis_token_list(&self.network.to_string()) +// .expect("Initial token list not found"); + +// let tokens = (1..) +// .zip(&genesis_tokens[..3]) +// .map(|(id, token)| Token { +// id: TokenId(id), +// symbol: token.symbol.clone(), +// address: token.address[2..] +// .parse() +// .expect("failed to parse token address"), +// decimals: token.decimals, +// }) +// .map(|token| (token.symbol.clone(), token)) +// .collect(); +// Ok(tokens) +// } + +// async fn tx_info(&self, _tx_hash: TxHash) -> Result { +// unreachable!() +// } + +// async fn get_tx_fee( +// &self, +// _tx_type: TxFeeTypes, +// _address: Address, +// _token: impl Into + Send + 'async_trait, +// ) -> Result { +// unreachable!() +// } + +// async fn get_txs_batch_fee( +// &self, +// _tx_types: Vec, +// _addresses: Vec
, +// _token: impl Into + Send + 'async_trait, +// ) -> Result { +// unreachable!() +// } + +// async fn ethop_info(&self, _serial_id: u32) -> Result { +// unreachable!() +// } + +// async fn get_eth_tx_for_withdrawal( +// &self, +// _withdrawal_hash: TxHash, +// ) -> Result, ClientError> { +// unreachable!() +// } + +// /// Returns the example `ContractAddress` instance: +// /// - the HEX-encoded sequence of bytes [0..20) provided as the `main_contract`; +// /// - the `gov_contract` is not usable in tests and it is simply an empty string. +// async fn contract_address(&self) -> Result { +// Ok(ContractAddress { +// main_contract: "0x000102030405060708090a0b0c0d0e0f10111213".to_string(), +// gov_contract: "".to_string(), +// }) +// } + +// async fn send_tx( +// &self, +// _tx: ZkSyncTx, +// _eth_signature: Option, +// ) -> Result { +// unreachable!() +// } + +// async fn send_txs_batch( +// &self, +// _txs_signed: Vec<(ZkSyncTx, Option)>, +// _eth_signature: Option, +// ) -> Result, ClientError> { +// unreachable!() +// } + +// fn network(&self) -> Network { +// self.network +// } +// } + +// async fn get_test_wallet( +// private_key_raw: &[u8], +// network: Network, +// ) -> Wallet { +// let private_key = H256::from_slice(private_key_raw); +// let address = PackedEthSignature::address_from_private_key(&private_key).unwrap(); + +// let eth_signer = PrivateKeySigner::new(private_key); +// let creds = WalletCredentials::from_eth_signer(address, eth_signer, Network::Mainnet) +// .await +// .unwrap(); + +// let provider = MockProvider { +// network, +// eth_private_key: private_key, +// }; +// Wallet::new(provider, creds).await.unwrap() +// } + +// #[tokio::test] +// async fn test_wallet_address() { +// let wallet = get_test_wallet(&[5; 32], Network::Mainnet).await; +// let expected_address = +// PackedEthSignature::address_from_private_key(&H256::from([5; 32])).unwrap(); +// assert_eq!(wallet.address(), expected_address); +// } + +// #[tokio::test] +// async fn test_wallet_account_info() { +// let wallet = get_test_wallet(&[10; 32], Network::Mainnet).await; +// let account_info = wallet.account_info().await.unwrap(); +// assert_eq!(account_info.address, wallet.address()); +// } + +// #[tokio::test] +// async fn test_wallet_account_id() { +// let wallet = get_test_wallet(&[14; 32], Network::Mainnet).await; +// assert_eq!(wallet.account_id(), Some(AccountId(42))); +// } + +// #[tokio::test] +// async fn test_wallet_refresh_tokens() { +// let mut wallet = get_test_wallet(&[20; 32], Network::Mainnet).await; +// let _dai_token = wallet +// .tokens +// .resolve(TokenLike::Symbol("DAI".into())) +// .unwrap(); + +// wallet.provider.network = Network::Rinkeby; +// wallet.refresh_tokens_cache().await.unwrap(); + +// // DAI is not in the Rinkeby network +// assert!(wallet +// .tokens +// .resolve(TokenLike::Symbol("DAI".into())) +// .is_none()); +// } + +// #[tokio::test] +// async fn test_wallet_get_balance_committed() { +// let wallet = get_test_wallet(&[40; 32], Network::Mainnet).await; +// let balance = wallet +// .get_balance(BlockStatus::Committed, "DAI") +// .await +// .unwrap(); +// assert_eq!(balance.to_u32(), Some(12345)); +// } + +// #[tokio::test] +// async fn test_wallet_get_balance_committed_not_existent() { +// let wallet = get_test_wallet(&[40; 32], Network::Mainnet).await; +// let result = wallet.get_balance(BlockStatus::Committed, "ETH").await; + +// assert_eq!(result.unwrap_err(), ClientError::UnknownToken); +// } + +// #[tokio::test] +// async fn test_wallet_get_balance_verified() { +// let wallet = get_test_wallet(&[50; 32], Network::Mainnet).await; +// let balance = wallet +// .get_balance(BlockStatus::Verified, "USDC") +// .await +// .unwrap(); +// assert_eq!(balance.to_u32(), Some(98765)); +// } + +// #[tokio::test] +// async fn test_wallet_get_balance_verified_not_existent() { +// let wallet = get_test_wallet(&[50; 32], Network::Mainnet).await; +// let result = wallet.get_balance(BlockStatus::Verified, "ETH").await; + +// assert_eq!(result.unwrap_err(), ClientError::UnknownToken); +// } + +// #[tokio::test] +// async fn test_wallet_is_signing_key_set() { +// let wallet = get_test_wallet(&[50; 32], Network::Mainnet).await; +// assert!(wallet.is_signing_key_set().await.unwrap()); +// } + +// #[tokio::test] +// async fn test_wallet_ethereum() { +// let wallet = get_test_wallet(&[50; 32], Network::Mainnet).await; +// let eth_provider = wallet.ethereum("http://some.random.url").await.unwrap(); +// let expected_address: Vec<_> = (0..20).collect(); +// assert_eq!(eth_provider.contract_address().as_bytes(), expected_address); +// } +// } diff --git a/sdk/zksync-web3.js/.gitignore b/sdk/zksync-web3.js/.gitignore new file mode 100644 index 000000000000..378eac25d311 --- /dev/null +++ b/sdk/zksync-web3.js/.gitignore @@ -0,0 +1 @@ +build diff --git a/sdk/zksync-web3.js/abi/ContractDeployer.json b/sdk/zksync-web3.js/abi/ContractDeployer.json new file mode 100644 index 000000000000..55dfa544b8fb --- /dev/null +++ b/sdk/zksync-web3.js/abi/ContractDeployer.json @@ -0,0 +1,374 @@ +{ + "abi": [ + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "deployerAddress", + "type": "address" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "bytecodeHash", + "type": "bytes32" + }, + { + "indexed": true, + "internalType": "address", + "name": "contractAddress", + "type": "address" + } + ], + "name": "ContractDeployed", + "type": "event" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "_bytecodeHash", + "type": "bytes32" + }, + { + "internalType": "bytes", + "name": "_input", + "type": "bytes" + } + ], + "name": "create", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "_salt", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "_bytecodeHash", + "type": "bytes32" + }, + { + "internalType": "bytes", + "name": "_input", + "type": "bytes" + } + ], + "name": "create2", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "_salt", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "_bytecodeHash", + "type": "bytes32" + }, + { + "internalType": "bytes", + "name": "_input", + "type": "bytes" + }, + { + "internalType": "enum IContractDeployer.AccountAbstractionVersion", + "name": "_aaVersion", + "type": "uint8" + } + ], + "name": "create2Account", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "_bytecodeHash", + "type": "bytes32" + }, + { + "internalType": "bytes", + "name": "_input", + "type": "bytes" + }, + { + "internalType": "enum IContractDeployer.AccountAbstractionVersion", + "name": "_aaVersion", + "type": "uint8" + } + ], + "name": "createAccount", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_address", + "type": "address" + } + ], + "name": "extendedAccountVersion", + "outputs": [ + { + "internalType": "enum IContractDeployer.AccountAbstractionVersion", + "name": "", + "type": "uint8" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "components": [ + { + "internalType": "bytes32", + "name": "bytecodeHash", + "type": "bytes32" + }, + { + "internalType": "address", + "name": "newAddress", + "type": "address" + }, + { + "internalType": "bool", + "name": "callConstructor", + "type": "bool" + }, + { + "internalType": "uint256", + "name": "value", + "type": "uint256" + }, + { + "internalType": "bytes", + "name": "input", + "type": "bytes" + } + ], + "internalType": "struct ContractDeployer.ForceDeployment", + "name": "_deployment", + "type": "tuple" + }, + { + "internalType": "address", + "name": "_sender", + "type": "address" + } + ], + "name": "forceDeployOnAddress", + "outputs": [], + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [ + { + "components": [ + { + "internalType": "bytes32", + "name": "bytecodeHash", + "type": "bytes32" + }, + { + "internalType": "address", + "name": "newAddress", + "type": "address" + }, + { + "internalType": "bool", + "name": "callConstructor", + "type": "bool" + }, + { + "internalType": "uint256", + "name": "value", + "type": "uint256" + }, + { + "internalType": "bytes", + "name": "input", + "type": "bytes" + } + ], + "internalType": "struct ContractDeployer.ForceDeployment[]", + "name": "_deployments", + "type": "tuple[]" + } + ], + "name": "forceDeployOnAddresses", + "outputs": [], + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_address", + "type": "address" + } + ], + "name": "getAccountInfo", + "outputs": [ + { + "components": [ + { + "internalType": "enum IContractDeployer.AccountAbstractionVersion", + "name": "supportedAAVersion", + "type": "uint8" + }, + { + "internalType": "enum IContractDeployer.AccountNonceOrdering", + "name": "nonceOrdering", + "type": "uint8" + } + ], + "internalType": "struct IContractDeployer.AccountInfo", + "name": "info", + "type": "tuple" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_sender", + "type": "address" + }, + { + "internalType": "uint256", + "name": "_senderNonce", + "type": "uint256" + } + ], + "name": "getNewAddressCreate", + "outputs": [ + { + "internalType": "address", + "name": "newAddress", + "type": "address" + } + ], + "stateMutability": "pure", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_sender", + "type": "address" + }, + { + "internalType": "bytes32", + "name": "_bytecodeHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "_salt", + "type": "bytes32" + }, + { + "internalType": "bytes", + "name": "_input", + "type": "bytes" + } + ], + "name": "getNewAddressCreate2", + "outputs": [ + { + "internalType": "address", + "name": "newAddress", + "type": "address" + } + ], + "stateMutability": "pure", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "enum IContractDeployer.AccountAbstractionVersion", + "name": "_version", + "type": "uint8" + } + ], + "name": "updateAccountVersion", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "enum IContractDeployer.AccountNonceOrdering", + "name": "_nonceOrdering", + "type": "uint8" + } + ], + "name": "updateNonceOrdering", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + } + ] +} diff --git a/sdk/zksync-web3.js/abi/IAllowList.json b/sdk/zksync-web3.js/abi/IAllowList.json new file mode 100644 index 000000000000..21f49da47619 --- /dev/null +++ b/sdk/zksync-web3.js/abi/IAllowList.json @@ -0,0 +1,337 @@ +{ + "abi": [ + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "target", + "type": "address" + }, + { + "indexed": false, + "internalType": "enum IAllowList.AccessMode", + "name": "previousMode", + "type": "uint8" + }, + { + "indexed": false, + "internalType": "enum IAllowList.AccessMode", + "name": "newMode", + "type": "uint8" + } + ], + "name": "UpdateAccessMode", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "caller", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "target", + "type": "address" + }, + { + "indexed": true, + "internalType": "bytes4", + "name": "functionSig", + "type": "bytes4" + }, + { + "indexed": false, + "internalType": "bool", + "name": "status", + "type": "bool" + } + ], + "name": "UpdateCallPermission", + "type": "event" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_caller", + "type": "address" + }, + { + "internalType": "address", + "name": "_target", + "type": "address" + }, + { + "internalType": "bytes4", + "name": "_functionSig", + "type": "bytes4" + } + ], + "name": "canCall", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_target", + "type": "address" + } + ], + "name": "getAccessMode", + "outputs": [ + { + "internalType": "enum IAllowList.AccessMode", + "name": "", + "type": "uint8" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_l1Token", + "type": "address" + } + ], + "name": "getTokenDepositLimitData", + "outputs": [ + { + "components": [ + { + "internalType": "bool", + "name": "depositLimitation", + "type": "bool" + }, + { + "internalType": "uint256", + "name": "depositCap", + "type": "uint256" + } + ], + "internalType": "struct IAllowList.Deposit", + "name": "", + "type": "tuple" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_l1Token", + "type": "address" + } + ], + "name": "getTokenWithdrawalLimitData", + "outputs": [ + { + "components": [ + { + "internalType": "bool", + "name": "withdrawalLimitation", + "type": "bool" + }, + { + "internalType": "uint256", + "name": "withdrawalFactor", + "type": "uint256" + } + ], + "internalType": "struct IAllowList.Withdrawal", + "name": "", + "type": "tuple" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_caller", + "type": "address" + }, + { + "internalType": "address", + "name": "_target", + "type": "address" + }, + { + "internalType": "bytes4", + "name": "_functionSig", + "type": "bytes4" + } + ], + "name": "hasSpecialAccessToCall", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_target", + "type": "address" + }, + { + "internalType": "enum IAllowList.AccessMode", + "name": "_accessMode", + "type": "uint8" + } + ], + "name": "setAccessMode", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address[]", + "name": "_targets", + "type": "address[]" + }, + { + "internalType": "enum IAllowList.AccessMode[]", + "name": "_accessMode", + "type": "uint8[]" + } + ], + "name": "setBatchAccessMode", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address[]", + "name": "_callers", + "type": "address[]" + }, + { + "internalType": "address[]", + "name": "_targets", + "type": "address[]" + }, + { + "internalType": "bytes4[]", + "name": "_functionSigs", + "type": "bytes4[]" + }, + { + "internalType": "bool[]", + "name": "_enables", + "type": "bool[]" + } + ], + "name": "setBatchPermissionToCall", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_l1Token", + "type": "address" + }, + { + "internalType": "bool", + "name": "_depositLimitation", + "type": "bool" + }, + { + "internalType": "uint256", + "name": "_depositCap", + "type": "uint256" + } + ], + "name": "setDepositLimit", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_caller", + "type": "address" + }, + { + "internalType": "address", + "name": "_target", + "type": "address" + }, + { + "internalType": "bytes4", + "name": "_functionSig", + "type": "bytes4" + }, + { + "internalType": "bool", + "name": "_enable", + "type": "bool" + } + ], + "name": "setPermissionToCall", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_l1Token", + "type": "address" + }, + { + "internalType": "bool", + "name": "_withdrawalLimitation", + "type": "bool" + }, + { + "internalType": "uint256", + "name": "_withdrawalFactor", + "type": "uint256" + } + ], + "name": "setWithdrawalLimit", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + } + ] +} diff --git a/sdk/zksync-web3.js/abi/IERC1271.json b/sdk/zksync-web3.js/abi/IERC1271.json new file mode 100644 index 000000000000..5e153118a2af --- /dev/null +++ b/sdk/zksync-web3.js/abi/IERC1271.json @@ -0,0 +1,28 @@ +{ + "abi": [ + { + "inputs": [ + { + "internalType": "bytes32", + "name": "hash", + "type": "bytes32" + }, + { + "internalType": "bytes", + "name": "signature", + "type": "bytes" + } + ], + "name": "isValidSignature", + "outputs": [ + { + "internalType": "bytes4", + "name": "magicValue", + "type": "bytes4" + } + ], + "stateMutability": "view", + "type": "function" + } + ] +} \ No newline at end of file diff --git a/sdk/zksync-web3.js/abi/IERC20.json b/sdk/zksync-web3.js/abi/IERC20.json new file mode 100644 index 000000000000..2fc7246362c0 --- /dev/null +++ b/sdk/zksync-web3.js/abi/IERC20.json @@ -0,0 +1,226 @@ +{ + "abi": [ + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "spender", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "value", + "type": "uint256" + } + ], + "name": "Approval", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "value", + "type": "uint256" + } + ], + "name": "Transfer", + "type": "event" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "internalType": "address", + "name": "spender", + "type": "address" + } + ], + "name": "allowance", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "spender", + "type": "address" + }, + { + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "approve", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "account", + "type": "address" + } + ], + "name": "balanceOf", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "decimals", + "outputs": [ + { + "internalType": "uint8", + "name": "", + "type": "uint8" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "name", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "symbol", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "totalSupply", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "transfer", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "transferFrom", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "nonpayable", + "type": "function" + } + ] +} diff --git a/sdk/zksync-web3.js/abi/IEthToken.json b/sdk/zksync-web3.js/abi/IEthToken.json new file mode 100644 index 000000000000..2c54afcbe009 --- /dev/null +++ b/sdk/zksync-web3.js/abi/IEthToken.json @@ -0,0 +1,192 @@ +{ + "abi": [ + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "account", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "Mint", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "value", + "type": "uint256" + } + ], + "name": "Transfer", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "_l1Receiver", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "_amount", + "type": "uint256" + } + ], + "name": "Withdrawal", + "type": "event" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "name": "balanceOf", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "decimals", + "outputs": [ + { + "internalType": "uint8", + "name": "", + "type": "uint8" + } + ], + "stateMutability": "pure", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_account", + "type": "address" + }, + { + "internalType": "uint256", + "name": "_amount", + "type": "uint256" + } + ], + "name": "mint", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "name", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + } + ], + "stateMutability": "pure", + "type": "function" + }, + { + "inputs": [], + "name": "symbol", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + } + ], + "stateMutability": "pure", + "type": "function" + }, + { + "inputs": [], + "name": "totalSupply", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_from", + "type": "address" + }, + { + "internalType": "address", + "name": "_to", + "type": "address" + }, + { + "internalType": "uint256", + "name": "_amount", + "type": "uint256" + } + ], + "name": "transferFromTo", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_l1Receiver", + "type": "address" + } + ], + "name": "withdraw", + "outputs": [], + "stateMutability": "payable", + "type": "function" + } + ] +} diff --git a/sdk/zksync-web3.js/abi/IL1Bridge.json b/sdk/zksync-web3.js/abi/IL1Bridge.json new file mode 100644 index 000000000000..f0cb575994f5 --- /dev/null +++ b/sdk/zksync-web3.js/abi/IL1Bridge.json @@ -0,0 +1,243 @@ +{ + "abi": [ + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "l1Token", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "ClaimedFailedDeposit", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "l1Token", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "DepositInitiated", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "l1Token", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "WithdrawalFinalized", + "type": "event" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_depositSender", + "type": "address" + }, + { + "internalType": "address", + "name": "_l1Token", + "type": "address" + }, + { + "internalType": "bytes32", + "name": "_l2TxHash", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "_l2BlockNumber", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2MessageIndex", + "type": "uint256" + }, + { + "internalType": "uint16", + "name": "_l2TxNumberInBlock", + "type": "uint16" + }, + { + "internalType": "bytes32[]", + "name": "_merkleProof", + "type": "bytes32[]" + } + ], + "name": "claimFailedDeposit", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_l2Receiver", + "type": "address" + }, + { + "internalType": "address", + "name": "_l1Token", + "type": "address" + }, + { + "internalType": "uint256", + "name": "_amount", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2TxGasLimit", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2TxGasPerPubdataByte", + "type": "uint256" + } + ], + "name": "deposit", + "outputs": [ + { + "internalType": "bytes32", + "name": "txHash", + "type": "bytes32" + } + ], + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_l2BlockNumber", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2MessageIndex", + "type": "uint256" + }, + { + "internalType": "uint16", + "name": "_l2TxNumberInBlock", + "type": "uint16" + }, + { + "internalType": "bytes", + "name": "_message", + "type": "bytes" + }, + { + "internalType": "bytes32[]", + "name": "_merkleProof", + "type": "bytes32[]" + } + ], + "name": "finalizeWithdrawal", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_l2BlockNumber", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2MessageIndex", + "type": "uint256" + } + ], + "name": "isWithdrawalFinalized", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_l1Token", + "type": "address" + } + ], + "name": "l2TokenAddress", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + } + ] +} diff --git a/sdk/zksync-web3.js/abi/IL1Messenger.json b/sdk/zksync-web3.js/abi/IL1Messenger.json new file mode 100644 index 000000000000..b2bbdd5f427b --- /dev/null +++ b/sdk/zksync-web3.js/abi/IL1Messenger.json @@ -0,0 +1,48 @@ +{ + "abi": [ + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "_sender", + "type": "address" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "_hash", + "type": "bytes32" + }, + { + "indexed": false, + "internalType": "bytes", + "name": "_message", + "type": "bytes" + } + ], + "name": "L1MessageSent", + "type": "event" + }, + { + "inputs": [ + { + "internalType": "bytes", + "name": "_message", + "type": "bytes" + } + ], + "name": "sendToL1", + "outputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + } + ], + "stateMutability": "nonpayable", + "type": "function" + } + ] +} diff --git a/sdk/zksync-web3.js/abi/IL2Bridge.json b/sdk/zksync-web3.js/abi/IL2Bridge.json new file mode 100644 index 000000000000..e62a099dcd59 --- /dev/null +++ b/sdk/zksync-web3.js/abi/IL2Bridge.json @@ -0,0 +1,173 @@ +{ + "abi": [ + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "l1Sender", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "l2Receiver", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "l2Token", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "FinalizeDeposit", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "l2Sender", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "l1Receiver", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "l2Token", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "WithdrawalInitiated", + "type": "event" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_l1Sender", + "type": "address" + }, + { + "internalType": "address", + "name": "_l2Receiver", + "type": "address" + }, + { + "internalType": "address", + "name": "_l1Token", + "type": "address" + }, + { + "internalType": "uint256", + "name": "_amount", + "type": "uint256" + }, + { + "internalType": "bytes", + "name": "_data", + "type": "bytes" + } + ], + "name": "finalizeDeposit", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "l1Bridge", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_l2Token", + "type": "address" + } + ], + "name": "l1TokenAddress", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_l1Token", + "type": "address" + } + ], + "name": "l2TokenAddress", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_l1Receiver", + "type": "address" + }, + { + "internalType": "address", + "name": "_l2Token", + "type": "address" + }, + { + "internalType": "uint256", + "name": "_amount", + "type": "uint256" + } + ], + "name": "withdraw", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + } + ] +} diff --git a/sdk/zksync-web3.js/abi/IPaymasterFlow.json b/sdk/zksync-web3.js/abi/IPaymasterFlow.json new file mode 100644 index 000000000000..c78e121fd538 --- /dev/null +++ b/sdk/zksync-web3.js/abi/IPaymasterFlow.json @@ -0,0 +1,40 @@ +{ + "abi": [ + { + "inputs": [ + { + "internalType": "address", + "name": "_token", + "type": "address" + }, + { + "internalType": "uint256", + "name": "_minAllowance", + "type": "uint256" + }, + { + "internalType": "bytes", + "name": "_innerInput", + "type": "bytes" + } + ], + "name": "approvalBased", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes", + "name": "input", + "type": "bytes" + } + ], + "name": "general", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + } + ] +} diff --git a/sdk/zksync-web3.js/abi/IZkSync.json b/sdk/zksync-web3.js/abi/IZkSync.json new file mode 100644 index 000000000000..c31cd6a2aad2 --- /dev/null +++ b/sdk/zksync-web3.js/abi/IZkSync.json @@ -0,0 +1,2195 @@ +{ + "abi": [ + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "uint256", + "name": "blockNumber", + "type": "uint256" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "blockHash", + "type": "bytes32" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "name": "BlockCommit", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "uint256", + "name": "blockNumber", + "type": "uint256" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "blockHash", + "type": "bytes32" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "name": "BlockExecution", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "uint256", + "name": "totalBlocksCommitted", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "totalBlocksVerified", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "totalBlocksExecuted", + "type": "uint256" + } + ], + "name": "BlocksRevert", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "uint256", + "name": "previousLastVerifiedBlock", + "type": "uint256" + }, + { + "indexed": true, + "internalType": "uint256", + "name": "currentLastVerifiedBlock", + "type": "uint256" + } + ], + "name": "BlocksVerification", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "uint256", + "name": "proposalId", + "type": "uint256" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "proposalHash", + "type": "bytes32" + } + ], + "name": "CancelUpgradeProposal", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "EthWithdrawalFinalized", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "uint256", + "name": "proposalId", + "type": "uint256" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "proposalHash", + "type": "bytes32" + }, + { + "indexed": false, + "internalType": "bytes32", + "name": "proposalSalt", + "type": "bytes32" + } + ], + "name": "ExecuteUpgrade", + "type": "event" + }, + { + "anonymous": false, + "inputs": [], + "name": "Freeze", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "bool", + "name": "isPorterAvailable", + "type": "bool" + } + ], + "name": "IsPorterAvailableStatusUpdate", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "oldGovernor", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "newGovernor", + "type": "address" + } + ], + "name": "NewGovernor", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "bytes32", + "name": "previousBytecodeHash", + "type": "bytes32" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "newBytecodeHash", + "type": "bytes32" + } + ], + "name": "NewL2BootloaderBytecodeHash", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "bytes32", + "name": "previousBytecodeHash", + "type": "bytes32" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "newBytecodeHash", + "type": "bytes32" + } + ], + "name": "NewL2DefaultAccountBytecodeHash", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "oldPendingGovernor", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "newPendingGovernor", + "type": "address" + } + ], + "name": "NewPendingGovernor", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "uint256", + "name": "txId", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "bytes32", + "name": "txHash", + "type": "bytes32" + }, + { + "indexed": false, + "internalType": "uint64", + "name": "expirationTimestamp", + "type": "uint64" + }, + { + "components": [ + { + "internalType": "uint256", + "name": "txType", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "from", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "to", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "gasLimit", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "gasPerPubdataByteLimit", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "maxFeePerGas", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "maxPriorityFeePerGas", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "paymaster", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "nonce", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "value", + "type": "uint256" + }, + { + "internalType": "uint256[4]", + "name": "reserved", + "type": "uint256[4]" + }, + { + "internalType": "bytes", + "name": "data", + "type": "bytes" + }, + { + "internalType": "bytes", + "name": "signature", + "type": "bytes" + }, + { + "internalType": "uint256[]", + "name": "factoryDeps", + "type": "uint256[]" + }, + { + "internalType": "bytes", + "name": "paymasterInput", + "type": "bytes" + }, + { + "internalType": "bytes", + "name": "reservedDynamic", + "type": "bytes" + } + ], + "indexed": false, + "internalType": "struct IMailbox.L2CanonicalTransaction", + "name": "transaction", + "type": "tuple" + }, + { + "indexed": false, + "internalType": "bytes[]", + "name": "factoryDeps", + "type": "bytes[]" + } + ], + "name": "NewPriorityRequest", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "uint256", + "name": "oldPriorityTxMaxGasLimit", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "newPriorityTxMaxGasLimit", + "type": "uint256" + } + ], + "name": "NewPriorityTxMaxGasLimit", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "oldVerifier", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "newVerifier", + "type": "address" + } + ], + "name": "NewVerifier", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "components": [ + { + "internalType": "bytes32", + "name": "recursionNodeLevelVkHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "recursionLeafLevelVkHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "recursionCircuitsSetVksHash", + "type": "bytes32" + } + ], + "indexed": false, + "internalType": "struct VerifierParams", + "name": "oldVerifierParams", + "type": "tuple" + }, + { + "components": [ + { + "internalType": "bytes32", + "name": "recursionNodeLevelVkHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "recursionLeafLevelVkHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "recursionCircuitsSetVksHash", + "type": "bytes32" + } + ], + "indexed": false, + "internalType": "struct VerifierParams", + "name": "newVerifierParams", + "type": "tuple" + } + ], + "name": "NewVerifierParams", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "uint256", + "name": "proposalId", + "type": "uint256" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "proposalHash", + "type": "bytes32" + } + ], + "name": "ProposeShadowUpgrade", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "components": [ + { + "components": [ + { + "internalType": "address", + "name": "facet", + "type": "address" + }, + { + "internalType": "enum Diamond.Action", + "name": "action", + "type": "uint8" + }, + { + "internalType": "bool", + "name": "isFreezable", + "type": "bool" + }, + { + "internalType": "bytes4[]", + "name": "selectors", + "type": "bytes4[]" + } + ], + "internalType": "struct Diamond.FacetCut[]", + "name": "facetCuts", + "type": "tuple[]" + }, + { + "internalType": "address", + "name": "initAddress", + "type": "address" + }, + { + "internalType": "bytes", + "name": "initCalldata", + "type": "bytes" + } + ], + "indexed": false, + "internalType": "struct Diamond.DiamondCutData", + "name": "diamondCut", + "type": "tuple" + }, + { + "indexed": true, + "internalType": "uint256", + "name": "proposalId", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "bytes32", + "name": "proposalSalt", + "type": "bytes32" + } + ], + "name": "ProposeTransparentUpgrade", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "uint256", + "name": "proposalId", + "type": "uint256" + }, + { + "indexed": true, + "internalType": "bytes32", + "name": "proposalHash", + "type": "bytes32" + } + ], + "name": "SecurityCouncilUpgradeApprove", + "type": "event" + }, + { + "anonymous": false, + "inputs": [], + "name": "Unfreeze", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "validatorAddress", + "type": "address" + }, + { + "indexed": false, + "internalType": "bool", + "name": "isActive", + "type": "bool" + } + ], + "name": "ValidatorStatusUpdate", + "type": "event" + }, + { + "inputs": [], + "name": "acceptGovernor", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "_proposedUpgradeHash", + "type": "bytes32" + } + ], + "name": "cancelUpgradeProposal", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "components": [ + { + "internalType": "uint64", + "name": "blockNumber", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "blockHash", + "type": "bytes32" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "l2LogsTreeRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "timestamp", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "internalType": "struct IExecutor.StoredBlockInfo", + "name": "_lastCommittedBlockData", + "type": "tuple" + }, + { + "components": [ + { + "internalType": "uint64", + "name": "blockNumber", + "type": "uint64" + }, + { + "internalType": "uint64", + "name": "timestamp", + "type": "uint64" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "newStateRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "l2LogsTreeRoot", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes", + "name": "initialStorageChanges", + "type": "bytes" + }, + { + "internalType": "bytes", + "name": "repeatedStorageChanges", + "type": "bytes" + }, + { + "internalType": "bytes", + "name": "l2Logs", + "type": "bytes" + }, + { + "internalType": "bytes[]", + "name": "l2ArbitraryLengthMessages", + "type": "bytes[]" + }, + { + "internalType": "bytes[]", + "name": "factoryDeps", + "type": "bytes[]" + } + ], + "internalType": "struct IExecutor.CommitBlockInfo[]", + "name": "_newBlocksData", + "type": "tuple[]" + } + ], + "name": "commitBlocks", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "components": [ + { + "internalType": "uint64", + "name": "blockNumber", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "blockHash", + "type": "bytes32" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "l2LogsTreeRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "timestamp", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "internalType": "struct IExecutor.StoredBlockInfo[]", + "name": "_blocksData", + "type": "tuple[]" + } + ], + "name": "executeBlocks", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "components": [ + { + "components": [ + { + "internalType": "address", + "name": "facet", + "type": "address" + }, + { + "internalType": "enum Diamond.Action", + "name": "action", + "type": "uint8" + }, + { + "internalType": "bool", + "name": "isFreezable", + "type": "bool" + }, + { + "internalType": "bytes4[]", + "name": "selectors", + "type": "bytes4[]" + } + ], + "internalType": "struct Diamond.FacetCut[]", + "name": "facetCuts", + "type": "tuple[]" + }, + { + "internalType": "address", + "name": "initAddress", + "type": "address" + }, + { + "internalType": "bytes", + "name": "initCalldata", + "type": "bytes" + } + ], + "internalType": "struct Diamond.DiamondCutData", + "name": "_diamondCut", + "type": "tuple" + }, + { + "internalType": "bytes32", + "name": "_proposalSalt", + "type": "bytes32" + } + ], + "name": "executeUpgrade", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes4", + "name": "_selector", + "type": "bytes4" + } + ], + "name": "facetAddress", + "outputs": [ + { + "internalType": "address", + "name": "facet", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "facetAddresses", + "outputs": [ + { + "internalType": "address[]", + "name": "facets", + "type": "address[]" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_facet", + "type": "address" + } + ], + "name": "facetFunctionSelectors", + "outputs": [ + { + "internalType": "bytes4[]", + "name": "", + "type": "bytes4[]" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "facets", + "outputs": [ + { + "components": [ + { + "internalType": "address", + "name": "addr", + "type": "address" + }, + { + "internalType": "bytes4[]", + "name": "selectors", + "type": "bytes4[]" + } + ], + "internalType": "struct IGetters.Facet[]", + "name": "", + "type": "tuple[]" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_l2BlockNumber", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2MessageIndex", + "type": "uint256" + }, + { + "internalType": "uint16", + "name": "_l2TxNumberInBlock", + "type": "uint16" + }, + { + "internalType": "bytes", + "name": "_message", + "type": "bytes" + }, + { + "internalType": "bytes32[]", + "name": "_merkleProof", + "type": "bytes32[]" + } + ], + "name": "finalizeEthWithdrawal", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "freezeDiamond", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "getCurrentProposalId", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getFirstUnprocessedPriorityTx", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getGovernor", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getL2BootloaderBytecodeHash", + "outputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getL2DefaultAccountBytecodeHash", + "outputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getPendingGovernor", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getPriorityQueueSize", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getProposedUpgradeHash", + "outputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getProposedUpgradeTimestamp", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getSecurityCouncil", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getTotalBlocksCommitted", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getTotalBlocksExecuted", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getTotalBlocksVerified", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getTotalPriorityTxs", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getUpgradeProposalState", + "outputs": [ + { + "internalType": "enum UpgradeState", + "name": "", + "type": "uint8" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getVerifier", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getVerifierParams", + "outputs": [ + { + "components": [ + { + "internalType": "bytes32", + "name": "recursionNodeLevelVkHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "recursionLeafLevelVkHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "recursionCircuitsSetVksHash", + "type": "bytes32" + } + ], + "internalType": "struct VerifierParams", + "name": "", + "type": "tuple" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "getpriorityTxMaxGasLimit", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "isApprovedBySecurityCouncil", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "isDiamondStorageFrozen", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_l2BlockNumber", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2MessageIndex", + "type": "uint256" + } + ], + "name": "isEthWithdrawalFinalized", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_facet", + "type": "address" + } + ], + "name": "isFacetFreezable", + "outputs": [ + { + "internalType": "bool", + "name": "isFreezable", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes4", + "name": "_selector", + "type": "bytes4" + } + ], + "name": "isFunctionFreezable", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_address", + "type": "address" + } + ], + "name": "isValidator", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_blockNumber", + "type": "uint256" + } + ], + "name": "l2LogsRootHash", + "outputs": [ + { + "internalType": "bytes32", + "name": "hash", + "type": "bytes32" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_gasPrice", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2GasLimit", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2GasPerPubdataByteLimit", + "type": "uint256" + } + ], + "name": "l2TransactionBaseCost", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "priorityQueueFrontOperation", + "outputs": [ + { + "components": [ + { + "internalType": "bytes32", + "name": "canonicalTxHash", + "type": "bytes32" + }, + { + "internalType": "uint64", + "name": "expirationTimestamp", + "type": "uint64" + }, + { + "internalType": "uint192", + "name": "layer2Tip", + "type": "uint192" + } + ], + "internalType": "struct PriorityOperation", + "name": "", + "type": "tuple" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "_proposalHash", + "type": "bytes32" + }, + { + "internalType": "uint40", + "name": "_proposalId", + "type": "uint40" + } + ], + "name": "proposeShadowUpgrade", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "components": [ + { + "components": [ + { + "internalType": "address", + "name": "facet", + "type": "address" + }, + { + "internalType": "enum Diamond.Action", + "name": "action", + "type": "uint8" + }, + { + "internalType": "bool", + "name": "isFreezable", + "type": "bool" + }, + { + "internalType": "bytes4[]", + "name": "selectors", + "type": "bytes4[]" + } + ], + "internalType": "struct Diamond.FacetCut[]", + "name": "facetCuts", + "type": "tuple[]" + }, + { + "internalType": "address", + "name": "initAddress", + "type": "address" + }, + { + "internalType": "bytes", + "name": "initCalldata", + "type": "bytes" + } + ], + "internalType": "struct Diamond.DiamondCutData", + "name": "_diamondCut", + "type": "tuple" + }, + { + "internalType": "uint40", + "name": "_proposalId", + "type": "uint40" + } + ], + "name": "proposeTransparentUpgrade", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "components": [ + { + "internalType": "uint64", + "name": "blockNumber", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "blockHash", + "type": "bytes32" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "l2LogsTreeRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "timestamp", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "internalType": "struct IExecutor.StoredBlockInfo", + "name": "_prevBlock", + "type": "tuple" + }, + { + "components": [ + { + "internalType": "uint64", + "name": "blockNumber", + "type": "uint64" + }, + { + "internalType": "bytes32", + "name": "blockHash", + "type": "bytes32" + }, + { + "internalType": "uint64", + "name": "indexRepeatedStorageChanges", + "type": "uint64" + }, + { + "internalType": "uint256", + "name": "numberOfLayer1Txs", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "priorityOperationsHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "l2LogsTreeRoot", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "timestamp", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "commitment", + "type": "bytes32" + } + ], + "internalType": "struct IExecutor.StoredBlockInfo[]", + "name": "_committedBlocks", + "type": "tuple[]" + }, + { + "components": [ + { + "internalType": "uint256[]", + "name": "recursiveAggregationInput", + "type": "uint256[]" + }, + { + "internalType": "uint256[]", + "name": "serializedProof", + "type": "uint256[]" + } + ], + "internalType": "struct IExecutor.ProofInput", + "name": "_proof", + "type": "tuple" + } + ], + "name": "proveBlocks", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "_l2TxHash", + "type": "bytes32" + }, + { + "internalType": "uint256", + "name": "_l2BlockNumber", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2MessageIndex", + "type": "uint256" + }, + { + "internalType": "uint16", + "name": "_l2TxNumberInBlock", + "type": "uint16" + }, + { + "internalType": "bytes32[]", + "name": "_merkleProof", + "type": "bytes32[]" + }, + { + "internalType": "enum TxStatus", + "name": "_status", + "type": "uint8" + } + ], + "name": "proveL1ToL2TransactionStatus", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_blockNumber", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_index", + "type": "uint256" + }, + { + "components": [ + { + "internalType": "uint8", + "name": "l2ShardId", + "type": "uint8" + }, + { + "internalType": "bool", + "name": "isService", + "type": "bool" + }, + { + "internalType": "uint16", + "name": "txNumberInBlock", + "type": "uint16" + }, + { + "internalType": "address", + "name": "sender", + "type": "address" + }, + { + "internalType": "bytes32", + "name": "key", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "value", + "type": "bytes32" + } + ], + "internalType": "struct L2Log", + "name": "_log", + "type": "tuple" + }, + { + "internalType": "bytes32[]", + "name": "_proof", + "type": "bytes32[]" + } + ], + "name": "proveL2LogInclusion", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_blockNumber", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_index", + "type": "uint256" + }, + { + "components": [ + { + "internalType": "uint16", + "name": "txNumberInBlock", + "type": "uint16" + }, + { + "internalType": "address", + "name": "sender", + "type": "address" + }, + { + "internalType": "bytes", + "name": "data", + "type": "bytes" + } + ], + "internalType": "struct L2Message", + "name": "_message", + "type": "tuple" + }, + { + "internalType": "bytes32[]", + "name": "_proof", + "type": "bytes32[]" + } + ], + "name": "proveL2MessageInclusion", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_contractL2", + "type": "address" + }, + { + "internalType": "uint256", + "name": "_l2Value", + "type": "uint256" + }, + { + "internalType": "bytes", + "name": "_calldata", + "type": "bytes" + }, + { + "internalType": "uint256", + "name": "_l2GasLimit", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2GasPerPubdataByteLimit", + "type": "uint256" + }, + { + "internalType": "bytes[]", + "name": "_factoryDeps", + "type": "bytes[]" + }, + { + "internalType": "address", + "name": "_refundRecipient", + "type": "address" + } + ], + "name": "requestL2Transaction", + "outputs": [ + { + "internalType": "bytes32", + "name": "canonicalTxHash", + "type": "bytes32" + } + ], + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_newLastBlock", + "type": "uint256" + } + ], + "name": "revertBlocks", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "_upgradeProposalHash", + "type": "bytes32" + } + ], + "name": "securityCouncilUpgradeApprove", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_txId", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2Value", + "type": "uint256" + }, + { + "internalType": "address", + "name": "_sender", + "type": "address" + }, + { + "internalType": "address", + "name": "_contractAddressL2", + "type": "address" + }, + { + "internalType": "bytes", + "name": "_calldata", + "type": "bytes" + }, + { + "internalType": "uint256", + "name": "_l2GasLimit", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_l2GasPerPubdataByteLimit", + "type": "uint256" + }, + { + "internalType": "bytes[]", + "name": "_factoryDeps", + "type": "bytes[]" + }, + { + "internalType": "uint256", + "name": "_toMint", + "type": "uint256" + }, + { + "internalType": "address", + "name": "_refundRecipient", + "type": "address" + } + ], + "name": "serializeL2Transaction", + "outputs": [ + { + "components": [ + { + "internalType": "uint256", + "name": "txType", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "from", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "to", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "gasLimit", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "gasPerPubdataByteLimit", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "maxFeePerGas", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "maxPriorityFeePerGas", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "paymaster", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "nonce", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "value", + "type": "uint256" + }, + { + "internalType": "uint256[4]", + "name": "reserved", + "type": "uint256[4]" + }, + { + "internalType": "bytes", + "name": "data", + "type": "bytes" + }, + { + "internalType": "bytes", + "name": "signature", + "type": "bytes" + }, + { + "internalType": "uint256[]", + "name": "factoryDeps", + "type": "uint256[]" + }, + { + "internalType": "bytes", + "name": "paymasterInput", + "type": "bytes" + }, + { + "internalType": "bytes", + "name": "reservedDynamic", + "type": "bytes" + } + ], + "internalType": "struct IMailbox.L2CanonicalTransaction", + "name": "", + "type": "tuple" + } + ], + "stateMutability": "pure", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "_l2BootloaderBytecodeHash", + "type": "bytes32" + } + ], + "name": "setL2BootloaderBytecodeHash", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "_l2DefaultAccountBytecodeHash", + "type": "bytes32" + } + ], + "name": "setL2DefaultAccountBytecodeHash", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_newPendingGovernor", + "type": "address" + } + ], + "name": "setPendingGovernor", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bool", + "name": "_zkPorterIsAvailable", + "type": "bool" + } + ], + "name": "setPorterAvailability", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_newPriorityTxMaxGasLimit", + "type": "uint256" + } + ], + "name": "setPriorityTxMaxGasLimit", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_validator", + "type": "address" + }, + { + "internalType": "bool", + "name": "_active", + "type": "bool" + } + ], + "name": "setValidator", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "contract Verifier", + "name": "_newVerifier", + "type": "address" + } + ], + "name": "setVerifier", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "components": [ + { + "internalType": "bytes32", + "name": "recursionNodeLevelVkHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "recursionLeafLevelVkHash", + "type": "bytes32" + }, + { + "internalType": "bytes32", + "name": "recursionCircuitsSetVksHash", + "type": "bytes32" + } + ], + "internalType": "struct VerifierParams", + "name": "_newVerifierParams", + "type": "tuple" + } + ], + "name": "setVerifierParams", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_blockNumber", + "type": "uint256" + } + ], + "name": "storedBlockHash", + "outputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "unfreezeDiamond", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "components": [ + { + "components": [ + { + "internalType": "address", + "name": "facet", + "type": "address" + }, + { + "internalType": "enum Diamond.Action", + "name": "action", + "type": "uint8" + }, + { + "internalType": "bool", + "name": "isFreezable", + "type": "bool" + }, + { + "internalType": "bytes4[]", + "name": "selectors", + "type": "bytes4[]" + } + ], + "internalType": "struct Diamond.FacetCut[]", + "name": "facetCuts", + "type": "tuple[]" + }, + { + "internalType": "address", + "name": "initAddress", + "type": "address" + }, + { + "internalType": "bytes", + "name": "initCalldata", + "type": "bytes" + } + ], + "internalType": "struct Diamond.DiamondCutData", + "name": "_diamondCut", + "type": "tuple" + }, + { + "internalType": "uint256", + "name": "_proposalId", + "type": "uint256" + }, + { + "internalType": "bytes32", + "name": "_salt", + "type": "bytes32" + } + ], + "name": "upgradeProposalHash", + "outputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + } + ], + "stateMutability": "pure", + "type": "function" + } + ] +} diff --git a/sdk/zksync-web3.js/abi/update-abi.sh b/sdk/zksync-web3.js/abi/update-abi.sh new file mode 100755 index 000000000000..c43a800a7834 --- /dev/null +++ b/sdk/zksync-web3.js/abi/update-abi.sh @@ -0,0 +1,20 @@ +#!/bin/bash + +cd `dirname $0` + +OPEN_ZEPPELIN_CONTRACTS=$ZKSYNC_HOME/contracts/ethereum/artifacts/@openzeppelin/contracts +ETHEREUM_CONTRACTS=$ZKSYNC_HOME/contracts/ethereum/artifacts/cache/solpp-generated-contracts +ZKSYNC_CONTRACTS=$ZKSYNC_HOME/contracts/zksync/artifacts-zk/cache-zk/solpp-generated-contracts +SYSTEM_CONTRACTS=$ZKSYNC_HOME/etc/system-contracts/artifacts-zk/cache-zk/solpp-generated-contracts + +cat $OPEN_ZEPPELIN_CONTRACTS/token/ERC20/extensions/IERC20Metadata.sol/IERC20Metadata.json | jq '{ abi: .abi}' > IERC20.json + +cat $ETHEREUM_CONTRACTS/bridge/interfaces/IL1Bridge.sol/IL1Bridge.json | jq '{ abi: .abi}' > IL1Bridge.json +cat $ETHEREUM_CONTRACTS/zksync/interfaces/IZkSync.sol/IZkSync.json | jq '{ abi: .abi}' > IZkSync.json +cat $ETHEREUM_CONTRACTS/common/interfaces/IAllowList.sol/IAllowList.json | jq '{ abi: .abi}' > IAllowList.json + +cat $ZKSYNC_CONTRACTS/bridge/interfaces/IL2Bridge.sol/IL2Bridge.json | jq '{ abi: .abi}' > IL2Bridge.json +cat $ZKSYNC_CONTRACTS/interfaces/IPaymasterFlow.sol/IPaymasterFlow.json | jq '{ abi: .abi}' > IPaymasterFlow.json + +cat $SYSTEM_CONTRACTS/interfaces/IL1Messenger.sol/IL1Messenger.json | jq '{ abi: .abi}' > IL1Messenger.json +cat $SYSTEM_CONTRACTS/ContractDeployer.sol/ContractDeployer.json | jq '{ abi: .abi}' > ContractDeployer.json diff --git a/sdk/zksync-web3.js/package.json b/sdk/zksync-web3.js/package.json new file mode 100644 index 000000000000..1e366498f318 --- /dev/null +++ b/sdk/zksync-web3.js/package.json @@ -0,0 +1,32 @@ +{ + "name": "zksync-web3", + "version": "0.13.0", + "main": "build/src/index.js", + "types": "build/src/index.d.ts", + "files": [ + "build/", + "abi/", + "src/", + "typechain/" + ], + "license": "MIT", + "dependencies": {}, + "devDependencies": { + "@types/chai": "^4.2.19", + "@types/mocha": "^8.2.2", + "@types/node": "^16.0.0", + "chai": "^4.3.4", + "ethers": "~5.7.0", + "mocha": "^9.0.2", + "ts-node": "^10.0.0", + "typescript": "^4.3.5" + }, + "peerDependencies": { + "ethers": "^5.7.0" + }, + "scripts": { + "tests": "mocha -r ts-node/register tests/**/*.test.ts", + "build": "tsc && cp -f typechain/*.d.ts build/typechain", + "watch": "tsc --watch" + } +} diff --git a/sdk/zksync-web3.js/src/adapters.ts b/sdk/zksync-web3.js/src/adapters.ts new file mode 100644 index 000000000000..f92ef7df1ef1 --- /dev/null +++ b/sdk/zksync-web3.js/src/adapters.ts @@ -0,0 +1,440 @@ +import { BigNumber, BigNumberish, ethers, BytesLike } from 'ethers'; +import { Provider } from './provider'; +import { + RECOMMENDED_GAS_LIMIT, + isETH, + ETH_ADDRESS, + checkBaseCost, + undoL1ToL2Alias, + layer1TxDefaults, + DEFAULT_GAS_PER_PUBDATA_LIMIT, + L1_MESSENGER_ADDRESS, + BOOTLOADER_FORMAL_ADDRESS, + RECOMMENDED_DEPOSIT_L2_GAS_LIMIT, + DEPOSIT_GAS_PER_PUBDATA_LIMIT +} from './utils'; +import { IZkSyncFactory, IL1BridgeFactory, IL2BridgeFactory, IERC20MetadataFactory } from '../typechain'; +import { Address, PriorityOpResponse, BlockTag, Eip712Meta, TransactionResponse, BalancesMap } from './types'; + +type Constructor = new (...args: any[]) => T; + +interface TxSender { + sendTransaction(tx: ethers.providers.TransactionRequest): Promise; + getAddress(): Promise
; +} + +export function AdapterL1>(Base: TBase) { + return class Adapter extends Base { + _providerL2(): Provider { + throw new Error('Must be implemented by the derived class!'); + } + _providerL1(): ethers.providers.Provider { + throw new Error('Must be implemented by the derived class!'); + } + _signerL1(): ethers.Signer { + throw new Error('Must be implemented by the derived class!'); + } + + async getMainContract() { + const address = await this._providerL2().getMainContractAddress(); + return IZkSyncFactory.connect(address, this._signerL1()); + } + + async getL1BridgeContracts() { + const addresses = await this._providerL2().getDefaultBridgeAddresses(); + return { + erc20: IL1BridgeFactory.connect(addresses.erc20L1, this._signerL1()) + }; + } + + async getBalanceL1(token?: Address, blockTag?: ethers.providers.BlockTag): Promise { + token ??= ETH_ADDRESS; + if (isETH(token)) { + return await this._providerL1().getBalance(await this.getAddress(), blockTag); + } else { + const erc20contract = IERC20MetadataFactory.connect(token, this._providerL1()); + return await erc20contract.balanceOf(await this.getAddress()); + } + } + + async l2TokenAddress(token: Address) { + if (token == ETH_ADDRESS) { + return ETH_ADDRESS; + } else { + const erc20Bridge = (await this.getL1BridgeContracts()).erc20; + return await erc20Bridge.l2TokenAddress(token); + } + } + + async approveERC20( + token: Address, + amount: BigNumberish, + overrides?: ethers.Overrides & { bridgeAddress?: Address } + ): Promise { + if (isETH(token)) { + throw new Error("ETH token can't be approved. The address of the token does not exist on L1."); + } + + let bridgeAddress = overrides?.bridgeAddress; + const erc20contract = IERC20MetadataFactory.connect(token, this._signerL1()); + + if (bridgeAddress == null) { + bridgeAddress = (await this._providerL2().getDefaultBridgeAddresses()).erc20L1; + } else { + delete overrides.bridgeAddress; + } + + let gasLimit: BigNumberish; + if (overrides?.gasLimit) { + gasLimit = await overrides.gasLimit; + } else { + // For some reason, gas estimation for approves may be imprecise. + // At least in the localhost scenario. + gasLimit = await erc20contract.estimateGas.approve(bridgeAddress, amount); + gasLimit = gasLimit.gt(RECOMMENDED_GAS_LIMIT.ERC20_APPROVE) + ? gasLimit + : RECOMMENDED_GAS_LIMIT.ERC20_APPROVE; + } + + return await erc20contract.approve(bridgeAddress, amount, { + gasLimit, + ...overrides + }); + } + + async getBaseCost(params: { + gasLimit: BigNumberish; + gasPerPubdataByte?: BigNumberish; + gasPrice?: BigNumberish; + }): Promise { + const zksyncContract = await this.getMainContract(); + const parameters = { ...layer1TxDefaults(), ...params }; + parameters.gasPrice ??= await this._providerL1().getGasPrice(); + parameters.gasPerPubdataByte ??= DEPOSIT_GAS_PER_PUBDATA_LIMIT; + + return BigNumber.from( + await zksyncContract.l2TransactionBaseCost( + parameters.gasPrice, + parameters.gasLimit, + parameters.gasPerPubdataByte + ) + ); + } + + async deposit(transaction: { + token: Address; + amount: BigNumberish; + to?: Address; + operatorTip?: BigNumberish; + bridgeAddress?: Address; + approveERC20?: boolean; + l2GasLimit?: BigNumberish; + gasPerPubdataByte?: BigNumberish; + overrides?: ethers.PayableOverrides; + approveOverrides?: ethers.Overrides; + }): Promise { + const bridgeContracts = await this.getL1BridgeContracts(); + if (transaction.bridgeAddress) { + bridgeContracts.erc20.attach(transaction.bridgeAddress); + } + + const { ...tx } = transaction; + tx.to ??= await this.getAddress(); + tx.operatorTip ??= BigNumber.from(0); + tx.overrides ??= {}; + tx.gasPerPubdataByte ??= DEPOSIT_GAS_PER_PUBDATA_LIMIT; + tx.l2GasLimit ??= BigNumber.from(RECOMMENDED_DEPOSIT_L2_GAS_LIMIT); + + const { to, token, amount, operatorTip, overrides } = tx; + overrides.gasPrice ??= await this._providerL1().getGasPrice(); + overrides.gasLimit ??= BigNumber.from(RECOMMENDED_GAS_LIMIT.DEPOSIT); + + const baseCost = BigNumber.from(0); + + const args: [Address, Address, BigNumberish, BigNumberish, BigNumberish] = [ + to, + token, + amount, + tx.l2GasLimit, + tx.gasPerPubdataByte + ]; + + if (token == ETH_ADDRESS) { + overrides.value ??= baseCost.add(operatorTip).add(amount); + + return await this.requestExecute({ + contractAddress: to, + calldata: '0x', + l2Value: amount, + // For some reason typescript can not deduce that we've already set the + // tx.gasLimit + l2GasLimit: tx.l2GasLimit!, + ...tx + }); + } else { + overrides.value ??= baseCost.add(operatorTip); + + await checkBaseCost(baseCost, overrides.value); + + if (transaction.approveERC20) { + const approveTx = await this.approveERC20(token, amount, { + bridgeAddress: bridgeContracts.erc20.address, + ...transaction.approveOverrides + }); + overrides.nonce ??= approveTx.nonce + 1; + } + + if (overrides.gasLimit == null) { + const gasEstimate = await bridgeContracts.erc20.estimateGas + .deposit(...args, overrides) + .catch(() => BigNumber.from(0)); + const recommendedGasLimit = RECOMMENDED_GAS_LIMIT.DEPOSIT; + overrides.gasLimit = gasEstimate.gte(recommendedGasLimit) ? gasEstimate : recommendedGasLimit; + } + + return await this._providerL2().getPriorityOpResponse( + await bridgeContracts.erc20.deposit(...args, overrides) + ); + } + } + + async _getWithdrawalLog(withdrawalHash: BytesLike, index: number = 0) { + const hash = ethers.utils.hexlify(withdrawalHash); + const receipt = await this._providerL2().getTransactionReceipt(hash); + const log = receipt.logs.filter( + (log) => + log.address == L1_MESSENGER_ADDRESS && + log.topics[0] == ethers.utils.id('L1MessageSent(address,bytes32,bytes)') + )[index]; + + return { + log, + l1BatchTxId: receipt.l1BatchTxIndex + }; + } + + async _getWithdrawalL2ToL1Log(withdrawalHash: BytesLike, index: number = 0) { + const hash = ethers.utils.hexlify(withdrawalHash); + const receipt = await this._providerL2().getTransactionReceipt(hash); + const messages = Array.from(receipt.l2ToL1Logs.entries()).filter( + ([_, log]) => log.sender == L1_MESSENGER_ADDRESS + ); + const [l2ToL1LogIndex, l2ToL1Log] = messages[index]; + + return { + l2ToL1LogIndex, + l2ToL1Log + }; + } + + async finalizeWithdrawal(withdrawalHash: BytesLike, index: number = 0, overrides?: ethers.Overrides) { + const { log, l1BatchTxId } = await this._getWithdrawalLog(withdrawalHash, index); + const { l2ToL1LogIndex } = await this._getWithdrawalL2ToL1Log(withdrawalHash, index); + const sender = ethers.utils.hexDataSlice(log.topics[1], 12); + const proof = await this._providerL2().getLogProof(withdrawalHash, l2ToL1LogIndex); + const message = ethers.utils.defaultAbiCoder.decode(['bytes'], log.data)[0]; + + if (isETH(sender)) { + const contractAddress = await this._providerL2().getMainContractAddress(); + const zksync = IZkSyncFactory.connect(contractAddress, this._signerL1()); + + return await zksync.finalizeEthWithdrawal( + log.l1BatchNumber, + proof.id, + l1BatchTxId, + message, + proof.proof, + overrides ?? {} + ); + } + + const l2Bridge = IL2BridgeFactory.connect(sender, this._providerL2()); + const l1Bridge = IL1BridgeFactory.connect(await l2Bridge.l1Bridge(), this._signerL1()); + return await l1Bridge.finalizeWithdrawal( + log.l1BatchNumber, + proof.id, + l1BatchTxId, + message, + proof.proof, + overrides ?? {} + ); + } + + async isWithdrawalFinalized(withdrawalHash: BytesLike, index: number = 0) { + const { log } = await this._getWithdrawalLog(withdrawalHash, index); + const { l2ToL1LogIndex } = await this._getWithdrawalL2ToL1Log(withdrawalHash, index); + const sender = ethers.utils.hexDataSlice(log.topics[1], 12); + // `getLogProof` is called not to get proof but + // to get the index of the corresponding L2->L1 log, + // which is returned as `proof.id`. + const proof = await this._providerL2().getLogProof(withdrawalHash, l2ToL1LogIndex); + + if (isETH(sender)) { + const contractAddress = await this._providerL2().getMainContractAddress(); + const zksync = IZkSyncFactory.connect(contractAddress, this._signerL1()); + + return await zksync.isEthWithdrawalFinalized(log.l1BatchNumber, proof.id); + } + + const l2Bridge = IL2BridgeFactory.connect(sender, this._providerL2()); + const l1Bridge = IL1BridgeFactory.connect(await l2Bridge.l1Bridge(), this._providerL1()); + + return await l1Bridge.isWithdrawalFinalized(log.l1BatchNumber, proof.id); + } + + async claimFailedDeposit(depositHash: BytesLike, overrides?: ethers.Overrides) { + const receipt = await this._providerL2().getTransactionReceipt(ethers.utils.hexlify(depositHash)); + const successL2ToL1LogIndex = receipt.l2ToL1Logs.findIndex( + (l2ToL1log) => l2ToL1log.sender == BOOTLOADER_FORMAL_ADDRESS && l2ToL1log.key == depositHash + ); + const successL2ToL1Log = receipt.l2ToL1Logs[successL2ToL1LogIndex]; + if (successL2ToL1Log.value != ethers.constants.HashZero) { + throw new Error('Cannot claim successful deposit'); + } + + const tx = await this._providerL2().getTransaction(ethers.utils.hexlify(depositHash)); + + // Undo the aliasing, since the Mailbox contract set it as for contract address. + const l1BridgeAddress = undoL1ToL2Alias(receipt.from); + const l2BridgeAddress = receipt.to; + + const l1Bridge = IL1BridgeFactory.connect(l1BridgeAddress, this._signerL1()); + const l2Bridge = IL2BridgeFactory.connect(l2BridgeAddress, this._providerL2()); + + const calldata = l2Bridge.interface.decodeFunctionData('finalizeDeposit', tx.data); + + const proof = await this._providerL2().getLogProof(depositHash, successL2ToL1LogIndex); + return await l1Bridge.claimFailedDeposit( + calldata['_l1Sender'], + calldata['_l1Token'], + depositHash, + receipt.l1BatchNumber, + proof.id, + receipt.l1BatchTxIndex, + proof.proof, + overrides ?? {} + ); + } + + async requestExecute(transaction: { + contractAddress: Address; + calldata: BytesLike; + l2GasLimit: BigNumberish; + l2Value?: BigNumberish; + factoryDeps?: ethers.BytesLike[]; + operatorTip?: BigNumberish; + gasPerPubdataByte?: BigNumberish; + refundRecipient?: Address; + overrides?: ethers.PayableOverrides; + }): Promise { + const zksyncContract = await this.getMainContract(); + + const { ...tx } = transaction; + tx.l2Value ??= BigNumber.from(0); + tx.operatorTip ??= BigNumber.from(0); + tx.factoryDeps ??= []; + tx.overrides ??= {}; + tx.gasPerPubdataByte ??= DEPOSIT_GAS_PER_PUBDATA_LIMIT; + tx.refundRecipient ??= await this.getAddress(); + + const { + contractAddress, + l2Value, + calldata, + l2GasLimit, + factoryDeps, + operatorTip, + overrides, + gasPerPubdataByte, + refundRecipient + } = tx; + overrides.gasPrice ??= await this._providerL1().getGasPrice(); + overrides.gasLimit ??= BigNumber.from(RECOMMENDED_GAS_LIMIT.EXECUTE); + + const baseCost = await this.getBaseCost({ + gasPrice: await overrides.gasPrice, + gasPerPubdataByte, + gasLimit: l2GasLimit + }); + + overrides.value ??= baseCost.add(operatorTip).add(l2Value); + + await checkBaseCost(baseCost, overrides.value); + + return this._providerL2().getPriorityOpResponse( + await zksyncContract.requestL2Transaction( + contractAddress, + l2Value, + calldata, + l2GasLimit, + DEPOSIT_GAS_PER_PUBDATA_LIMIT, + factoryDeps, + refundRecipient, + overrides + ) + ); + } + }; +} + +export function AdapterL2>(Base: TBase) { + return class Adapter extends Base { + _providerL2(): Provider { + throw new Error('Must be implemented by the derived class!'); + } + _signerL2(): ethers.Signer { + throw new Error('Must be implemented by the derived class!'); + } + + async getBalance(token?: Address, blockTag: BlockTag = 'committed') { + return await this._providerL2().getBalance(await this.getAddress(), blockTag, token); + } + + async getAllBalances(): Promise { + return await this._providerL2().getAllAccountBalances(await this.getAddress()); + } + + async getL2BridgeContracts() { + const addresses = await this._providerL2().getDefaultBridgeAddresses(); + return { + erc20: IL2BridgeFactory.connect(addresses.erc20L2, this._signerL2()) + }; + } + + _fillCustomData(data: Eip712Meta): Eip712Meta { + const customData = { ...data }; + customData.gasPerPubdata ??= DEFAULT_GAS_PER_PUBDATA_LIMIT; + customData.factoryDeps ??= []; + return customData; + } + + async withdraw(transaction: { + token: Address; + amount: BigNumberish; + to?: Address; + bridgeAddress?: Address; + overrides?: ethers.Overrides; + }): Promise { + const withdrawTx = await this._providerL2().getWithdrawTx({ + from: await this.getAddress(), + ...transaction + }); + const txResponse = await this.sendTransaction(withdrawTx); + return this._providerL2()._wrapTransaction(txResponse); + } + + async transfer(transaction: { + to: Address; + amount: BigNumberish; + token?: Address; + overrides?: ethers.Overrides; + }): Promise { + const transferTx = await this._providerL2().getTransferTx({ + from: await this.getAddress(), + ...transaction + }); + const txResponse = await this.sendTransaction(transferTx); + return this._providerL2()._wrapTransaction(txResponse); + } + }; +} diff --git a/sdk/zksync-web3.js/src/calldata.ts b/sdk/zksync-web3.js/src/calldata.ts new file mode 100644 index 000000000000..5d12a25d08ac --- /dev/null +++ b/sdk/zksync-web3.js/src/calldata.ts @@ -0,0 +1,103 @@ +import { BigNumber, BigNumberish, ethers, utils } from 'ethers'; + +interface CallDataParams { + constructorCall?: boolean; +} + +interface CallData extends CallDataParams { + hash?: BigNumberish; + input: BigNumberish[]; +} + +const CONSTRUCTOR_DATA_OFFSET = 8; +const FIELD_SIZE = 32; + +const SELECTOR_SIZE_BYTES = 4; +const OFFSET_DATA = 1; +const OFFSET_HEADER = 0; + +function toLeBytes(x: BigNumberish): Uint8Array { + const hexString = BigNumber.from(x).toHexString(); + return utils.arrayify(hexString).reverse(); +} + +// This function parses calldata generated for a solidity contract call. +// Format is described in details here: https://docs.soliditylang.org/en/latest/abi-spec.html +// This function might incorrectly handle complex types. +export function parseCalldata(calldata: ethers.BytesLike, params?: CallDataParams): CallData { + const bytes = utils.arrayify(calldata); + + // The first four bytes of the call data for a function call specifies the function to be called. + // It is the first four bytes of the Keccak-256 hash of the signature of the function. + if (bytes.length < 4) { + throw new Error('No function selector found'); + } + + const selector = utils.hexlify(bytes.slice(0, 4)); + + // All the arguments follow the selector and are encoded as defined in the ABI spec. + // Arguments are aligned to 32 bytes each. + if (bytes.length % 32 !== 4) { + throw new Error('Unsupported arguments alignment'); + } + + const input = []; + + for (let i = 4; i < bytes.length; i += 32) { + input.push(utils.hexlify(bytes.slice(i, i + 32))); + } + + return { + hash: selector, + input, + ...params + }; +} + +// Spec: https://www.notion.so/matterlabs/Contract-ABI-21cfe71b2e3346029f4b591ae33332b4 +export function calldataBytes(calldata: CallData): Uint8Array { + let buffer: Uint8Array; + let calldataSize: number; + + if (calldata.constructorCall) { + const size = (OFFSET_DATA + calldata.input.length) * FIELD_SIZE; + buffer = new Uint8Array(size); + + buffer[CONSTRUCTOR_DATA_OFFSET] |= 0b00000001; + + let calldataOffset = OFFSET_DATA * FIELD_SIZE; + calldata.input.forEach((value) => { + toLeBytes(value).forEach((byte, index) => { + buffer[index + calldataOffset] = byte; + }); + calldataOffset += FIELD_SIZE; + }); + + calldataSize = calldata.input.length * FIELD_SIZE; + } else { + const size = (OFFSET_DATA + 1 + calldata.input.length) * FIELD_SIZE; + buffer = new Uint8Array(size); + + const entryHashOffset = (OFFSET_DATA + 1) * FIELD_SIZE - SELECTOR_SIZE_BYTES; + toLeBytes(calldata.hash).forEach((byte, index) => { + buffer[index + entryHashOffset] = byte; + }); + + for (let i = 0; i < calldata.input.length; i++) { + const offset = (OFFSET_DATA + i) * FIELD_SIZE; + const argument = toLeBytes(calldata.input[i]); + + buffer.set(argument.slice(SELECTOR_SIZE_BYTES), offset); + buffer.set(argument.slice(0, SELECTOR_SIZE_BYTES), offset + 2 * FIELD_SIZE - SELECTOR_SIZE_BYTES); + } + + calldataSize = SELECTOR_SIZE_BYTES + calldata.input.length * FIELD_SIZE; + } + + const calldataSizeOffset = OFFSET_HEADER * FIELD_SIZE; + toLeBytes(calldataSize).forEach((byte, index) => { + buffer[calldataSizeOffset + index] = byte; + }); + + return buffer; +} diff --git a/sdk/zksync-web3.js/src/contract.ts b/sdk/zksync-web3.js/src/contract.ts new file mode 100644 index 000000000000..2d25274dc9fa --- /dev/null +++ b/sdk/zksync-web3.js/src/contract.ts @@ -0,0 +1,89 @@ +import { Wallet } from './wallet'; +import { Signer } from './signer'; +import { BytesLike, Contract, ContractInterface, ethers, utils } from 'ethers'; +import { + hashBytecode, + CONTRACT_DEPLOYER, + CONTRACT_DEPLOYER_ADDRESS, + EIP712_TX_TYPE, + getDeployedContracts, + DEFAULT_GAS_PER_PUBDATA_LIMIT +} from './utils'; +import { AccountAbstractionVersion, DeploymentType } from './types'; +export { Contract } from 'ethers'; + +export class ContractFactory extends ethers.ContractFactory { + override readonly signer: Wallet | Signer; + readonly deploymentType: DeploymentType; + + constructor( + abi: ContractInterface, + bytecode: ethers.BytesLike, + signer: Wallet | Signer, + deploymentType?: DeploymentType + ) { + super(abi, bytecode, signer); + this.deploymentType = deploymentType || 'create'; + } + + private encodeCalldata(salt: BytesLike, bytecodeHash: BytesLike, constructorCalldata: BytesLike) { + if (this.deploymentType == 'create') { + return CONTRACT_DEPLOYER.encodeFunctionData('create', [salt, bytecodeHash, constructorCalldata]); + } else if (this.deploymentType == 'createAccount') { + return CONTRACT_DEPLOYER.encodeFunctionData('createAccount', [ + salt, + bytecodeHash, + constructorCalldata, + AccountAbstractionVersion.Version1 + ]); + } else { + throw new Error(`Unsupported deployment type ${this.deploymentType}`); + } + } + + override getDeployTransaction(...args: any[]): ethers.providers.TransactionRequest { + let salt = '0x0000000000000000000000000000000000000000000000000000000000000000'; + + // The overrides will be popped out in this call: + const txRequest = super.getDeployTransaction(...args); + // Removing overrides + if (this.interface.deploy.inputs.length + 1 == args.length) { + args.pop(); + } + + // Salt argument is not used, so we provide a placeholder value. + const bytecodeHash = hashBytecode(this.bytecode); + const constructorCalldata = utils.arrayify(this.interface.encodeDeploy(args)); + + const deployCalldata = this.encodeCalldata(salt, bytecodeHash, constructorCalldata); + + txRequest.type = EIP712_TX_TYPE; + txRequest.to = CONTRACT_DEPLOYER_ADDRESS; + txRequest.data = deployCalldata; + txRequest.customData ??= {}; + txRequest.customData.factoryDeps ??= []; + txRequest.customData.gasPerPubdata ??= DEFAULT_GAS_PER_PUBDATA_LIMIT; + // The number of factory deps is relatively low, so it is efficient enough. + if (!txRequest.customData.factoryDeps.includes(this.bytecode)) { + txRequest.customData.factoryDeps.push(this.bytecode); + } + + return txRequest; + } + + override async deploy(...args: Array): Promise { + const contract = await super.deploy(...args); + + const deployTxReceipt = await contract.deployTransaction.wait(); + + const deployedAddresses = getDeployedContracts(deployTxReceipt).map((info) => info.deployedAddress); + + const contractWithCorrectAddress = new ethers.Contract( + deployedAddresses[deployedAddresses.length - 1], + contract.interface, + contract.signer + ); + utils.defineReadOnly(contractWithCorrectAddress, 'deployTransaction', contract.deployTransaction); + return contractWithCorrectAddress; + } +} diff --git a/sdk/zksync-web3.js/src/index.ts b/sdk/zksync-web3.js/src/index.ts new file mode 100644 index 000000000000..600a4d21070d --- /dev/null +++ b/sdk/zksync-web3.js/src/index.ts @@ -0,0 +1,6 @@ +export * as utils from './utils'; +export * as types from './types'; +export { EIP712Signer, Signer, L1Signer } from './signer'; +export { Wallet } from './wallet'; +export { Web3Provider, Provider } from './provider'; +export { ContractFactory, Contract } from './contract'; diff --git a/sdk/zksync-web3.js/src/paymaster-utils.ts b/sdk/zksync-web3.js/src/paymaster-utils.ts new file mode 100644 index 000000000000..d95f124f4db3 --- /dev/null +++ b/sdk/zksync-web3.js/src/paymaster-utils.ts @@ -0,0 +1,31 @@ +import { BytesLike, ethers } from 'ethers'; + +import { Address, ApprovalBasedPaymasterInput, GeneralPaymasterInput, PaymasterInput, PaymasterParams } from './types'; + +export const IPaymasterFlow = new ethers.utils.Interface(require('../../abi/IPaymasterFlow.json').abi); + +export function getApprovalBasedPaymasterInput(paymasterInput: ApprovalBasedPaymasterInput): BytesLike { + return IPaymasterFlow.encodeFunctionData('approvalBased', [ + paymasterInput.token, + paymasterInput.minimalAllowance, + paymasterInput.innerInput + ]); +} + +export function getGeneralPaymasterInput(paymasterInput: GeneralPaymasterInput): BytesLike { + return IPaymasterFlow.encodeFunctionData('general', [paymasterInput.innerInput]); +} + +export function getPaymasterParams(paymasterAddress: Address, paymasterInput: PaymasterInput): PaymasterParams { + if (paymasterInput.type == 'General') { + return { + paymaster: paymasterAddress, + paymasterInput: getGeneralPaymasterInput(paymasterInput) + }; + } else { + return { + paymaster: paymasterAddress, + paymasterInput: getApprovalBasedPaymasterInput(paymasterInput) + }; + } +} diff --git a/sdk/zksync-web3.js/src/provider.ts b/sdk/zksync-web3.js/src/provider.ts new file mode 100644 index 000000000000..2982a74e9e41 --- /dev/null +++ b/sdk/zksync-web3.js/src/provider.ts @@ -0,0 +1,608 @@ +import { ethers, BigNumber, BigNumberish, utils, providers, BytesLike, Contract } from 'ethers'; +import Formatter = providers.Formatter; +import { ExternalProvider } from '@ethersproject/providers'; +import { ConnectionInfo, poll } from '@ethersproject/web'; +import { IERC20MetadataFactory, IEthTokenFactory, IL2BridgeFactory } from '../typechain'; +import { + Address, + EventFilter, + BlockTag, + TransactionResponse, + TransactionRequest, + TransactionStatus, + Token, + PriorityOpResponse, + BalancesMap, + MessageProof, + TransactionReceipt, + Block, + BlockWithTransactions, + Log, + TransactionDetails, + BlockDetails, + ContractAccountInfo +} from './types'; +import { + isETH, + getL2HashFromPriorityOp, + EIP712_TX_TYPE, + CONTRACT_DEPLOYER_ADDRESS, + CONTRACT_DEPLOYER, + ETH_ADDRESS, + parseTransaction, + sleep, + L2_ETH_TOKEN_ADDRESS +} from './utils'; +import { Signer } from './signer'; + +let defaultFormatter: Formatter = null; + +export class Provider extends ethers.providers.JsonRpcProvider { + protected contractAddresses: { + mainContract?: Address; + erc20BridgeL1?: Address; + erc20BridgeL2?: Address; + }; + + override async getTransactionReceipt(transactionHash: string | Promise): Promise { + await this.getNetwork(); + + transactionHash = await transactionHash; + + const params = { transactionHash: this.formatter.hash(transactionHash, true) }; + + return poll( + async () => { + const result = await this.perform('getTransactionReceipt', params); + + if (result == null) { + if (this._emitted['t:' + transactionHash] == null) { + return null; + } + return undefined; + } + + if (result.blockNumber == null && result.status != null && BigNumber.from(result.status).isZero()) { + // transaction is rejected in the state-keeper + return { + ...this.formatter.receipt({ + ...result, + confirmations: 1, + blockNumber: 0, + blockHash: ethers.constants.HashZero + }), + blockNumber: null, + blockHash: null, + l1BatchNumber: null, + l1BatchTxIndex: null + }; + } + + if (result.blockHash == null) { + // receipt is not ready + return undefined; + } else { + const receipt: any = this.formatter.receipt(result); + if (receipt.blockNumber == null) { + receipt.confirmations = 0; + } else if (receipt.confirmations == null) { + const blockNumber = await this._getInternalBlockNumber(100 + 2 * this.pollingInterval); + + // Add the confirmations using the fast block number (pessimistic) + let confirmations = blockNumber - receipt.blockNumber + 1; + if (confirmations <= 0) { + confirmations = 1; + } + receipt.confirmations = confirmations; + } + return receipt; + } + }, + { oncePoll: this } + ); + } + + override async getBlock(blockHashOrBlockTag: BlockTag | string | Promise): Promise { + return >this._getBlock(blockHashOrBlockTag, false); + } + + override async getBlockWithTransactions( + blockHashOrBlockTag: BlockTag | string | Promise + ): Promise { + return >this._getBlock(blockHashOrBlockTag, true); + } + + static override getFormatter(): Formatter { + if (defaultFormatter == null) { + defaultFormatter = new Formatter(); + const number = defaultFormatter.number.bind(defaultFormatter); + const boolean = defaultFormatter.boolean.bind(defaultFormatter); + const hash = defaultFormatter.hash.bind(defaultFormatter); + const address = defaultFormatter.address.bind(defaultFormatter); + + defaultFormatter.formats.receiptLog.l1BatchNumber = Formatter.allowNull(number); + + (defaultFormatter.formats as any).l2Tol1Log = { + blockNumber: number, + blockHash: hash, + l1BatchNumber: Formatter.allowNull(number), + transactionIndex: number, + shardId: number, + isService: boolean, + sender: address, + key: hash, + value: hash, + transactionHash: hash, + logIndex: number + }; + + defaultFormatter.formats.receipt.l1BatchNumber = Formatter.allowNull(number); + defaultFormatter.formats.receipt.l1BatchTxIndex = Formatter.allowNull(number); + defaultFormatter.formats.receipt.l2ToL1Logs = Formatter.arrayOf((value) => + Formatter.check((defaultFormatter.formats as any).l2Tol1Log, value) + ); + + defaultFormatter.formats.block.l1BatchNumber = Formatter.allowNull(number); + defaultFormatter.formats.block.l1BatchTimestamp = Formatter.allowNull(number); + defaultFormatter.formats.blockWithTransactions.l1BatchNumber = Formatter.allowNull(number); + defaultFormatter.formats.blockWithTransactions.l1BatchTimestamp = Formatter.allowNull(number); + defaultFormatter.formats.transaction.l1BatchNumber = Formatter.allowNull(number); + defaultFormatter.formats.transaction.l1BatchTxIndex = Formatter.allowNull(number); + + defaultFormatter.formats.filterLog.l1BatchNumber = Formatter.allowNull(number); + } + return defaultFormatter; + } + + override async getBalance(address: Address, blockTag?: BlockTag, tokenAddress?: Address) { + const tag = this.formatter.blockTag(blockTag); + if (tokenAddress == null || isETH(tokenAddress)) { + // requesting ETH balance + return await super.getBalance(address, tag); + } else { + try { + let token = IERC20MetadataFactory.connect(tokenAddress, this); + return await token.balanceOf(address, { blockTag: tag }); + } catch { + return BigNumber.from(0); + } + } + } + + async l2TokenAddress(token: Address) { + if (token == ETH_ADDRESS) { + return ETH_ADDRESS; + } else { + const erc20BridgeAddress = (await this.getDefaultBridgeAddresses()).erc20L2; + const erc20Bridge = IL2BridgeFactory.connect(erc20BridgeAddress, this); + return await erc20Bridge.l2TokenAddress(token); + } + } + + async l1TokenAddress(token: Address) { + if (token == ETH_ADDRESS) { + return ETH_ADDRESS; + } else { + const erc20BridgeAddress = (await this.getDefaultBridgeAddresses()).erc20L2; + const erc20Bridge = IL2BridgeFactory.connect(erc20BridgeAddress, this); + return await erc20Bridge.l1TokenAddress(token); + } + } + + // This function is used when formatting requests for + // eth_call and eth_estimateGas. We override it here + // because we have extra stuff to serialize (customData). + // This function is for internal use only. + static override hexlifyTransaction( + transaction: ethers.providers.TransactionRequest, + allowExtra?: Record + ) { + const result = ethers.providers.JsonRpcProvider.hexlifyTransaction(transaction, { + ...allowExtra, + customData: true, + from: true + }); + if (transaction.customData == null) { + return result; + } + result.eip712Meta = { + gasPerPubdata: utils.hexValue(transaction.customData.gasPerPubdata ?? 0) + } as any; + transaction.type = EIP712_TX_TYPE; + if (transaction.customData.factoryDeps) { + // @ts-ignore + result.eip712Meta.factoryDeps = transaction.customData.factoryDeps.map((dep: ethers.BytesLike) => + Array.from(utils.arrayify(dep)) + ); + } + if (transaction.customData.paymasterParams) { + // @ts-ignore + result.eip712Meta.paymasterParams = { + paymaster: utils.hexlify(transaction.customData.paymasterParams.paymaster), + paymasterInput: Array.from(utils.arrayify(transaction.customData.paymasterParams.paymasterInput)) + }; + } + return result; + } + + override async estimateGas(transaction: utils.Deferrable): Promise { + await this.getNetwork(); + const params = await utils.resolveProperties({ + transaction: this._getTransactionRequest(transaction) + }); + if (transaction.customData != null) { + // @ts-ignore + params.transaction.customData = transaction.customData; + } + const result = await this.perform('estimateGas', params); + try { + return BigNumber.from(result); + } catch (error) { + throw new Error(`bad result from backend (estimateGas): ${result}`); + } + } + + override async getGasPrice(token?: Address): Promise { + const params = token ? [token] : []; + const price = await this.send('eth_gasPrice', params); + return BigNumber.from(price); + } + + constructor(url?: ConnectionInfo | string, network?: ethers.providers.Networkish) { + super(url, network); + this.pollingInterval = 500; + + const blockTag = this.formatter.blockTag.bind(this.formatter); + this.formatter.blockTag = (tag: any) => { + if (tag == 'committed' || tag == 'finalized') { + return tag; + } + return blockTag(tag); + }; + this.contractAddresses = {}; + this.formatter.transaction = parseTransaction; + } + + async getMessageProof( + blockNumber: ethers.BigNumberish, + sender: Address, + messageHash: BytesLike, + logIndex?: number + ): Promise { + return await this.send('zks_getL2ToL1MsgProof', [ + BigNumber.from(blockNumber).toNumber(), + sender, + ethers.utils.hexlify(messageHash), + logIndex + ]); + } + + async getLogProof(txHash: BytesLike, index?: number): Promise { + return await this.send('zks_getL2ToL1LogProof', [ethers.utils.hexlify(txHash), index]); + } + + async getL1BatchBlockRange(l1BatchNumber: number): Promise<[number, number] | null> { + const range = await this.send('zks_getL1BatchBlockRange', [l1BatchNumber]); + if (range == null) { + return null; + } + return [parseInt(range[0], 16), parseInt(range[1], 16)]; + } + + async getMainContractAddress(): Promise
{ + if (!this.contractAddresses.mainContract) { + this.contractAddresses.mainContract = await this.send('zks_getMainContract', []); + } + return this.contractAddresses.mainContract; + } + + async getTestnetPaymasterAddress(): Promise
{ + // Unlike contract's addresses, the testnet paymaster is not cached, since it can be trivially changed + // on the fly by the server and should not be relied to be constant + return await this.send('zks_getTestnetPaymaster', []); + } + + async getDefaultBridgeAddresses() { + if (!this.contractAddresses.erc20BridgeL1) { + let addresses = await this.send('zks_getBridgeContracts', []); + this.contractAddresses.erc20BridgeL1 = addresses.l1Erc20DefaultBridge; + this.contractAddresses.erc20BridgeL2 = addresses.l2Erc20DefaultBridge; + } + return { + erc20L1: this.contractAddresses.erc20BridgeL1, + erc20L2: this.contractAddresses.erc20BridgeL2 + }; + } + + async getConfirmedTokens(start: number = 0, limit: number = 255): Promise { + const tokens: Token[] = await this.send('zks_getConfirmedTokens', [start, limit]); + return tokens.map((token) => ({ address: token.l2Address, ...token })); + } + + async getTokenPrice(token: Address): Promise { + return await this.send('zks_getTokenPrice', [token]); + } + + async getAllAccountBalances(address: Address): Promise { + let balances = await this.send('zks_getAllAccountBalances', [address]); + for (let token in balances) { + balances[token] = BigNumber.from(balances[token]); + } + return balances; + } + + async l1ChainId(): Promise { + const res = await this.send('zks_L1ChainId', []); + return BigNumber.from(res).toNumber(); + } + + async getL1BatchNumber(): Promise { + const number = await this.send('zks_L1BatchNumber', []); + return BigNumber.from(number).toNumber(); + } + + async getBlockDetails(number: number): Promise { + return await this.send('zks_getBlockDetails', [number]); + } + + async getTransactionDetails(txHash: BytesLike): Promise { + return await this.send('zks_getTransactionDetails', [txHash]); + } + + async getWithdrawTx(transaction: { + token: Address; + amount: BigNumberish; + from?: Address; + to?: Address; + bridgeAddress?: Address; + overrides?: ethers.CallOverrides; + }): Promise { + const { ...tx } = transaction; + + if (tx.to == null && tx.from == null) { + throw new Error('withdrawal target address is undefined'); + } + + tx.to ??= tx.from; + tx.overrides ??= {}; + tx.overrides.from ??= tx.from; + + if (isETH(tx.token)) { + if (!tx.overrides.value) { + tx.overrides.value = tx.amount; + } + const passedValue = BigNumber.from(tx.overrides.value); + + if (!passedValue.eq(tx.amount)) { + // To avoid users shooting themselves into the foot, we will always use the amount to withdraw + // as the value + + throw new Error('The tx.value is not equal to the value withdrawn'); + } + + const ethL2Token = IEthTokenFactory.connect(L2_ETH_TOKEN_ADDRESS, this); + return ethL2Token.populateTransaction.withdraw(tx.to, tx.overrides); + } + + if (tx.bridgeAddress == null) { + const bridges = await this.getDefaultBridgeAddresses(); + tx.bridgeAddress = bridges.erc20L2; + } + + const bridge = IL2BridgeFactory.connect(tx.bridgeAddress!, this); + return bridge.populateTransaction.withdraw(tx.to, tx.token, tx.amount, tx.overrides); + } + + async estimateGasWithdraw(transaction: { + token: Address; + amount: BigNumberish; + from?: Address; + to?: Address; + bridgeAddress?: Address; + overrides?: ethers.CallOverrides; + }): Promise { + const withdrawTx = await this.getWithdrawTx(transaction); + return await this.estimateGas(withdrawTx); + } + + async getTransferTx(transaction: { + to: Address; + amount: BigNumberish; + from?: Address; + token?: Address; + overrides?: ethers.CallOverrides; + }): Promise { + const { ...tx } = transaction; + tx.overrides ??= {}; + tx.overrides.from ??= tx.from; + + if (tx.token == null || tx.token == ETH_ADDRESS) { + return { + ...(await ethers.utils.resolveProperties(tx.overrides)), + to: tx.to, + value: tx.amount + }; + } else { + const token = IERC20MetadataFactory.connect(tx.token, this); + return await token.populateTransaction.transfer(tx.to, tx.amount, tx.overrides); + } + } + + async estimateGasTransfer(transaction: { + to: Address; + amount: BigNumberish; + from?: Address; + token?: Address; + overrides?: ethers.CallOverrides; + }): Promise { + const transferTx = await this.getTransferTx(transaction); + return await this.estimateGas(transferTx); + } + + static getDefaultProvider() { + return new Provider(process.env.ZKSYNC_WEB3_API_URL || 'http://localhost:3050'); + } + + async newFilter(filter: EventFilter | Promise): Promise { + filter = await filter; + const id = await this.send('eth_newFilter', [this._prepareFilter(filter)]); + return BigNumber.from(id); + } + + async newBlockFilter(): Promise { + const id = await this.send('eth_newBlockFilter', []); + return BigNumber.from(id); + } + + async newPendingTransactionsFilter(): Promise { + const id = await this.send('eth_newPendingTransactionFilter', []); + return BigNumber.from(id); + } + + async getFilterChanges(idx: BigNumber): Promise> { + const logs = await this.send('eth_getFilterChanges', [idx.toHexString()]); + return typeof logs[0] === 'string' ? logs : this._parseLogs(logs); + } + + override async getLogs(filter: EventFilter | Promise = {}): Promise> { + filter = await filter; + const logs = await this.send('eth_getLogs', [this._prepareFilter(filter)]); + return this._parseLogs(logs); + } + + protected _parseLogs(logs: any[]): Array { + return Formatter.arrayOf(this.formatter.filterLog.bind(this.formatter))(logs); + } + + protected _prepareFilter(filter: EventFilter) { + return { + ...filter, + fromBlock: filter.fromBlock == null ? null : this.formatter.blockTag(filter.fromBlock), + toBlock: filter.fromBlock == null ? null : this.formatter.blockTag(filter.toBlock) + }; + } + + override _wrapTransaction(tx: ethers.Transaction, hash?: string): TransactionResponse { + const response = super._wrapTransaction(tx, hash) as TransactionResponse; + + response.waitFinalize = async () => { + const receipt = await response.wait(); + while (true) { + const block = await this.getBlock('finalized'); + if (receipt.blockNumber <= block.number) { + return await this.getTransactionReceipt(receipt.transactionHash); + } else { + await sleep(this.pollingInterval); + } + } + }; + + return response; + } + + // This is inefficient. Status should probably be indicated in the transaction receipt. + async getTransactionStatus(txHash: string) { + const tx = await this.getTransaction(txHash); + if (tx == null) { + return TransactionStatus.NotFound; + } + if (tx.blockNumber == null) { + return TransactionStatus.Processing; + } + const verifiedBlock = await this.getBlock('finalized'); + if (tx.blockNumber <= verifiedBlock.number) { + return TransactionStatus.Finalized; + } + return TransactionStatus.Committed; + } + + override async getTransaction(hash: string | Promise): Promise { + hash = await hash; + const tx = await super.getTransaction(hash); + return tx ? this._wrapTransaction(tx, hash) : null; + } + + override async sendTransaction(transaction: string | Promise): Promise { + return (await super.sendTransaction(transaction)) as TransactionResponse; + } + + async getL2TransactionFromPriorityOp( + l1TxResponse: ethers.providers.TransactionResponse + ): Promise { + const receipt = await l1TxResponse.wait(); + const l2Hash = getL2HashFromPriorityOp(receipt, await this.getMainContractAddress()); + + let status = null; + do { + status = await this.getTransactionStatus(l2Hash); + await sleep(this.pollingInterval); + } while (status == TransactionStatus.NotFound); + + return await this.getTransaction(l2Hash); + } + + async getPriorityOpResponse(l1TxResponse: ethers.providers.TransactionResponse): Promise { + const l2Response = { ...l1TxResponse } as PriorityOpResponse; + + l2Response.waitL1Commit = l2Response.wait; + l2Response.wait = async () => { + const l2Tx = await this.getL2TransactionFromPriorityOp(l1TxResponse); + return await l2Tx.wait(); + }; + l2Response.waitFinalize = async () => { + const l2Tx = await this.getL2TransactionFromPriorityOp(l1TxResponse); + return await l2Tx.waitFinalize(); + }; + + return l2Response; + } + + async getContractAccountInfo(address: Address): Promise { + const deployerContract = new Contract(CONTRACT_DEPLOYER_ADDRESS, CONTRACT_DEPLOYER, this); + const data = await deployerContract.getAccountInfo(address); + + return { + supportedAAVersion: data.supportedAAVersion, + nonceOrdering: data.nonceOrdering + }; + } +} + +export class Web3Provider extends Provider { + readonly provider: ExternalProvider; + + constructor(provider: ExternalProvider, network?: ethers.providers.Networkish) { + if (provider == null) { + throw new Error('missing provider'); + } + if (!provider.request) { + throw new Error('provider must implement eip-1193'); + } + + let path = provider.host || provider.path || (provider.isMetaMask ? 'metamask' : 'eip-1193:'); + super(path, network); + this.provider = provider; + } + + override async send(method: string, params?: Array): Promise { + params ??= []; + // Metamask complains about eth_sign (and on some versions hangs) + if (method == 'eth_sign' && (this.provider.isMetaMask || this.provider.isStatus)) { + // https://github.com/ethereum/go-ethereum/wiki/Management-APIs#personal_sign + method = 'personal_sign'; + params = [params[1], params[0]]; + } + return await this.provider.request({ method, params }); + } + + override getSigner(addressOrIndex?: number | string): Signer { + return Signer.from(super.getSigner(addressOrIndex) as any); + } + + override async estimateGas(transaction: ethers.utils.Deferrable) { + const gas: BigNumber = await super.estimateGas(transaction); + const metamaskMinimum = BigNumber.from(21000); + const isEIP712 = transaction.customData != null || transaction.type == EIP712_TX_TYPE; + return gas.gt(metamaskMinimum) || isEIP712 ? gas : metamaskMinimum; + } +} diff --git a/sdk/zksync-web3.js/src/signer.ts b/sdk/zksync-web3.js/src/signer.ts new file mode 100644 index 000000000000..442cabc6a1a9 --- /dev/null +++ b/sdk/zksync-web3.js/src/signer.ts @@ -0,0 +1,170 @@ +import { ethers } from 'ethers'; +import { Provider } from './provider'; +import { serialize, EIP712_TX_TYPE, hashBytecode, DEFAULT_GAS_PER_PUBDATA_LIMIT } from './utils'; +import { BlockTag, TransactionResponse, Signature, TransactionRequest } from './types'; +import { TypedDataDomain, TypedDataSigner } from '@ethersproject/abstract-signer'; +import { _TypedDataEncoder as TypedDataEncoder } from '@ethersproject/hash'; +import { AdapterL1, AdapterL2 } from './adapters'; + +export const eip712Types = { + Transaction: [ + { name: 'txType', type: 'uint256' }, + { name: 'from', type: 'uint256' }, + { name: 'to', type: 'uint256' }, + { name: 'gasLimit', type: 'uint256' }, + { name: 'gasPerPubdataByteLimit', type: 'uint256' }, + { name: 'maxFeePerGas', type: 'uint256' }, + { name: 'maxPriorityFeePerGas', type: 'uint256' }, + { name: 'paymaster', type: 'uint256' }, + { name: 'nonce', type: 'uint256' }, + { name: 'value', type: 'uint256' }, + { name: 'data', type: 'bytes' }, + { name: 'factoryDeps', type: 'bytes32[]' }, + { name: 'paymasterInput', type: 'bytes' } + ] +}; + +export class EIP712Signer { + private eip712Domain: Promise; + constructor(private ethSigner: ethers.Signer & TypedDataSigner, chainId: number | Promise) { + this.eip712Domain = Promise.resolve(chainId).then((chainId) => ({ + name: 'zkSync', + version: '2', + chainId + })); + } + + static getSignInput(transaction: TransactionRequest) { + const maxFeePerGas = transaction.maxFeePerGas || transaction.gasPrice; + const maxPriorityFeePerGas = transaction.maxPriorityFeePerGas || maxFeePerGas; + const gasPerPubdataByteLimit = transaction.customData?.gasPerPubdata || DEFAULT_GAS_PER_PUBDATA_LIMIT; + const signInput = { + txType: transaction.type, + from: transaction.from, + to: transaction.to, + gasLimit: transaction.gasLimit, + gasPerPubdataByteLimit: gasPerPubdataByteLimit, + maxFeePerGas, + maxPriorityFeePerGas, + paymaster: transaction.customData?.paymasterParams?.paymaster || ethers.constants.AddressZero, + nonce: transaction.nonce, + value: transaction.value, + data: transaction.data, + factoryDeps: transaction.customData?.factoryDeps?.map((dep) => hashBytecode(dep)) || [], + paymasterInput: transaction.customData?.paymasterParams?.paymasterInput || '0x' + }; + return signInput; + } + + async sign(transaction: TransactionRequest): Promise { + return await this.ethSigner._signTypedData( + await this.eip712Domain, + eip712Types, + EIP712Signer.getSignInput(transaction) + ); + } + + static getSignedDigest(transaction: TransactionRequest): ethers.BytesLike { + if (!transaction.chainId) { + throw Error("Transaction chainId isn't set"); + } + const domain = { + name: 'zkSync', + version: '2', + chainId: transaction.chainId + }; + return TypedDataEncoder.hash(domain, eip712Types, EIP712Signer.getSignInput(transaction)); + } +} + +// This class is to be used on the frontend, with metamask injection. +// It only contains L2 operations. For L1 operations, see L1Signer. +// Sample usage: +// const provider = new zkweb3.Web3Provider(window.ethereum); +// const signer = provider.getSigner(); +// const tx = await signer.sendTransaction({ ... }); +export class Signer extends AdapterL2(ethers.providers.JsonRpcSigner) { + public override provider: Provider; + public eip712: EIP712Signer; + + override _signerL2() { + return this; + } + + override _providerL2() { + return this.provider; + } + + static from(signer: ethers.providers.JsonRpcSigner & { provider: Provider }): Signer { + const newSigner: Signer = Object.setPrototypeOf(signer, Signer.prototype); + // @ts-ignore + newSigner.eip712 = new EIP712Signer(newSigner, newSigner.getChainId()); + return newSigner; + } + + // an alias with a better name + async getNonce(blockTag?: BlockTag) { + return await this.getTransactionCount(blockTag); + } + + override async sendTransaction(transaction: TransactionRequest): Promise { + if (transaction.customData == null && transaction.type == null) { + // use legacy txs by default + transaction.type = 0; + } + if (transaction.customData == null && transaction.type != EIP712_TX_TYPE) { + return (await super.sendTransaction(transaction)) as TransactionResponse; + } else { + const address = await this.getAddress(); + transaction.from ??= address; + if (transaction.from.toLowerCase() != address.toLowerCase()) { + throw new Error('Transaction `from` address mismatch'); + } + transaction.type = EIP712_TX_TYPE; + transaction.value ??= 0; + transaction.data ??= '0x'; + transaction.nonce ??= await this.getNonce(); + transaction.customData = this._fillCustomData(transaction.customData); + transaction.gasPrice ??= await this.provider.getGasPrice(); + transaction.gasLimit ??= await this.provider.estimateGas(transaction); + transaction.chainId ??= (await this.provider.getNetwork()).chainId; + transaction.customData.customSignature = await this.eip712.sign(transaction); + + const txBytes = serialize(transaction); + return await this.provider.sendTransaction(txBytes); + } + } +} + +// This class is to be used on the frontend with metamask injection. +// It only contains L1 operations. For L2 operations, see Signer. +// Sample usage: +// const provider = new ethers.Web3Provider(window.ethereum); +// const zksyncProvider = new zkweb3.Provider(''); +// const signer = zkweb3.L1Signer.from(provider.getSigner(), zksyncProvider); +// const tx = await signer.deposit({ ... }); +export class L1Signer extends AdapterL1(ethers.providers.JsonRpcSigner) { + public providerL2: Provider; + override _providerL2() { + return this.providerL2; + } + + override _providerL1() { + return this.provider; + } + + override _signerL1() { + return this; + } + + static from(signer: ethers.providers.JsonRpcSigner, zksyncProvider: Provider): L1Signer { + const newSigner: L1Signer = Object.setPrototypeOf(signer, L1Signer.prototype); + newSigner.providerL2 = zksyncProvider; + return newSigner; + } + + connectToL2(provider: Provider): this { + this.providerL2 = provider; + return this; + } +} diff --git a/sdk/zksync-web3.js/src/types.ts b/sdk/zksync-web3.js/src/types.ts new file mode 100644 index 000000000000..8ae6ad9784e1 --- /dev/null +++ b/sdk/zksync-web3.js/src/types.ts @@ -0,0 +1,201 @@ +import { BytesLike, BigNumberish, providers, BigNumber } from 'ethers'; +import { BlockWithTransactions as EthersBlockWithTransactions } from '@ethersproject/abstract-provider'; + +// 0x-prefixed, hex encoded, ethereum account address +export type Address = string; +// 0x-prefixed, hex encoded, ECDSA signature. +export type Signature = string; + +// Ethereum network +export enum Network { + Mainnet = 1, + Ropsten = 3, + Rinkeby = 4, + Goerli = 5, + Localhost = 9 +} + +export enum PriorityQueueType { + Deque = 0, + HeapBuffer = 1, + Heap = 2 +} + +export enum PriorityOpTree { + Full = 0, + Rollup = 1 +} + +export enum TransactionStatus { + NotFound = 'not-found', + Processing = 'processing', + Committed = 'committed', + Finalized = 'finalized' +} + +export type PaymasterParams = { + paymaster: Address; + paymasterInput: BytesLike; +}; + +export type Eip712Meta = { + gasPerPubdata?: BigNumberish; + factoryDeps?: BytesLike[]; + customSignature?: BytesLike; + paymasterParams?: PaymasterParams; +}; + +// prettier-ignore +export type BlockTag = + | number + | string // hex number + | 'committed' + | 'finalized' + | 'latest' + | 'earliest' + | 'pending'; + +export type DeploymentType = 'create' | 'createAccount'; + +export interface Token { + l1Address: Address; + l2Address: Address; + /** @deprecated This field is here for backward compatibility - please use l2Address field instead */ + address: Address; + name: string; + symbol: string; + decimals: number; +} + +export interface MessageProof { + id: number; + proof: string[]; + root: string; +} + +export interface EventFilter { + topics?: Array | null>; + address?: Address | Array
; + fromBlock?: BlockTag; + toBlock?: BlockTag; + blockHash?: string; +} + +export interface TransactionResponse extends providers.TransactionResponse { + l1BatchNumber: number; + l1BatchTxIndex: number; + waitFinalize(): Promise; +} + +export interface TransactionReceipt extends providers.TransactionReceipt { + l1BatchNumber: number; + l1BatchTxIndex: number; + logs: Array; + l2ToL1Logs: Array; +} + +export interface Block extends providers.Block { + l1BatchNumber: number; + l1BatchTimestamp: number; +} + +export interface BlockWithTransactions extends EthersBlockWithTransactions { + l1BatchNumber: number; + l1BatchTimestamp: number; + transactions: Array; +} + +export interface Log extends providers.Log { + l1BatchNumber: number; +} + +export interface L2ToL1Log { + blockNumber: number; + blockHash: string; + l1BatchNumber: number; + transactionIndex: number; + shardId: number; + isService: boolean; + sender: string; + key: string; + value: string; + transactionHash: string; + logIndex: number; +} + +export type TransactionRequest = providers.TransactionRequest & { + customData?: Eip712Meta; +}; + +export interface PriorityOpResponse extends TransactionResponse { + waitL1Commit(confirmation?: number): Promise; +} + +export type BalancesMap = { [key: string]: BigNumber }; + +export interface DeploymentInfo { + sender: Address; + bytecodeHash: string; + deployedAddress: Address; +} + +export interface ApprovalBasedPaymasterInput { + type: 'ApprovalBased'; + token: Address; + minimalAllowance: BigNumber; + innerInput: BytesLike; +} + +export interface GeneralPaymasterInput { + type: 'General'; + innerInput: BytesLike; +} + +export interface EthereumSignature { + v: number; + r: BytesLike; + s: BytesLike; +} + +export type PaymasterInput = ApprovalBasedPaymasterInput | GeneralPaymasterInput; + +export enum AccountAbstractionVersion { + None = 0, + Version1 = 1 +} + +export enum AccountNonceOrdering { + Sequential = 0, + Arbitrary = 1 +} + +export interface ContractAccountInfo { + supportedAAVersion: AccountAbstractionVersion; + nonceOrdering: AccountNonceOrdering; +} + +export interface BlockDetails { + number: number; + timestamp: number; + l1TxCount: number; + l2TxCount: number; + rootHash?: string; + status: string; + commitTxHash?: string; + committedAt?: Date; + proveTxHash?: string; + provenAt?: Date; + executeTxHash?: string; + executedAt?: Date; +} + +export interface TransactionDetails { + isL1Originated: boolean; + status: string; + fee: BigNumberish; + initiatorAddress: Address; + receivedAt: Date; + ethCommitTxHash?: string; + ethProveTxHash?: string; + ethExecuteTxHash?: string; +} diff --git a/sdk/zksync-web3.js/src/utils.ts b/sdk/zksync-web3.js/src/utils.ts new file mode 100644 index 000000000000..b857ae9ba2b0 --- /dev/null +++ b/sdk/zksync-web3.js/src/utils.ts @@ -0,0 +1,463 @@ +import { utils, ethers, BigNumber, BigNumberish, BytesLike } from 'ethers'; +import { SignatureLike } from '@ethersproject/bytes'; +import { + Address, + Eip712Meta, + PriorityQueueType, + PriorityOpTree, + DeploymentInfo, + PaymasterParams, + EthereumSignature +} from './types'; +import { TypedDataDomain, TypedDataField } from '@ethersproject/abstract-signer'; +import { Provider } from './provider'; +import { EIP712Signer } from './signer'; + +export * from './paymaster-utils'; + +export const ETH_ADDRESS = '0x0000000000000000000000000000000000000000'; + +export const ZKSYNC_MAIN_ABI = new utils.Interface(require('../../abi/IZkSync.json').abi); +export const CONTRACT_DEPLOYER = new utils.Interface(require('../../abi/ContractDeployer.json').abi); +export const L1_MESSENGER = new utils.Interface(require('../../abi/IL1Messenger.json').abi); +export const IERC20 = new utils.Interface(require('../../abi/IERC20.json').abi); +export const IERC1271 = new utils.Interface(require('../../abi/IERC1271.json').abi); +export const L1_BRIDGE_ABI = new utils.Interface(require('../../abi/IL1Bridge.json').abi); +export const L2_BRIDGE_ABI = new utils.Interface(require('../../abi/IL2Bridge.json').abi); + +export const BOOTLOADER_FORMAL_ADDRESS = '0x0000000000000000000000000000000000008001'; +export const CONTRACT_DEPLOYER_ADDRESS = '0x0000000000000000000000000000000000008006'; +export const L1_MESSENGER_ADDRESS = '0x0000000000000000000000000000000000008008'; +export const L2_ETH_TOKEN_ADDRESS = '0x000000000000000000000000000000000000800a'; + +export const L1_TO_L2_ALIAS_OFFSET = '0x1111000000000000000000000000000000001111'; + +export const EIP1271_MAGIC_VALUE = '0x1626ba7e'; + +export const EIP712_TX_TYPE = 0x71; +export const PRIORITY_OPERATION_L2_TX_TYPE = 0xff; + +export const MAX_BYTECODE_LEN_BYTES = ((1 << 16) - 1) * 32; + +// The large L2 gas per pubdata to sign. This gas is enough to ensure that +// any reasonable limit will be accepted. Note, that the operator is NOT required to +// use the honest value of gas per pubdata and it can use any value up to the one signed by the user. +// In the future releases, we will provide a way to estimate the current gasPerPubdata. +export const DEFAULT_GAS_PER_PUBDATA_LIMIT = 50000; + +// It is possible to provide practically any gasPerPubdataByte for L1->L2 transactions, since +// the cost per gas will be adjusted respectively. We will use 800 as an relatively optimal value for now. +export const DEPOSIT_GAS_PER_PUBDATA_LIMIT = 800; + +// The recommended L2 gas limit for a deposit. +export const RECOMMENDED_DEPOSIT_L2_GAS_LIMIT = 10000000; + +export const RECOMMENDED_GAS_LIMIT = { + DEPOSIT: 600_000, + EXECUTE: 620_000, + ERC20_APPROVE: 50_000 +}; + +export function isETH(token: Address) { + return token.toLowerCase() == ETH_ADDRESS || token.toLowerCase() == L2_ETH_TOKEN_ADDRESS; +} + +export function sleep(millis: number) { + return new Promise((resolve) => setTimeout(resolve, millis)); +} + +export function layer1TxDefaults() { + return { + queueType: PriorityQueueType.Deque, + opTree: PriorityOpTree.Full + }; +} + +export function getHashedL2ToL1Msg(sender: Address, msg: BytesLike, txNumberInBlock: number) { + const encodedMsg = new Uint8Array([ + 0, // l2ShardId + 1, // isService + ...ethers.utils.zeroPad(ethers.utils.hexlify(txNumberInBlock), 2), + ...ethers.utils.arrayify(L1_MESSENGER_ADDRESS), + ...ethers.utils.zeroPad(sender, 32), + ...ethers.utils.arrayify(ethers.utils.keccak256(msg)) + ]); + + return ethers.utils.keccak256(encodedMsg); +} + +export function getDeployedContracts(receipt: ethers.providers.TransactionReceipt): DeploymentInfo[] { + const addressBytesLen = 40; + const deployedContracts = receipt.logs + .filter( + (log) => + log.topics[0] == utils.id('ContractDeployed(address,bytes32,address)') && + log.address == CONTRACT_DEPLOYER_ADDRESS + ) + // Take the last topic (deployed contract address as U256) and extract address from it (U160). + .map((log) => { + const sender = `0x${log.topics[1].slice(log.topics[1].length - addressBytesLen)}`; + const bytesCodehash = log.topics[2]; + const address = `0x${log.topics[3].slice(log.topics[3].length - addressBytesLen)}`; + return { + sender: utils.getAddress(sender), + bytecodeHash: bytesCodehash, + deployedAddress: utils.getAddress(address) + }; + }); + + return deployedContracts; +} + +export function create2Address(sender: Address, bytecodeHash: BytesLike, salt: BytesLike, input: BytesLike) { + const prefix = ethers.utils.keccak256(ethers.utils.toUtf8Bytes('zksyncCreate2')); + const inputHash = ethers.utils.keccak256(input); + const addressBytes = ethers.utils + .keccak256(ethers.utils.concat([prefix, ethers.utils.zeroPad(sender, 32), salt, bytecodeHash, inputHash])) + .slice(26); + return ethers.utils.getAddress(addressBytes); +} + +export function createAddress(sender: Address, senderNonce: BigNumberish) { + const prefix = ethers.utils.keccak256(ethers.utils.toUtf8Bytes('zksyncCreate')); + const addressBytes = ethers.utils + .keccak256( + ethers.utils.concat([ + prefix, + ethers.utils.zeroPad(sender, 32), + ethers.utils.zeroPad(ethers.utils.hexlify(senderNonce), 32) + ]) + ) + .slice(26); + + return ethers.utils.getAddress(addressBytes); +} + +export async function checkBaseCost( + baseCost: ethers.BigNumber, + value: ethers.BigNumberish | Promise +) { + if (baseCost.gt(await value)) { + throw new Error( + `The base cost of performing the priority operation is higher than the provided value parameter ` + + `for the transaction: baseCost: ${baseCost}, provided value: ${value}` + ); + } +} + +export function serialize(transaction: ethers.providers.TransactionRequest, signature?: SignatureLike) { + if (transaction.customData == null && transaction.type != EIP712_TX_TYPE) { + return utils.serializeTransaction(transaction as ethers.PopulatedTransaction, signature); + } + if (!transaction.chainId) { + throw Error("Transaction chainId isn't set"); + } + + function formatNumber(value: BigNumberish, name: string): Uint8Array { + const result = utils.stripZeros(BigNumber.from(value).toHexString()); + if (result.length > 32) { + throw new Error('invalid length for ' + name); + } + return result; + } + + if (!transaction.from) { + throw new Error('Explicitly providing `from` field is reqiured for EIP712 transactions'); + } + const from = transaction.from; + + const meta: Eip712Meta = transaction.customData; + + let maxFeePerGas = transaction.maxFeePerGas || transaction.gasPrice || 0; + let maxPriorityFeePerGas = transaction.maxPriorityFeePerGas || maxFeePerGas; + + const fields: any[] = [ + formatNumber(transaction.nonce || 0, 'nonce'), + formatNumber(maxPriorityFeePerGas, 'maxPriorityFeePerGas'), + formatNumber(maxFeePerGas, 'maxFeePerGas'), + formatNumber(transaction.gasLimit || 0, 'gasLimit'), + transaction.to != null ? utils.getAddress(transaction.to) : '0x', + formatNumber(transaction.value || 0, 'value'), + transaction.data || '0x' + ]; + + if (signature) { + const sig = utils.splitSignature(signature); + fields.push(formatNumber(sig.recoveryParam, 'recoveryParam')); + fields.push(utils.stripZeros(sig.r)); + fields.push(utils.stripZeros(sig.s)); + } else { + fields.push(formatNumber(transaction.chainId, 'chainId')); + fields.push('0x'); + fields.push('0x'); + } + fields.push(formatNumber(transaction.chainId, 'chainId')); + fields.push(utils.getAddress(from)); + + // Add meta + fields.push(formatNumber(meta.gasPerPubdata || DEFAULT_GAS_PER_PUBDATA_LIMIT, 'gasPerPubdata')); + fields.push((meta.factoryDeps ?? []).map((dep) => utils.hexlify(dep))); + + if (meta.customSignature && ethers.utils.arrayify(meta.customSignature).length == 0) { + throw new Error('Empty signatures are not supported'); + } + fields.push(meta.customSignature || '0x'); + + if (meta.paymasterParams) { + fields.push([meta.paymasterParams.paymaster, ethers.utils.hexlify(meta.paymasterParams.paymasterInput)]); + } else { + fields.push([]); + } + + return utils.hexConcat([[EIP712_TX_TYPE], utils.RLP.encode(fields)]); +} + +export function hashBytecode(bytecode: ethers.BytesLike): Uint8Array { + // For getting the consistent length we first convert the bytecode to UInt8Array + const bytecodeAsArray = ethers.utils.arrayify(bytecode); + + if (bytecodeAsArray.length % 32 != 0) { + throw new Error('The bytecode length in bytes must be divisible by 32'); + } + + if (bytecodeAsArray.length > MAX_BYTECODE_LEN_BYTES) { + throw new Error(`Bytecode can not be longer than ${MAX_BYTECODE_LEN_BYTES} bytes`); + } + + const hashStr = ethers.utils.sha256(bytecodeAsArray); + const hash = ethers.utils.arrayify(hashStr); + + // Note that the length of the bytecode + // should be provided in 32-byte words. + const bytecodeLengthInWords = bytecodeAsArray.length / 32; + if (bytecodeLengthInWords % 2 == 0) { + throw new Error('Bytecode length in 32-byte words must be odd'); + } + + const bytecodeLength = ethers.utils.arrayify(bytecodeLengthInWords); + + // The bytecode should always take the first 2 bytes of the bytecode hash, + // so we pad it from the left in case the length is smaller than 2 bytes. + const bytecodeLengthPadded = ethers.utils.zeroPad(bytecodeLength, 2); + + const codeHashVersion = new Uint8Array([1, 0]); + hash.set(codeHashVersion, 0); + hash.set(bytecodeLengthPadded, 2); + + return hash; +} + +export function parseTransaction(payload: ethers.BytesLike): ethers.Transaction { + function handleAddress(value: string): string { + if (value === '0x') { + return null; + } + return utils.getAddress(value); + } + + function handleNumber(value: string): BigNumber { + if (value === '0x') { + return BigNumber.from(0); + } + return BigNumber.from(value); + } + + function arrayToPaymasterParams(arr: string[]): PaymasterParams | undefined { + if (arr.length == 0) { + return undefined; + } + if (arr.length != 2) { + throw new Error(`Invalid paymaster parameters, expected to have length of 2, found ${arr.length}`); + } + + return { + paymaster: utils.getAddress(arr[0]), + paymasterInput: utils.arrayify(arr[1]) + }; + } + + const bytes = utils.arrayify(payload); + if (bytes[0] != EIP712_TX_TYPE) { + return utils.parseTransaction(bytes); + } + + const raw = utils.RLP.decode(bytes.slice(1)); + const transaction: any = { + type: EIP712_TX_TYPE, + nonce: handleNumber(raw[0]).toNumber(), + maxPriorityFeePerGas: handleNumber(raw[1]), + maxFeePerGas: handleNumber(raw[2]), + gasLimit: handleNumber(raw[3]), + to: handleAddress(raw[4]), + value: handleNumber(raw[5]), + data: raw[6], + chainId: handleNumber(raw[10]), + from: handleAddress(raw[11]), + customData: { + gasPerPubdata: handleNumber(raw[12]), + factoryDeps: raw[13], + customSignature: raw[14], + paymasterParams: arrayToPaymasterParams(raw[15]) + } + }; + + const ethSignature = { + v: handleNumber(raw[7]).toNumber(), + r: raw[8], + s: raw[9] + }; + + if ( + (utils.hexlify(ethSignature.r) == '0x' || utils.hexlify(ethSignature.s) == '0x') && + !transaction.customData.customSignature + ) { + return transaction; + } + + if (ethSignature.v !== 0 && ethSignature.v !== 1 && !transaction.customData.customSignature) { + throw new Error('Failed to parse signature'); + } + + if (!transaction.customData.customSignature) { + transaction.v = ethSignature.v; + transaction.s = ethSignature.s; + transaction.r = ethSignature.r; + } + + transaction.hash = eip712TxHash(transaction, ethSignature); + + return transaction; +} + +function getSignature(transaction: any, ethSignature?: EthereumSignature): Uint8Array { + if (transaction?.customData?.customSignature && transaction.customData.customSignature.length) { + return ethers.utils.arrayify(transaction.customData.customSignature); + } + + if (!ethSignature) { + throw new Error('No signature provided'); + } + + const r = ethers.utils.zeroPad(ethers.utils.arrayify(ethSignature.r), 32); + const s = ethers.utils.zeroPad(ethers.utils.arrayify(ethSignature.s), 32); + const v = ethSignature.v; + + return new Uint8Array([...r, ...s, v]); +} + +function eip712TxHash(transaction: any, ethSignature?: EthereumSignature) { + const signedDigest = EIP712Signer.getSignedDigest(transaction); + const hashedSignature = ethers.utils.keccak256(getSignature(transaction, ethSignature)); + + return ethers.utils.keccak256(ethers.utils.hexConcat([signedDigest, hashedSignature])); +} + +export function getL2HashFromPriorityOp( + txReceipt: ethers.providers.TransactionReceipt, + zkSyncAddress: Address +): string { + let txHash: string = null; + for (const log of txReceipt.logs) { + if (log.address.toLowerCase() != zkSyncAddress.toLowerCase()) { + continue; + } + + try { + const priorityQueueLog = ZKSYNC_MAIN_ABI.parseLog(log); + if (priorityQueueLog && priorityQueueLog.args.txHash != null) { + txHash = priorityQueueLog.args.txHash; + } + } catch {} + } + if (!txHash) { + throw new Error('Failed to parse tx logs'); + } + + return txHash; +} + +export function applyL1ToL2Alias(address: string): string { + return ethers.utils.hexlify(ethers.BigNumber.from(address).add(L1_TO_L2_ALIAS_OFFSET)); +} + +export function undoL1ToL2Alias(address: string): string { + return ethers.utils.hexlify(ethers.BigNumber.from(address).sub(L1_TO_L2_ALIAS_OFFSET)); +} + +// The method with similar functionality is already available in ethers.js, +// the only difference is that we provide additional `try { } catch { }` +// for error-resilience. +// +// It will also pave the road for allowing future EIP-1271 signature verification, by +// letting our SDK have functionality to verify signatures. +function isECDSASignatureCorrect(address: string, msgHash: string, signature: SignatureLike): boolean { + try { + return address == ethers.utils.recoverAddress(msgHash, signature); + } catch { + // In case ECDSA signature verification has thrown an error, + // we simply consider the signature as incorrect. + return false; + } +} + +async function isEIP1271SignatureCorrect( + provider: Provider, + address: string, + msgHash: string, + signature: SignatureLike +): Promise { + const accountContract = new ethers.Contract(address, IERC1271, provider); + + // This line may throw an exception if the contract does not implement the EIP1271 correctly. + // But it may also throw an exception in case the internet connection is lost. + // It is the caller's responsibility to handle the exception. + const result = await accountContract.isValidSignature(msgHash, signature); + + return result == EIP1271_MAGIC_VALUE; +} + +async function isSignatureCorrect( + provider: Provider, + address: string, + msgHash: string, + signature: SignatureLike +): Promise { + let isContractAccount = false; + + const code = await provider.getCode(address); + isContractAccount = ethers.utils.arrayify(code).length != 0; + + if (!isContractAccount) { + return isECDSASignatureCorrect(address, msgHash, signature); + } else { + return await isEIP1271SignatureCorrect(provider, address, msgHash, signature); + } +} + +// Returns `true` or `false` depending on whether or not the account abstraction's +// signature is correct. Note, that while currently it does not do any `async` actions. +// in the future it will. That's why the `Promise` is returned. +export async function isMessageSignatureCorrect( + provider: Provider, + address: string, + message: ethers.Bytes | string, + signature: SignatureLike +): Promise { + const msgHash = ethers.utils.hashMessage(message); + return await isSignatureCorrect(provider, address, msgHash, signature); +} + +// Returns `true` or `false` depending on whether or not the account abstraction's +// EIP712 signature is correct. Note, that while currently it does not do any `async` actions. +// in the future it will. That's why the `Promise` is returned. +export async function isTypedDataSignatureCorrect( + provider: Provider, + address: string, + domain: TypedDataDomain, + types: Record>, + value: Record, + signature: SignatureLike +): Promise { + const msgHash = ethers.utils._TypedDataEncoder.hash(domain, types, value); + return await isSignatureCorrect(provider, address, msgHash, signature); +} diff --git a/sdk/zksync-web3.js/src/wallet.ts b/sdk/zksync-web3.js/src/wallet.ts new file mode 100644 index 000000000000..fb6d3c75414d --- /dev/null +++ b/sdk/zksync-web3.js/src/wallet.ts @@ -0,0 +1,128 @@ +import { EIP712Signer } from './signer'; +import { Provider } from './provider'; +import { serialize, EIP712_TX_TYPE } from './utils'; +import { ethers, utils } from 'ethers'; +import { BlockTag, TransactionResponse, TransactionRequest } from './types'; +import { ProgressCallback } from '@ethersproject/json-wallets'; +import { AdapterL1, AdapterL2 } from './adapters'; + +export class Wallet extends AdapterL2(AdapterL1(ethers.Wallet)) { + override readonly provider: Provider; + providerL1?: ethers.providers.Provider; + public eip712: EIP712Signer; + + override _providerL1() { + if (this.providerL1 == null) { + throw new Error('L1 provider missing: use `connectToL1` to specify'); + } + return this.providerL1; + } + + override _providerL2() { + return this.provider; + } + + override _signerL1() { + return this.ethWallet(); + } + + override _signerL2() { + return this; + } + + ethWallet() { + return new ethers.Wallet(this._signingKey(), this._providerL1()); + } + + // an alias with a better name + async getNonce(blockTag?: BlockTag) { + return await this.getTransactionCount(blockTag); + } + + override connect(provider: Provider) { + return new Wallet(this._signingKey(), provider, this.providerL1); + } + + connectToL1(provider: ethers.providers.Provider) { + return new Wallet(this._signingKey(), this.provider, provider); + } + + static override fromMnemonic(mnemonic: string, path?: string, wordlist?: ethers.Wordlist) { + const wallet = super.fromMnemonic(mnemonic, path, wordlist); + return new Wallet(wallet._signingKey()); + } + + static override async fromEncryptedJson( + json: string, + password?: string | ethers.Bytes, + callback?: ProgressCallback + ) { + const wallet = await super.fromEncryptedJson(json, password, callback); + return new Wallet(wallet._signingKey()); + } + + static override fromEncryptedJsonSync(json: string, password?: string | ethers.Bytes) { + const wallet = super.fromEncryptedJsonSync(json, password); + return new Wallet(wallet._signingKey()); + } + + static override createRandom(options?: any) { + const wallet = super.createRandom(options); + return new Wallet(wallet._signingKey()); + } + + constructor( + privateKey: ethers.BytesLike | utils.SigningKey, + providerL2?: Provider, + providerL1?: ethers.providers.Provider + ) { + super(privateKey, providerL2); + if (this.provider != null) { + const chainId = this.getChainId(); + // @ts-ignore + this.eip712 = new EIP712Signer(this, chainId); + } + this.providerL1 = providerL1; + } + + override async populateTransaction(transaction: TransactionRequest): Promise { + if (transaction.type == null && transaction.customData == null) { + // use legacy txs by default + transaction.type = 0; + } + transaction = await super.populateTransaction(transaction); + if (transaction.customData == null && transaction.type != EIP712_TX_TYPE) { + return transaction; + } + + transaction.type = EIP712_TX_TYPE; + transaction.value ??= 0; + transaction.data ??= '0x'; + transaction.customData = this._fillCustomData(transaction.customData); + transaction.gasPrice = await this.provider.getGasPrice(); + return transaction; + } + + override async signTransaction(transaction: TransactionRequest): Promise { + if (transaction.customData == null && transaction.type != EIP712_TX_TYPE) { + if (transaction.type == 2 && transaction.maxFeePerGas == null) { + transaction.maxFeePerGas = await this.provider.getGasPrice(); + } + return await super.signTransaction(transaction); + } else { + transaction.from ??= this.address; + if (transaction.from.toLowerCase() != this.address.toLowerCase()) { + throw new Error('Transaction `from` address mismatch'); + } + transaction.customData.customSignature = await this.eip712.sign(transaction); + + return serialize(transaction); + } + } + + override async sendTransaction(transaction: ethers.providers.TransactionRequest): Promise { + // Typescript isn't smart enough to recognise that wallet.sendTransaction + // calls provider.sendTransaction which returns our extended type and not ethers' one. + return (await super.sendTransaction(transaction)) as TransactionResponse; + } +} diff --git a/sdk/zksync-web3.js/tests/main.test.ts b/sdk/zksync-web3.js/tests/main.test.ts new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdk/zksync-web3.js/tsconfig.json b/sdk/zksync-web3.js/tsconfig.json new file mode 100644 index 000000000000..322b2cd47e0c --- /dev/null +++ b/sdk/zksync-web3.js/tsconfig.json @@ -0,0 +1,18 @@ +{ + "compilerOptions": { + "module": "commonjs", + "target": "es2019", + + "outDir": "./build", + "esModuleInterop": true, + "declaration": true, + + "preserveSymlinks": true, + "preserveWatchOutput": true, + + "noImplicitOverride": true + }, + "files": [ + "./src/index.ts" + ] +} diff --git a/sdk/zksync-web3.js/typechain/IAllowList.d.ts b/sdk/zksync-web3.js/typechain/IAllowList.d.ts new file mode 100644 index 000000000000..44150193a204 --- /dev/null +++ b/sdk/zksync-web3.js/typechain/IAllowList.d.ts @@ -0,0 +1,767 @@ +/* Autogenerated file. Do not edit manually. */ +/* tslint:disable */ +/* eslint-disable */ + +import { + ethers, + EventFilter, + Signer, + BigNumber, + BigNumberish, + PopulatedTransaction, +} from "ethers"; +import { + Contract, + ContractTransaction, + Overrides, + CallOverrides, +} from "@ethersproject/contracts"; +import { BytesLike } from "@ethersproject/bytes"; +import { Listener, Provider } from "@ethersproject/providers"; +import { FunctionFragment, EventFragment, Result } from "@ethersproject/abi"; + +interface IAllowListInterface extends ethers.utils.Interface { + functions: { + "acceptOwner()": FunctionFragment; + "canCall(address,address,bytes4)": FunctionFragment; + "hasSpecialAccessToCall(address,address,bytes4)": FunctionFragment; + "isAccessPublic(address)": FunctionFragment; + "owner()": FunctionFragment; + "pendingOwner()": FunctionFragment; + "setBatchPermissionToCall(address[],address[],bytes4[],bool[])": FunctionFragment; + "setBatchPublicAccess(address[],bool[])": FunctionFragment; + "setPendingOwner(address)": FunctionFragment; + "setPermissionToCall(address,address,bytes4,bool)": FunctionFragment; + "setPublicAccess(address,bool)": FunctionFragment; + }; + + encodeFunctionData( + functionFragment: "acceptOwner", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "canCall", + values: [string, string, BytesLike] + ): string; + encodeFunctionData( + functionFragment: "hasSpecialAccessToCall", + values: [string, string, BytesLike] + ): string; + encodeFunctionData( + functionFragment: "isAccessPublic", + values: [string] + ): string; + encodeFunctionData(functionFragment: "owner", values?: undefined): string; + encodeFunctionData( + functionFragment: "pendingOwner", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "setBatchPermissionToCall", + values: [string[], string[], BytesLike[], boolean[]] + ): string; + encodeFunctionData( + functionFragment: "setBatchPublicAccess", + values: [string[], boolean[]] + ): string; + encodeFunctionData( + functionFragment: "setPendingOwner", + values: [string] + ): string; + encodeFunctionData( + functionFragment: "setPermissionToCall", + values: [string, string, BytesLike, boolean] + ): string; + encodeFunctionData( + functionFragment: "setPublicAccess", + values: [string, boolean] + ): string; + + decodeFunctionResult( + functionFragment: "acceptOwner", + data: BytesLike + ): Result; + decodeFunctionResult(functionFragment: "canCall", data: BytesLike): Result; + decodeFunctionResult( + functionFragment: "hasSpecialAccessToCall", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "isAccessPublic", + data: BytesLike + ): Result; + decodeFunctionResult(functionFragment: "owner", data: BytesLike): Result; + decodeFunctionResult( + functionFragment: "pendingOwner", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "setBatchPermissionToCall", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "setBatchPublicAccess", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "setPendingOwner", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "setPermissionToCall", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "setPublicAccess", + data: BytesLike + ): Result; + + events: { + "NewOwner(address)": EventFragment; + "NewPendingOwner(address,address)": EventFragment; + "UpdateCallPermission(address,address,bytes4,bool)": EventFragment; + "UpdatePublicAccess(address,bool)": EventFragment; + }; + + getEvent(nameOrSignatureOrTopic: "NewOwner"): EventFragment; + getEvent(nameOrSignatureOrTopic: "NewPendingOwner"): EventFragment; + getEvent(nameOrSignatureOrTopic: "UpdateCallPermission"): EventFragment; + getEvent(nameOrSignatureOrTopic: "UpdatePublicAccess"): EventFragment; +} + +export class IAllowList extends Contract { + connect(signerOrProvider: Signer | Provider | string): this; + attach(addressOrName: string): this; + deployed(): Promise; + + on(event: EventFilter | string, listener: Listener): this; + once(event: EventFilter | string, listener: Listener): this; + addListener(eventName: EventFilter | string, listener: Listener): this; + removeAllListeners(eventName: EventFilter | string): this; + removeListener(eventName: any, listener: Listener): this; + + interface: IAllowListInterface; + + functions: { + acceptOwner(overrides?: Overrides): Promise; + + "acceptOwner()"(overrides?: Overrides): Promise; + + canCall( + _caller: string, + _target: string, + _functionSig: BytesLike, + overrides?: CallOverrides + ): Promise<{ + 0: boolean; + }>; + + "canCall(address,address,bytes4)"( + _caller: string, + _target: string, + _functionSig: BytesLike, + overrides?: CallOverrides + ): Promise<{ + 0: boolean; + }>; + + hasSpecialAccessToCall( + _caller: string, + _target: string, + _functionSig: BytesLike, + overrides?: CallOverrides + ): Promise<{ + 0: boolean; + }>; + + "hasSpecialAccessToCall(address,address,bytes4)"( + _caller: string, + _target: string, + _functionSig: BytesLike, + overrides?: CallOverrides + ): Promise<{ + 0: boolean; + }>; + + isAccessPublic( + _target: string, + overrides?: CallOverrides + ): Promise<{ + 0: boolean; + }>; + + "isAccessPublic(address)"( + _target: string, + overrides?: CallOverrides + ): Promise<{ + 0: boolean; + }>; + + owner(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + "owner()"(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + pendingOwner(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + "pendingOwner()"(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + setBatchPermissionToCall( + _callers: string[], + _targets: string[], + _functionSigs: BytesLike[], + _enables: boolean[], + overrides?: Overrides + ): Promise; + + "setBatchPermissionToCall(address[],address[],bytes4[],bool[])"( + _callers: string[], + _targets: string[], + _functionSigs: BytesLike[], + _enables: boolean[], + overrides?: Overrides + ): Promise; + + setBatchPublicAccess( + _targets: string[], + _enables: boolean[], + overrides?: Overrides + ): Promise; + + "setBatchPublicAccess(address[],bool[])"( + _targets: string[], + _enables: boolean[], + overrides?: Overrides + ): Promise; + + setPendingOwner( + _newPendingOwner: string, + overrides?: Overrides + ): Promise; + + "setPendingOwner(address)"( + _newPendingOwner: string, + overrides?: Overrides + ): Promise; + + setPermissionToCall( + _caller: string, + _target: string, + _functionSig: BytesLike, + _enable: boolean, + overrides?: Overrides + ): Promise; + + "setPermissionToCall(address,address,bytes4,bool)"( + _caller: string, + _target: string, + _functionSig: BytesLike, + _enable: boolean, + overrides?: Overrides + ): Promise; + + setPublicAccess( + _target: string, + _enable: boolean, + overrides?: Overrides + ): Promise; + + "setPublicAccess(address,bool)"( + _target: string, + _enable: boolean, + overrides?: Overrides + ): Promise; + }; + + acceptOwner(overrides?: Overrides): Promise; + + "acceptOwner()"(overrides?: Overrides): Promise; + + canCall( + _caller: string, + _target: string, + _functionSig: BytesLike, + overrides?: CallOverrides + ): Promise; + + "canCall(address,address,bytes4)"( + _caller: string, + _target: string, + _functionSig: BytesLike, + overrides?: CallOverrides + ): Promise; + + hasSpecialAccessToCall( + _caller: string, + _target: string, + _functionSig: BytesLike, + overrides?: CallOverrides + ): Promise; + + "hasSpecialAccessToCall(address,address,bytes4)"( + _caller: string, + _target: string, + _functionSig: BytesLike, + overrides?: CallOverrides + ): Promise; + + isAccessPublic(_target: string, overrides?: CallOverrides): Promise; + + "isAccessPublic(address)"( + _target: string, + overrides?: CallOverrides + ): Promise; + + owner(overrides?: CallOverrides): Promise; + + "owner()"(overrides?: CallOverrides): Promise; + + pendingOwner(overrides?: CallOverrides): Promise; + + "pendingOwner()"(overrides?: CallOverrides): Promise; + + setBatchPermissionToCall( + _callers: string[], + _targets: string[], + _functionSigs: BytesLike[], + _enables: boolean[], + overrides?: Overrides + ): Promise; + + "setBatchPermissionToCall(address[],address[],bytes4[],bool[])"( + _callers: string[], + _targets: string[], + _functionSigs: BytesLike[], + _enables: boolean[], + overrides?: Overrides + ): Promise; + + setBatchPublicAccess( + _targets: string[], + _enables: boolean[], + overrides?: Overrides + ): Promise; + + "setBatchPublicAccess(address[],bool[])"( + _targets: string[], + _enables: boolean[], + overrides?: Overrides + ): Promise; + + setPendingOwner( + _newPendingOwner: string, + overrides?: Overrides + ): Promise; + + "setPendingOwner(address)"( + _newPendingOwner: string, + overrides?: Overrides + ): Promise; + + setPermissionToCall( + _caller: string, + _target: string, + _functionSig: BytesLike, + _enable: boolean, + overrides?: Overrides + ): Promise; + + "setPermissionToCall(address,address,bytes4,bool)"( + _caller: string, + _target: string, + _functionSig: BytesLike, + _enable: boolean, + overrides?: Overrides + ): Promise; + + setPublicAccess( + _target: string, + _enable: boolean, + overrides?: Overrides + ): Promise; + + "setPublicAccess(address,bool)"( + _target: string, + _enable: boolean, + overrides?: Overrides + ): Promise; + + callStatic: { + acceptOwner(overrides?: CallOverrides): Promise; + + "acceptOwner()"(overrides?: CallOverrides): Promise; + + canCall( + _caller: string, + _target: string, + _functionSig: BytesLike, + overrides?: CallOverrides + ): Promise; + + "canCall(address,address,bytes4)"( + _caller: string, + _target: string, + _functionSig: BytesLike, + overrides?: CallOverrides + ): Promise; + + hasSpecialAccessToCall( + _caller: string, + _target: string, + _functionSig: BytesLike, + overrides?: CallOverrides + ): Promise; + + "hasSpecialAccessToCall(address,address,bytes4)"( + _caller: string, + _target: string, + _functionSig: BytesLike, + overrides?: CallOverrides + ): Promise; + + isAccessPublic( + _target: string, + overrides?: CallOverrides + ): Promise; + + "isAccessPublic(address)"( + _target: string, + overrides?: CallOverrides + ): Promise; + + owner(overrides?: CallOverrides): Promise; + + "owner()"(overrides?: CallOverrides): Promise; + + pendingOwner(overrides?: CallOverrides): Promise; + + "pendingOwner()"(overrides?: CallOverrides): Promise; + + setBatchPermissionToCall( + _callers: string[], + _targets: string[], + _functionSigs: BytesLike[], + _enables: boolean[], + overrides?: CallOverrides + ): Promise; + + "setBatchPermissionToCall(address[],address[],bytes4[],bool[])"( + _callers: string[], + _targets: string[], + _functionSigs: BytesLike[], + _enables: boolean[], + overrides?: CallOverrides + ): Promise; + + setBatchPublicAccess( + _targets: string[], + _enables: boolean[], + overrides?: CallOverrides + ): Promise; + + "setBatchPublicAccess(address[],bool[])"( + _targets: string[], + _enables: boolean[], + overrides?: CallOverrides + ): Promise; + + setPendingOwner( + _newPendingOwner: string, + overrides?: CallOverrides + ): Promise; + + "setPendingOwner(address)"( + _newPendingOwner: string, + overrides?: CallOverrides + ): Promise; + + setPermissionToCall( + _caller: string, + _target: string, + _functionSig: BytesLike, + _enable: boolean, + overrides?: CallOverrides + ): Promise; + + "setPermissionToCall(address,address,bytes4,bool)"( + _caller: string, + _target: string, + _functionSig: BytesLike, + _enable: boolean, + overrides?: CallOverrides + ): Promise; + + setPublicAccess( + _target: string, + _enable: boolean, + overrides?: CallOverrides + ): Promise; + + "setPublicAccess(address,bool)"( + _target: string, + _enable: boolean, + overrides?: CallOverrides + ): Promise; + }; + + filters: { + NewOwner(newOwner: string | null): EventFilter; + + NewPendingOwner( + oldPendingOwner: string | null, + newPendingOwner: string | null + ): EventFilter; + + UpdateCallPermission( + caller: string | null, + target: string | null, + functionSig: BytesLike | null, + status: null + ): EventFilter; + + UpdatePublicAccess(target: string | null, newStatus: null): EventFilter; + }; + + estimateGas: { + acceptOwner(overrides?: Overrides): Promise; + + "acceptOwner()"(overrides?: Overrides): Promise; + + canCall( + _caller: string, + _target: string, + _functionSig: BytesLike, + overrides?: CallOverrides + ): Promise; + + "canCall(address,address,bytes4)"( + _caller: string, + _target: string, + _functionSig: BytesLike, + overrides?: CallOverrides + ): Promise; + + hasSpecialAccessToCall( + _caller: string, + _target: string, + _functionSig: BytesLike, + overrides?: CallOverrides + ): Promise; + + "hasSpecialAccessToCall(address,address,bytes4)"( + _caller: string, + _target: string, + _functionSig: BytesLike, + overrides?: CallOverrides + ): Promise; + + isAccessPublic( + _target: string, + overrides?: CallOverrides + ): Promise; + + "isAccessPublic(address)"( + _target: string, + overrides?: CallOverrides + ): Promise; + + owner(overrides?: CallOverrides): Promise; + + "owner()"(overrides?: CallOverrides): Promise; + + pendingOwner(overrides?: CallOverrides): Promise; + + "pendingOwner()"(overrides?: CallOverrides): Promise; + + setBatchPermissionToCall( + _callers: string[], + _targets: string[], + _functionSigs: BytesLike[], + _enables: boolean[], + overrides?: Overrides + ): Promise; + + "setBatchPermissionToCall(address[],address[],bytes4[],bool[])"( + _callers: string[], + _targets: string[], + _functionSigs: BytesLike[], + _enables: boolean[], + overrides?: Overrides + ): Promise; + + setBatchPublicAccess( + _targets: string[], + _enables: boolean[], + overrides?: Overrides + ): Promise; + + "setBatchPublicAccess(address[],bool[])"( + _targets: string[], + _enables: boolean[], + overrides?: Overrides + ): Promise; + + setPendingOwner( + _newPendingOwner: string, + overrides?: Overrides + ): Promise; + + "setPendingOwner(address)"( + _newPendingOwner: string, + overrides?: Overrides + ): Promise; + + setPermissionToCall( + _caller: string, + _target: string, + _functionSig: BytesLike, + _enable: boolean, + overrides?: Overrides + ): Promise; + + "setPermissionToCall(address,address,bytes4,bool)"( + _caller: string, + _target: string, + _functionSig: BytesLike, + _enable: boolean, + overrides?: Overrides + ): Promise; + + setPublicAccess( + _target: string, + _enable: boolean, + overrides?: Overrides + ): Promise; + + "setPublicAccess(address,bool)"( + _target: string, + _enable: boolean, + overrides?: Overrides + ): Promise; + }; + + populateTransaction: { + acceptOwner(overrides?: Overrides): Promise; + + "acceptOwner()"(overrides?: Overrides): Promise; + + canCall( + _caller: string, + _target: string, + _functionSig: BytesLike, + overrides?: CallOverrides + ): Promise; + + "canCall(address,address,bytes4)"( + _caller: string, + _target: string, + _functionSig: BytesLike, + overrides?: CallOverrides + ): Promise; + + hasSpecialAccessToCall( + _caller: string, + _target: string, + _functionSig: BytesLike, + overrides?: CallOverrides + ): Promise; + + "hasSpecialAccessToCall(address,address,bytes4)"( + _caller: string, + _target: string, + _functionSig: BytesLike, + overrides?: CallOverrides + ): Promise; + + isAccessPublic( + _target: string, + overrides?: CallOverrides + ): Promise; + + "isAccessPublic(address)"( + _target: string, + overrides?: CallOverrides + ): Promise; + + owner(overrides?: CallOverrides): Promise; + + "owner()"(overrides?: CallOverrides): Promise; + + pendingOwner(overrides?: CallOverrides): Promise; + + "pendingOwner()"(overrides?: CallOverrides): Promise; + + setBatchPermissionToCall( + _callers: string[], + _targets: string[], + _functionSigs: BytesLike[], + _enables: boolean[], + overrides?: Overrides + ): Promise; + + "setBatchPermissionToCall(address[],address[],bytes4[],bool[])"( + _callers: string[], + _targets: string[], + _functionSigs: BytesLike[], + _enables: boolean[], + overrides?: Overrides + ): Promise; + + setBatchPublicAccess( + _targets: string[], + _enables: boolean[], + overrides?: Overrides + ): Promise; + + "setBatchPublicAccess(address[],bool[])"( + _targets: string[], + _enables: boolean[], + overrides?: Overrides + ): Promise; + + setPendingOwner( + _newPendingOwner: string, + overrides?: Overrides + ): Promise; + + "setPendingOwner(address)"( + _newPendingOwner: string, + overrides?: Overrides + ): Promise; + + setPermissionToCall( + _caller: string, + _target: string, + _functionSig: BytesLike, + _enable: boolean, + overrides?: Overrides + ): Promise; + + "setPermissionToCall(address,address,bytes4,bool)"( + _caller: string, + _target: string, + _functionSig: BytesLike, + _enable: boolean, + overrides?: Overrides + ): Promise; + + setPublicAccess( + _target: string, + _enable: boolean, + overrides?: Overrides + ): Promise; + + "setPublicAccess(address,bool)"( + _target: string, + _enable: boolean, + overrides?: Overrides + ): Promise; + }; +} diff --git a/sdk/zksync-web3.js/typechain/IAllowListFactory.ts b/sdk/zksync-web3.js/typechain/IAllowListFactory.ts new file mode 100644 index 000000000000..67fa419cfa41 --- /dev/null +++ b/sdk/zksync-web3.js/typechain/IAllowListFactory.ts @@ -0,0 +1,317 @@ +/* Autogenerated file. Do not edit manually. */ +/* tslint:disable */ +/* eslint-disable */ + +import { Contract, Signer } from "ethers"; +import { Provider } from "@ethersproject/providers"; + +import type { IAllowList } from "./IAllowList"; + +export class IAllowListFactory { + static connect( + address: string, + signerOrProvider: Signer | Provider + ): IAllowList { + return new Contract(address, _abi, signerOrProvider) as IAllowList; + } +} + +const _abi = [ + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "address", + name: "newOwner", + type: "address", + }, + ], + name: "NewOwner", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "address", + name: "oldPendingOwner", + type: "address", + }, + { + indexed: true, + internalType: "address", + name: "newPendingOwner", + type: "address", + }, + ], + name: "NewPendingOwner", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "address", + name: "caller", + type: "address", + }, + { + indexed: true, + internalType: "address", + name: "target", + type: "address", + }, + { + indexed: true, + internalType: "bytes4", + name: "functionSig", + type: "bytes4", + }, + { + indexed: false, + internalType: "bool", + name: "status", + type: "bool", + }, + ], + name: "UpdateCallPermission", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "address", + name: "target", + type: "address", + }, + { + indexed: false, + internalType: "bool", + name: "newStatus", + type: "bool", + }, + ], + name: "UpdatePublicAccess", + type: "event", + }, + { + inputs: [], + name: "acceptOwner", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + internalType: "address", + name: "_caller", + type: "address", + }, + { + internalType: "address", + name: "_target", + type: "address", + }, + { + internalType: "bytes4", + name: "_functionSig", + type: "bytes4", + }, + ], + name: "canCall", + outputs: [ + { + internalType: "bool", + name: "", + type: "bool", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "address", + name: "_caller", + type: "address", + }, + { + internalType: "address", + name: "_target", + type: "address", + }, + { + internalType: "bytes4", + name: "_functionSig", + type: "bytes4", + }, + ], + name: "hasSpecialAccessToCall", + outputs: [ + { + internalType: "bool", + name: "", + type: "bool", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "address", + name: "_target", + type: "address", + }, + ], + name: "isAccessPublic", + outputs: [ + { + internalType: "bool", + name: "", + type: "bool", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "owner", + outputs: [ + { + internalType: "address", + name: "", + type: "address", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "pendingOwner", + outputs: [ + { + internalType: "address", + name: "", + type: "address", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "address[]", + name: "_callers", + type: "address[]", + }, + { + internalType: "address[]", + name: "_targets", + type: "address[]", + }, + { + internalType: "bytes4[]", + name: "_functionSigs", + type: "bytes4[]", + }, + { + internalType: "bool[]", + name: "_enables", + type: "bool[]", + }, + ], + name: "setBatchPermissionToCall", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + internalType: "address[]", + name: "_targets", + type: "address[]", + }, + { + internalType: "bool[]", + name: "_enables", + type: "bool[]", + }, + ], + name: "setBatchPublicAccess", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + internalType: "address", + name: "_newPendingOwner", + type: "address", + }, + ], + name: "setPendingOwner", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + internalType: "address", + name: "_caller", + type: "address", + }, + { + internalType: "address", + name: "_target", + type: "address", + }, + { + internalType: "bytes4", + name: "_functionSig", + type: "bytes4", + }, + { + internalType: "bool", + name: "_enable", + type: "bool", + }, + ], + name: "setPermissionToCall", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + internalType: "address", + name: "_target", + type: "address", + }, + { + internalType: "bool", + name: "_enable", + type: "bool", + }, + ], + name: "setPublicAccess", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, +]; diff --git a/sdk/zksync-web3.js/typechain/IERC20Metadata.d.ts b/sdk/zksync-web3.js/typechain/IERC20Metadata.d.ts new file mode 100644 index 000000000000..b6c3181b26fc --- /dev/null +++ b/sdk/zksync-web3.js/typechain/IERC20Metadata.d.ts @@ -0,0 +1,511 @@ +/* Autogenerated file. Do not edit manually. */ +/* tslint:disable */ +/* eslint-disable */ + +import { + ethers, + EventFilter, + Signer, + BigNumber, + BigNumberish, + PopulatedTransaction, +} from "ethers"; +import { + Contract, + ContractTransaction, + Overrides, + CallOverrides, +} from "@ethersproject/contracts"; +import { BytesLike } from "@ethersproject/bytes"; +import { Listener, Provider } from "@ethersproject/providers"; +import { FunctionFragment, EventFragment, Result } from "@ethersproject/abi"; + +interface IERC20MetadataInterface extends ethers.utils.Interface { + functions: { + "allowance(address,address)": FunctionFragment; + "approve(address,uint256)": FunctionFragment; + "balanceOf(address)": FunctionFragment; + "decimals()": FunctionFragment; + "name()": FunctionFragment; + "symbol()": FunctionFragment; + "totalSupply()": FunctionFragment; + "transfer(address,uint256)": FunctionFragment; + "transferFrom(address,address,uint256)": FunctionFragment; + }; + + encodeFunctionData( + functionFragment: "allowance", + values: [string, string] + ): string; + encodeFunctionData( + functionFragment: "approve", + values: [string, BigNumberish] + ): string; + encodeFunctionData(functionFragment: "balanceOf", values: [string]): string; + encodeFunctionData(functionFragment: "decimals", values?: undefined): string; + encodeFunctionData(functionFragment: "name", values?: undefined): string; + encodeFunctionData(functionFragment: "symbol", values?: undefined): string; + encodeFunctionData( + functionFragment: "totalSupply", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "transfer", + values: [string, BigNumberish] + ): string; + encodeFunctionData( + functionFragment: "transferFrom", + values: [string, string, BigNumberish] + ): string; + + decodeFunctionResult(functionFragment: "allowance", data: BytesLike): Result; + decodeFunctionResult(functionFragment: "approve", data: BytesLike): Result; + decodeFunctionResult(functionFragment: "balanceOf", data: BytesLike): Result; + decodeFunctionResult(functionFragment: "decimals", data: BytesLike): Result; + decodeFunctionResult(functionFragment: "name", data: BytesLike): Result; + decodeFunctionResult(functionFragment: "symbol", data: BytesLike): Result; + decodeFunctionResult( + functionFragment: "totalSupply", + data: BytesLike + ): Result; + decodeFunctionResult(functionFragment: "transfer", data: BytesLike): Result; + decodeFunctionResult( + functionFragment: "transferFrom", + data: BytesLike + ): Result; + + events: { + "Approval(address,address,uint256)": EventFragment; + "Transfer(address,address,uint256)": EventFragment; + }; + + getEvent(nameOrSignatureOrTopic: "Approval"): EventFragment; + getEvent(nameOrSignatureOrTopic: "Transfer"): EventFragment; +} + +export class IERC20Metadata extends Contract { + connect(signerOrProvider: Signer | Provider | string): this; + attach(addressOrName: string): this; + deployed(): Promise; + + on(event: EventFilter | string, listener: Listener): this; + once(event: EventFilter | string, listener: Listener): this; + addListener(eventName: EventFilter | string, listener: Listener): this; + removeAllListeners(eventName: EventFilter | string): this; + removeListener(eventName: any, listener: Listener): this; + + interface: IERC20MetadataInterface; + + functions: { + allowance( + owner: string, + spender: string, + overrides?: CallOverrides + ): Promise<{ + 0: BigNumber; + }>; + + "allowance(address,address)"( + owner: string, + spender: string, + overrides?: CallOverrides + ): Promise<{ + 0: BigNumber; + }>; + + approve( + spender: string, + amount: BigNumberish, + overrides?: Overrides + ): Promise; + + "approve(address,uint256)"( + spender: string, + amount: BigNumberish, + overrides?: Overrides + ): Promise; + + balanceOf( + account: string, + overrides?: CallOverrides + ): Promise<{ + 0: BigNumber; + }>; + + "balanceOf(address)"( + account: string, + overrides?: CallOverrides + ): Promise<{ + 0: BigNumber; + }>; + + decimals(overrides?: CallOverrides): Promise<{ + 0: number; + }>; + + "decimals()"(overrides?: CallOverrides): Promise<{ + 0: number; + }>; + + name(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + "name()"(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + symbol(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + "symbol()"(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + totalSupply(overrides?: CallOverrides): Promise<{ + 0: BigNumber; + }>; + + "totalSupply()"(overrides?: CallOverrides): Promise<{ + 0: BigNumber; + }>; + + transfer( + to: string, + amount: BigNumberish, + overrides?: Overrides + ): Promise; + + "transfer(address,uint256)"( + to: string, + amount: BigNumberish, + overrides?: Overrides + ): Promise; + + transferFrom( + from: string, + to: string, + amount: BigNumberish, + overrides?: Overrides + ): Promise; + + "transferFrom(address,address,uint256)"( + from: string, + to: string, + amount: BigNumberish, + overrides?: Overrides + ): Promise; + }; + + allowance( + owner: string, + spender: string, + overrides?: CallOverrides + ): Promise; + + "allowance(address,address)"( + owner: string, + spender: string, + overrides?: CallOverrides + ): Promise; + + approve( + spender: string, + amount: BigNumberish, + overrides?: Overrides + ): Promise; + + "approve(address,uint256)"( + spender: string, + amount: BigNumberish, + overrides?: Overrides + ): Promise; + + balanceOf(account: string, overrides?: CallOverrides): Promise; + + "balanceOf(address)"( + account: string, + overrides?: CallOverrides + ): Promise; + + decimals(overrides?: CallOverrides): Promise; + + "decimals()"(overrides?: CallOverrides): Promise; + + name(overrides?: CallOverrides): Promise; + + "name()"(overrides?: CallOverrides): Promise; + + symbol(overrides?: CallOverrides): Promise; + + "symbol()"(overrides?: CallOverrides): Promise; + + totalSupply(overrides?: CallOverrides): Promise; + + "totalSupply()"(overrides?: CallOverrides): Promise; + + transfer( + to: string, + amount: BigNumberish, + overrides?: Overrides + ): Promise; + + "transfer(address,uint256)"( + to: string, + amount: BigNumberish, + overrides?: Overrides + ): Promise; + + transferFrom( + from: string, + to: string, + amount: BigNumberish, + overrides?: Overrides + ): Promise; + + "transferFrom(address,address,uint256)"( + from: string, + to: string, + amount: BigNumberish, + overrides?: Overrides + ): Promise; + + callStatic: { + allowance( + owner: string, + spender: string, + overrides?: CallOverrides + ): Promise; + + "allowance(address,address)"( + owner: string, + spender: string, + overrides?: CallOverrides + ): Promise; + + approve( + spender: string, + amount: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "approve(address,uint256)"( + spender: string, + amount: BigNumberish, + overrides?: CallOverrides + ): Promise; + + balanceOf(account: string, overrides?: CallOverrides): Promise; + + "balanceOf(address)"( + account: string, + overrides?: CallOverrides + ): Promise; + + decimals(overrides?: CallOverrides): Promise; + + "decimals()"(overrides?: CallOverrides): Promise; + + name(overrides?: CallOverrides): Promise; + + "name()"(overrides?: CallOverrides): Promise; + + symbol(overrides?: CallOverrides): Promise; + + "symbol()"(overrides?: CallOverrides): Promise; + + totalSupply(overrides?: CallOverrides): Promise; + + "totalSupply()"(overrides?: CallOverrides): Promise; + + transfer( + to: string, + amount: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "transfer(address,uint256)"( + to: string, + amount: BigNumberish, + overrides?: CallOverrides + ): Promise; + + transferFrom( + from: string, + to: string, + amount: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "transferFrom(address,address,uint256)"( + from: string, + to: string, + amount: BigNumberish, + overrides?: CallOverrides + ): Promise; + }; + + filters: { + Approval( + owner: string | null, + spender: string | null, + value: null + ): EventFilter; + + Transfer(from: string | null, to: string | null, value: null): EventFilter; + }; + + estimateGas: { + allowance( + owner: string, + spender: string, + overrides?: CallOverrides + ): Promise; + + "allowance(address,address)"( + owner: string, + spender: string, + overrides?: CallOverrides + ): Promise; + + approve( + spender: string, + amount: BigNumberish, + overrides?: Overrides + ): Promise; + + "approve(address,uint256)"( + spender: string, + amount: BigNumberish, + overrides?: Overrides + ): Promise; + + balanceOf(account: string, overrides?: CallOverrides): Promise; + + "balanceOf(address)"( + account: string, + overrides?: CallOverrides + ): Promise; + + decimals(overrides?: CallOverrides): Promise; + + "decimals()"(overrides?: CallOverrides): Promise; + + name(overrides?: CallOverrides): Promise; + + "name()"(overrides?: CallOverrides): Promise; + + symbol(overrides?: CallOverrides): Promise; + + "symbol()"(overrides?: CallOverrides): Promise; + + totalSupply(overrides?: CallOverrides): Promise; + + "totalSupply()"(overrides?: CallOverrides): Promise; + + transfer( + to: string, + amount: BigNumberish, + overrides?: Overrides + ): Promise; + + "transfer(address,uint256)"( + to: string, + amount: BigNumberish, + overrides?: Overrides + ): Promise; + + transferFrom( + from: string, + to: string, + amount: BigNumberish, + overrides?: Overrides + ): Promise; + + "transferFrom(address,address,uint256)"( + from: string, + to: string, + amount: BigNumberish, + overrides?: Overrides + ): Promise; + }; + + populateTransaction: { + allowance( + owner: string, + spender: string, + overrides?: CallOverrides + ): Promise; + + "allowance(address,address)"( + owner: string, + spender: string, + overrides?: CallOverrides + ): Promise; + + approve( + spender: string, + amount: BigNumberish, + overrides?: Overrides + ): Promise; + + "approve(address,uint256)"( + spender: string, + amount: BigNumberish, + overrides?: Overrides + ): Promise; + + balanceOf( + account: string, + overrides?: CallOverrides + ): Promise; + + "balanceOf(address)"( + account: string, + overrides?: CallOverrides + ): Promise; + + decimals(overrides?: CallOverrides): Promise; + + "decimals()"(overrides?: CallOverrides): Promise; + + name(overrides?: CallOverrides): Promise; + + "name()"(overrides?: CallOverrides): Promise; + + symbol(overrides?: CallOverrides): Promise; + + "symbol()"(overrides?: CallOverrides): Promise; + + totalSupply(overrides?: CallOverrides): Promise; + + "totalSupply()"(overrides?: CallOverrides): Promise; + + transfer( + to: string, + amount: BigNumberish, + overrides?: Overrides + ): Promise; + + "transfer(address,uint256)"( + to: string, + amount: BigNumberish, + overrides?: Overrides + ): Promise; + + transferFrom( + from: string, + to: string, + amount: BigNumberish, + overrides?: Overrides + ): Promise; + + "transferFrom(address,address,uint256)"( + from: string, + to: string, + amount: BigNumberish, + overrides?: Overrides + ): Promise; + }; +} diff --git a/sdk/zksync-web3.js/typechain/IERC20MetadataFactory.ts b/sdk/zksync-web3.js/typechain/IERC20MetadataFactory.ts new file mode 100644 index 000000000000..3a5f7c088174 --- /dev/null +++ b/sdk/zksync-web3.js/typechain/IERC20MetadataFactory.ts @@ -0,0 +1,242 @@ +/* Autogenerated file. Do not edit manually. */ +/* tslint:disable */ +/* eslint-disable */ + +import { Contract, Signer } from "ethers"; +import { Provider } from "@ethersproject/providers"; + +import type { IERC20Metadata } from "./IERC20Metadata"; + +export class IERC20MetadataFactory { + static connect( + address: string, + signerOrProvider: Signer | Provider + ): IERC20Metadata { + return new Contract(address, _abi, signerOrProvider) as IERC20Metadata; + } +} + +const _abi = [ + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "address", + name: "owner", + type: "address", + }, + { + indexed: true, + internalType: "address", + name: "spender", + type: "address", + }, + { + indexed: false, + internalType: "uint256", + name: "value", + type: "uint256", + }, + ], + name: "Approval", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "address", + name: "from", + type: "address", + }, + { + indexed: true, + internalType: "address", + name: "to", + type: "address", + }, + { + indexed: false, + internalType: "uint256", + name: "value", + type: "uint256", + }, + ], + name: "Transfer", + type: "event", + }, + { + inputs: [ + { + internalType: "address", + name: "owner", + type: "address", + }, + { + internalType: "address", + name: "spender", + type: "address", + }, + ], + name: "allowance", + outputs: [ + { + internalType: "uint256", + name: "", + type: "uint256", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "address", + name: "spender", + type: "address", + }, + { + internalType: "uint256", + name: "amount", + type: "uint256", + }, + ], + name: "approve", + outputs: [ + { + internalType: "bool", + name: "", + type: "bool", + }, + ], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + internalType: "address", + name: "account", + type: "address", + }, + ], + name: "balanceOf", + outputs: [ + { + internalType: "uint256", + name: "", + type: "uint256", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "decimals", + outputs: [ + { + internalType: "uint8", + name: "", + type: "uint8", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "name", + outputs: [ + { + internalType: "string", + name: "", + type: "string", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "symbol", + outputs: [ + { + internalType: "string", + name: "", + type: "string", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "totalSupply", + outputs: [ + { + internalType: "uint256", + name: "", + type: "uint256", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "address", + name: "to", + type: "address", + }, + { + internalType: "uint256", + name: "amount", + type: "uint256", + }, + ], + name: "transfer", + outputs: [ + { + internalType: "bool", + name: "", + type: "bool", + }, + ], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + internalType: "address", + name: "from", + type: "address", + }, + { + internalType: "address", + name: "to", + type: "address", + }, + { + internalType: "uint256", + name: "amount", + type: "uint256", + }, + ], + name: "transferFrom", + outputs: [ + { + internalType: "bool", + name: "", + type: "bool", + }, + ], + stateMutability: "nonpayable", + type: "function", + }, +]; diff --git a/sdk/zksync-web3.js/typechain/IEthToken.d.ts b/sdk/zksync-web3.js/typechain/IEthToken.d.ts new file mode 100644 index 000000000000..11f00956d903 --- /dev/null +++ b/sdk/zksync-web3.js/typechain/IEthToken.d.ts @@ -0,0 +1,426 @@ +/* Autogenerated file. Do not edit manually. */ +/* tslint:disable */ +/* eslint-disable */ + +import { + ethers, + EventFilter, + Signer, + BigNumber, + BigNumberish, + PopulatedTransaction, +} from "ethers"; +import { + Contract, + ContractTransaction, + Overrides, + PayableOverrides, + CallOverrides, +} from "@ethersproject/contracts"; +import { BytesLike } from "@ethersproject/bytes"; +import { Listener, Provider } from "@ethersproject/providers"; +import { FunctionFragment, EventFragment, Result } from "@ethersproject/abi"; + +interface IEthTokenInterface extends ethers.utils.Interface { + functions: { + "balanceOf(address)": FunctionFragment; + "decimals()": FunctionFragment; + "mint(address,uint256)": FunctionFragment; + "name()": FunctionFragment; + "symbol()": FunctionFragment; + "totalSupply()": FunctionFragment; + "transferFromTo(address,address,uint256)": FunctionFragment; + "withdraw(address)": FunctionFragment; + }; + + encodeFunctionData(functionFragment: "balanceOf", values: [string]): string; + encodeFunctionData(functionFragment: "decimals", values?: undefined): string; + encodeFunctionData( + functionFragment: "mint", + values: [string, BigNumberish] + ): string; + encodeFunctionData(functionFragment: "name", values?: undefined): string; + encodeFunctionData(functionFragment: "symbol", values?: undefined): string; + encodeFunctionData( + functionFragment: "totalSupply", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "transferFromTo", + values: [string, string, BigNumberish] + ): string; + encodeFunctionData(functionFragment: "withdraw", values: [string]): string; + + decodeFunctionResult(functionFragment: "balanceOf", data: BytesLike): Result; + decodeFunctionResult(functionFragment: "decimals", data: BytesLike): Result; + decodeFunctionResult(functionFragment: "mint", data: BytesLike): Result; + decodeFunctionResult(functionFragment: "name", data: BytesLike): Result; + decodeFunctionResult(functionFragment: "symbol", data: BytesLike): Result; + decodeFunctionResult( + functionFragment: "totalSupply", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "transferFromTo", + data: BytesLike + ): Result; + decodeFunctionResult(functionFragment: "withdraw", data: BytesLike): Result; + + events: { + "Mint(address,uint256)": EventFragment; + "Transfer(address,address,uint256)": EventFragment; + "Withdrawal(address,uint256)": EventFragment; + }; + + getEvent(nameOrSignatureOrTopic: "Mint"): EventFragment; + getEvent(nameOrSignatureOrTopic: "Transfer"): EventFragment; + getEvent(nameOrSignatureOrTopic: "Withdrawal"): EventFragment; +} + +export class IEthToken extends Contract { + connect(signerOrProvider: Signer | Provider | string): this; + attach(addressOrName: string): this; + deployed(): Promise; + + on(event: EventFilter | string, listener: Listener): this; + once(event: EventFilter | string, listener: Listener): this; + addListener(eventName: EventFilter | string, listener: Listener): this; + removeAllListeners(eventName: EventFilter | string): this; + removeListener(eventName: any, listener: Listener): this; + + interface: IEthTokenInterface; + + functions: { + balanceOf( + arg0: string, + overrides?: CallOverrides + ): Promise<{ + 0: BigNumber; + }>; + + "balanceOf(address)"( + arg0: string, + overrides?: CallOverrides + ): Promise<{ + 0: BigNumber; + }>; + + decimals(overrides?: CallOverrides): Promise<{ + 0: number; + }>; + + "decimals()"(overrides?: CallOverrides): Promise<{ + 0: number; + }>; + + mint( + _account: string, + _amount: BigNumberish, + overrides?: Overrides + ): Promise; + + "mint(address,uint256)"( + _account: string, + _amount: BigNumberish, + overrides?: Overrides + ): Promise; + + name(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + "name()"(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + symbol(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + "symbol()"(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + totalSupply(overrides?: CallOverrides): Promise<{ + 0: BigNumber; + }>; + + "totalSupply()"(overrides?: CallOverrides): Promise<{ + 0: BigNumber; + }>; + + transferFromTo( + _from: string, + _to: string, + _amount: BigNumberish, + overrides?: Overrides + ): Promise; + + "transferFromTo(address,address,uint256)"( + _from: string, + _to: string, + _amount: BigNumberish, + overrides?: Overrides + ): Promise; + + withdraw( + _l1Receiver: string, + overrides?: PayableOverrides + ): Promise; + + "withdraw(address)"( + _l1Receiver: string, + overrides?: PayableOverrides + ): Promise; + }; + + balanceOf(arg0: string, overrides?: CallOverrides): Promise; + + "balanceOf(address)"( + arg0: string, + overrides?: CallOverrides + ): Promise; + + decimals(overrides?: CallOverrides): Promise; + + "decimals()"(overrides?: CallOverrides): Promise; + + mint( + _account: string, + _amount: BigNumberish, + overrides?: Overrides + ): Promise; + + "mint(address,uint256)"( + _account: string, + _amount: BigNumberish, + overrides?: Overrides + ): Promise; + + name(overrides?: CallOverrides): Promise; + + "name()"(overrides?: CallOverrides): Promise; + + symbol(overrides?: CallOverrides): Promise; + + "symbol()"(overrides?: CallOverrides): Promise; + + totalSupply(overrides?: CallOverrides): Promise; + + "totalSupply()"(overrides?: CallOverrides): Promise; + + transferFromTo( + _from: string, + _to: string, + _amount: BigNumberish, + overrides?: Overrides + ): Promise; + + "transferFromTo(address,address,uint256)"( + _from: string, + _to: string, + _amount: BigNumberish, + overrides?: Overrides + ): Promise; + + withdraw( + _l1Receiver: string, + overrides?: PayableOverrides + ): Promise; + + "withdraw(address)"( + _l1Receiver: string, + overrides?: PayableOverrides + ): Promise; + + callStatic: { + balanceOf(arg0: string, overrides?: CallOverrides): Promise; + + "balanceOf(address)"( + arg0: string, + overrides?: CallOverrides + ): Promise; + + decimals(overrides?: CallOverrides): Promise; + + "decimals()"(overrides?: CallOverrides): Promise; + + mint( + _account: string, + _amount: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "mint(address,uint256)"( + _account: string, + _amount: BigNumberish, + overrides?: CallOverrides + ): Promise; + + name(overrides?: CallOverrides): Promise; + + "name()"(overrides?: CallOverrides): Promise; + + symbol(overrides?: CallOverrides): Promise; + + "symbol()"(overrides?: CallOverrides): Promise; + + totalSupply(overrides?: CallOverrides): Promise; + + "totalSupply()"(overrides?: CallOverrides): Promise; + + transferFromTo( + _from: string, + _to: string, + _amount: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "transferFromTo(address,address,uint256)"( + _from: string, + _to: string, + _amount: BigNumberish, + overrides?: CallOverrides + ): Promise; + + withdraw(_l1Receiver: string, overrides?: CallOverrides): Promise; + + "withdraw(address)"( + _l1Receiver: string, + overrides?: CallOverrides + ): Promise; + }; + + filters: { + Mint(account: string | null, amount: null): EventFilter; + + Transfer(from: string | null, to: string | null, value: null): EventFilter; + + Withdrawal(_l1Receiver: string | null, _amount: null): EventFilter; + }; + + estimateGas: { + balanceOf(arg0: string, overrides?: CallOverrides): Promise; + + "balanceOf(address)"( + arg0: string, + overrides?: CallOverrides + ): Promise; + + decimals(overrides?: CallOverrides): Promise; + + "decimals()"(overrides?: CallOverrides): Promise; + + mint( + _account: string, + _amount: BigNumberish, + overrides?: Overrides + ): Promise; + + "mint(address,uint256)"( + _account: string, + _amount: BigNumberish, + overrides?: Overrides + ): Promise; + + name(overrides?: CallOverrides): Promise; + + "name()"(overrides?: CallOverrides): Promise; + + symbol(overrides?: CallOverrides): Promise; + + "symbol()"(overrides?: CallOverrides): Promise; + + totalSupply(overrides?: CallOverrides): Promise; + + "totalSupply()"(overrides?: CallOverrides): Promise; + + transferFromTo( + _from: string, + _to: string, + _amount: BigNumberish, + overrides?: Overrides + ): Promise; + + "transferFromTo(address,address,uint256)"( + _from: string, + _to: string, + _amount: BigNumberish, + overrides?: Overrides + ): Promise; + + withdraw( + _l1Receiver: string, + overrides?: PayableOverrides + ): Promise; + + "withdraw(address)"( + _l1Receiver: string, + overrides?: PayableOverrides + ): Promise; + }; + + populateTransaction: { + balanceOf( + arg0: string, + overrides?: CallOverrides + ): Promise; + + "balanceOf(address)"( + arg0: string, + overrides?: CallOverrides + ): Promise; + + decimals(overrides?: CallOverrides): Promise; + + "decimals()"(overrides?: CallOverrides): Promise; + + mint( + _account: string, + _amount: BigNumberish, + overrides?: Overrides + ): Promise; + + "mint(address,uint256)"( + _account: string, + _amount: BigNumberish, + overrides?: Overrides + ): Promise; + + name(overrides?: CallOverrides): Promise; + + "name()"(overrides?: CallOverrides): Promise; + + symbol(overrides?: CallOverrides): Promise; + + "symbol()"(overrides?: CallOverrides): Promise; + + totalSupply(overrides?: CallOverrides): Promise; + + "totalSupply()"(overrides?: CallOverrides): Promise; + + transferFromTo( + _from: string, + _to: string, + _amount: BigNumberish, + overrides?: Overrides + ): Promise; + + "transferFromTo(address,address,uint256)"( + _from: string, + _to: string, + _amount: BigNumberish, + overrides?: Overrides + ): Promise; + + withdraw( + _l1Receiver: string, + overrides?: PayableOverrides + ): Promise; + + "withdraw(address)"( + _l1Receiver: string, + overrides?: PayableOverrides + ): Promise; + }; +} diff --git a/sdk/zksync-web3.js/typechain/IEthTokenFactory.ts b/sdk/zksync-web3.js/typechain/IEthTokenFactory.ts new file mode 100644 index 000000000000..c90aeafe57e9 --- /dev/null +++ b/sdk/zksync-web3.js/typechain/IEthTokenFactory.ts @@ -0,0 +1,208 @@ +/* Autogenerated file. Do not edit manually. */ +/* tslint:disable */ +/* eslint-disable */ + +import { Contract, Signer } from "ethers"; +import { Provider } from "@ethersproject/providers"; + +import type { IEthToken } from "./IEthToken"; + +export class IEthTokenFactory { + static connect( + address: string, + signerOrProvider: Signer | Provider + ): IEthToken { + return new Contract(address, _abi, signerOrProvider) as IEthToken; + } +} + +const _abi = [ + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "address", + name: "account", + type: "address", + }, + { + indexed: false, + internalType: "uint256", + name: "amount", + type: "uint256", + }, + ], + name: "Mint", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "address", + name: "from", + type: "address", + }, + { + indexed: true, + internalType: "address", + name: "to", + type: "address", + }, + { + indexed: false, + internalType: "uint256", + name: "value", + type: "uint256", + }, + ], + name: "Transfer", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "address", + name: "_l1Receiver", + type: "address", + }, + { + indexed: false, + internalType: "uint256", + name: "_amount", + type: "uint256", + }, + ], + name: "Withdrawal", + type: "event", + }, + { + inputs: [ + { + internalType: "address", + name: "", + type: "address", + }, + ], + name: "balanceOf", + outputs: [ + { + internalType: "uint256", + name: "", + type: "uint256", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "decimals", + outputs: [ + { + internalType: "uint8", + name: "", + type: "uint8", + }, + ], + stateMutability: "pure", + type: "function", + }, + { + inputs: [ + { + internalType: "address", + name: "_account", + type: "address", + }, + { + internalType: "uint256", + name: "_amount", + type: "uint256", + }, + ], + name: "mint", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [], + name: "name", + outputs: [ + { + internalType: "string", + name: "", + type: "string", + }, + ], + stateMutability: "pure", + type: "function", + }, + { + inputs: [], + name: "symbol", + outputs: [ + { + internalType: "string", + name: "", + type: "string", + }, + ], + stateMutability: "pure", + type: "function", + }, + { + inputs: [], + name: "totalSupply", + outputs: [ + { + internalType: "uint256", + name: "", + type: "uint256", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "address", + name: "_from", + type: "address", + }, + { + internalType: "address", + name: "_to", + type: "address", + }, + { + internalType: "uint256", + name: "_amount", + type: "uint256", + }, + ], + name: "transferFromTo", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + internalType: "address", + name: "_l1Receiver", + type: "address", + }, + ], + name: "withdraw", + outputs: [], + stateMutability: "payable", + type: "function", + }, +]; diff --git a/sdk/zksync-web3.js/typechain/IL1Bridge.d.ts b/sdk/zksync-web3.js/typechain/IL1Bridge.d.ts new file mode 100644 index 000000000000..778df3998e34 --- /dev/null +++ b/sdk/zksync-web3.js/typechain/IL1Bridge.d.ts @@ -0,0 +1,537 @@ +/* Autogenerated file. Do not edit manually. */ +/* tslint:disable */ +/* eslint-disable */ + +import { + ethers, + EventFilter, + Signer, + BigNumber, + BigNumberish, + PopulatedTransaction, +} from "ethers"; +import { + Contract, + ContractTransaction, + Overrides, + PayableOverrides, + CallOverrides, +} from "@ethersproject/contracts"; +import { BytesLike } from "@ethersproject/bytes"; +import { Listener, Provider } from "@ethersproject/providers"; +import { FunctionFragment, EventFragment, Result } from "@ethersproject/abi"; + +interface IL1BridgeInterface extends ethers.utils.Interface { + functions: { + "claimFailedDeposit(address,address,bytes32,uint256,uint256,uint16,bytes32[])": FunctionFragment; + "deposit(address,address,uint256,uint256,uint256)": FunctionFragment; + "finalizeWithdrawal(uint256,uint256,uint16,bytes,bytes32[])": FunctionFragment; + "isWithdrawalFinalized(uint256,uint256)": FunctionFragment; + "l2TokenAddress(address)": FunctionFragment; + }; + + encodeFunctionData( + functionFragment: "claimFailedDeposit", + values: [ + string, + string, + BytesLike, + BigNumberish, + BigNumberish, + BigNumberish, + BytesLike[] + ] + ): string; + encodeFunctionData( + functionFragment: "deposit", + values: [string, string, BigNumberish, BigNumberish, BigNumberish] + ): string; + encodeFunctionData( + functionFragment: "finalizeWithdrawal", + values: [BigNumberish, BigNumberish, BigNumberish, BytesLike, BytesLike[]] + ): string; + encodeFunctionData( + functionFragment: "isWithdrawalFinalized", + values: [BigNumberish, BigNumberish] + ): string; + encodeFunctionData( + functionFragment: "l2TokenAddress", + values: [string] + ): string; + + decodeFunctionResult( + functionFragment: "claimFailedDeposit", + data: BytesLike + ): Result; + decodeFunctionResult(functionFragment: "deposit", data: BytesLike): Result; + decodeFunctionResult( + functionFragment: "finalizeWithdrawal", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "isWithdrawalFinalized", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "l2TokenAddress", + data: BytesLike + ): Result; + + events: { + "ClaimedFailedDeposit(address,address,uint256)": EventFragment; + "DepositInitiated(address,address,address,uint256)": EventFragment; + "WithdrawalFinalized(address,address,uint256)": EventFragment; + }; + + getEvent(nameOrSignatureOrTopic: "ClaimedFailedDeposit"): EventFragment; + getEvent(nameOrSignatureOrTopic: "DepositInitiated"): EventFragment; + getEvent(nameOrSignatureOrTopic: "WithdrawalFinalized"): EventFragment; +} + +export class IL1Bridge extends Contract { + connect(signerOrProvider: Signer | Provider | string): this; + attach(addressOrName: string): this; + deployed(): Promise; + + on(event: EventFilter | string, listener: Listener): this; + once(event: EventFilter | string, listener: Listener): this; + addListener(eventName: EventFilter | string, listener: Listener): this; + removeAllListeners(eventName: EventFilter | string): this; + removeListener(eventName: any, listener: Listener): this; + + interface: IL1BridgeInterface; + + functions: { + claimFailedDeposit( + _depositSender: string, + _l1Token: string, + _l2TxHash: BytesLike, + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _merkleProof: BytesLike[], + overrides?: Overrides + ): Promise; + + "claimFailedDeposit(address,address,bytes32,uint256,uint256,uint16,bytes32[])"( + _depositSender: string, + _l1Token: string, + _l2TxHash: BytesLike, + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _merkleProof: BytesLike[], + overrides?: Overrides + ): Promise; + + deposit( + _l2Receiver: string, + _l1Token: string, + _amount: BigNumberish, + _l2TxGasLimit: BigNumberish, + _l2TxGasPerPubdataByte: BigNumberish, + overrides?: PayableOverrides + ): Promise; + + "deposit(address,address,uint256,uint256,uint256)"( + _l2Receiver: string, + _l1Token: string, + _amount: BigNumberish, + _l2TxGasLimit: BigNumberish, + _l2TxGasPerPubdataByte: BigNumberish, + overrides?: PayableOverrides + ): Promise; + + finalizeWithdrawal( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _message: BytesLike, + _merkleProof: BytesLike[], + overrides?: Overrides + ): Promise; + + "finalizeWithdrawal(uint256,uint256,uint16,bytes,bytes32[])"( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _message: BytesLike, + _merkleProof: BytesLike[], + overrides?: Overrides + ): Promise; + + isWithdrawalFinalized( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + overrides?: CallOverrides + ): Promise<{ + 0: boolean; + }>; + + "isWithdrawalFinalized(uint256,uint256)"( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + overrides?: CallOverrides + ): Promise<{ + 0: boolean; + }>; + + l2TokenAddress( + _l1Token: string, + overrides?: CallOverrides + ): Promise<{ + 0: string; + }>; + + "l2TokenAddress(address)"( + _l1Token: string, + overrides?: CallOverrides + ): Promise<{ + 0: string; + }>; + }; + + claimFailedDeposit( + _depositSender: string, + _l1Token: string, + _l2TxHash: BytesLike, + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _merkleProof: BytesLike[], + overrides?: Overrides + ): Promise; + + "claimFailedDeposit(address,address,bytes32,uint256,uint256,uint16,bytes32[])"( + _depositSender: string, + _l1Token: string, + _l2TxHash: BytesLike, + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _merkleProof: BytesLike[], + overrides?: Overrides + ): Promise; + + deposit( + _l2Receiver: string, + _l1Token: string, + _amount: BigNumberish, + _l2TxGasLimit: BigNumberish, + _l2TxGasPerPubdataByte: BigNumberish, + overrides?: PayableOverrides + ): Promise; + + "deposit(address,address,uint256,uint256,uint256)"( + _l2Receiver: string, + _l1Token: string, + _amount: BigNumberish, + _l2TxGasLimit: BigNumberish, + _l2TxGasPerPubdataByte: BigNumberish, + overrides?: PayableOverrides + ): Promise; + + finalizeWithdrawal( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _message: BytesLike, + _merkleProof: BytesLike[], + overrides?: Overrides + ): Promise; + + "finalizeWithdrawal(uint256,uint256,uint16,bytes,bytes32[])"( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _message: BytesLike, + _merkleProof: BytesLike[], + overrides?: Overrides + ): Promise; + + isWithdrawalFinalized( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "isWithdrawalFinalized(uint256,uint256)"( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + overrides?: CallOverrides + ): Promise; + + l2TokenAddress(_l1Token: string, overrides?: CallOverrides): Promise; + + "l2TokenAddress(address)"( + _l1Token: string, + overrides?: CallOverrides + ): Promise; + + callStatic: { + claimFailedDeposit( + _depositSender: string, + _l1Token: string, + _l2TxHash: BytesLike, + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _merkleProof: BytesLike[], + overrides?: CallOverrides + ): Promise; + + "claimFailedDeposit(address,address,bytes32,uint256,uint256,uint16,bytes32[])"( + _depositSender: string, + _l1Token: string, + _l2TxHash: BytesLike, + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _merkleProof: BytesLike[], + overrides?: CallOverrides + ): Promise; + + deposit( + _l2Receiver: string, + _l1Token: string, + _amount: BigNumberish, + _l2TxGasLimit: BigNumberish, + _l2TxGasPerPubdataByte: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "deposit(address,address,uint256,uint256,uint256)"( + _l2Receiver: string, + _l1Token: string, + _amount: BigNumberish, + _l2TxGasLimit: BigNumberish, + _l2TxGasPerPubdataByte: BigNumberish, + overrides?: CallOverrides + ): Promise; + + finalizeWithdrawal( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _message: BytesLike, + _merkleProof: BytesLike[], + overrides?: CallOverrides + ): Promise; + + "finalizeWithdrawal(uint256,uint256,uint16,bytes,bytes32[])"( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _message: BytesLike, + _merkleProof: BytesLike[], + overrides?: CallOverrides + ): Promise; + + isWithdrawalFinalized( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "isWithdrawalFinalized(uint256,uint256)"( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + overrides?: CallOverrides + ): Promise; + + l2TokenAddress( + _l1Token: string, + overrides?: CallOverrides + ): Promise; + + "l2TokenAddress(address)"( + _l1Token: string, + overrides?: CallOverrides + ): Promise; + }; + + filters: { + ClaimedFailedDeposit( + to: string | null, + l1Token: string | null, + amount: null + ): EventFilter; + + DepositInitiated( + from: string | null, + to: string | null, + l1Token: string | null, + amount: null + ): EventFilter; + + WithdrawalFinalized( + to: string | null, + l1Token: string | null, + amount: null + ): EventFilter; + }; + + estimateGas: { + claimFailedDeposit( + _depositSender: string, + _l1Token: string, + _l2TxHash: BytesLike, + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _merkleProof: BytesLike[], + overrides?: Overrides + ): Promise; + + "claimFailedDeposit(address,address,bytes32,uint256,uint256,uint16,bytes32[])"( + _depositSender: string, + _l1Token: string, + _l2TxHash: BytesLike, + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _merkleProof: BytesLike[], + overrides?: Overrides + ): Promise; + + deposit( + _l2Receiver: string, + _l1Token: string, + _amount: BigNumberish, + _l2TxGasLimit: BigNumberish, + _l2TxGasPerPubdataByte: BigNumberish, + overrides?: PayableOverrides + ): Promise; + + "deposit(address,address,uint256,uint256,uint256)"( + _l2Receiver: string, + _l1Token: string, + _amount: BigNumberish, + _l2TxGasLimit: BigNumberish, + _l2TxGasPerPubdataByte: BigNumberish, + overrides?: PayableOverrides + ): Promise; + + finalizeWithdrawal( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _message: BytesLike, + _merkleProof: BytesLike[], + overrides?: Overrides + ): Promise; + + "finalizeWithdrawal(uint256,uint256,uint16,bytes,bytes32[])"( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _message: BytesLike, + _merkleProof: BytesLike[], + overrides?: Overrides + ): Promise; + + isWithdrawalFinalized( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "isWithdrawalFinalized(uint256,uint256)"( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + overrides?: CallOverrides + ): Promise; + + l2TokenAddress( + _l1Token: string, + overrides?: CallOverrides + ): Promise; + + "l2TokenAddress(address)"( + _l1Token: string, + overrides?: CallOverrides + ): Promise; + }; + + populateTransaction: { + claimFailedDeposit( + _depositSender: string, + _l1Token: string, + _l2TxHash: BytesLike, + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _merkleProof: BytesLike[], + overrides?: Overrides + ): Promise; + + "claimFailedDeposit(address,address,bytes32,uint256,uint256,uint16,bytes32[])"( + _depositSender: string, + _l1Token: string, + _l2TxHash: BytesLike, + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _merkleProof: BytesLike[], + overrides?: Overrides + ): Promise; + + deposit( + _l2Receiver: string, + _l1Token: string, + _amount: BigNumberish, + _l2TxGasLimit: BigNumberish, + _l2TxGasPerPubdataByte: BigNumberish, + overrides?: PayableOverrides + ): Promise; + + "deposit(address,address,uint256,uint256,uint256)"( + _l2Receiver: string, + _l1Token: string, + _amount: BigNumberish, + _l2TxGasLimit: BigNumberish, + _l2TxGasPerPubdataByte: BigNumberish, + overrides?: PayableOverrides + ): Promise; + + finalizeWithdrawal( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _message: BytesLike, + _merkleProof: BytesLike[], + overrides?: Overrides + ): Promise; + + "finalizeWithdrawal(uint256,uint256,uint16,bytes,bytes32[])"( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _message: BytesLike, + _merkleProof: BytesLike[], + overrides?: Overrides + ): Promise; + + isWithdrawalFinalized( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "isWithdrawalFinalized(uint256,uint256)"( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + overrides?: CallOverrides + ): Promise; + + l2TokenAddress( + _l1Token: string, + overrides?: CallOverrides + ): Promise; + + "l2TokenAddress(address)"( + _l1Token: string, + overrides?: CallOverrides + ): Promise; + }; +} diff --git a/sdk/zksync-web3.js/typechain/IL1BridgeFactory.ts b/sdk/zksync-web3.js/typechain/IL1BridgeFactory.ts new file mode 100644 index 000000000000..4b6b8dd20ceb --- /dev/null +++ b/sdk/zksync-web3.js/typechain/IL1BridgeFactory.ts @@ -0,0 +1,259 @@ +/* Autogenerated file. Do not edit manually. */ +/* tslint:disable */ +/* eslint-disable */ + +import { Contract, Signer } from "ethers"; +import { Provider } from "@ethersproject/providers"; + +import type { IL1Bridge } from "./IL1Bridge"; + +export class IL1BridgeFactory { + static connect( + address: string, + signerOrProvider: Signer | Provider + ): IL1Bridge { + return new Contract(address, _abi, signerOrProvider) as IL1Bridge; + } +} + +const _abi = [ + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "address", + name: "to", + type: "address", + }, + { + indexed: true, + internalType: "address", + name: "l1Token", + type: "address", + }, + { + indexed: false, + internalType: "uint256", + name: "amount", + type: "uint256", + }, + ], + name: "ClaimedFailedDeposit", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "address", + name: "from", + type: "address", + }, + { + indexed: true, + internalType: "address", + name: "to", + type: "address", + }, + { + indexed: true, + internalType: "address", + name: "l1Token", + type: "address", + }, + { + indexed: false, + internalType: "uint256", + name: "amount", + type: "uint256", + }, + ], + name: "DepositInitiated", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "address", + name: "to", + type: "address", + }, + { + indexed: true, + internalType: "address", + name: "l1Token", + type: "address", + }, + { + indexed: false, + internalType: "uint256", + name: "amount", + type: "uint256", + }, + ], + name: "WithdrawalFinalized", + type: "event", + }, + { + inputs: [ + { + internalType: "address", + name: "_depositSender", + type: "address", + }, + { + internalType: "address", + name: "_l1Token", + type: "address", + }, + { + internalType: "bytes32", + name: "_l2TxHash", + type: "bytes32", + }, + { + internalType: "uint256", + name: "_l2BlockNumber", + type: "uint256", + }, + { + internalType: "uint256", + name: "_l2MessageIndex", + type: "uint256", + }, + { + internalType: "uint16", + name: "_l2TxNumberInBlock", + type: "uint16", + }, + { + internalType: "bytes32[]", + name: "_merkleProof", + type: "bytes32[]", + }, + ], + name: "claimFailedDeposit", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + internalType: "address", + name: "_l2Receiver", + type: "address", + }, + { + internalType: "address", + name: "_l1Token", + type: "address", + }, + { + internalType: "uint256", + name: "_amount", + type: "uint256", + }, + { + internalType: "uint256", + name: "_l2TxGasLimit", + type: "uint256", + }, + { + internalType: "uint256", + name: "_l2TxGasPerPubdataByte", + type: "uint256", + }, + ], + name: "deposit", + outputs: [ + { + internalType: "bytes32", + name: "txHash", + type: "bytes32", + }, + ], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint256", + name: "_l2BlockNumber", + type: "uint256", + }, + { + internalType: "uint256", + name: "_l2MessageIndex", + type: "uint256", + }, + { + internalType: "uint16", + name: "_l2TxNumberInBlock", + type: "uint16", + }, + { + internalType: "bytes", + name: "_message", + type: "bytes", + }, + { + internalType: "bytes32[]", + name: "_merkleProof", + type: "bytes32[]", + }, + ], + name: "finalizeWithdrawal", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint256", + name: "_l2BlockNumber", + type: "uint256", + }, + { + internalType: "uint256", + name: "_l2MessageIndex", + type: "uint256", + }, + ], + name: "isWithdrawalFinalized", + outputs: [ + { + internalType: "bool", + name: "", + type: "bool", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "address", + name: "_l1Token", + type: "address", + }, + ], + name: "l2TokenAddress", + outputs: [ + { + internalType: "address", + name: "", + type: "address", + }, + ], + stateMutability: "view", + type: "function", + }, +]; diff --git a/sdk/zksync-web3.js/typechain/IL2Bridge.d.ts b/sdk/zksync-web3.js/typechain/IL2Bridge.d.ts new file mode 100644 index 000000000000..e7742f25c5f6 --- /dev/null +++ b/sdk/zksync-web3.js/typechain/IL2Bridge.d.ts @@ -0,0 +1,376 @@ +/* Autogenerated file. Do not edit manually. */ +/* tslint:disable */ +/* eslint-disable */ + +import { + ethers, + EventFilter, + Signer, + BigNumber, + BigNumberish, + PopulatedTransaction, +} from "ethers"; +import { + Contract, + ContractTransaction, + Overrides, + CallOverrides, +} from "@ethersproject/contracts"; +import { BytesLike } from "@ethersproject/bytes"; +import { Listener, Provider } from "@ethersproject/providers"; +import { FunctionFragment, EventFragment, Result } from "@ethersproject/abi"; + +interface IL2BridgeInterface extends ethers.utils.Interface { + functions: { + "finalizeDeposit(address,address,address,uint256,bytes)": FunctionFragment; + "l1Bridge()": FunctionFragment; + "l1TokenAddress(address)": FunctionFragment; + "l2TokenAddress(address)": FunctionFragment; + "withdraw(address,address,uint256)": FunctionFragment; + }; + + encodeFunctionData( + functionFragment: "finalizeDeposit", + values: [string, string, string, BigNumberish, BytesLike] + ): string; + encodeFunctionData(functionFragment: "l1Bridge", values?: undefined): string; + encodeFunctionData( + functionFragment: "l1TokenAddress", + values: [string] + ): string; + encodeFunctionData( + functionFragment: "l2TokenAddress", + values: [string] + ): string; + encodeFunctionData( + functionFragment: "withdraw", + values: [string, string, BigNumberish] + ): string; + + decodeFunctionResult( + functionFragment: "finalizeDeposit", + data: BytesLike + ): Result; + decodeFunctionResult(functionFragment: "l1Bridge", data: BytesLike): Result; + decodeFunctionResult( + functionFragment: "l1TokenAddress", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "l2TokenAddress", + data: BytesLike + ): Result; + decodeFunctionResult(functionFragment: "withdraw", data: BytesLike): Result; + + events: {}; +} + +export class IL2Bridge extends Contract { + connect(signerOrProvider: Signer | Provider | string): this; + attach(addressOrName: string): this; + deployed(): Promise; + + on(event: EventFilter | string, listener: Listener): this; + once(event: EventFilter | string, listener: Listener): this; + addListener(eventName: EventFilter | string, listener: Listener): this; + removeAllListeners(eventName: EventFilter | string): this; + removeListener(eventName: any, listener: Listener): this; + + interface: IL2BridgeInterface; + + functions: { + finalizeDeposit( + _l1Sender: string, + _l2Receiver: string, + _l1Token: string, + _amount: BigNumberish, + _data: BytesLike, + overrides?: Overrides + ): Promise; + + "finalizeDeposit(address,address,address,uint256,bytes)"( + _l1Sender: string, + _l2Receiver: string, + _l1Token: string, + _amount: BigNumberish, + _data: BytesLike, + overrides?: Overrides + ): Promise; + + l1Bridge(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + "l1Bridge()"(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + l1TokenAddress( + _l2Token: string, + overrides?: CallOverrides + ): Promise<{ + 0: string; + }>; + + "l1TokenAddress(address)"( + _l2Token: string, + overrides?: CallOverrides + ): Promise<{ + 0: string; + }>; + + l2TokenAddress( + _l1Token: string, + overrides?: CallOverrides + ): Promise<{ + 0: string; + }>; + + "l2TokenAddress(address)"( + _l1Token: string, + overrides?: CallOverrides + ): Promise<{ + 0: string; + }>; + + withdraw( + _l1Receiver: string, + _l2Token: string, + _amount: BigNumberish, + overrides?: Overrides + ): Promise; + + "withdraw(address,address,uint256)"( + _l1Receiver: string, + _l2Token: string, + _amount: BigNumberish, + overrides?: Overrides + ): Promise; + }; + + finalizeDeposit( + _l1Sender: string, + _l2Receiver: string, + _l1Token: string, + _amount: BigNumberish, + _data: BytesLike, + overrides?: Overrides + ): Promise; + + "finalizeDeposit(address,address,address,uint256,bytes)"( + _l1Sender: string, + _l2Receiver: string, + _l1Token: string, + _amount: BigNumberish, + _data: BytesLike, + overrides?: Overrides + ): Promise; + + l1Bridge(overrides?: CallOverrides): Promise; + + "l1Bridge()"(overrides?: CallOverrides): Promise; + + l1TokenAddress(_l2Token: string, overrides?: CallOverrides): Promise; + + "l1TokenAddress(address)"( + _l2Token: string, + overrides?: CallOverrides + ): Promise; + + l2TokenAddress(_l1Token: string, overrides?: CallOverrides): Promise; + + "l2TokenAddress(address)"( + _l1Token: string, + overrides?: CallOverrides + ): Promise; + + withdraw( + _l1Receiver: string, + _l2Token: string, + _amount: BigNumberish, + overrides?: Overrides + ): Promise; + + "withdraw(address,address,uint256)"( + _l1Receiver: string, + _l2Token: string, + _amount: BigNumberish, + overrides?: Overrides + ): Promise; + + callStatic: { + finalizeDeposit( + _l1Sender: string, + _l2Receiver: string, + _l1Token: string, + _amount: BigNumberish, + _data: BytesLike, + overrides?: CallOverrides + ): Promise; + + "finalizeDeposit(address,address,address,uint256,bytes)"( + _l1Sender: string, + _l2Receiver: string, + _l1Token: string, + _amount: BigNumberish, + _data: BytesLike, + overrides?: CallOverrides + ): Promise; + + l1Bridge(overrides?: CallOverrides): Promise; + + "l1Bridge()"(overrides?: CallOverrides): Promise; + + l1TokenAddress( + _l2Token: string, + overrides?: CallOverrides + ): Promise; + + "l1TokenAddress(address)"( + _l2Token: string, + overrides?: CallOverrides + ): Promise; + + l2TokenAddress( + _l1Token: string, + overrides?: CallOverrides + ): Promise; + + "l2TokenAddress(address)"( + _l1Token: string, + overrides?: CallOverrides + ): Promise; + + withdraw( + _l1Receiver: string, + _l2Token: string, + _amount: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "withdraw(address,address,uint256)"( + _l1Receiver: string, + _l2Token: string, + _amount: BigNumberish, + overrides?: CallOverrides + ): Promise; + }; + + filters: {}; + + estimateGas: { + finalizeDeposit( + _l1Sender: string, + _l2Receiver: string, + _l1Token: string, + _amount: BigNumberish, + _data: BytesLike, + overrides?: Overrides + ): Promise; + + "finalizeDeposit(address,address,address,uint256,bytes)"( + _l1Sender: string, + _l2Receiver: string, + _l1Token: string, + _amount: BigNumberish, + _data: BytesLike, + overrides?: Overrides + ): Promise; + + l1Bridge(overrides?: CallOverrides): Promise; + + "l1Bridge()"(overrides?: CallOverrides): Promise; + + l1TokenAddress( + _l2Token: string, + overrides?: CallOverrides + ): Promise; + + "l1TokenAddress(address)"( + _l2Token: string, + overrides?: CallOverrides + ): Promise; + + l2TokenAddress( + _l1Token: string, + overrides?: CallOverrides + ): Promise; + + "l2TokenAddress(address)"( + _l1Token: string, + overrides?: CallOverrides + ): Promise; + + withdraw( + _l1Receiver: string, + _l2Token: string, + _amount: BigNumberish, + overrides?: Overrides + ): Promise; + + "withdraw(address,address,uint256)"( + _l1Receiver: string, + _l2Token: string, + _amount: BigNumberish, + overrides?: Overrides + ): Promise; + }; + + populateTransaction: { + finalizeDeposit( + _l1Sender: string, + _l2Receiver: string, + _l1Token: string, + _amount: BigNumberish, + _data: BytesLike, + overrides?: Overrides + ): Promise; + + "finalizeDeposit(address,address,address,uint256,bytes)"( + _l1Sender: string, + _l2Receiver: string, + _l1Token: string, + _amount: BigNumberish, + _data: BytesLike, + overrides?: Overrides + ): Promise; + + l1Bridge(overrides?: CallOverrides): Promise; + + "l1Bridge()"(overrides?: CallOverrides): Promise; + + l1TokenAddress( + _l2Token: string, + overrides?: CallOverrides + ): Promise; + + "l1TokenAddress(address)"( + _l2Token: string, + overrides?: CallOverrides + ): Promise; + + l2TokenAddress( + _l1Token: string, + overrides?: CallOverrides + ): Promise; + + "l2TokenAddress(address)"( + _l1Token: string, + overrides?: CallOverrides + ): Promise; + + withdraw( + _l1Receiver: string, + _l2Token: string, + _amount: BigNumberish, + overrides?: Overrides + ): Promise; + + "withdraw(address,address,uint256)"( + _l1Receiver: string, + _l2Token: string, + _amount: BigNumberish, + overrides?: Overrides + ): Promise; + }; +} diff --git a/sdk/zksync-web3.js/typechain/IL2BridgeFactory.ts b/sdk/zksync-web3.js/typechain/IL2BridgeFactory.ts new file mode 100644 index 000000000000..94ffa3561842 --- /dev/null +++ b/sdk/zksync-web3.js/typechain/IL2BridgeFactory.ts @@ -0,0 +1,127 @@ +/* Autogenerated file. Do not edit manually. */ +/* tslint:disable */ +/* eslint-disable */ + +import { Contract, Signer } from "ethers"; +import { Provider } from "@ethersproject/providers"; + +import type { IL2Bridge } from "./IL2Bridge"; + +export class IL2BridgeFactory { + static connect( + address: string, + signerOrProvider: Signer | Provider + ): IL2Bridge { + return new Contract(address, _abi, signerOrProvider) as IL2Bridge; + } +} + +const _abi = [ + { + inputs: [ + { + internalType: "address", + name: "_l1Sender", + type: "address", + }, + { + internalType: "address", + name: "_l2Receiver", + type: "address", + }, + { + internalType: "address", + name: "_l1Token", + type: "address", + }, + { + internalType: "uint256", + name: "_amount", + type: "uint256", + }, + { + internalType: "bytes", + name: "_data", + type: "bytes", + }, + ], + name: "finalizeDeposit", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [], + name: "l1Bridge", + outputs: [ + { + internalType: "address", + name: "", + type: "address", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "address", + name: "_l2Token", + type: "address", + }, + ], + name: "l1TokenAddress", + outputs: [ + { + internalType: "address", + name: "", + type: "address", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "address", + name: "_l1Token", + type: "address", + }, + ], + name: "l2TokenAddress", + outputs: [ + { + internalType: "address", + name: "", + type: "address", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "address", + name: "_l1Receiver", + type: "address", + }, + { + internalType: "address", + name: "_l2Token", + type: "address", + }, + { + internalType: "uint256", + name: "_amount", + type: "uint256", + }, + ], + name: "withdraw", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, +]; diff --git a/sdk/zksync-web3.js/typechain/IZkSync.d.ts b/sdk/zksync-web3.js/typechain/IZkSync.d.ts new file mode 100644 index 000000000000..803c97f47e71 --- /dev/null +++ b/sdk/zksync-web3.js/typechain/IZkSync.d.ts @@ -0,0 +1,4973 @@ +/* Autogenerated file. Do not edit manually. */ +/* tslint:disable */ +/* eslint-disable */ + +import { + ethers, + EventFilter, + Signer, + BigNumber, + BigNumberish, + PopulatedTransaction, +} from "ethers"; +import { + Contract, + ContractTransaction, + Overrides, + PayableOverrides, + CallOverrides, +} from "@ethersproject/contracts"; +import { BytesLike } from "@ethersproject/bytes"; +import { Listener, Provider } from "@ethersproject/providers"; +import { FunctionFragment, EventFragment, Result } from "@ethersproject/abi"; + +interface IZkSyncInterface extends ethers.utils.Interface { + functions: { + "acceptGovernor()": FunctionFragment; + "cancelUpgradeProposal(bytes32)": FunctionFragment; + "commitBlocks(tuple,tuple[])": FunctionFragment; + "executeBlocks(tuple[])": FunctionFragment; + "executeUpgrade(tuple,bytes32)": FunctionFragment; + "facetAddress(bytes4)": FunctionFragment; + "facetAddresses()": FunctionFragment; + "facetFunctionSelectors(address)": FunctionFragment; + "facets()": FunctionFragment; + "finalizeEthWithdrawal(uint256,uint256,uint16,bytes,bytes32[])": FunctionFragment; + "freezeDiamond()": FunctionFragment; + "getCurrentProposalId()": FunctionFragment; + "getFirstUnprocessedPriorityTx()": FunctionFragment; + "getGovernor()": FunctionFragment; + "getL2BootloaderBytecodeHash()": FunctionFragment; + "getL2DefaultAccountBytecodeHash()": FunctionFragment; + "getPendingGovernor()": FunctionFragment; + "getPriorityQueueSize()": FunctionFragment; + "getPriorityTxMaxGasLimit()": FunctionFragment; + "getProposedUpgradeHash()": FunctionFragment; + "getProposedUpgradeTimestamp()": FunctionFragment; + "getSecurityCouncil()": FunctionFragment; + "getTotalBlocksCommitted()": FunctionFragment; + "getTotalBlocksExecuted()": FunctionFragment; + "getTotalBlocksVerified()": FunctionFragment; + "getTotalPriorityTxs()": FunctionFragment; + "getUpgradeProposalState()": FunctionFragment; + "getVerifier()": FunctionFragment; + "getVerifierParams()": FunctionFragment; + "isApprovedBySecurityCouncil()": FunctionFragment; + "isDiamondStorageFrozen()": FunctionFragment; + "isEthWithdrawalFinalized(uint256,uint256)": FunctionFragment; + "isFacetFreezable(address)": FunctionFragment; + "isFunctionFreezable(bytes4)": FunctionFragment; + "isValidator(address)": FunctionFragment; + "l2LogsRootHash(uint256)": FunctionFragment; + "l2TransactionBaseCost(uint256,uint256,uint256)": FunctionFragment; + "priorityQueueFrontOperation()": FunctionFragment; + "proposeShadowUpgrade(bytes32,uint40)": FunctionFragment; + "proposeTransparentUpgrade(tuple,uint40)": FunctionFragment; + "proveBlocks(tuple,tuple[],tuple)": FunctionFragment; + "proveL1ToL2TransactionStatus(bytes32,uint256,uint256,uint16,bytes32[],uint8)": FunctionFragment; + "proveL2LogInclusion(uint256,uint256,tuple,bytes32[])": FunctionFragment; + "proveL2MessageInclusion(uint256,uint256,tuple,bytes32[])": FunctionFragment; + "requestL2Transaction(address,uint256,bytes,uint256,uint256,bytes[],address)": FunctionFragment; + "revertBlocks(uint256)": FunctionFragment; + "securityCouncilUpgradeApprove(bytes32)": FunctionFragment; + "serializeL2Transaction(uint256,uint256,address,address,bytes,uint256,uint256,bytes[],uint256,address)": FunctionFragment; + "setL2BootloaderBytecodeHash(bytes32)": FunctionFragment; + "setL2DefaultAccountBytecodeHash(bytes32)": FunctionFragment; + "setPendingGovernor(address)": FunctionFragment; + "setPorterAvailability(bool)": FunctionFragment; + "setPriorityTxMaxGasLimit(uint256)": FunctionFragment; + "setValidator(address,bool)": FunctionFragment; + "setVerifier(address)": FunctionFragment; + "setVerifierParams(tuple)": FunctionFragment; + "storedBlockHash(uint256)": FunctionFragment; + "unfreezeDiamond()": FunctionFragment; + "upgradeProposalHash(tuple,uint256,bytes32)": FunctionFragment; + }; + + encodeFunctionData( + functionFragment: "acceptGovernor", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "cancelUpgradeProposal", + values: [BytesLike] + ): string; + encodeFunctionData( + functionFragment: "commitBlocks", + values: [ + { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }, + { + blockNumber: BigNumberish; + timestamp: BigNumberish; + indexRepeatedStorageChanges: BigNumberish; + newStateRoot: BytesLike; + numberOfLayer1Txs: BigNumberish; + l2LogsTreeRoot: BytesLike; + priorityOperationsHash: BytesLike; + initialStorageChanges: BytesLike; + repeatedStorageChanges: BytesLike; + l2Logs: BytesLike; + l2ArbitraryLengthMessages: BytesLike[]; + factoryDeps: BytesLike[]; + }[] + ] + ): string; + encodeFunctionData( + functionFragment: "executeBlocks", + values: [ + { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }[] + ] + ): string; + encodeFunctionData( + functionFragment: "executeUpgrade", + values: [ + { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + BytesLike + ] + ): string; + encodeFunctionData( + functionFragment: "facetAddress", + values: [BytesLike] + ): string; + encodeFunctionData( + functionFragment: "facetAddresses", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "facetFunctionSelectors", + values: [string] + ): string; + encodeFunctionData(functionFragment: "facets", values?: undefined): string; + encodeFunctionData( + functionFragment: "finalizeEthWithdrawal", + values: [BigNumberish, BigNumberish, BigNumberish, BytesLike, BytesLike[]] + ): string; + encodeFunctionData( + functionFragment: "freezeDiamond", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "getCurrentProposalId", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "getFirstUnprocessedPriorityTx", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "getGovernor", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "getL2BootloaderBytecodeHash", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "getL2DefaultAccountBytecodeHash", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "getPendingGovernor", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "getPriorityQueueSize", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "getPriorityTxMaxGasLimit", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "getProposedUpgradeHash", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "getProposedUpgradeTimestamp", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "getSecurityCouncil", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "getTotalBlocksCommitted", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "getTotalBlocksExecuted", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "getTotalBlocksVerified", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "getTotalPriorityTxs", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "getUpgradeProposalState", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "getVerifier", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "getVerifierParams", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "isApprovedBySecurityCouncil", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "isDiamondStorageFrozen", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "isEthWithdrawalFinalized", + values: [BigNumberish, BigNumberish] + ): string; + encodeFunctionData( + functionFragment: "isFacetFreezable", + values: [string] + ): string; + encodeFunctionData( + functionFragment: "isFunctionFreezable", + values: [BytesLike] + ): string; + encodeFunctionData(functionFragment: "isValidator", values: [string]): string; + encodeFunctionData( + functionFragment: "l2LogsRootHash", + values: [BigNumberish] + ): string; + encodeFunctionData( + functionFragment: "l2TransactionBaseCost", + values: [BigNumberish, BigNumberish, BigNumberish] + ): string; + encodeFunctionData( + functionFragment: "priorityQueueFrontOperation", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "proposeShadowUpgrade", + values: [BytesLike, BigNumberish] + ): string; + encodeFunctionData( + functionFragment: "proposeTransparentUpgrade", + values: [ + { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + BigNumberish + ] + ): string; + encodeFunctionData( + functionFragment: "proveBlocks", + values: [ + { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }, + { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }[], + { + recursiveAggregationInput: BigNumberish[]; + serializedProof: BigNumberish[]; + } + ] + ): string; + encodeFunctionData( + functionFragment: "proveL1ToL2TransactionStatus", + values: [ + BytesLike, + BigNumberish, + BigNumberish, + BigNumberish, + BytesLike[], + BigNumberish + ] + ): string; + encodeFunctionData( + functionFragment: "proveL2LogInclusion", + values: [ + BigNumberish, + BigNumberish, + { + l2ShardId: BigNumberish; + isService: boolean; + txNumberInBlock: BigNumberish; + sender: string; + key: BytesLike; + value: BytesLike; + }, + BytesLike[] + ] + ): string; + encodeFunctionData( + functionFragment: "proveL2MessageInclusion", + values: [ + BigNumberish, + BigNumberish, + { txNumberInBlock: BigNumberish; sender: string; data: BytesLike }, + BytesLike[] + ] + ): string; + encodeFunctionData( + functionFragment: "requestL2Transaction", + values: [ + string, + BigNumberish, + BytesLike, + BigNumberish, + BigNumberish, + BytesLike[], + string + ] + ): string; + encodeFunctionData( + functionFragment: "revertBlocks", + values: [BigNumberish] + ): string; + encodeFunctionData( + functionFragment: "securityCouncilUpgradeApprove", + values: [BytesLike] + ): string; + encodeFunctionData( + functionFragment: "serializeL2Transaction", + values: [ + BigNumberish, + BigNumberish, + string, + string, + BytesLike, + BigNumberish, + BigNumberish, + BytesLike[], + BigNumberish, + string + ] + ): string; + encodeFunctionData( + functionFragment: "setL2BootloaderBytecodeHash", + values: [BytesLike] + ): string; + encodeFunctionData( + functionFragment: "setL2DefaultAccountBytecodeHash", + values: [BytesLike] + ): string; + encodeFunctionData( + functionFragment: "setPendingGovernor", + values: [string] + ): string; + encodeFunctionData( + functionFragment: "setPorterAvailability", + values: [boolean] + ): string; + encodeFunctionData( + functionFragment: "setPriorityTxMaxGasLimit", + values: [BigNumberish] + ): string; + encodeFunctionData( + functionFragment: "setValidator", + values: [string, boolean] + ): string; + encodeFunctionData(functionFragment: "setVerifier", values: [string]): string; + encodeFunctionData( + functionFragment: "setVerifierParams", + values: [ + { + recursionNodeLevelVkHash: BytesLike; + recursionLeafLevelVkHash: BytesLike; + recursionCircuitsSetVksHash: BytesLike; + } + ] + ): string; + encodeFunctionData( + functionFragment: "storedBlockHash", + values: [BigNumberish] + ): string; + encodeFunctionData( + functionFragment: "unfreezeDiamond", + values?: undefined + ): string; + encodeFunctionData( + functionFragment: "upgradeProposalHash", + values: [ + { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + BigNumberish, + BytesLike + ] + ): string; + + decodeFunctionResult( + functionFragment: "acceptGovernor", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "cancelUpgradeProposal", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "commitBlocks", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "executeBlocks", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "executeUpgrade", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "facetAddress", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "facetAddresses", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "facetFunctionSelectors", + data: BytesLike + ): Result; + decodeFunctionResult(functionFragment: "facets", data: BytesLike): Result; + decodeFunctionResult( + functionFragment: "finalizeEthWithdrawal", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "freezeDiamond", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "getCurrentProposalId", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "getFirstUnprocessedPriorityTx", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "getGovernor", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "getL2BootloaderBytecodeHash", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "getL2DefaultAccountBytecodeHash", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "getPendingGovernor", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "getPriorityQueueSize", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "getPriorityTxMaxGasLimit", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "getProposedUpgradeHash", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "getProposedUpgradeTimestamp", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "getSecurityCouncil", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "getTotalBlocksCommitted", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "getTotalBlocksExecuted", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "getTotalBlocksVerified", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "getTotalPriorityTxs", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "getUpgradeProposalState", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "getVerifier", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "getVerifierParams", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "isApprovedBySecurityCouncil", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "isDiamondStorageFrozen", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "isEthWithdrawalFinalized", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "isFacetFreezable", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "isFunctionFreezable", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "isValidator", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "l2LogsRootHash", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "l2TransactionBaseCost", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "priorityQueueFrontOperation", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "proposeShadowUpgrade", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "proposeTransparentUpgrade", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "proveBlocks", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "proveL1ToL2TransactionStatus", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "proveL2LogInclusion", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "proveL2MessageInclusion", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "requestL2Transaction", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "revertBlocks", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "securityCouncilUpgradeApprove", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "serializeL2Transaction", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "setL2BootloaderBytecodeHash", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "setL2DefaultAccountBytecodeHash", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "setPendingGovernor", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "setPorterAvailability", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "setPriorityTxMaxGasLimit", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "setValidator", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "setVerifier", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "setVerifierParams", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "storedBlockHash", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "unfreezeDiamond", + data: BytesLike + ): Result; + decodeFunctionResult( + functionFragment: "upgradeProposalHash", + data: BytesLike + ): Result; + + events: { + "BlockCommit(uint256,bytes32,bytes32)": EventFragment; + "BlockExecution(uint256,bytes32,bytes32)": EventFragment; + "BlocksRevert(uint256,uint256,uint256)": EventFragment; + "BlocksVerification(uint256,uint256)": EventFragment; + "CancelUpgradeProposal(uint256,bytes32)": EventFragment; + "EthWithdrawalFinalized(address,uint256)": EventFragment; + "ExecuteUpgrade(uint256,bytes32,bytes32)": EventFragment; + "Freeze()": EventFragment; + "IsPorterAvailableStatusUpdate(bool)": EventFragment; + "NewGovernor(address,address)": EventFragment; + "NewL2BootloaderBytecodeHash(bytes32,bytes32)": EventFragment; + "NewL2DefaultAccountBytecodeHash(bytes32,bytes32)": EventFragment; + "NewPendingGovernor(address,address)": EventFragment; + "NewPriorityRequest(uint256,bytes32,uint64,tuple,bytes[])": EventFragment; + "NewPriorityTxMaxGasLimit(uint256,uint256)": EventFragment; + "NewVerifier(address,address)": EventFragment; + "NewVerifierParams(tuple,tuple)": EventFragment; + "ProposeShadowUpgrade(uint256,bytes32)": EventFragment; + "ProposeTransparentUpgrade(tuple,uint256,bytes32)": EventFragment; + "SecurityCouncilUpgradeApprove(uint256,bytes32)": EventFragment; + "Unfreeze()": EventFragment; + "ValidatorStatusUpdate(address,bool)": EventFragment; + }; + + getEvent(nameOrSignatureOrTopic: "BlockCommit"): EventFragment; + getEvent(nameOrSignatureOrTopic: "BlockExecution"): EventFragment; + getEvent(nameOrSignatureOrTopic: "BlocksRevert"): EventFragment; + getEvent(nameOrSignatureOrTopic: "BlocksVerification"): EventFragment; + getEvent(nameOrSignatureOrTopic: "CancelUpgradeProposal"): EventFragment; + getEvent(nameOrSignatureOrTopic: "EthWithdrawalFinalized"): EventFragment; + getEvent(nameOrSignatureOrTopic: "ExecuteUpgrade"): EventFragment; + getEvent(nameOrSignatureOrTopic: "Freeze"): EventFragment; + getEvent( + nameOrSignatureOrTopic: "IsPorterAvailableStatusUpdate" + ): EventFragment; + getEvent(nameOrSignatureOrTopic: "NewGovernor"): EventFragment; + getEvent( + nameOrSignatureOrTopic: "NewL2BootloaderBytecodeHash" + ): EventFragment; + getEvent( + nameOrSignatureOrTopic: "NewL2DefaultAccountBytecodeHash" + ): EventFragment; + getEvent(nameOrSignatureOrTopic: "NewPendingGovernor"): EventFragment; + getEvent(nameOrSignatureOrTopic: "NewPriorityRequest"): EventFragment; + getEvent(nameOrSignatureOrTopic: "NewPriorityTxMaxGasLimit"): EventFragment; + getEvent(nameOrSignatureOrTopic: "NewVerifier"): EventFragment; + getEvent(nameOrSignatureOrTopic: "NewVerifierParams"): EventFragment; + getEvent(nameOrSignatureOrTopic: "ProposeShadowUpgrade"): EventFragment; + getEvent(nameOrSignatureOrTopic: "ProposeTransparentUpgrade"): EventFragment; + getEvent( + nameOrSignatureOrTopic: "SecurityCouncilUpgradeApprove" + ): EventFragment; + getEvent(nameOrSignatureOrTopic: "Unfreeze"): EventFragment; + getEvent(nameOrSignatureOrTopic: "ValidatorStatusUpdate"): EventFragment; +} + +export class IZkSync extends Contract { + connect(signerOrProvider: Signer | Provider | string): this; + attach(addressOrName: string): this; + deployed(): Promise; + + on(event: EventFilter | string, listener: Listener): this; + once(event: EventFilter | string, listener: Listener): this; + addListener(eventName: EventFilter | string, listener: Listener): this; + removeAllListeners(eventName: EventFilter | string): this; + removeListener(eventName: any, listener: Listener): this; + + interface: IZkSyncInterface; + + functions: { + acceptGovernor(overrides?: Overrides): Promise; + + "acceptGovernor()"(overrides?: Overrides): Promise; + + cancelUpgradeProposal( + _proposedUpgradeHash: BytesLike, + overrides?: Overrides + ): Promise; + + "cancelUpgradeProposal(bytes32)"( + _proposedUpgradeHash: BytesLike, + overrides?: Overrides + ): Promise; + + commitBlocks( + _lastCommittedBlockData: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }, + _newBlocksData: { + blockNumber: BigNumberish; + timestamp: BigNumberish; + indexRepeatedStorageChanges: BigNumberish; + newStateRoot: BytesLike; + numberOfLayer1Txs: BigNumberish; + l2LogsTreeRoot: BytesLike; + priorityOperationsHash: BytesLike; + initialStorageChanges: BytesLike; + repeatedStorageChanges: BytesLike; + l2Logs: BytesLike; + l2ArbitraryLengthMessages: BytesLike[]; + factoryDeps: BytesLike[]; + }[], + overrides?: Overrides + ): Promise; + + "commitBlocks((uint64,bytes32,uint64,uint256,bytes32,bytes32,uint256,bytes32),tuple[])"( + _lastCommittedBlockData: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }, + _newBlocksData: { + blockNumber: BigNumberish; + timestamp: BigNumberish; + indexRepeatedStorageChanges: BigNumberish; + newStateRoot: BytesLike; + numberOfLayer1Txs: BigNumberish; + l2LogsTreeRoot: BytesLike; + priorityOperationsHash: BytesLike; + initialStorageChanges: BytesLike; + repeatedStorageChanges: BytesLike; + l2Logs: BytesLike; + l2ArbitraryLengthMessages: BytesLike[]; + factoryDeps: BytesLike[]; + }[], + overrides?: Overrides + ): Promise; + + executeBlocks( + _blocksData: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }[], + overrides?: Overrides + ): Promise; + + "executeBlocks(tuple[])"( + _blocksData: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }[], + overrides?: Overrides + ): Promise; + + executeUpgrade( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalSalt: BytesLike, + overrides?: Overrides + ): Promise; + + "executeUpgrade((tuple[],address,bytes),bytes32)"( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalSalt: BytesLike, + overrides?: Overrides + ): Promise; + + facetAddress( + _selector: BytesLike, + overrides?: CallOverrides + ): Promise<{ + facet: string; + 0: string; + }>; + + "facetAddress(bytes4)"( + _selector: BytesLike, + overrides?: CallOverrides + ): Promise<{ + facet: string; + 0: string; + }>; + + facetAddresses(overrides?: CallOverrides): Promise<{ + facets: string[]; + 0: string[]; + }>; + + "facetAddresses()"(overrides?: CallOverrides): Promise<{ + facets: string[]; + 0: string[]; + }>; + + facetFunctionSelectors( + _facet: string, + overrides?: CallOverrides + ): Promise<{ + 0: string[]; + }>; + + "facetFunctionSelectors(address)"( + _facet: string, + overrides?: CallOverrides + ): Promise<{ + 0: string[]; + }>; + + facets(overrides?: CallOverrides): Promise<{ + 0: { addr: string; selectors: string[]; 0: string; 1: string[] }[]; + }>; + + "facets()"(overrides?: CallOverrides): Promise<{ + 0: { addr: string; selectors: string[]; 0: string; 1: string[] }[]; + }>; + + finalizeEthWithdrawal( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _message: BytesLike, + _merkleProof: BytesLike[], + overrides?: Overrides + ): Promise; + + "finalizeEthWithdrawal(uint256,uint256,uint16,bytes,bytes32[])"( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _message: BytesLike, + _merkleProof: BytesLike[], + overrides?: Overrides + ): Promise; + + freezeDiamond(overrides?: Overrides): Promise; + + "freezeDiamond()"(overrides?: Overrides): Promise; + + getCurrentProposalId(overrides?: CallOverrides): Promise<{ + 0: BigNumber; + }>; + + "getCurrentProposalId()"(overrides?: CallOverrides): Promise<{ + 0: BigNumber; + }>; + + getFirstUnprocessedPriorityTx(overrides?: CallOverrides): Promise<{ + 0: BigNumber; + }>; + + "getFirstUnprocessedPriorityTx()"(overrides?: CallOverrides): Promise<{ + 0: BigNumber; + }>; + + getGovernor(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + "getGovernor()"(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + getL2BootloaderBytecodeHash(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + "getL2BootloaderBytecodeHash()"(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + getL2DefaultAccountBytecodeHash(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + "getL2DefaultAccountBytecodeHash()"(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + getPendingGovernor(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + "getPendingGovernor()"(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + getPriorityQueueSize(overrides?: CallOverrides): Promise<{ + 0: BigNumber; + }>; + + "getPriorityQueueSize()"(overrides?: CallOverrides): Promise<{ + 0: BigNumber; + }>; + + getPriorityTxMaxGasLimit(overrides?: CallOverrides): Promise<{ + 0: BigNumber; + }>; + + "getPriorityTxMaxGasLimit()"(overrides?: CallOverrides): Promise<{ + 0: BigNumber; + }>; + + getProposedUpgradeHash(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + "getProposedUpgradeHash()"(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + getProposedUpgradeTimestamp(overrides?: CallOverrides): Promise<{ + 0: BigNumber; + }>; + + "getProposedUpgradeTimestamp()"(overrides?: CallOverrides): Promise<{ + 0: BigNumber; + }>; + + getSecurityCouncil(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + "getSecurityCouncil()"(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + getTotalBlocksCommitted(overrides?: CallOverrides): Promise<{ + 0: BigNumber; + }>; + + "getTotalBlocksCommitted()"(overrides?: CallOverrides): Promise<{ + 0: BigNumber; + }>; + + getTotalBlocksExecuted(overrides?: CallOverrides): Promise<{ + 0: BigNumber; + }>; + + "getTotalBlocksExecuted()"(overrides?: CallOverrides): Promise<{ + 0: BigNumber; + }>; + + getTotalBlocksVerified(overrides?: CallOverrides): Promise<{ + 0: BigNumber; + }>; + + "getTotalBlocksVerified()"(overrides?: CallOverrides): Promise<{ + 0: BigNumber; + }>; + + getTotalPriorityTxs(overrides?: CallOverrides): Promise<{ + 0: BigNumber; + }>; + + "getTotalPriorityTxs()"(overrides?: CallOverrides): Promise<{ + 0: BigNumber; + }>; + + getUpgradeProposalState(overrides?: CallOverrides): Promise<{ + 0: number; + }>; + + "getUpgradeProposalState()"(overrides?: CallOverrides): Promise<{ + 0: number; + }>; + + getVerifier(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + "getVerifier()"(overrides?: CallOverrides): Promise<{ + 0: string; + }>; + + getVerifierParams(overrides?: CallOverrides): Promise<{ + 0: { + recursionNodeLevelVkHash: string; + recursionLeafLevelVkHash: string; + recursionCircuitsSetVksHash: string; + 0: string; + 1: string; + 2: string; + }; + }>; + + "getVerifierParams()"(overrides?: CallOverrides): Promise<{ + 0: { + recursionNodeLevelVkHash: string; + recursionLeafLevelVkHash: string; + recursionCircuitsSetVksHash: string; + 0: string; + 1: string; + 2: string; + }; + }>; + + isApprovedBySecurityCouncil(overrides?: CallOverrides): Promise<{ + 0: boolean; + }>; + + "isApprovedBySecurityCouncil()"(overrides?: CallOverrides): Promise<{ + 0: boolean; + }>; + + isDiamondStorageFrozen(overrides?: CallOverrides): Promise<{ + 0: boolean; + }>; + + "isDiamondStorageFrozen()"(overrides?: CallOverrides): Promise<{ + 0: boolean; + }>; + + isEthWithdrawalFinalized( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + overrides?: CallOverrides + ): Promise<{ + 0: boolean; + }>; + + "isEthWithdrawalFinalized(uint256,uint256)"( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + overrides?: CallOverrides + ): Promise<{ + 0: boolean; + }>; + + isFacetFreezable( + _facet: string, + overrides?: CallOverrides + ): Promise<{ + isFreezable: boolean; + 0: boolean; + }>; + + "isFacetFreezable(address)"( + _facet: string, + overrides?: CallOverrides + ): Promise<{ + isFreezable: boolean; + 0: boolean; + }>; + + isFunctionFreezable( + _selector: BytesLike, + overrides?: CallOverrides + ): Promise<{ + 0: boolean; + }>; + + "isFunctionFreezable(bytes4)"( + _selector: BytesLike, + overrides?: CallOverrides + ): Promise<{ + 0: boolean; + }>; + + isValidator( + _address: string, + overrides?: CallOverrides + ): Promise<{ + 0: boolean; + }>; + + "isValidator(address)"( + _address: string, + overrides?: CallOverrides + ): Promise<{ + 0: boolean; + }>; + + l2LogsRootHash( + _blockNumber: BigNumberish, + overrides?: CallOverrides + ): Promise<{ + hash: string; + 0: string; + }>; + + "l2LogsRootHash(uint256)"( + _blockNumber: BigNumberish, + overrides?: CallOverrides + ): Promise<{ + hash: string; + 0: string; + }>; + + l2TransactionBaseCost( + _gasPrice: BigNumberish, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + overrides?: CallOverrides + ): Promise<{ + 0: BigNumber; + }>; + + "l2TransactionBaseCost(uint256,uint256,uint256)"( + _gasPrice: BigNumberish, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + overrides?: CallOverrides + ): Promise<{ + 0: BigNumber; + }>; + + priorityQueueFrontOperation(overrides?: CallOverrides): Promise<{ + 0: { + canonicalTxHash: string; + expirationTimestamp: BigNumber; + layer2Tip: BigNumber; + 0: string; + 1: BigNumber; + 2: BigNumber; + }; + }>; + + "priorityQueueFrontOperation()"(overrides?: CallOverrides): Promise<{ + 0: { + canonicalTxHash: string; + expirationTimestamp: BigNumber; + layer2Tip: BigNumber; + 0: string; + 1: BigNumber; + 2: BigNumber; + }; + }>; + + proposeShadowUpgrade( + _proposalHash: BytesLike, + _proposalId: BigNumberish, + overrides?: Overrides + ): Promise; + + "proposeShadowUpgrade(bytes32,uint40)"( + _proposalHash: BytesLike, + _proposalId: BigNumberish, + overrides?: Overrides + ): Promise; + + proposeTransparentUpgrade( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalId: BigNumberish, + overrides?: Overrides + ): Promise; + + "proposeTransparentUpgrade((tuple[],address,bytes),uint40)"( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalId: BigNumberish, + overrides?: Overrides + ): Promise; + + proveBlocks( + _prevBlock: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }, + _committedBlocks: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }[], + _proof: { + recursiveAggregationInput: BigNumberish[]; + serializedProof: BigNumberish[]; + }, + overrides?: Overrides + ): Promise; + + "proveBlocks((uint64,bytes32,uint64,uint256,bytes32,bytes32,uint256,bytes32),tuple[],(uint256[],uint256[]))"( + _prevBlock: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }, + _committedBlocks: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }[], + _proof: { + recursiveAggregationInput: BigNumberish[]; + serializedProof: BigNumberish[]; + }, + overrides?: Overrides + ): Promise; + + proveL1ToL2TransactionStatus( + _l2TxHash: BytesLike, + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _merkleProof: BytesLike[], + _status: BigNumberish, + overrides?: CallOverrides + ): Promise<{ + 0: boolean; + }>; + + "proveL1ToL2TransactionStatus(bytes32,uint256,uint256,uint16,bytes32[],uint8)"( + _l2TxHash: BytesLike, + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _merkleProof: BytesLike[], + _status: BigNumberish, + overrides?: CallOverrides + ): Promise<{ + 0: boolean; + }>; + + proveL2LogInclusion( + _blockNumber: BigNumberish, + _index: BigNumberish, + _log: { + l2ShardId: BigNumberish; + isService: boolean; + txNumberInBlock: BigNumberish; + sender: string; + key: BytesLike; + value: BytesLike; + }, + _proof: BytesLike[], + overrides?: CallOverrides + ): Promise<{ + 0: boolean; + }>; + + "proveL2LogInclusion(uint256,uint256,(uint8,bool,uint16,address,bytes32,bytes32),bytes32[])"( + _blockNumber: BigNumberish, + _index: BigNumberish, + _log: { + l2ShardId: BigNumberish; + isService: boolean; + txNumberInBlock: BigNumberish; + sender: string; + key: BytesLike; + value: BytesLike; + }, + _proof: BytesLike[], + overrides?: CallOverrides + ): Promise<{ + 0: boolean; + }>; + + proveL2MessageInclusion( + _blockNumber: BigNumberish, + _index: BigNumberish, + _message: { + txNumberInBlock: BigNumberish; + sender: string; + data: BytesLike; + }, + _proof: BytesLike[], + overrides?: CallOverrides + ): Promise<{ + 0: boolean; + }>; + + "proveL2MessageInclusion(uint256,uint256,(uint16,address,bytes),bytes32[])"( + _blockNumber: BigNumberish, + _index: BigNumberish, + _message: { + txNumberInBlock: BigNumberish; + sender: string; + data: BytesLike; + }, + _proof: BytesLike[], + overrides?: CallOverrides + ): Promise<{ + 0: boolean; + }>; + + requestL2Transaction( + _contractL2: string, + _l2Value: BigNumberish, + _calldata: BytesLike, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + _factoryDeps: BytesLike[], + _refundRecipient: string, + overrides?: PayableOverrides + ): Promise; + + "requestL2Transaction(address,uint256,bytes,uint256,uint256,bytes[],address)"( + _contractL2: string, + _l2Value: BigNumberish, + _calldata: BytesLike, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + _factoryDeps: BytesLike[], + _refundRecipient: string, + overrides?: PayableOverrides + ): Promise; + + revertBlocks( + _newLastBlock: BigNumberish, + overrides?: Overrides + ): Promise; + + "revertBlocks(uint256)"( + _newLastBlock: BigNumberish, + overrides?: Overrides + ): Promise; + + securityCouncilUpgradeApprove( + _upgradeProposalHash: BytesLike, + overrides?: Overrides + ): Promise; + + "securityCouncilUpgradeApprove(bytes32)"( + _upgradeProposalHash: BytesLike, + overrides?: Overrides + ): Promise; + + serializeL2Transaction( + _txId: BigNumberish, + _l2Value: BigNumberish, + _sender: string, + _contractAddressL2: string, + _calldata: BytesLike, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + _factoryDeps: BytesLike[], + _toMint: BigNumberish, + _refundRecipient: string, + overrides?: CallOverrides + ): Promise<{ + 0: { + txType: BigNumber; + from: BigNumber; + to: BigNumber; + gasLimit: BigNumber; + gasPerPubdataByteLimit: BigNumber; + maxFeePerGas: BigNumber; + maxPriorityFeePerGas: BigNumber; + paymaster: BigNumber; + nonce: BigNumber; + value: BigNumber; + reserved: [BigNumber, BigNumber, BigNumber, BigNumber]; + data: string; + signature: string; + factoryDeps: BigNumber[]; + paymasterInput: string; + reservedDynamic: string; + 0: BigNumber; + 1: BigNumber; + 2: BigNumber; + 3: BigNumber; + 4: BigNumber; + 5: BigNumber; + 6: BigNumber; + 7: BigNumber; + 8: BigNumber; + 9: BigNumber; + 10: [BigNumber, BigNumber, BigNumber, BigNumber]; + 11: string; + 12: string; + 13: BigNumber[]; + 14: string; + 15: string; + }; + }>; + + "serializeL2Transaction(uint256,uint256,address,address,bytes,uint256,uint256,bytes[],uint256,address)"( + _txId: BigNumberish, + _l2Value: BigNumberish, + _sender: string, + _contractAddressL2: string, + _calldata: BytesLike, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + _factoryDeps: BytesLike[], + _toMint: BigNumberish, + _refundRecipient: string, + overrides?: CallOverrides + ): Promise<{ + 0: { + txType: BigNumber; + from: BigNumber; + to: BigNumber; + gasLimit: BigNumber; + gasPerPubdataByteLimit: BigNumber; + maxFeePerGas: BigNumber; + maxPriorityFeePerGas: BigNumber; + paymaster: BigNumber; + nonce: BigNumber; + value: BigNumber; + reserved: [BigNumber, BigNumber, BigNumber, BigNumber]; + data: string; + signature: string; + factoryDeps: BigNumber[]; + paymasterInput: string; + reservedDynamic: string; + 0: BigNumber; + 1: BigNumber; + 2: BigNumber; + 3: BigNumber; + 4: BigNumber; + 5: BigNumber; + 6: BigNumber; + 7: BigNumber; + 8: BigNumber; + 9: BigNumber; + 10: [BigNumber, BigNumber, BigNumber, BigNumber]; + 11: string; + 12: string; + 13: BigNumber[]; + 14: string; + 15: string; + }; + }>; + + setL2BootloaderBytecodeHash( + _l2BootloaderBytecodeHash: BytesLike, + overrides?: Overrides + ): Promise; + + "setL2BootloaderBytecodeHash(bytes32)"( + _l2BootloaderBytecodeHash: BytesLike, + overrides?: Overrides + ): Promise; + + setL2DefaultAccountBytecodeHash( + _l2DefaultAccountBytecodeHash: BytesLike, + overrides?: Overrides + ): Promise; + + "setL2DefaultAccountBytecodeHash(bytes32)"( + _l2DefaultAccountBytecodeHash: BytesLike, + overrides?: Overrides + ): Promise; + + setPendingGovernor( + _newPendingGovernor: string, + overrides?: Overrides + ): Promise; + + "setPendingGovernor(address)"( + _newPendingGovernor: string, + overrides?: Overrides + ): Promise; + + setPorterAvailability( + _zkPorterIsAvailable: boolean, + overrides?: Overrides + ): Promise; + + "setPorterAvailability(bool)"( + _zkPorterIsAvailable: boolean, + overrides?: Overrides + ): Promise; + + setPriorityTxMaxGasLimit( + _newPriorityTxMaxGasLimit: BigNumberish, + overrides?: Overrides + ): Promise; + + "setPriorityTxMaxGasLimit(uint256)"( + _newPriorityTxMaxGasLimit: BigNumberish, + overrides?: Overrides + ): Promise; + + setValidator( + _validator: string, + _active: boolean, + overrides?: Overrides + ): Promise; + + "setValidator(address,bool)"( + _validator: string, + _active: boolean, + overrides?: Overrides + ): Promise; + + setVerifier( + _newVerifier: string, + overrides?: Overrides + ): Promise; + + "setVerifier(address)"( + _newVerifier: string, + overrides?: Overrides + ): Promise; + + setVerifierParams( + _newVerifierParams: { + recursionNodeLevelVkHash: BytesLike; + recursionLeafLevelVkHash: BytesLike; + recursionCircuitsSetVksHash: BytesLike; + }, + overrides?: Overrides + ): Promise; + + "setVerifierParams((bytes32,bytes32,bytes32))"( + _newVerifierParams: { + recursionNodeLevelVkHash: BytesLike; + recursionLeafLevelVkHash: BytesLike; + recursionCircuitsSetVksHash: BytesLike; + }, + overrides?: Overrides + ): Promise; + + storedBlockHash( + _blockNumber: BigNumberish, + overrides?: CallOverrides + ): Promise<{ + 0: string; + }>; + + "storedBlockHash(uint256)"( + _blockNumber: BigNumberish, + overrides?: CallOverrides + ): Promise<{ + 0: string; + }>; + + unfreezeDiamond(overrides?: Overrides): Promise; + + "unfreezeDiamond()"(overrides?: Overrides): Promise; + + upgradeProposalHash( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalId: BigNumberish, + _salt: BytesLike, + overrides?: CallOverrides + ): Promise<{ + 0: string; + }>; + + "upgradeProposalHash((tuple[],address,bytes),uint256,bytes32)"( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalId: BigNumberish, + _salt: BytesLike, + overrides?: CallOverrides + ): Promise<{ + 0: string; + }>; + }; + + acceptGovernor(overrides?: Overrides): Promise; + + "acceptGovernor()"(overrides?: Overrides): Promise; + + cancelUpgradeProposal( + _proposedUpgradeHash: BytesLike, + overrides?: Overrides + ): Promise; + + "cancelUpgradeProposal(bytes32)"( + _proposedUpgradeHash: BytesLike, + overrides?: Overrides + ): Promise; + + commitBlocks( + _lastCommittedBlockData: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }, + _newBlocksData: { + blockNumber: BigNumberish; + timestamp: BigNumberish; + indexRepeatedStorageChanges: BigNumberish; + newStateRoot: BytesLike; + numberOfLayer1Txs: BigNumberish; + l2LogsTreeRoot: BytesLike; + priorityOperationsHash: BytesLike; + initialStorageChanges: BytesLike; + repeatedStorageChanges: BytesLike; + l2Logs: BytesLike; + l2ArbitraryLengthMessages: BytesLike[]; + factoryDeps: BytesLike[]; + }[], + overrides?: Overrides + ): Promise; + + "commitBlocks((uint64,bytes32,uint64,uint256,bytes32,bytes32,uint256,bytes32),tuple[])"( + _lastCommittedBlockData: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }, + _newBlocksData: { + blockNumber: BigNumberish; + timestamp: BigNumberish; + indexRepeatedStorageChanges: BigNumberish; + newStateRoot: BytesLike; + numberOfLayer1Txs: BigNumberish; + l2LogsTreeRoot: BytesLike; + priorityOperationsHash: BytesLike; + initialStorageChanges: BytesLike; + repeatedStorageChanges: BytesLike; + l2Logs: BytesLike; + l2ArbitraryLengthMessages: BytesLike[]; + factoryDeps: BytesLike[]; + }[], + overrides?: Overrides + ): Promise; + + executeBlocks( + _blocksData: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }[], + overrides?: Overrides + ): Promise; + + "executeBlocks(tuple[])"( + _blocksData: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }[], + overrides?: Overrides + ): Promise; + + executeUpgrade( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalSalt: BytesLike, + overrides?: Overrides + ): Promise; + + "executeUpgrade((tuple[],address,bytes),bytes32)"( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalSalt: BytesLike, + overrides?: Overrides + ): Promise; + + facetAddress( + _selector: BytesLike, + overrides?: CallOverrides + ): Promise; + + "facetAddress(bytes4)"( + _selector: BytesLike, + overrides?: CallOverrides + ): Promise; + + facetAddresses(overrides?: CallOverrides): Promise; + + "facetAddresses()"(overrides?: CallOverrides): Promise; + + facetFunctionSelectors( + _facet: string, + overrides?: CallOverrides + ): Promise; + + "facetFunctionSelectors(address)"( + _facet: string, + overrides?: CallOverrides + ): Promise; + + facets( + overrides?: CallOverrides + ): Promise<{ addr: string; selectors: string[]; 0: string; 1: string[] }[]>; + + "facets()"( + overrides?: CallOverrides + ): Promise<{ addr: string; selectors: string[]; 0: string; 1: string[] }[]>; + + finalizeEthWithdrawal( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _message: BytesLike, + _merkleProof: BytesLike[], + overrides?: Overrides + ): Promise; + + "finalizeEthWithdrawal(uint256,uint256,uint16,bytes,bytes32[])"( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _message: BytesLike, + _merkleProof: BytesLike[], + overrides?: Overrides + ): Promise; + + freezeDiamond(overrides?: Overrides): Promise; + + "freezeDiamond()"(overrides?: Overrides): Promise; + + getCurrentProposalId(overrides?: CallOverrides): Promise; + + "getCurrentProposalId()"(overrides?: CallOverrides): Promise; + + getFirstUnprocessedPriorityTx(overrides?: CallOverrides): Promise; + + "getFirstUnprocessedPriorityTx()"( + overrides?: CallOverrides + ): Promise; + + getGovernor(overrides?: CallOverrides): Promise; + + "getGovernor()"(overrides?: CallOverrides): Promise; + + getL2BootloaderBytecodeHash(overrides?: CallOverrides): Promise; + + "getL2BootloaderBytecodeHash()"(overrides?: CallOverrides): Promise; + + getL2DefaultAccountBytecodeHash(overrides?: CallOverrides): Promise; + + "getL2DefaultAccountBytecodeHash()"( + overrides?: CallOverrides + ): Promise; + + getPendingGovernor(overrides?: CallOverrides): Promise; + + "getPendingGovernor()"(overrides?: CallOverrides): Promise; + + getPriorityQueueSize(overrides?: CallOverrides): Promise; + + "getPriorityQueueSize()"(overrides?: CallOverrides): Promise; + + getPriorityTxMaxGasLimit(overrides?: CallOverrides): Promise; + + "getPriorityTxMaxGasLimit()"(overrides?: CallOverrides): Promise; + + getProposedUpgradeHash(overrides?: CallOverrides): Promise; + + "getProposedUpgradeHash()"(overrides?: CallOverrides): Promise; + + getProposedUpgradeTimestamp(overrides?: CallOverrides): Promise; + + "getProposedUpgradeTimestamp()"( + overrides?: CallOverrides + ): Promise; + + getSecurityCouncil(overrides?: CallOverrides): Promise; + + "getSecurityCouncil()"(overrides?: CallOverrides): Promise; + + getTotalBlocksCommitted(overrides?: CallOverrides): Promise; + + "getTotalBlocksCommitted()"(overrides?: CallOverrides): Promise; + + getTotalBlocksExecuted(overrides?: CallOverrides): Promise; + + "getTotalBlocksExecuted()"(overrides?: CallOverrides): Promise; + + getTotalBlocksVerified(overrides?: CallOverrides): Promise; + + "getTotalBlocksVerified()"(overrides?: CallOverrides): Promise; + + getTotalPriorityTxs(overrides?: CallOverrides): Promise; + + "getTotalPriorityTxs()"(overrides?: CallOverrides): Promise; + + getUpgradeProposalState(overrides?: CallOverrides): Promise; + + "getUpgradeProposalState()"(overrides?: CallOverrides): Promise; + + getVerifier(overrides?: CallOverrides): Promise; + + "getVerifier()"(overrides?: CallOverrides): Promise; + + getVerifierParams( + overrides?: CallOverrides + ): Promise<{ + recursionNodeLevelVkHash: string; + recursionLeafLevelVkHash: string; + recursionCircuitsSetVksHash: string; + 0: string; + 1: string; + 2: string; + }>; + + "getVerifierParams()"( + overrides?: CallOverrides + ): Promise<{ + recursionNodeLevelVkHash: string; + recursionLeafLevelVkHash: string; + recursionCircuitsSetVksHash: string; + 0: string; + 1: string; + 2: string; + }>; + + isApprovedBySecurityCouncil(overrides?: CallOverrides): Promise; + + "isApprovedBySecurityCouncil()"(overrides?: CallOverrides): Promise; + + isDiamondStorageFrozen(overrides?: CallOverrides): Promise; + + "isDiamondStorageFrozen()"(overrides?: CallOverrides): Promise; + + isEthWithdrawalFinalized( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "isEthWithdrawalFinalized(uint256,uint256)"( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + overrides?: CallOverrides + ): Promise; + + isFacetFreezable(_facet: string, overrides?: CallOverrides): Promise; + + "isFacetFreezable(address)"( + _facet: string, + overrides?: CallOverrides + ): Promise; + + isFunctionFreezable( + _selector: BytesLike, + overrides?: CallOverrides + ): Promise; + + "isFunctionFreezable(bytes4)"( + _selector: BytesLike, + overrides?: CallOverrides + ): Promise; + + isValidator(_address: string, overrides?: CallOverrides): Promise; + + "isValidator(address)"( + _address: string, + overrides?: CallOverrides + ): Promise; + + l2LogsRootHash( + _blockNumber: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "l2LogsRootHash(uint256)"( + _blockNumber: BigNumberish, + overrides?: CallOverrides + ): Promise; + + l2TransactionBaseCost( + _gasPrice: BigNumberish, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "l2TransactionBaseCost(uint256,uint256,uint256)"( + _gasPrice: BigNumberish, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + overrides?: CallOverrides + ): Promise; + + priorityQueueFrontOperation( + overrides?: CallOverrides + ): Promise<{ + canonicalTxHash: string; + expirationTimestamp: BigNumber; + layer2Tip: BigNumber; + 0: string; + 1: BigNumber; + 2: BigNumber; + }>; + + "priorityQueueFrontOperation()"( + overrides?: CallOverrides + ): Promise<{ + canonicalTxHash: string; + expirationTimestamp: BigNumber; + layer2Tip: BigNumber; + 0: string; + 1: BigNumber; + 2: BigNumber; + }>; + + proposeShadowUpgrade( + _proposalHash: BytesLike, + _proposalId: BigNumberish, + overrides?: Overrides + ): Promise; + + "proposeShadowUpgrade(bytes32,uint40)"( + _proposalHash: BytesLike, + _proposalId: BigNumberish, + overrides?: Overrides + ): Promise; + + proposeTransparentUpgrade( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalId: BigNumberish, + overrides?: Overrides + ): Promise; + + "proposeTransparentUpgrade((tuple[],address,bytes),uint40)"( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalId: BigNumberish, + overrides?: Overrides + ): Promise; + + proveBlocks( + _prevBlock: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }, + _committedBlocks: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }[], + _proof: { + recursiveAggregationInput: BigNumberish[]; + serializedProof: BigNumberish[]; + }, + overrides?: Overrides + ): Promise; + + "proveBlocks((uint64,bytes32,uint64,uint256,bytes32,bytes32,uint256,bytes32),tuple[],(uint256[],uint256[]))"( + _prevBlock: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }, + _committedBlocks: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }[], + _proof: { + recursiveAggregationInput: BigNumberish[]; + serializedProof: BigNumberish[]; + }, + overrides?: Overrides + ): Promise; + + proveL1ToL2TransactionStatus( + _l2TxHash: BytesLike, + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _merkleProof: BytesLike[], + _status: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "proveL1ToL2TransactionStatus(bytes32,uint256,uint256,uint16,bytes32[],uint8)"( + _l2TxHash: BytesLike, + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _merkleProof: BytesLike[], + _status: BigNumberish, + overrides?: CallOverrides + ): Promise; + + proveL2LogInclusion( + _blockNumber: BigNumberish, + _index: BigNumberish, + _log: { + l2ShardId: BigNumberish; + isService: boolean; + txNumberInBlock: BigNumberish; + sender: string; + key: BytesLike; + value: BytesLike; + }, + _proof: BytesLike[], + overrides?: CallOverrides + ): Promise; + + "proveL2LogInclusion(uint256,uint256,(uint8,bool,uint16,address,bytes32,bytes32),bytes32[])"( + _blockNumber: BigNumberish, + _index: BigNumberish, + _log: { + l2ShardId: BigNumberish; + isService: boolean; + txNumberInBlock: BigNumberish; + sender: string; + key: BytesLike; + value: BytesLike; + }, + _proof: BytesLike[], + overrides?: CallOverrides + ): Promise; + + proveL2MessageInclusion( + _blockNumber: BigNumberish, + _index: BigNumberish, + _message: { + txNumberInBlock: BigNumberish; + sender: string; + data: BytesLike; + }, + _proof: BytesLike[], + overrides?: CallOverrides + ): Promise; + + "proveL2MessageInclusion(uint256,uint256,(uint16,address,bytes),bytes32[])"( + _blockNumber: BigNumberish, + _index: BigNumberish, + _message: { + txNumberInBlock: BigNumberish; + sender: string; + data: BytesLike; + }, + _proof: BytesLike[], + overrides?: CallOverrides + ): Promise; + + requestL2Transaction( + _contractL2: string, + _l2Value: BigNumberish, + _calldata: BytesLike, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + _factoryDeps: BytesLike[], + _refundRecipient: string, + overrides?: PayableOverrides + ): Promise; + + "requestL2Transaction(address,uint256,bytes,uint256,uint256,bytes[],address)"( + _contractL2: string, + _l2Value: BigNumberish, + _calldata: BytesLike, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + _factoryDeps: BytesLike[], + _refundRecipient: string, + overrides?: PayableOverrides + ): Promise; + + revertBlocks( + _newLastBlock: BigNumberish, + overrides?: Overrides + ): Promise; + + "revertBlocks(uint256)"( + _newLastBlock: BigNumberish, + overrides?: Overrides + ): Promise; + + securityCouncilUpgradeApprove( + _upgradeProposalHash: BytesLike, + overrides?: Overrides + ): Promise; + + "securityCouncilUpgradeApprove(bytes32)"( + _upgradeProposalHash: BytesLike, + overrides?: Overrides + ): Promise; + + serializeL2Transaction( + _txId: BigNumberish, + _l2Value: BigNumberish, + _sender: string, + _contractAddressL2: string, + _calldata: BytesLike, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + _factoryDeps: BytesLike[], + _toMint: BigNumberish, + _refundRecipient: string, + overrides?: CallOverrides + ): Promise<{ + txType: BigNumber; + from: BigNumber; + to: BigNumber; + gasLimit: BigNumber; + gasPerPubdataByteLimit: BigNumber; + maxFeePerGas: BigNumber; + maxPriorityFeePerGas: BigNumber; + paymaster: BigNumber; + nonce: BigNumber; + value: BigNumber; + reserved: [BigNumber, BigNumber, BigNumber, BigNumber]; + data: string; + signature: string; + factoryDeps: BigNumber[]; + paymasterInput: string; + reservedDynamic: string; + 0: BigNumber; + 1: BigNumber; + 2: BigNumber; + 3: BigNumber; + 4: BigNumber; + 5: BigNumber; + 6: BigNumber; + 7: BigNumber; + 8: BigNumber; + 9: BigNumber; + 10: [BigNumber, BigNumber, BigNumber, BigNumber]; + 11: string; + 12: string; + 13: BigNumber[]; + 14: string; + 15: string; + }>; + + "serializeL2Transaction(uint256,uint256,address,address,bytes,uint256,uint256,bytes[],uint256,address)"( + _txId: BigNumberish, + _l2Value: BigNumberish, + _sender: string, + _contractAddressL2: string, + _calldata: BytesLike, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + _factoryDeps: BytesLike[], + _toMint: BigNumberish, + _refundRecipient: string, + overrides?: CallOverrides + ): Promise<{ + txType: BigNumber; + from: BigNumber; + to: BigNumber; + gasLimit: BigNumber; + gasPerPubdataByteLimit: BigNumber; + maxFeePerGas: BigNumber; + maxPriorityFeePerGas: BigNumber; + paymaster: BigNumber; + nonce: BigNumber; + value: BigNumber; + reserved: [BigNumber, BigNumber, BigNumber, BigNumber]; + data: string; + signature: string; + factoryDeps: BigNumber[]; + paymasterInput: string; + reservedDynamic: string; + 0: BigNumber; + 1: BigNumber; + 2: BigNumber; + 3: BigNumber; + 4: BigNumber; + 5: BigNumber; + 6: BigNumber; + 7: BigNumber; + 8: BigNumber; + 9: BigNumber; + 10: [BigNumber, BigNumber, BigNumber, BigNumber]; + 11: string; + 12: string; + 13: BigNumber[]; + 14: string; + 15: string; + }>; + + setL2BootloaderBytecodeHash( + _l2BootloaderBytecodeHash: BytesLike, + overrides?: Overrides + ): Promise; + + "setL2BootloaderBytecodeHash(bytes32)"( + _l2BootloaderBytecodeHash: BytesLike, + overrides?: Overrides + ): Promise; + + setL2DefaultAccountBytecodeHash( + _l2DefaultAccountBytecodeHash: BytesLike, + overrides?: Overrides + ): Promise; + + "setL2DefaultAccountBytecodeHash(bytes32)"( + _l2DefaultAccountBytecodeHash: BytesLike, + overrides?: Overrides + ): Promise; + + setPendingGovernor( + _newPendingGovernor: string, + overrides?: Overrides + ): Promise; + + "setPendingGovernor(address)"( + _newPendingGovernor: string, + overrides?: Overrides + ): Promise; + + setPorterAvailability( + _zkPorterIsAvailable: boolean, + overrides?: Overrides + ): Promise; + + "setPorterAvailability(bool)"( + _zkPorterIsAvailable: boolean, + overrides?: Overrides + ): Promise; + + setPriorityTxMaxGasLimit( + _newPriorityTxMaxGasLimit: BigNumberish, + overrides?: Overrides + ): Promise; + + "setPriorityTxMaxGasLimit(uint256)"( + _newPriorityTxMaxGasLimit: BigNumberish, + overrides?: Overrides + ): Promise; + + setValidator( + _validator: string, + _active: boolean, + overrides?: Overrides + ): Promise; + + "setValidator(address,bool)"( + _validator: string, + _active: boolean, + overrides?: Overrides + ): Promise; + + setVerifier( + _newVerifier: string, + overrides?: Overrides + ): Promise; + + "setVerifier(address)"( + _newVerifier: string, + overrides?: Overrides + ): Promise; + + setVerifierParams( + _newVerifierParams: { + recursionNodeLevelVkHash: BytesLike; + recursionLeafLevelVkHash: BytesLike; + recursionCircuitsSetVksHash: BytesLike; + }, + overrides?: Overrides + ): Promise; + + "setVerifierParams((bytes32,bytes32,bytes32))"( + _newVerifierParams: { + recursionNodeLevelVkHash: BytesLike; + recursionLeafLevelVkHash: BytesLike; + recursionCircuitsSetVksHash: BytesLike; + }, + overrides?: Overrides + ): Promise; + + storedBlockHash( + _blockNumber: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "storedBlockHash(uint256)"( + _blockNumber: BigNumberish, + overrides?: CallOverrides + ): Promise; + + unfreezeDiamond(overrides?: Overrides): Promise; + + "unfreezeDiamond()"(overrides?: Overrides): Promise; + + upgradeProposalHash( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalId: BigNumberish, + _salt: BytesLike, + overrides?: CallOverrides + ): Promise; + + "upgradeProposalHash((tuple[],address,bytes),uint256,bytes32)"( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalId: BigNumberish, + _salt: BytesLike, + overrides?: CallOverrides + ): Promise; + + callStatic: { + acceptGovernor(overrides?: CallOverrides): Promise; + + "acceptGovernor()"(overrides?: CallOverrides): Promise; + + cancelUpgradeProposal( + _proposedUpgradeHash: BytesLike, + overrides?: CallOverrides + ): Promise; + + "cancelUpgradeProposal(bytes32)"( + _proposedUpgradeHash: BytesLike, + overrides?: CallOverrides + ): Promise; + + commitBlocks( + _lastCommittedBlockData: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }, + _newBlocksData: { + blockNumber: BigNumberish; + timestamp: BigNumberish; + indexRepeatedStorageChanges: BigNumberish; + newStateRoot: BytesLike; + numberOfLayer1Txs: BigNumberish; + l2LogsTreeRoot: BytesLike; + priorityOperationsHash: BytesLike; + initialStorageChanges: BytesLike; + repeatedStorageChanges: BytesLike; + l2Logs: BytesLike; + l2ArbitraryLengthMessages: BytesLike[]; + factoryDeps: BytesLike[]; + }[], + overrides?: CallOverrides + ): Promise; + + "commitBlocks((uint64,bytes32,uint64,uint256,bytes32,bytes32,uint256,bytes32),tuple[])"( + _lastCommittedBlockData: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }, + _newBlocksData: { + blockNumber: BigNumberish; + timestamp: BigNumberish; + indexRepeatedStorageChanges: BigNumberish; + newStateRoot: BytesLike; + numberOfLayer1Txs: BigNumberish; + l2LogsTreeRoot: BytesLike; + priorityOperationsHash: BytesLike; + initialStorageChanges: BytesLike; + repeatedStorageChanges: BytesLike; + l2Logs: BytesLike; + l2ArbitraryLengthMessages: BytesLike[]; + factoryDeps: BytesLike[]; + }[], + overrides?: CallOverrides + ): Promise; + + executeBlocks( + _blocksData: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }[], + overrides?: CallOverrides + ): Promise; + + "executeBlocks(tuple[])"( + _blocksData: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }[], + overrides?: CallOverrides + ): Promise; + + executeUpgrade( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalSalt: BytesLike, + overrides?: CallOverrides + ): Promise; + + "executeUpgrade((tuple[],address,bytes),bytes32)"( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalSalt: BytesLike, + overrides?: CallOverrides + ): Promise; + + facetAddress( + _selector: BytesLike, + overrides?: CallOverrides + ): Promise; + + "facetAddress(bytes4)"( + _selector: BytesLike, + overrides?: CallOverrides + ): Promise; + + facetAddresses(overrides?: CallOverrides): Promise; + + "facetAddresses()"(overrides?: CallOverrides): Promise; + + facetFunctionSelectors( + _facet: string, + overrides?: CallOverrides + ): Promise; + + "facetFunctionSelectors(address)"( + _facet: string, + overrides?: CallOverrides + ): Promise; + + facets( + overrides?: CallOverrides + ): Promise<{ addr: string; selectors: string[]; 0: string; 1: string[] }[]>; + + "facets()"( + overrides?: CallOverrides + ): Promise<{ addr: string; selectors: string[]; 0: string; 1: string[] }[]>; + + finalizeEthWithdrawal( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _message: BytesLike, + _merkleProof: BytesLike[], + overrides?: CallOverrides + ): Promise; + + "finalizeEthWithdrawal(uint256,uint256,uint16,bytes,bytes32[])"( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _message: BytesLike, + _merkleProof: BytesLike[], + overrides?: CallOverrides + ): Promise; + + freezeDiamond(overrides?: CallOverrides): Promise; + + "freezeDiamond()"(overrides?: CallOverrides): Promise; + + getCurrentProposalId(overrides?: CallOverrides): Promise; + + "getCurrentProposalId()"(overrides?: CallOverrides): Promise; + + getFirstUnprocessedPriorityTx( + overrides?: CallOverrides + ): Promise; + + "getFirstUnprocessedPriorityTx()"( + overrides?: CallOverrides + ): Promise; + + getGovernor(overrides?: CallOverrides): Promise; + + "getGovernor()"(overrides?: CallOverrides): Promise; + + getL2BootloaderBytecodeHash(overrides?: CallOverrides): Promise; + + "getL2BootloaderBytecodeHash()"(overrides?: CallOverrides): Promise; + + getL2DefaultAccountBytecodeHash(overrides?: CallOverrides): Promise; + + "getL2DefaultAccountBytecodeHash()"( + overrides?: CallOverrides + ): Promise; + + getPendingGovernor(overrides?: CallOverrides): Promise; + + "getPendingGovernor()"(overrides?: CallOverrides): Promise; + + getPriorityQueueSize(overrides?: CallOverrides): Promise; + + "getPriorityQueueSize()"(overrides?: CallOverrides): Promise; + + getPriorityTxMaxGasLimit(overrides?: CallOverrides): Promise; + + "getPriorityTxMaxGasLimit()"( + overrides?: CallOverrides + ): Promise; + + getProposedUpgradeHash(overrides?: CallOverrides): Promise; + + "getProposedUpgradeHash()"(overrides?: CallOverrides): Promise; + + getProposedUpgradeTimestamp(overrides?: CallOverrides): Promise; + + "getProposedUpgradeTimestamp()"( + overrides?: CallOverrides + ): Promise; + + getSecurityCouncil(overrides?: CallOverrides): Promise; + + "getSecurityCouncil()"(overrides?: CallOverrides): Promise; + + getTotalBlocksCommitted(overrides?: CallOverrides): Promise; + + "getTotalBlocksCommitted()"(overrides?: CallOverrides): Promise; + + getTotalBlocksExecuted(overrides?: CallOverrides): Promise; + + "getTotalBlocksExecuted()"(overrides?: CallOverrides): Promise; + + getTotalBlocksVerified(overrides?: CallOverrides): Promise; + + "getTotalBlocksVerified()"(overrides?: CallOverrides): Promise; + + getTotalPriorityTxs(overrides?: CallOverrides): Promise; + + "getTotalPriorityTxs()"(overrides?: CallOverrides): Promise; + + getUpgradeProposalState(overrides?: CallOverrides): Promise; + + "getUpgradeProposalState()"(overrides?: CallOverrides): Promise; + + getVerifier(overrides?: CallOverrides): Promise; + + "getVerifier()"(overrides?: CallOverrides): Promise; + + getVerifierParams( + overrides?: CallOverrides + ): Promise<{ + recursionNodeLevelVkHash: string; + recursionLeafLevelVkHash: string; + recursionCircuitsSetVksHash: string; + 0: string; + 1: string; + 2: string; + }>; + + "getVerifierParams()"( + overrides?: CallOverrides + ): Promise<{ + recursionNodeLevelVkHash: string; + recursionLeafLevelVkHash: string; + recursionCircuitsSetVksHash: string; + 0: string; + 1: string; + 2: string; + }>; + + isApprovedBySecurityCouncil(overrides?: CallOverrides): Promise; + + "isApprovedBySecurityCouncil()"( + overrides?: CallOverrides + ): Promise; + + isDiamondStorageFrozen(overrides?: CallOverrides): Promise; + + "isDiamondStorageFrozen()"(overrides?: CallOverrides): Promise; + + isEthWithdrawalFinalized( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "isEthWithdrawalFinalized(uint256,uint256)"( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + overrides?: CallOverrides + ): Promise; + + isFacetFreezable( + _facet: string, + overrides?: CallOverrides + ): Promise; + + "isFacetFreezable(address)"( + _facet: string, + overrides?: CallOverrides + ): Promise; + + isFunctionFreezable( + _selector: BytesLike, + overrides?: CallOverrides + ): Promise; + + "isFunctionFreezable(bytes4)"( + _selector: BytesLike, + overrides?: CallOverrides + ): Promise; + + isValidator(_address: string, overrides?: CallOverrides): Promise; + + "isValidator(address)"( + _address: string, + overrides?: CallOverrides + ): Promise; + + l2LogsRootHash( + _blockNumber: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "l2LogsRootHash(uint256)"( + _blockNumber: BigNumberish, + overrides?: CallOverrides + ): Promise; + + l2TransactionBaseCost( + _gasPrice: BigNumberish, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "l2TransactionBaseCost(uint256,uint256,uint256)"( + _gasPrice: BigNumberish, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + overrides?: CallOverrides + ): Promise; + + priorityQueueFrontOperation( + overrides?: CallOverrides + ): Promise<{ + canonicalTxHash: string; + expirationTimestamp: BigNumber; + layer2Tip: BigNumber; + 0: string; + 1: BigNumber; + 2: BigNumber; + }>; + + "priorityQueueFrontOperation()"( + overrides?: CallOverrides + ): Promise<{ + canonicalTxHash: string; + expirationTimestamp: BigNumber; + layer2Tip: BigNumber; + 0: string; + 1: BigNumber; + 2: BigNumber; + }>; + + proposeShadowUpgrade( + _proposalHash: BytesLike, + _proposalId: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "proposeShadowUpgrade(bytes32,uint40)"( + _proposalHash: BytesLike, + _proposalId: BigNumberish, + overrides?: CallOverrides + ): Promise; + + proposeTransparentUpgrade( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalId: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "proposeTransparentUpgrade((tuple[],address,bytes),uint40)"( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalId: BigNumberish, + overrides?: CallOverrides + ): Promise; + + proveBlocks( + _prevBlock: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }, + _committedBlocks: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }[], + _proof: { + recursiveAggregationInput: BigNumberish[]; + serializedProof: BigNumberish[]; + }, + overrides?: CallOverrides + ): Promise; + + "proveBlocks((uint64,bytes32,uint64,uint256,bytes32,bytes32,uint256,bytes32),tuple[],(uint256[],uint256[]))"( + _prevBlock: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }, + _committedBlocks: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }[], + _proof: { + recursiveAggregationInput: BigNumberish[]; + serializedProof: BigNumberish[]; + }, + overrides?: CallOverrides + ): Promise; + + proveL1ToL2TransactionStatus( + _l2TxHash: BytesLike, + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _merkleProof: BytesLike[], + _status: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "proveL1ToL2TransactionStatus(bytes32,uint256,uint256,uint16,bytes32[],uint8)"( + _l2TxHash: BytesLike, + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _merkleProof: BytesLike[], + _status: BigNumberish, + overrides?: CallOverrides + ): Promise; + + proveL2LogInclusion( + _blockNumber: BigNumberish, + _index: BigNumberish, + _log: { + l2ShardId: BigNumberish; + isService: boolean; + txNumberInBlock: BigNumberish; + sender: string; + key: BytesLike; + value: BytesLike; + }, + _proof: BytesLike[], + overrides?: CallOverrides + ): Promise; + + "proveL2LogInclusion(uint256,uint256,(uint8,bool,uint16,address,bytes32,bytes32),bytes32[])"( + _blockNumber: BigNumberish, + _index: BigNumberish, + _log: { + l2ShardId: BigNumberish; + isService: boolean; + txNumberInBlock: BigNumberish; + sender: string; + key: BytesLike; + value: BytesLike; + }, + _proof: BytesLike[], + overrides?: CallOverrides + ): Promise; + + proveL2MessageInclusion( + _blockNumber: BigNumberish, + _index: BigNumberish, + _message: { + txNumberInBlock: BigNumberish; + sender: string; + data: BytesLike; + }, + _proof: BytesLike[], + overrides?: CallOverrides + ): Promise; + + "proveL2MessageInclusion(uint256,uint256,(uint16,address,bytes),bytes32[])"( + _blockNumber: BigNumberish, + _index: BigNumberish, + _message: { + txNumberInBlock: BigNumberish; + sender: string; + data: BytesLike; + }, + _proof: BytesLike[], + overrides?: CallOverrides + ): Promise; + + requestL2Transaction( + _contractL2: string, + _l2Value: BigNumberish, + _calldata: BytesLike, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + _factoryDeps: BytesLike[], + _refundRecipient: string, + overrides?: CallOverrides + ): Promise; + + "requestL2Transaction(address,uint256,bytes,uint256,uint256,bytes[],address)"( + _contractL2: string, + _l2Value: BigNumberish, + _calldata: BytesLike, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + _factoryDeps: BytesLike[], + _refundRecipient: string, + overrides?: CallOverrides + ): Promise; + + revertBlocks( + _newLastBlock: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "revertBlocks(uint256)"( + _newLastBlock: BigNumberish, + overrides?: CallOverrides + ): Promise; + + securityCouncilUpgradeApprove( + _upgradeProposalHash: BytesLike, + overrides?: CallOverrides + ): Promise; + + "securityCouncilUpgradeApprove(bytes32)"( + _upgradeProposalHash: BytesLike, + overrides?: CallOverrides + ): Promise; + + serializeL2Transaction( + _txId: BigNumberish, + _l2Value: BigNumberish, + _sender: string, + _contractAddressL2: string, + _calldata: BytesLike, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + _factoryDeps: BytesLike[], + _toMint: BigNumberish, + _refundRecipient: string, + overrides?: CallOverrides + ): Promise<{ + txType: BigNumber; + from: BigNumber; + to: BigNumber; + gasLimit: BigNumber; + gasPerPubdataByteLimit: BigNumber; + maxFeePerGas: BigNumber; + maxPriorityFeePerGas: BigNumber; + paymaster: BigNumber; + nonce: BigNumber; + value: BigNumber; + reserved: [BigNumber, BigNumber, BigNumber, BigNumber]; + data: string; + signature: string; + factoryDeps: BigNumber[]; + paymasterInput: string; + reservedDynamic: string; + 0: BigNumber; + 1: BigNumber; + 2: BigNumber; + 3: BigNumber; + 4: BigNumber; + 5: BigNumber; + 6: BigNumber; + 7: BigNumber; + 8: BigNumber; + 9: BigNumber; + 10: [BigNumber, BigNumber, BigNumber, BigNumber]; + 11: string; + 12: string; + 13: BigNumber[]; + 14: string; + 15: string; + }>; + + "serializeL2Transaction(uint256,uint256,address,address,bytes,uint256,uint256,bytes[],uint256,address)"( + _txId: BigNumberish, + _l2Value: BigNumberish, + _sender: string, + _contractAddressL2: string, + _calldata: BytesLike, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + _factoryDeps: BytesLike[], + _toMint: BigNumberish, + _refundRecipient: string, + overrides?: CallOverrides + ): Promise<{ + txType: BigNumber; + from: BigNumber; + to: BigNumber; + gasLimit: BigNumber; + gasPerPubdataByteLimit: BigNumber; + maxFeePerGas: BigNumber; + maxPriorityFeePerGas: BigNumber; + paymaster: BigNumber; + nonce: BigNumber; + value: BigNumber; + reserved: [BigNumber, BigNumber, BigNumber, BigNumber]; + data: string; + signature: string; + factoryDeps: BigNumber[]; + paymasterInput: string; + reservedDynamic: string; + 0: BigNumber; + 1: BigNumber; + 2: BigNumber; + 3: BigNumber; + 4: BigNumber; + 5: BigNumber; + 6: BigNumber; + 7: BigNumber; + 8: BigNumber; + 9: BigNumber; + 10: [BigNumber, BigNumber, BigNumber, BigNumber]; + 11: string; + 12: string; + 13: BigNumber[]; + 14: string; + 15: string; + }>; + + setL2BootloaderBytecodeHash( + _l2BootloaderBytecodeHash: BytesLike, + overrides?: CallOverrides + ): Promise; + + "setL2BootloaderBytecodeHash(bytes32)"( + _l2BootloaderBytecodeHash: BytesLike, + overrides?: CallOverrides + ): Promise; + + setL2DefaultAccountBytecodeHash( + _l2DefaultAccountBytecodeHash: BytesLike, + overrides?: CallOverrides + ): Promise; + + "setL2DefaultAccountBytecodeHash(bytes32)"( + _l2DefaultAccountBytecodeHash: BytesLike, + overrides?: CallOverrides + ): Promise; + + setPendingGovernor( + _newPendingGovernor: string, + overrides?: CallOverrides + ): Promise; + + "setPendingGovernor(address)"( + _newPendingGovernor: string, + overrides?: CallOverrides + ): Promise; + + setPorterAvailability( + _zkPorterIsAvailable: boolean, + overrides?: CallOverrides + ): Promise; + + "setPorterAvailability(bool)"( + _zkPorterIsAvailable: boolean, + overrides?: CallOverrides + ): Promise; + + setPriorityTxMaxGasLimit( + _newPriorityTxMaxGasLimit: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "setPriorityTxMaxGasLimit(uint256)"( + _newPriorityTxMaxGasLimit: BigNumberish, + overrides?: CallOverrides + ): Promise; + + setValidator( + _validator: string, + _active: boolean, + overrides?: CallOverrides + ): Promise; + + "setValidator(address,bool)"( + _validator: string, + _active: boolean, + overrides?: CallOverrides + ): Promise; + + setVerifier(_newVerifier: string, overrides?: CallOverrides): Promise; + + "setVerifier(address)"( + _newVerifier: string, + overrides?: CallOverrides + ): Promise; + + setVerifierParams( + _newVerifierParams: { + recursionNodeLevelVkHash: BytesLike; + recursionLeafLevelVkHash: BytesLike; + recursionCircuitsSetVksHash: BytesLike; + }, + overrides?: CallOverrides + ): Promise; + + "setVerifierParams((bytes32,bytes32,bytes32))"( + _newVerifierParams: { + recursionNodeLevelVkHash: BytesLike; + recursionLeafLevelVkHash: BytesLike; + recursionCircuitsSetVksHash: BytesLike; + }, + overrides?: CallOverrides + ): Promise; + + storedBlockHash( + _blockNumber: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "storedBlockHash(uint256)"( + _blockNumber: BigNumberish, + overrides?: CallOverrides + ): Promise; + + unfreezeDiamond(overrides?: CallOverrides): Promise; + + "unfreezeDiamond()"(overrides?: CallOverrides): Promise; + + upgradeProposalHash( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalId: BigNumberish, + _salt: BytesLike, + overrides?: CallOverrides + ): Promise; + + "upgradeProposalHash((tuple[],address,bytes),uint256,bytes32)"( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalId: BigNumberish, + _salt: BytesLike, + overrides?: CallOverrides + ): Promise; + }; + + filters: { + BlockCommit( + blockNumber: BigNumberish | null, + blockHash: BytesLike | null, + commitment: BytesLike | null + ): EventFilter; + + BlockExecution( + blockNumber: BigNumberish | null, + blockHash: BytesLike | null, + commitment: BytesLike | null + ): EventFilter; + + BlocksRevert( + totalBlocksCommitted: null, + totalBlocksVerified: null, + totalBlocksExecuted: null + ): EventFilter; + + BlocksVerification( + previousLastVerifiedBlock: BigNumberish | null, + currentLastVerifiedBlock: BigNumberish | null + ): EventFilter; + + CancelUpgradeProposal( + proposalId: BigNumberish | null, + proposalHash: BytesLike | null + ): EventFilter; + + EthWithdrawalFinalized(to: string | null, amount: null): EventFilter; + + ExecuteUpgrade( + proposalId: BigNumberish | null, + proposalHash: BytesLike | null, + proposalSalt: null + ): EventFilter; + + Freeze(): EventFilter; + + IsPorterAvailableStatusUpdate(isPorterAvailable: null): EventFilter; + + NewGovernor( + oldGovernor: string | null, + newGovernor: string | null + ): EventFilter; + + NewL2BootloaderBytecodeHash( + previousBytecodeHash: BytesLike | null, + newBytecodeHash: BytesLike | null + ): EventFilter; + + NewL2DefaultAccountBytecodeHash( + previousBytecodeHash: BytesLike | null, + newBytecodeHash: BytesLike | null + ): EventFilter; + + NewPendingGovernor( + oldPendingGovernor: string | null, + newPendingGovernor: string | null + ): EventFilter; + + NewPriorityRequest( + txId: null, + txHash: null, + expirationTimestamp: null, + transaction: null, + factoryDeps: null + ): EventFilter; + + NewPriorityTxMaxGasLimit( + oldPriorityTxMaxGasLimit: null, + newPriorityTxMaxGasLimit: null + ): EventFilter; + + NewVerifier( + oldVerifier: string | null, + newVerifier: string | null + ): EventFilter; + + NewVerifierParams( + oldVerifierParams: null, + newVerifierParams: null + ): EventFilter; + + ProposeShadowUpgrade( + proposalId: BigNumberish | null, + proposalHash: BytesLike | null + ): EventFilter; + + ProposeTransparentUpgrade( + diamondCut: null, + proposalId: BigNumberish | null, + proposalSalt: null + ): EventFilter; + + SecurityCouncilUpgradeApprove( + proposalId: BigNumberish | null, + proposalHash: BytesLike | null + ): EventFilter; + + Unfreeze(): EventFilter; + + ValidatorStatusUpdate( + validatorAddress: string | null, + isActive: null + ): EventFilter; + }; + + estimateGas: { + acceptGovernor(overrides?: Overrides): Promise; + + "acceptGovernor()"(overrides?: Overrides): Promise; + + cancelUpgradeProposal( + _proposedUpgradeHash: BytesLike, + overrides?: Overrides + ): Promise; + + "cancelUpgradeProposal(bytes32)"( + _proposedUpgradeHash: BytesLike, + overrides?: Overrides + ): Promise; + + commitBlocks( + _lastCommittedBlockData: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }, + _newBlocksData: { + blockNumber: BigNumberish; + timestamp: BigNumberish; + indexRepeatedStorageChanges: BigNumberish; + newStateRoot: BytesLike; + numberOfLayer1Txs: BigNumberish; + l2LogsTreeRoot: BytesLike; + priorityOperationsHash: BytesLike; + initialStorageChanges: BytesLike; + repeatedStorageChanges: BytesLike; + l2Logs: BytesLike; + l2ArbitraryLengthMessages: BytesLike[]; + factoryDeps: BytesLike[]; + }[], + overrides?: Overrides + ): Promise; + + "commitBlocks((uint64,bytes32,uint64,uint256,bytes32,bytes32,uint256,bytes32),tuple[])"( + _lastCommittedBlockData: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }, + _newBlocksData: { + blockNumber: BigNumberish; + timestamp: BigNumberish; + indexRepeatedStorageChanges: BigNumberish; + newStateRoot: BytesLike; + numberOfLayer1Txs: BigNumberish; + l2LogsTreeRoot: BytesLike; + priorityOperationsHash: BytesLike; + initialStorageChanges: BytesLike; + repeatedStorageChanges: BytesLike; + l2Logs: BytesLike; + l2ArbitraryLengthMessages: BytesLike[]; + factoryDeps: BytesLike[]; + }[], + overrides?: Overrides + ): Promise; + + executeBlocks( + _blocksData: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }[], + overrides?: Overrides + ): Promise; + + "executeBlocks(tuple[])"( + _blocksData: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }[], + overrides?: Overrides + ): Promise; + + executeUpgrade( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalSalt: BytesLike, + overrides?: Overrides + ): Promise; + + "executeUpgrade((tuple[],address,bytes),bytes32)"( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalSalt: BytesLike, + overrides?: Overrides + ): Promise; + + facetAddress( + _selector: BytesLike, + overrides?: CallOverrides + ): Promise; + + "facetAddress(bytes4)"( + _selector: BytesLike, + overrides?: CallOverrides + ): Promise; + + facetAddresses(overrides?: CallOverrides): Promise; + + "facetAddresses()"(overrides?: CallOverrides): Promise; + + facetFunctionSelectors( + _facet: string, + overrides?: CallOverrides + ): Promise; + + "facetFunctionSelectors(address)"( + _facet: string, + overrides?: CallOverrides + ): Promise; + + facets(overrides?: CallOverrides): Promise; + + "facets()"(overrides?: CallOverrides): Promise; + + finalizeEthWithdrawal( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _message: BytesLike, + _merkleProof: BytesLike[], + overrides?: Overrides + ): Promise; + + "finalizeEthWithdrawal(uint256,uint256,uint16,bytes,bytes32[])"( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _message: BytesLike, + _merkleProof: BytesLike[], + overrides?: Overrides + ): Promise; + + freezeDiamond(overrides?: Overrides): Promise; + + "freezeDiamond()"(overrides?: Overrides): Promise; + + getCurrentProposalId(overrides?: CallOverrides): Promise; + + "getCurrentProposalId()"(overrides?: CallOverrides): Promise; + + getFirstUnprocessedPriorityTx( + overrides?: CallOverrides + ): Promise; + + "getFirstUnprocessedPriorityTx()"( + overrides?: CallOverrides + ): Promise; + + getGovernor(overrides?: CallOverrides): Promise; + + "getGovernor()"(overrides?: CallOverrides): Promise; + + getL2BootloaderBytecodeHash(overrides?: CallOverrides): Promise; + + "getL2BootloaderBytecodeHash()"( + overrides?: CallOverrides + ): Promise; + + getL2DefaultAccountBytecodeHash( + overrides?: CallOverrides + ): Promise; + + "getL2DefaultAccountBytecodeHash()"( + overrides?: CallOverrides + ): Promise; + + getPendingGovernor(overrides?: CallOverrides): Promise; + + "getPendingGovernor()"(overrides?: CallOverrides): Promise; + + getPriorityQueueSize(overrides?: CallOverrides): Promise; + + "getPriorityQueueSize()"(overrides?: CallOverrides): Promise; + + getPriorityTxMaxGasLimit(overrides?: CallOverrides): Promise; + + "getPriorityTxMaxGasLimit()"( + overrides?: CallOverrides + ): Promise; + + getProposedUpgradeHash(overrides?: CallOverrides): Promise; + + "getProposedUpgradeHash()"(overrides?: CallOverrides): Promise; + + getProposedUpgradeTimestamp(overrides?: CallOverrides): Promise; + + "getProposedUpgradeTimestamp()"( + overrides?: CallOverrides + ): Promise; + + getSecurityCouncil(overrides?: CallOverrides): Promise; + + "getSecurityCouncil()"(overrides?: CallOverrides): Promise; + + getTotalBlocksCommitted(overrides?: CallOverrides): Promise; + + "getTotalBlocksCommitted()"(overrides?: CallOverrides): Promise; + + getTotalBlocksExecuted(overrides?: CallOverrides): Promise; + + "getTotalBlocksExecuted()"(overrides?: CallOverrides): Promise; + + getTotalBlocksVerified(overrides?: CallOverrides): Promise; + + "getTotalBlocksVerified()"(overrides?: CallOverrides): Promise; + + getTotalPriorityTxs(overrides?: CallOverrides): Promise; + + "getTotalPriorityTxs()"(overrides?: CallOverrides): Promise; + + getUpgradeProposalState(overrides?: CallOverrides): Promise; + + "getUpgradeProposalState()"(overrides?: CallOverrides): Promise; + + getVerifier(overrides?: CallOverrides): Promise; + + "getVerifier()"(overrides?: CallOverrides): Promise; + + getVerifierParams(overrides?: CallOverrides): Promise; + + "getVerifierParams()"(overrides?: CallOverrides): Promise; + + isApprovedBySecurityCouncil(overrides?: CallOverrides): Promise; + + "isApprovedBySecurityCouncil()"( + overrides?: CallOverrides + ): Promise; + + isDiamondStorageFrozen(overrides?: CallOverrides): Promise; + + "isDiamondStorageFrozen()"(overrides?: CallOverrides): Promise; + + isEthWithdrawalFinalized( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "isEthWithdrawalFinalized(uint256,uint256)"( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + overrides?: CallOverrides + ): Promise; + + isFacetFreezable( + _facet: string, + overrides?: CallOverrides + ): Promise; + + "isFacetFreezable(address)"( + _facet: string, + overrides?: CallOverrides + ): Promise; + + isFunctionFreezable( + _selector: BytesLike, + overrides?: CallOverrides + ): Promise; + + "isFunctionFreezable(bytes4)"( + _selector: BytesLike, + overrides?: CallOverrides + ): Promise; + + isValidator( + _address: string, + overrides?: CallOverrides + ): Promise; + + "isValidator(address)"( + _address: string, + overrides?: CallOverrides + ): Promise; + + l2LogsRootHash( + _blockNumber: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "l2LogsRootHash(uint256)"( + _blockNumber: BigNumberish, + overrides?: CallOverrides + ): Promise; + + l2TransactionBaseCost( + _gasPrice: BigNumberish, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "l2TransactionBaseCost(uint256,uint256,uint256)"( + _gasPrice: BigNumberish, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + overrides?: CallOverrides + ): Promise; + + priorityQueueFrontOperation(overrides?: CallOverrides): Promise; + + "priorityQueueFrontOperation()"( + overrides?: CallOverrides + ): Promise; + + proposeShadowUpgrade( + _proposalHash: BytesLike, + _proposalId: BigNumberish, + overrides?: Overrides + ): Promise; + + "proposeShadowUpgrade(bytes32,uint40)"( + _proposalHash: BytesLike, + _proposalId: BigNumberish, + overrides?: Overrides + ): Promise; + + proposeTransparentUpgrade( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalId: BigNumberish, + overrides?: Overrides + ): Promise; + + "proposeTransparentUpgrade((tuple[],address,bytes),uint40)"( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalId: BigNumberish, + overrides?: Overrides + ): Promise; + + proveBlocks( + _prevBlock: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }, + _committedBlocks: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }[], + _proof: { + recursiveAggregationInput: BigNumberish[]; + serializedProof: BigNumberish[]; + }, + overrides?: Overrides + ): Promise; + + "proveBlocks((uint64,bytes32,uint64,uint256,bytes32,bytes32,uint256,bytes32),tuple[],(uint256[],uint256[]))"( + _prevBlock: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }, + _committedBlocks: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }[], + _proof: { + recursiveAggregationInput: BigNumberish[]; + serializedProof: BigNumberish[]; + }, + overrides?: Overrides + ): Promise; + + proveL1ToL2TransactionStatus( + _l2TxHash: BytesLike, + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _merkleProof: BytesLike[], + _status: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "proveL1ToL2TransactionStatus(bytes32,uint256,uint256,uint16,bytes32[],uint8)"( + _l2TxHash: BytesLike, + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _merkleProof: BytesLike[], + _status: BigNumberish, + overrides?: CallOverrides + ): Promise; + + proveL2LogInclusion( + _blockNumber: BigNumberish, + _index: BigNumberish, + _log: { + l2ShardId: BigNumberish; + isService: boolean; + txNumberInBlock: BigNumberish; + sender: string; + key: BytesLike; + value: BytesLike; + }, + _proof: BytesLike[], + overrides?: CallOverrides + ): Promise; + + "proveL2LogInclusion(uint256,uint256,(uint8,bool,uint16,address,bytes32,bytes32),bytes32[])"( + _blockNumber: BigNumberish, + _index: BigNumberish, + _log: { + l2ShardId: BigNumberish; + isService: boolean; + txNumberInBlock: BigNumberish; + sender: string; + key: BytesLike; + value: BytesLike; + }, + _proof: BytesLike[], + overrides?: CallOverrides + ): Promise; + + proveL2MessageInclusion( + _blockNumber: BigNumberish, + _index: BigNumberish, + _message: { + txNumberInBlock: BigNumberish; + sender: string; + data: BytesLike; + }, + _proof: BytesLike[], + overrides?: CallOverrides + ): Promise; + + "proveL2MessageInclusion(uint256,uint256,(uint16,address,bytes),bytes32[])"( + _blockNumber: BigNumberish, + _index: BigNumberish, + _message: { + txNumberInBlock: BigNumberish; + sender: string; + data: BytesLike; + }, + _proof: BytesLike[], + overrides?: CallOverrides + ): Promise; + + requestL2Transaction( + _contractL2: string, + _l2Value: BigNumberish, + _calldata: BytesLike, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + _factoryDeps: BytesLike[], + _refundRecipient: string, + overrides?: PayableOverrides + ): Promise; + + "requestL2Transaction(address,uint256,bytes,uint256,uint256,bytes[],address)"( + _contractL2: string, + _l2Value: BigNumberish, + _calldata: BytesLike, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + _factoryDeps: BytesLike[], + _refundRecipient: string, + overrides?: PayableOverrides + ): Promise; + + revertBlocks( + _newLastBlock: BigNumberish, + overrides?: Overrides + ): Promise; + + "revertBlocks(uint256)"( + _newLastBlock: BigNumberish, + overrides?: Overrides + ): Promise; + + securityCouncilUpgradeApprove( + _upgradeProposalHash: BytesLike, + overrides?: Overrides + ): Promise; + + "securityCouncilUpgradeApprove(bytes32)"( + _upgradeProposalHash: BytesLike, + overrides?: Overrides + ): Promise; + + serializeL2Transaction( + _txId: BigNumberish, + _l2Value: BigNumberish, + _sender: string, + _contractAddressL2: string, + _calldata: BytesLike, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + _factoryDeps: BytesLike[], + _toMint: BigNumberish, + _refundRecipient: string, + overrides?: CallOverrides + ): Promise; + + "serializeL2Transaction(uint256,uint256,address,address,bytes,uint256,uint256,bytes[],uint256,address)"( + _txId: BigNumberish, + _l2Value: BigNumberish, + _sender: string, + _contractAddressL2: string, + _calldata: BytesLike, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + _factoryDeps: BytesLike[], + _toMint: BigNumberish, + _refundRecipient: string, + overrides?: CallOverrides + ): Promise; + + setL2BootloaderBytecodeHash( + _l2BootloaderBytecodeHash: BytesLike, + overrides?: Overrides + ): Promise; + + "setL2BootloaderBytecodeHash(bytes32)"( + _l2BootloaderBytecodeHash: BytesLike, + overrides?: Overrides + ): Promise; + + setL2DefaultAccountBytecodeHash( + _l2DefaultAccountBytecodeHash: BytesLike, + overrides?: Overrides + ): Promise; + + "setL2DefaultAccountBytecodeHash(bytes32)"( + _l2DefaultAccountBytecodeHash: BytesLike, + overrides?: Overrides + ): Promise; + + setPendingGovernor( + _newPendingGovernor: string, + overrides?: Overrides + ): Promise; + + "setPendingGovernor(address)"( + _newPendingGovernor: string, + overrides?: Overrides + ): Promise; + + setPorterAvailability( + _zkPorterIsAvailable: boolean, + overrides?: Overrides + ): Promise; + + "setPorterAvailability(bool)"( + _zkPorterIsAvailable: boolean, + overrides?: Overrides + ): Promise; + + setPriorityTxMaxGasLimit( + _newPriorityTxMaxGasLimit: BigNumberish, + overrides?: Overrides + ): Promise; + + "setPriorityTxMaxGasLimit(uint256)"( + _newPriorityTxMaxGasLimit: BigNumberish, + overrides?: Overrides + ): Promise; + + setValidator( + _validator: string, + _active: boolean, + overrides?: Overrides + ): Promise; + + "setValidator(address,bool)"( + _validator: string, + _active: boolean, + overrides?: Overrides + ): Promise; + + setVerifier( + _newVerifier: string, + overrides?: Overrides + ): Promise; + + "setVerifier(address)"( + _newVerifier: string, + overrides?: Overrides + ): Promise; + + setVerifierParams( + _newVerifierParams: { + recursionNodeLevelVkHash: BytesLike; + recursionLeafLevelVkHash: BytesLike; + recursionCircuitsSetVksHash: BytesLike; + }, + overrides?: Overrides + ): Promise; + + "setVerifierParams((bytes32,bytes32,bytes32))"( + _newVerifierParams: { + recursionNodeLevelVkHash: BytesLike; + recursionLeafLevelVkHash: BytesLike; + recursionCircuitsSetVksHash: BytesLike; + }, + overrides?: Overrides + ): Promise; + + storedBlockHash( + _blockNumber: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "storedBlockHash(uint256)"( + _blockNumber: BigNumberish, + overrides?: CallOverrides + ): Promise; + + unfreezeDiamond(overrides?: Overrides): Promise; + + "unfreezeDiamond()"(overrides?: Overrides): Promise; + + upgradeProposalHash( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalId: BigNumberish, + _salt: BytesLike, + overrides?: CallOverrides + ): Promise; + + "upgradeProposalHash((tuple[],address,bytes),uint256,bytes32)"( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalId: BigNumberish, + _salt: BytesLike, + overrides?: CallOverrides + ): Promise; + }; + + populateTransaction: { + acceptGovernor(overrides?: Overrides): Promise; + + "acceptGovernor()"(overrides?: Overrides): Promise; + + cancelUpgradeProposal( + _proposedUpgradeHash: BytesLike, + overrides?: Overrides + ): Promise; + + "cancelUpgradeProposal(bytes32)"( + _proposedUpgradeHash: BytesLike, + overrides?: Overrides + ): Promise; + + commitBlocks( + _lastCommittedBlockData: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }, + _newBlocksData: { + blockNumber: BigNumberish; + timestamp: BigNumberish; + indexRepeatedStorageChanges: BigNumberish; + newStateRoot: BytesLike; + numberOfLayer1Txs: BigNumberish; + l2LogsTreeRoot: BytesLike; + priorityOperationsHash: BytesLike; + initialStorageChanges: BytesLike; + repeatedStorageChanges: BytesLike; + l2Logs: BytesLike; + l2ArbitraryLengthMessages: BytesLike[]; + factoryDeps: BytesLike[]; + }[], + overrides?: Overrides + ): Promise; + + "commitBlocks((uint64,bytes32,uint64,uint256,bytes32,bytes32,uint256,bytes32),tuple[])"( + _lastCommittedBlockData: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }, + _newBlocksData: { + blockNumber: BigNumberish; + timestamp: BigNumberish; + indexRepeatedStorageChanges: BigNumberish; + newStateRoot: BytesLike; + numberOfLayer1Txs: BigNumberish; + l2LogsTreeRoot: BytesLike; + priorityOperationsHash: BytesLike; + initialStorageChanges: BytesLike; + repeatedStorageChanges: BytesLike; + l2Logs: BytesLike; + l2ArbitraryLengthMessages: BytesLike[]; + factoryDeps: BytesLike[]; + }[], + overrides?: Overrides + ): Promise; + + executeBlocks( + _blocksData: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }[], + overrides?: Overrides + ): Promise; + + "executeBlocks(tuple[])"( + _blocksData: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }[], + overrides?: Overrides + ): Promise; + + executeUpgrade( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalSalt: BytesLike, + overrides?: Overrides + ): Promise; + + "executeUpgrade((tuple[],address,bytes),bytes32)"( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalSalt: BytesLike, + overrides?: Overrides + ): Promise; + + facetAddress( + _selector: BytesLike, + overrides?: CallOverrides + ): Promise; + + "facetAddress(bytes4)"( + _selector: BytesLike, + overrides?: CallOverrides + ): Promise; + + facetAddresses(overrides?: CallOverrides): Promise; + + "facetAddresses()"( + overrides?: CallOverrides + ): Promise; + + facetFunctionSelectors( + _facet: string, + overrides?: CallOverrides + ): Promise; + + "facetFunctionSelectors(address)"( + _facet: string, + overrides?: CallOverrides + ): Promise; + + facets(overrides?: CallOverrides): Promise; + + "facets()"(overrides?: CallOverrides): Promise; + + finalizeEthWithdrawal( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _message: BytesLike, + _merkleProof: BytesLike[], + overrides?: Overrides + ): Promise; + + "finalizeEthWithdrawal(uint256,uint256,uint16,bytes,bytes32[])"( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _message: BytesLike, + _merkleProof: BytesLike[], + overrides?: Overrides + ): Promise; + + freezeDiamond(overrides?: Overrides): Promise; + + "freezeDiamond()"(overrides?: Overrides): Promise; + + getCurrentProposalId( + overrides?: CallOverrides + ): Promise; + + "getCurrentProposalId()"( + overrides?: CallOverrides + ): Promise; + + getFirstUnprocessedPriorityTx( + overrides?: CallOverrides + ): Promise; + + "getFirstUnprocessedPriorityTx()"( + overrides?: CallOverrides + ): Promise; + + getGovernor(overrides?: CallOverrides): Promise; + + "getGovernor()"(overrides?: CallOverrides): Promise; + + getL2BootloaderBytecodeHash( + overrides?: CallOverrides + ): Promise; + + "getL2BootloaderBytecodeHash()"( + overrides?: CallOverrides + ): Promise; + + getL2DefaultAccountBytecodeHash( + overrides?: CallOverrides + ): Promise; + + "getL2DefaultAccountBytecodeHash()"( + overrides?: CallOverrides + ): Promise; + + getPendingGovernor( + overrides?: CallOverrides + ): Promise; + + "getPendingGovernor()"( + overrides?: CallOverrides + ): Promise; + + getPriorityQueueSize( + overrides?: CallOverrides + ): Promise; + + "getPriorityQueueSize()"( + overrides?: CallOverrides + ): Promise; + + getPriorityTxMaxGasLimit( + overrides?: CallOverrides + ): Promise; + + "getPriorityTxMaxGasLimit()"( + overrides?: CallOverrides + ): Promise; + + getProposedUpgradeHash( + overrides?: CallOverrides + ): Promise; + + "getProposedUpgradeHash()"( + overrides?: CallOverrides + ): Promise; + + getProposedUpgradeTimestamp( + overrides?: CallOverrides + ): Promise; + + "getProposedUpgradeTimestamp()"( + overrides?: CallOverrides + ): Promise; + + getSecurityCouncil( + overrides?: CallOverrides + ): Promise; + + "getSecurityCouncil()"( + overrides?: CallOverrides + ): Promise; + + getTotalBlocksCommitted( + overrides?: CallOverrides + ): Promise; + + "getTotalBlocksCommitted()"( + overrides?: CallOverrides + ): Promise; + + getTotalBlocksExecuted( + overrides?: CallOverrides + ): Promise; + + "getTotalBlocksExecuted()"( + overrides?: CallOverrides + ): Promise; + + getTotalBlocksVerified( + overrides?: CallOverrides + ): Promise; + + "getTotalBlocksVerified()"( + overrides?: CallOverrides + ): Promise; + + getTotalPriorityTxs( + overrides?: CallOverrides + ): Promise; + + "getTotalPriorityTxs()"( + overrides?: CallOverrides + ): Promise; + + getUpgradeProposalState( + overrides?: CallOverrides + ): Promise; + + "getUpgradeProposalState()"( + overrides?: CallOverrides + ): Promise; + + getVerifier(overrides?: CallOverrides): Promise; + + "getVerifier()"(overrides?: CallOverrides): Promise; + + getVerifierParams(overrides?: CallOverrides): Promise; + + "getVerifierParams()"( + overrides?: CallOverrides + ): Promise; + + isApprovedBySecurityCouncil( + overrides?: CallOverrides + ): Promise; + + "isApprovedBySecurityCouncil()"( + overrides?: CallOverrides + ): Promise; + + isDiamondStorageFrozen( + overrides?: CallOverrides + ): Promise; + + "isDiamondStorageFrozen()"( + overrides?: CallOverrides + ): Promise; + + isEthWithdrawalFinalized( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "isEthWithdrawalFinalized(uint256,uint256)"( + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + overrides?: CallOverrides + ): Promise; + + isFacetFreezable( + _facet: string, + overrides?: CallOverrides + ): Promise; + + "isFacetFreezable(address)"( + _facet: string, + overrides?: CallOverrides + ): Promise; + + isFunctionFreezable( + _selector: BytesLike, + overrides?: CallOverrides + ): Promise; + + "isFunctionFreezable(bytes4)"( + _selector: BytesLike, + overrides?: CallOverrides + ): Promise; + + isValidator( + _address: string, + overrides?: CallOverrides + ): Promise; + + "isValidator(address)"( + _address: string, + overrides?: CallOverrides + ): Promise; + + l2LogsRootHash( + _blockNumber: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "l2LogsRootHash(uint256)"( + _blockNumber: BigNumberish, + overrides?: CallOverrides + ): Promise; + + l2TransactionBaseCost( + _gasPrice: BigNumberish, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "l2TransactionBaseCost(uint256,uint256,uint256)"( + _gasPrice: BigNumberish, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + overrides?: CallOverrides + ): Promise; + + priorityQueueFrontOperation( + overrides?: CallOverrides + ): Promise; + + "priorityQueueFrontOperation()"( + overrides?: CallOverrides + ): Promise; + + proposeShadowUpgrade( + _proposalHash: BytesLike, + _proposalId: BigNumberish, + overrides?: Overrides + ): Promise; + + "proposeShadowUpgrade(bytes32,uint40)"( + _proposalHash: BytesLike, + _proposalId: BigNumberish, + overrides?: Overrides + ): Promise; + + proposeTransparentUpgrade( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalId: BigNumberish, + overrides?: Overrides + ): Promise; + + "proposeTransparentUpgrade((tuple[],address,bytes),uint40)"( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalId: BigNumberish, + overrides?: Overrides + ): Promise; + + proveBlocks( + _prevBlock: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }, + _committedBlocks: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }[], + _proof: { + recursiveAggregationInput: BigNumberish[]; + serializedProof: BigNumberish[]; + }, + overrides?: Overrides + ): Promise; + + "proveBlocks((uint64,bytes32,uint64,uint256,bytes32,bytes32,uint256,bytes32),tuple[],(uint256[],uint256[]))"( + _prevBlock: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }, + _committedBlocks: { + blockNumber: BigNumberish; + blockHash: BytesLike; + indexRepeatedStorageChanges: BigNumberish; + numberOfLayer1Txs: BigNumberish; + priorityOperationsHash: BytesLike; + l2LogsTreeRoot: BytesLike; + timestamp: BigNumberish; + commitment: BytesLike; + }[], + _proof: { + recursiveAggregationInput: BigNumberish[]; + serializedProof: BigNumberish[]; + }, + overrides?: Overrides + ): Promise; + + proveL1ToL2TransactionStatus( + _l2TxHash: BytesLike, + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _merkleProof: BytesLike[], + _status: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "proveL1ToL2TransactionStatus(bytes32,uint256,uint256,uint16,bytes32[],uint8)"( + _l2TxHash: BytesLike, + _l2BlockNumber: BigNumberish, + _l2MessageIndex: BigNumberish, + _l2TxNumberInBlock: BigNumberish, + _merkleProof: BytesLike[], + _status: BigNumberish, + overrides?: CallOverrides + ): Promise; + + proveL2LogInclusion( + _blockNumber: BigNumberish, + _index: BigNumberish, + _log: { + l2ShardId: BigNumberish; + isService: boolean; + txNumberInBlock: BigNumberish; + sender: string; + key: BytesLike; + value: BytesLike; + }, + _proof: BytesLike[], + overrides?: CallOverrides + ): Promise; + + "proveL2LogInclusion(uint256,uint256,(uint8,bool,uint16,address,bytes32,bytes32),bytes32[])"( + _blockNumber: BigNumberish, + _index: BigNumberish, + _log: { + l2ShardId: BigNumberish; + isService: boolean; + txNumberInBlock: BigNumberish; + sender: string; + key: BytesLike; + value: BytesLike; + }, + _proof: BytesLike[], + overrides?: CallOverrides + ): Promise; + + proveL2MessageInclusion( + _blockNumber: BigNumberish, + _index: BigNumberish, + _message: { + txNumberInBlock: BigNumberish; + sender: string; + data: BytesLike; + }, + _proof: BytesLike[], + overrides?: CallOverrides + ): Promise; + + "proveL2MessageInclusion(uint256,uint256,(uint16,address,bytes),bytes32[])"( + _blockNumber: BigNumberish, + _index: BigNumberish, + _message: { + txNumberInBlock: BigNumberish; + sender: string; + data: BytesLike; + }, + _proof: BytesLike[], + overrides?: CallOverrides + ): Promise; + + requestL2Transaction( + _contractL2: string, + _l2Value: BigNumberish, + _calldata: BytesLike, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + _factoryDeps: BytesLike[], + _refundRecipient: string, + overrides?: PayableOverrides + ): Promise; + + "requestL2Transaction(address,uint256,bytes,uint256,uint256,bytes[],address)"( + _contractL2: string, + _l2Value: BigNumberish, + _calldata: BytesLike, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + _factoryDeps: BytesLike[], + _refundRecipient: string, + overrides?: PayableOverrides + ): Promise; + + revertBlocks( + _newLastBlock: BigNumberish, + overrides?: Overrides + ): Promise; + + "revertBlocks(uint256)"( + _newLastBlock: BigNumberish, + overrides?: Overrides + ): Promise; + + securityCouncilUpgradeApprove( + _upgradeProposalHash: BytesLike, + overrides?: Overrides + ): Promise; + + "securityCouncilUpgradeApprove(bytes32)"( + _upgradeProposalHash: BytesLike, + overrides?: Overrides + ): Promise; + + serializeL2Transaction( + _txId: BigNumberish, + _l2Value: BigNumberish, + _sender: string, + _contractAddressL2: string, + _calldata: BytesLike, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + _factoryDeps: BytesLike[], + _toMint: BigNumberish, + _refundRecipient: string, + overrides?: CallOverrides + ): Promise; + + "serializeL2Transaction(uint256,uint256,address,address,bytes,uint256,uint256,bytes[],uint256,address)"( + _txId: BigNumberish, + _l2Value: BigNumberish, + _sender: string, + _contractAddressL2: string, + _calldata: BytesLike, + _gasLimit: BigNumberish, + _gasPerPubdataByteLimit: BigNumberish, + _factoryDeps: BytesLike[], + _toMint: BigNumberish, + _refundRecipient: string, + overrides?: CallOverrides + ): Promise; + + setL2BootloaderBytecodeHash( + _l2BootloaderBytecodeHash: BytesLike, + overrides?: Overrides + ): Promise; + + "setL2BootloaderBytecodeHash(bytes32)"( + _l2BootloaderBytecodeHash: BytesLike, + overrides?: Overrides + ): Promise; + + setL2DefaultAccountBytecodeHash( + _l2DefaultAccountBytecodeHash: BytesLike, + overrides?: Overrides + ): Promise; + + "setL2DefaultAccountBytecodeHash(bytes32)"( + _l2DefaultAccountBytecodeHash: BytesLike, + overrides?: Overrides + ): Promise; + + setPendingGovernor( + _newPendingGovernor: string, + overrides?: Overrides + ): Promise; + + "setPendingGovernor(address)"( + _newPendingGovernor: string, + overrides?: Overrides + ): Promise; + + setPorterAvailability( + _zkPorterIsAvailable: boolean, + overrides?: Overrides + ): Promise; + + "setPorterAvailability(bool)"( + _zkPorterIsAvailable: boolean, + overrides?: Overrides + ): Promise; + + setPriorityTxMaxGasLimit( + _newPriorityTxMaxGasLimit: BigNumberish, + overrides?: Overrides + ): Promise; + + "setPriorityTxMaxGasLimit(uint256)"( + _newPriorityTxMaxGasLimit: BigNumberish, + overrides?: Overrides + ): Promise; + + setValidator( + _validator: string, + _active: boolean, + overrides?: Overrides + ): Promise; + + "setValidator(address,bool)"( + _validator: string, + _active: boolean, + overrides?: Overrides + ): Promise; + + setVerifier( + _newVerifier: string, + overrides?: Overrides + ): Promise; + + "setVerifier(address)"( + _newVerifier: string, + overrides?: Overrides + ): Promise; + + setVerifierParams( + _newVerifierParams: { + recursionNodeLevelVkHash: BytesLike; + recursionLeafLevelVkHash: BytesLike; + recursionCircuitsSetVksHash: BytesLike; + }, + overrides?: Overrides + ): Promise; + + "setVerifierParams((bytes32,bytes32,bytes32))"( + _newVerifierParams: { + recursionNodeLevelVkHash: BytesLike; + recursionLeafLevelVkHash: BytesLike; + recursionCircuitsSetVksHash: BytesLike; + }, + overrides?: Overrides + ): Promise; + + storedBlockHash( + _blockNumber: BigNumberish, + overrides?: CallOverrides + ): Promise; + + "storedBlockHash(uint256)"( + _blockNumber: BigNumberish, + overrides?: CallOverrides + ): Promise; + + unfreezeDiamond(overrides?: Overrides): Promise; + + "unfreezeDiamond()"(overrides?: Overrides): Promise; + + upgradeProposalHash( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalId: BigNumberish, + _salt: BytesLike, + overrides?: CallOverrides + ): Promise; + + "upgradeProposalHash((tuple[],address,bytes),uint256,bytes32)"( + _diamondCut: { + facetCuts: { + facet: string; + action: BigNumberish; + isFreezable: boolean; + selectors: BytesLike[]; + }[]; + initAddress: string; + initCalldata: BytesLike; + }, + _proposalId: BigNumberish, + _salt: BytesLike, + overrides?: CallOverrides + ): Promise; + }; +} diff --git a/sdk/zksync-web3.js/typechain/IZkSyncFactory.ts b/sdk/zksync-web3.js/typechain/IZkSyncFactory.ts new file mode 100644 index 000000000000..3b217fb807b5 --- /dev/null +++ b/sdk/zksync-web3.js/typechain/IZkSyncFactory.ts @@ -0,0 +1,2211 @@ +/* Autogenerated file. Do not edit manually. */ +/* tslint:disable */ +/* eslint-disable */ + +import { Contract, Signer } from "ethers"; +import { Provider } from "@ethersproject/providers"; + +import type { IZkSync } from "./IZkSync"; + +export class IZkSyncFactory { + static connect( + address: string, + signerOrProvider: Signer | Provider + ): IZkSync { + return new Contract(address, _abi, signerOrProvider) as IZkSync; + } +} + +const _abi = [ + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "uint256", + name: "blockNumber", + type: "uint256", + }, + { + indexed: true, + internalType: "bytes32", + name: "blockHash", + type: "bytes32", + }, + { + indexed: true, + internalType: "bytes32", + name: "commitment", + type: "bytes32", + }, + ], + name: "BlockCommit", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "uint256", + name: "blockNumber", + type: "uint256", + }, + { + indexed: true, + internalType: "bytes32", + name: "blockHash", + type: "bytes32", + }, + { + indexed: true, + internalType: "bytes32", + name: "commitment", + type: "bytes32", + }, + ], + name: "BlockExecution", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: "uint256", + name: "totalBlocksCommitted", + type: "uint256", + }, + { + indexed: false, + internalType: "uint256", + name: "totalBlocksVerified", + type: "uint256", + }, + { + indexed: false, + internalType: "uint256", + name: "totalBlocksExecuted", + type: "uint256", + }, + ], + name: "BlocksRevert", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "uint256", + name: "previousLastVerifiedBlock", + type: "uint256", + }, + { + indexed: true, + internalType: "uint256", + name: "currentLastVerifiedBlock", + type: "uint256", + }, + ], + name: "BlocksVerification", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "uint256", + name: "proposalId", + type: "uint256", + }, + { + indexed: true, + internalType: "bytes32", + name: "proposalHash", + type: "bytes32", + }, + ], + name: "CancelUpgradeProposal", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "address", + name: "to", + type: "address", + }, + { + indexed: false, + internalType: "uint256", + name: "amount", + type: "uint256", + }, + ], + name: "EthWithdrawalFinalized", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "uint256", + name: "proposalId", + type: "uint256", + }, + { + indexed: true, + internalType: "bytes32", + name: "proposalHash", + type: "bytes32", + }, + { + indexed: false, + internalType: "bytes32", + name: "proposalSalt", + type: "bytes32", + }, + ], + name: "ExecuteUpgrade", + type: "event", + }, + { + anonymous: false, + inputs: [], + name: "Freeze", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: "bool", + name: "isPorterAvailable", + type: "bool", + }, + ], + name: "IsPorterAvailableStatusUpdate", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "address", + name: "oldGovernor", + type: "address", + }, + { + indexed: true, + internalType: "address", + name: "newGovernor", + type: "address", + }, + ], + name: "NewGovernor", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "bytes32", + name: "previousBytecodeHash", + type: "bytes32", + }, + { + indexed: true, + internalType: "bytes32", + name: "newBytecodeHash", + type: "bytes32", + }, + ], + name: "NewL2BootloaderBytecodeHash", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "bytes32", + name: "previousBytecodeHash", + type: "bytes32", + }, + { + indexed: true, + internalType: "bytes32", + name: "newBytecodeHash", + type: "bytes32", + }, + ], + name: "NewL2DefaultAccountBytecodeHash", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "address", + name: "oldPendingGovernor", + type: "address", + }, + { + indexed: true, + internalType: "address", + name: "newPendingGovernor", + type: "address", + }, + ], + name: "NewPendingGovernor", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: "uint256", + name: "txId", + type: "uint256", + }, + { + indexed: false, + internalType: "bytes32", + name: "txHash", + type: "bytes32", + }, + { + indexed: false, + internalType: "uint64", + name: "expirationTimestamp", + type: "uint64", + }, + { + components: [ + { + internalType: "uint256", + name: "txType", + type: "uint256", + }, + { + internalType: "uint256", + name: "from", + type: "uint256", + }, + { + internalType: "uint256", + name: "to", + type: "uint256", + }, + { + internalType: "uint256", + name: "gasLimit", + type: "uint256", + }, + { + internalType: "uint256", + name: "gasPerPubdataByteLimit", + type: "uint256", + }, + { + internalType: "uint256", + name: "maxFeePerGas", + type: "uint256", + }, + { + internalType: "uint256", + name: "maxPriorityFeePerGas", + type: "uint256", + }, + { + internalType: "uint256", + name: "paymaster", + type: "uint256", + }, + { + internalType: "uint256", + name: "nonce", + type: "uint256", + }, + { + internalType: "uint256", + name: "value", + type: "uint256", + }, + { + internalType: "uint256[4]", + name: "reserved", + type: "uint256[4]", + }, + { + internalType: "bytes", + name: "data", + type: "bytes", + }, + { + internalType: "bytes", + name: "signature", + type: "bytes", + }, + { + internalType: "uint256[]", + name: "factoryDeps", + type: "uint256[]", + }, + { + internalType: "bytes", + name: "paymasterInput", + type: "bytes", + }, + { + internalType: "bytes", + name: "reservedDynamic", + type: "bytes", + }, + ], + indexed: false, + internalType: "struct IMailbox.L2CanonicalTransaction", + name: "transaction", + type: "tuple", + }, + { + indexed: false, + internalType: "bytes[]", + name: "factoryDeps", + type: "bytes[]", + }, + ], + name: "NewPriorityRequest", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + indexed: false, + internalType: "uint256", + name: "oldPriorityTxMaxGasLimit", + type: "uint256", + }, + { + indexed: false, + internalType: "uint256", + name: "newPriorityTxMaxGasLimit", + type: "uint256", + }, + ], + name: "NewPriorityTxMaxGasLimit", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "address", + name: "oldVerifier", + type: "address", + }, + { + indexed: true, + internalType: "address", + name: "newVerifier", + type: "address", + }, + ], + name: "NewVerifier", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + components: [ + { + internalType: "bytes32", + name: "recursionNodeLevelVkHash", + type: "bytes32", + }, + { + internalType: "bytes32", + name: "recursionLeafLevelVkHash", + type: "bytes32", + }, + { + internalType: "bytes32", + name: "recursionCircuitsSetVksHash", + type: "bytes32", + }, + ], + indexed: false, + internalType: "struct VerifierParams", + name: "oldVerifierParams", + type: "tuple", + }, + { + components: [ + { + internalType: "bytes32", + name: "recursionNodeLevelVkHash", + type: "bytes32", + }, + { + internalType: "bytes32", + name: "recursionLeafLevelVkHash", + type: "bytes32", + }, + { + internalType: "bytes32", + name: "recursionCircuitsSetVksHash", + type: "bytes32", + }, + ], + indexed: false, + internalType: "struct VerifierParams", + name: "newVerifierParams", + type: "tuple", + }, + ], + name: "NewVerifierParams", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "uint256", + name: "proposalId", + type: "uint256", + }, + { + indexed: true, + internalType: "bytes32", + name: "proposalHash", + type: "bytes32", + }, + ], + name: "ProposeShadowUpgrade", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + components: [ + { + components: [ + { + internalType: "address", + name: "facet", + type: "address", + }, + { + internalType: "enum Diamond.Action", + name: "action", + type: "uint8", + }, + { + internalType: "bool", + name: "isFreezable", + type: "bool", + }, + { + internalType: "bytes4[]", + name: "selectors", + type: "bytes4[]", + }, + ], + internalType: "struct Diamond.FacetCut[]", + name: "facetCuts", + type: "tuple[]", + }, + { + internalType: "address", + name: "initAddress", + type: "address", + }, + { + internalType: "bytes", + name: "initCalldata", + type: "bytes", + }, + ], + indexed: false, + internalType: "struct Diamond.DiamondCutData", + name: "diamondCut", + type: "tuple", + }, + { + indexed: true, + internalType: "uint256", + name: "proposalId", + type: "uint256", + }, + { + indexed: false, + internalType: "bytes32", + name: "proposalSalt", + type: "bytes32", + }, + ], + name: "ProposeTransparentUpgrade", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "uint256", + name: "proposalId", + type: "uint256", + }, + { + indexed: true, + internalType: "bytes32", + name: "proposalHash", + type: "bytes32", + }, + ], + name: "SecurityCouncilUpgradeApprove", + type: "event", + }, + { + anonymous: false, + inputs: [], + name: "Unfreeze", + type: "event", + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "address", + name: "validatorAddress", + type: "address", + }, + { + indexed: false, + internalType: "bool", + name: "isActive", + type: "bool", + }, + ], + name: "ValidatorStatusUpdate", + type: "event", + }, + { + inputs: [], + name: "acceptGovernor", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + internalType: "bytes32", + name: "_proposedUpgradeHash", + type: "bytes32", + }, + ], + name: "cancelUpgradeProposal", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + components: [ + { + internalType: "uint64", + name: "blockNumber", + type: "uint64", + }, + { + internalType: "bytes32", + name: "blockHash", + type: "bytes32", + }, + { + internalType: "uint64", + name: "indexRepeatedStorageChanges", + type: "uint64", + }, + { + internalType: "uint256", + name: "numberOfLayer1Txs", + type: "uint256", + }, + { + internalType: "bytes32", + name: "priorityOperationsHash", + type: "bytes32", + }, + { + internalType: "bytes32", + name: "l2LogsTreeRoot", + type: "bytes32", + }, + { + internalType: "uint256", + name: "timestamp", + type: "uint256", + }, + { + internalType: "bytes32", + name: "commitment", + type: "bytes32", + }, + ], + internalType: "struct IExecutor.StoredBlockInfo", + name: "_lastCommittedBlockData", + type: "tuple", + }, + { + components: [ + { + internalType: "uint64", + name: "blockNumber", + type: "uint64", + }, + { + internalType: "uint64", + name: "timestamp", + type: "uint64", + }, + { + internalType: "uint64", + name: "indexRepeatedStorageChanges", + type: "uint64", + }, + { + internalType: "bytes32", + name: "newStateRoot", + type: "bytes32", + }, + { + internalType: "uint256", + name: "numberOfLayer1Txs", + type: "uint256", + }, + { + internalType: "bytes32", + name: "l2LogsTreeRoot", + type: "bytes32", + }, + { + internalType: "bytes32", + name: "priorityOperationsHash", + type: "bytes32", + }, + { + internalType: "bytes", + name: "initialStorageChanges", + type: "bytes", + }, + { + internalType: "bytes", + name: "repeatedStorageChanges", + type: "bytes", + }, + { + internalType: "bytes", + name: "l2Logs", + type: "bytes", + }, + { + internalType: "bytes[]", + name: "l2ArbitraryLengthMessages", + type: "bytes[]", + }, + { + internalType: "bytes[]", + name: "factoryDeps", + type: "bytes[]", + }, + ], + internalType: "struct IExecutor.CommitBlockInfo[]", + name: "_newBlocksData", + type: "tuple[]", + }, + ], + name: "commitBlocks", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + components: [ + { + internalType: "uint64", + name: "blockNumber", + type: "uint64", + }, + { + internalType: "bytes32", + name: "blockHash", + type: "bytes32", + }, + { + internalType: "uint64", + name: "indexRepeatedStorageChanges", + type: "uint64", + }, + { + internalType: "uint256", + name: "numberOfLayer1Txs", + type: "uint256", + }, + { + internalType: "bytes32", + name: "priorityOperationsHash", + type: "bytes32", + }, + { + internalType: "bytes32", + name: "l2LogsTreeRoot", + type: "bytes32", + }, + { + internalType: "uint256", + name: "timestamp", + type: "uint256", + }, + { + internalType: "bytes32", + name: "commitment", + type: "bytes32", + }, + ], + internalType: "struct IExecutor.StoredBlockInfo[]", + name: "_blocksData", + type: "tuple[]", + }, + ], + name: "executeBlocks", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + components: [ + { + components: [ + { + internalType: "address", + name: "facet", + type: "address", + }, + { + internalType: "enum Diamond.Action", + name: "action", + type: "uint8", + }, + { + internalType: "bool", + name: "isFreezable", + type: "bool", + }, + { + internalType: "bytes4[]", + name: "selectors", + type: "bytes4[]", + }, + ], + internalType: "struct Diamond.FacetCut[]", + name: "facetCuts", + type: "tuple[]", + }, + { + internalType: "address", + name: "initAddress", + type: "address", + }, + { + internalType: "bytes", + name: "initCalldata", + type: "bytes", + }, + ], + internalType: "struct Diamond.DiamondCutData", + name: "_diamondCut", + type: "tuple", + }, + { + internalType: "bytes32", + name: "_proposalSalt", + type: "bytes32", + }, + ], + name: "executeUpgrade", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + internalType: "bytes4", + name: "_selector", + type: "bytes4", + }, + ], + name: "facetAddress", + outputs: [ + { + internalType: "address", + name: "facet", + type: "address", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "facetAddresses", + outputs: [ + { + internalType: "address[]", + name: "facets", + type: "address[]", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "address", + name: "_facet", + type: "address", + }, + ], + name: "facetFunctionSelectors", + outputs: [ + { + internalType: "bytes4[]", + name: "", + type: "bytes4[]", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "facets", + outputs: [ + { + components: [ + { + internalType: "address", + name: "addr", + type: "address", + }, + { + internalType: "bytes4[]", + name: "selectors", + type: "bytes4[]", + }, + ], + internalType: "struct IGetters.Facet[]", + name: "", + type: "tuple[]", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint256", + name: "_l2BlockNumber", + type: "uint256", + }, + { + internalType: "uint256", + name: "_l2MessageIndex", + type: "uint256", + }, + { + internalType: "uint16", + name: "_l2TxNumberInBlock", + type: "uint16", + }, + { + internalType: "bytes", + name: "_message", + type: "bytes", + }, + { + internalType: "bytes32[]", + name: "_merkleProof", + type: "bytes32[]", + }, + ], + name: "finalizeEthWithdrawal", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [], + name: "freezeDiamond", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [], + name: "getCurrentProposalId", + outputs: [ + { + internalType: "uint256", + name: "", + type: "uint256", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "getFirstUnprocessedPriorityTx", + outputs: [ + { + internalType: "uint256", + name: "", + type: "uint256", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "getGovernor", + outputs: [ + { + internalType: "address", + name: "", + type: "address", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "getL2BootloaderBytecodeHash", + outputs: [ + { + internalType: "bytes32", + name: "", + type: "bytes32", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "getL2DefaultAccountBytecodeHash", + outputs: [ + { + internalType: "bytes32", + name: "", + type: "bytes32", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "getPendingGovernor", + outputs: [ + { + internalType: "address", + name: "", + type: "address", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "getPriorityQueueSize", + outputs: [ + { + internalType: "uint256", + name: "", + type: "uint256", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "getPriorityTxMaxGasLimit", + outputs: [ + { + internalType: "uint256", + name: "", + type: "uint256", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "getProposedUpgradeHash", + outputs: [ + { + internalType: "bytes32", + name: "", + type: "bytes32", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "getProposedUpgradeTimestamp", + outputs: [ + { + internalType: "uint256", + name: "", + type: "uint256", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "getSecurityCouncil", + outputs: [ + { + internalType: "address", + name: "", + type: "address", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "getTotalBlocksCommitted", + outputs: [ + { + internalType: "uint256", + name: "", + type: "uint256", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "getTotalBlocksExecuted", + outputs: [ + { + internalType: "uint256", + name: "", + type: "uint256", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "getTotalBlocksVerified", + outputs: [ + { + internalType: "uint256", + name: "", + type: "uint256", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "getTotalPriorityTxs", + outputs: [ + { + internalType: "uint256", + name: "", + type: "uint256", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "getUpgradeProposalState", + outputs: [ + { + internalType: "enum UpgradeState", + name: "", + type: "uint8", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "getVerifier", + outputs: [ + { + internalType: "address", + name: "", + type: "address", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "getVerifierParams", + outputs: [ + { + components: [ + { + internalType: "bytes32", + name: "recursionNodeLevelVkHash", + type: "bytes32", + }, + { + internalType: "bytes32", + name: "recursionLeafLevelVkHash", + type: "bytes32", + }, + { + internalType: "bytes32", + name: "recursionCircuitsSetVksHash", + type: "bytes32", + }, + ], + internalType: "struct VerifierParams", + name: "", + type: "tuple", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "isApprovedBySecurityCouncil", + outputs: [ + { + internalType: "bool", + name: "", + type: "bool", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "isDiamondStorageFrozen", + outputs: [ + { + internalType: "bool", + name: "", + type: "bool", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint256", + name: "_l2BlockNumber", + type: "uint256", + }, + { + internalType: "uint256", + name: "_l2MessageIndex", + type: "uint256", + }, + ], + name: "isEthWithdrawalFinalized", + outputs: [ + { + internalType: "bool", + name: "", + type: "bool", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "address", + name: "_facet", + type: "address", + }, + ], + name: "isFacetFreezable", + outputs: [ + { + internalType: "bool", + name: "isFreezable", + type: "bool", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "bytes4", + name: "_selector", + type: "bytes4", + }, + ], + name: "isFunctionFreezable", + outputs: [ + { + internalType: "bool", + name: "", + type: "bool", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "address", + name: "_address", + type: "address", + }, + ], + name: "isValidator", + outputs: [ + { + internalType: "bool", + name: "", + type: "bool", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint256", + name: "_blockNumber", + type: "uint256", + }, + ], + name: "l2LogsRootHash", + outputs: [ + { + internalType: "bytes32", + name: "hash", + type: "bytes32", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint256", + name: "_gasPrice", + type: "uint256", + }, + { + internalType: "uint256", + name: "_gasLimit", + type: "uint256", + }, + { + internalType: "uint256", + name: "_gasPerPubdataByteLimit", + type: "uint256", + }, + ], + name: "l2TransactionBaseCost", + outputs: [ + { + internalType: "uint256", + name: "", + type: "uint256", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "priorityQueueFrontOperation", + outputs: [ + { + components: [ + { + internalType: "bytes32", + name: "canonicalTxHash", + type: "bytes32", + }, + { + internalType: "uint64", + name: "expirationTimestamp", + type: "uint64", + }, + { + internalType: "uint192", + name: "layer2Tip", + type: "uint192", + }, + ], + internalType: "struct PriorityOperation", + name: "", + type: "tuple", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "bytes32", + name: "_proposalHash", + type: "bytes32", + }, + { + internalType: "uint40", + name: "_proposalId", + type: "uint40", + }, + ], + name: "proposeShadowUpgrade", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + components: [ + { + components: [ + { + internalType: "address", + name: "facet", + type: "address", + }, + { + internalType: "enum Diamond.Action", + name: "action", + type: "uint8", + }, + { + internalType: "bool", + name: "isFreezable", + type: "bool", + }, + { + internalType: "bytes4[]", + name: "selectors", + type: "bytes4[]", + }, + ], + internalType: "struct Diamond.FacetCut[]", + name: "facetCuts", + type: "tuple[]", + }, + { + internalType: "address", + name: "initAddress", + type: "address", + }, + { + internalType: "bytes", + name: "initCalldata", + type: "bytes", + }, + ], + internalType: "struct Diamond.DiamondCutData", + name: "_diamondCut", + type: "tuple", + }, + { + internalType: "uint40", + name: "_proposalId", + type: "uint40", + }, + ], + name: "proposeTransparentUpgrade", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + components: [ + { + internalType: "uint64", + name: "blockNumber", + type: "uint64", + }, + { + internalType: "bytes32", + name: "blockHash", + type: "bytes32", + }, + { + internalType: "uint64", + name: "indexRepeatedStorageChanges", + type: "uint64", + }, + { + internalType: "uint256", + name: "numberOfLayer1Txs", + type: "uint256", + }, + { + internalType: "bytes32", + name: "priorityOperationsHash", + type: "bytes32", + }, + { + internalType: "bytes32", + name: "l2LogsTreeRoot", + type: "bytes32", + }, + { + internalType: "uint256", + name: "timestamp", + type: "uint256", + }, + { + internalType: "bytes32", + name: "commitment", + type: "bytes32", + }, + ], + internalType: "struct IExecutor.StoredBlockInfo", + name: "_prevBlock", + type: "tuple", + }, + { + components: [ + { + internalType: "uint64", + name: "blockNumber", + type: "uint64", + }, + { + internalType: "bytes32", + name: "blockHash", + type: "bytes32", + }, + { + internalType: "uint64", + name: "indexRepeatedStorageChanges", + type: "uint64", + }, + { + internalType: "uint256", + name: "numberOfLayer1Txs", + type: "uint256", + }, + { + internalType: "bytes32", + name: "priorityOperationsHash", + type: "bytes32", + }, + { + internalType: "bytes32", + name: "l2LogsTreeRoot", + type: "bytes32", + }, + { + internalType: "uint256", + name: "timestamp", + type: "uint256", + }, + { + internalType: "bytes32", + name: "commitment", + type: "bytes32", + }, + ], + internalType: "struct IExecutor.StoredBlockInfo[]", + name: "_committedBlocks", + type: "tuple[]", + }, + { + components: [ + { + internalType: "uint256[]", + name: "recursiveAggregationInput", + type: "uint256[]", + }, + { + internalType: "uint256[]", + name: "serializedProof", + type: "uint256[]", + }, + ], + internalType: "struct IExecutor.ProofInput", + name: "_proof", + type: "tuple", + }, + ], + name: "proveBlocks", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + internalType: "bytes32", + name: "_l2TxHash", + type: "bytes32", + }, + { + internalType: "uint256", + name: "_l2BlockNumber", + type: "uint256", + }, + { + internalType: "uint256", + name: "_l2MessageIndex", + type: "uint256", + }, + { + internalType: "uint16", + name: "_l2TxNumberInBlock", + type: "uint16", + }, + { + internalType: "bytes32[]", + name: "_merkleProof", + type: "bytes32[]", + }, + { + internalType: "enum TxStatus", + name: "_status", + type: "uint8", + }, + ], + name: "proveL1ToL2TransactionStatus", + outputs: [ + { + internalType: "bool", + name: "", + type: "bool", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint256", + name: "_blockNumber", + type: "uint256", + }, + { + internalType: "uint256", + name: "_index", + type: "uint256", + }, + { + components: [ + { + internalType: "uint8", + name: "l2ShardId", + type: "uint8", + }, + { + internalType: "bool", + name: "isService", + type: "bool", + }, + { + internalType: "uint16", + name: "txNumberInBlock", + type: "uint16", + }, + { + internalType: "address", + name: "sender", + type: "address", + }, + { + internalType: "bytes32", + name: "key", + type: "bytes32", + }, + { + internalType: "bytes32", + name: "value", + type: "bytes32", + }, + ], + internalType: "struct L2Log", + name: "_log", + type: "tuple", + }, + { + internalType: "bytes32[]", + name: "_proof", + type: "bytes32[]", + }, + ], + name: "proveL2LogInclusion", + outputs: [ + { + internalType: "bool", + name: "", + type: "bool", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "uint256", + name: "_blockNumber", + type: "uint256", + }, + { + internalType: "uint256", + name: "_index", + type: "uint256", + }, + { + components: [ + { + internalType: "uint16", + name: "txNumberInBlock", + type: "uint16", + }, + { + internalType: "address", + name: "sender", + type: "address", + }, + { + internalType: "bytes", + name: "data", + type: "bytes", + }, + ], + internalType: "struct L2Message", + name: "_message", + type: "tuple", + }, + { + internalType: "bytes32[]", + name: "_proof", + type: "bytes32[]", + }, + ], + name: "proveL2MessageInclusion", + outputs: [ + { + internalType: "bool", + name: "", + type: "bool", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [ + { + internalType: "address", + name: "_contractL2", + type: "address", + }, + { + internalType: "uint256", + name: "_l2Value", + type: "uint256", + }, + { + internalType: "bytes", + name: "_calldata", + type: "bytes", + }, + { + internalType: "uint256", + name: "_gasLimit", + type: "uint256", + }, + { + internalType: "uint256", + name: "_gasPerPubdataByteLimit", + type: "uint256", + }, + { + internalType: "bytes[]", + name: "_factoryDeps", + type: "bytes[]", + }, + { + internalType: "address", + name: "_refundRecipient", + type: "address", + }, + ], + name: "requestL2Transaction", + outputs: [ + { + internalType: "bytes32", + name: "canonicalTxHash", + type: "bytes32", + }, + ], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint256", + name: "_newLastBlock", + type: "uint256", + }, + ], + name: "revertBlocks", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + internalType: "bytes32", + name: "_upgradeProposalHash", + type: "bytes32", + }, + ], + name: "securityCouncilUpgradeApprove", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint256", + name: "_txId", + type: "uint256", + }, + { + internalType: "uint256", + name: "_l2Value", + type: "uint256", + }, + { + internalType: "address", + name: "_sender", + type: "address", + }, + { + internalType: "address", + name: "_contractAddressL2", + type: "address", + }, + { + internalType: "bytes", + name: "_calldata", + type: "bytes", + }, + { + internalType: "uint256", + name: "_gasLimit", + type: "uint256", + }, + { + internalType: "uint256", + name: "_gasPerPubdataByteLimit", + type: "uint256", + }, + { + internalType: "bytes[]", + name: "_factoryDeps", + type: "bytes[]", + }, + { + internalType: "uint256", + name: "_toMint", + type: "uint256", + }, + { + internalType: "address", + name: "_refundRecipient", + type: "address", + }, + ], + name: "serializeL2Transaction", + outputs: [ + { + components: [ + { + internalType: "uint256", + name: "txType", + type: "uint256", + }, + { + internalType: "uint256", + name: "from", + type: "uint256", + }, + { + internalType: "uint256", + name: "to", + type: "uint256", + }, + { + internalType: "uint256", + name: "gasLimit", + type: "uint256", + }, + { + internalType: "uint256", + name: "gasPerPubdataByteLimit", + type: "uint256", + }, + { + internalType: "uint256", + name: "maxFeePerGas", + type: "uint256", + }, + { + internalType: "uint256", + name: "maxPriorityFeePerGas", + type: "uint256", + }, + { + internalType: "uint256", + name: "paymaster", + type: "uint256", + }, + { + internalType: "uint256", + name: "nonce", + type: "uint256", + }, + { + internalType: "uint256", + name: "value", + type: "uint256", + }, + { + internalType: "uint256[4]", + name: "reserved", + type: "uint256[4]", + }, + { + internalType: "bytes", + name: "data", + type: "bytes", + }, + { + internalType: "bytes", + name: "signature", + type: "bytes", + }, + { + internalType: "uint256[]", + name: "factoryDeps", + type: "uint256[]", + }, + { + internalType: "bytes", + name: "paymasterInput", + type: "bytes", + }, + { + internalType: "bytes", + name: "reservedDynamic", + type: "bytes", + }, + ], + internalType: "struct IMailbox.L2CanonicalTransaction", + name: "", + type: "tuple", + }, + ], + stateMutability: "pure", + type: "function", + }, + { + inputs: [ + { + internalType: "bytes32", + name: "_l2BootloaderBytecodeHash", + type: "bytes32", + }, + ], + name: "setL2BootloaderBytecodeHash", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + internalType: "bytes32", + name: "_l2DefaultAccountBytecodeHash", + type: "bytes32", + }, + ], + name: "setL2DefaultAccountBytecodeHash", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + internalType: "address", + name: "_newPendingGovernor", + type: "address", + }, + ], + name: "setPendingGovernor", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + internalType: "bool", + name: "_zkPorterIsAvailable", + type: "bool", + }, + ], + name: "setPorterAvailability", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint256", + name: "_newPriorityTxMaxGasLimit", + type: "uint256", + }, + ], + name: "setPriorityTxMaxGasLimit", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + internalType: "address", + name: "_validator", + type: "address", + }, + { + internalType: "bool", + name: "_active", + type: "bool", + }, + ], + name: "setValidator", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + internalType: "contract Verifier", + name: "_newVerifier", + type: "address", + }, + ], + name: "setVerifier", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + components: [ + { + internalType: "bytes32", + name: "recursionNodeLevelVkHash", + type: "bytes32", + }, + { + internalType: "bytes32", + name: "recursionLeafLevelVkHash", + type: "bytes32", + }, + { + internalType: "bytes32", + name: "recursionCircuitsSetVksHash", + type: "bytes32", + }, + ], + internalType: "struct VerifierParams", + name: "_newVerifierParams", + type: "tuple", + }, + ], + name: "setVerifierParams", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + internalType: "uint256", + name: "_blockNumber", + type: "uint256", + }, + ], + name: "storedBlockHash", + outputs: [ + { + internalType: "bytes32", + name: "", + type: "bytes32", + }, + ], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "unfreezeDiamond", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + { + inputs: [ + { + components: [ + { + components: [ + { + internalType: "address", + name: "facet", + type: "address", + }, + { + internalType: "enum Diamond.Action", + name: "action", + type: "uint8", + }, + { + internalType: "bool", + name: "isFreezable", + type: "bool", + }, + { + internalType: "bytes4[]", + name: "selectors", + type: "bytes4[]", + }, + ], + internalType: "struct Diamond.FacetCut[]", + name: "facetCuts", + type: "tuple[]", + }, + { + internalType: "address", + name: "initAddress", + type: "address", + }, + { + internalType: "bytes", + name: "initCalldata", + type: "bytes", + }, + ], + internalType: "struct Diamond.DiamondCutData", + name: "_diamondCut", + type: "tuple", + }, + { + internalType: "uint256", + name: "_proposalId", + type: "uint256", + }, + { + internalType: "bytes32", + name: "_salt", + type: "bytes32", + }, + ], + name: "upgradeProposalHash", + outputs: [ + { + internalType: "bytes32", + name: "", + type: "bytes32", + }, + ], + stateMutability: "pure", + type: "function", + }, +]; diff --git a/sdk/zksync-web3.js/typechain/index.ts b/sdk/zksync-web3.js/typechain/index.ts new file mode 100644 index 000000000000..1434b69d821f --- /dev/null +++ b/sdk/zksync-web3.js/typechain/index.ts @@ -0,0 +1,13 @@ +export { IZkSyncFactory } from './IZkSyncFactory'; +export type { IZkSync } from './IZkSync'; +export { IERC20MetadataFactory } from './IERC20MetadataFactory'; +export type { IERC20Metadata } from './IERC20Metadata'; +export { IL1BridgeFactory } from './IL1BridgeFactory'; +export type { IL1Bridge } from './IL1Bridge'; +export { IL2BridgeFactory } from './IL2BridgeFactory'; +export type { IL2Bridge } from './IL2Bridge'; +export { IAllowListFactory } from './IAllowListFactory'; +export type { IAllowList } from './IAllowList'; +export { IEthTokenFactory } from './IEthTokenFactory'; +export type { IEthToken } from './IEthToken'; + diff --git a/sdk/zksync-web3.js/typechain/update.sh b/sdk/zksync-web3.js/typechain/update.sh new file mode 100644 index 000000000000..848bad9d9d5f --- /dev/null +++ b/sdk/zksync-web3.js/typechain/update.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +cd `dirname $0` + +cp -f $ZKSYNC_HOME/contracts/ethereum/typechain/{IZkSync,IL2Bridge,IL1Bridge,IERC20Metadata,IAllowList}.d.ts . +cp -f $ZKSYNC_HOME/contracts/ethereum/typechain/{IZkSync,IL2Bridge,IL1Bridge,IERC20Metadata,IAllowList}Factory.ts . diff --git a/tarpaulin.toml b/tarpaulin.toml new file mode 100644 index 000000000000..7cc0d5606456 --- /dev/null +++ b/tarpaulin.toml @@ -0,0 +1,21 @@ +[coverage] +release = true +workspace = true +exclude = ["loadnext", "zksync_testkit", "zksync"] +# For some reason tarpaulin does not actually exclude these +# packages fully, so we specify them as files here. +exclude-files = [ + "core/lib/eth_client/src/clients/mock.rs", + "core/tests/loadnext/*", + "sdk/zksync-rs/*", +] +skip-clean = true +ignore-tests = true +# This flag is needed because somewhere in our dependency tree +# is an outdated package which breaks tarpaulin. +# `cargo update` fixed the tarpaulin error but broke something else +# in the server, so I'm just gonna leave this here for now. +avoid_cfg_tarpaulin = true + +[report] +out = ["Xml"] diff --git a/yarn.lock b/yarn.lock new file mode 100644 index 000000000000..69efb2766e4b --- /dev/null +++ b/yarn.lock @@ -0,0 +1,12452 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@ampproject/remapping@^2.1.0": + version "2.2.0" + resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.0.tgz#56c133824780de3174aed5ab6834f3026790154d" + integrity sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w== + dependencies: + "@jridgewell/gen-mapping" "^0.1.0" + "@jridgewell/trace-mapping" "^0.3.9" + +"@babel/code-frame@7.12.11": + version "7.12.11" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.12.11.tgz#f4ad435aa263db935b8f10f2c552d23fb716a63f" + integrity sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw== + dependencies: + "@babel/highlight" "^7.10.4" + +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.18.6.tgz#3b25d38c89600baa2dcc219edfa88a74eb2c427a" + integrity sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q== + dependencies: + "@babel/highlight" "^7.18.6" + +"@babel/compat-data@^7.20.0": + version "7.20.1" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.20.1.tgz#f2e6ef7790d8c8dbf03d379502dcc246dcce0b30" + integrity sha512-EWZ4mE2diW3QALKvDMiXnbZpRvlj+nayZ112nK93SnhqOtpdsbVD4W+2tEoT3YNBAG9RBR0ISY758ZkOgsn6pQ== + +"@babel/core@^7.11.6", "@babel/core@^7.12.3": + version "7.19.6" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.19.6.tgz#7122ae4f5c5a37c0946c066149abd8e75f81540f" + integrity sha512-D2Ue4KHpc6Ys2+AxpIx1BZ8+UegLLLE2p3KJEuJRKmokHOtl49jQ5ny1773KsGLZs8MQvBidAF6yWUJxRqtKtg== + dependencies: + "@ampproject/remapping" "^2.1.0" + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.19.6" + "@babel/helper-compilation-targets" "^7.19.3" + "@babel/helper-module-transforms" "^7.19.6" + "@babel/helpers" "^7.19.4" + "@babel/parser" "^7.19.6" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.19.6" + "@babel/types" "^7.19.4" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.1" + semver "^6.3.0" + +"@babel/generator@^7.19.6", "@babel/generator@^7.20.1", "@babel/generator@^7.7.2": + version "7.20.1" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.20.1.tgz#ef32ecd426222624cbd94871a7024639cf61a9fa" + integrity sha512-u1dMdBUmA7Z0rBB97xh8pIhviK7oItYOkjbsCxTWMknyvbQRBwX7/gn4JXurRdirWMFh+ZtYARqkA6ydogVZpg== + dependencies: + "@babel/types" "^7.20.0" + "@jridgewell/gen-mapping" "^0.3.2" + jsesc "^2.5.1" + +"@babel/helper-compilation-targets@^7.19.3": + version "7.20.0" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.20.0.tgz#6bf5374d424e1b3922822f1d9bdaa43b1a139d0a" + integrity sha512-0jp//vDGp9e8hZzBc6N/KwA5ZK3Wsm/pfm4CrY7vzegkVxc65SgSn6wYOnwHe9Js9HRQ1YTCKLGPzDtaS3RoLQ== + dependencies: + "@babel/compat-data" "^7.20.0" + "@babel/helper-validator-option" "^7.18.6" + browserslist "^4.21.3" + semver "^6.3.0" + +"@babel/helper-environment-visitor@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz#0c0cee9b35d2ca190478756865bb3528422f51be" + integrity sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg== + +"@babel/helper-function-name@^7.19.0": + version "7.19.0" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.19.0.tgz#941574ed5390682e872e52d3f38ce9d1bef4648c" + integrity sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w== + dependencies: + "@babel/template" "^7.18.10" + "@babel/types" "^7.19.0" + +"@babel/helper-hoist-variables@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz#d4d2c8fb4baeaa5c68b99cc8245c56554f926678" + integrity sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-module-imports@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz#1e3ebdbbd08aad1437b428c50204db13c5a3ca6e" + integrity sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-module-transforms@^7.19.6": + version "7.19.6" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.19.6.tgz#6c52cc3ac63b70952d33ee987cbee1c9368b533f" + integrity sha512-fCmcfQo/KYr/VXXDIyd3CBGZ6AFhPFy1TfSEJ+PilGVlQT6jcbqtHAM4C1EciRqMza7/TpOUZliuSH+U6HAhJw== + dependencies: + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-simple-access" "^7.19.4" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/helper-validator-identifier" "^7.19.1" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.19.6" + "@babel/types" "^7.19.4" + +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.19.0", "@babel/helper-plugin-utils@^7.8.0": + version "7.19.0" + resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.19.0.tgz#4796bb14961521f0f8715990bee2fb6e51ce21bf" + integrity sha512-40Ryx7I8mT+0gaNxm8JGTZFUITNqdLAgdg0hXzeVZxVD6nFsdhQvip6v8dqkRHzsz1VFpFAaOCHNn0vKBL7Czw== + +"@babel/helper-simple-access@^7.19.4": + version "7.19.4" + resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.19.4.tgz#be553f4951ac6352df2567f7daa19a0ee15668e7" + integrity sha512-f9Xq6WqBFqaDfbCzn2w85hwklswz5qsKlh7f08w4Y9yhJHpnNC0QemtSkK5YyOY8kPGvyiwdzZksGUhnGdaUIg== + dependencies: + "@babel/types" "^7.19.4" + +"@babel/helper-split-export-declaration@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz#7367949bc75b20c6d5a5d4a97bba2824ae8ef075" + integrity sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-string-parser@^7.19.4": + version "7.19.4" + resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.19.4.tgz#38d3acb654b4701a9b77fb0615a96f775c3a9e63" + integrity sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw== + +"@babel/helper-validator-identifier@^7.18.6", "@babel/helper-validator-identifier@^7.19.1": + version "7.19.1" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz#7eea834cf32901ffdc1a7ee555e2f9c27e249ca2" + integrity sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w== + +"@babel/helper-validator-option@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.18.6.tgz#bf0d2b5a509b1f336099e4ff36e1a63aa5db4db8" + integrity sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw== + +"@babel/helpers@^7.19.4": + version "7.20.1" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.20.1.tgz#2ab7a0fcb0a03b5bf76629196ed63c2d7311f4c9" + integrity sha512-J77mUVaDTUJFZ5BpP6mMn6OIl3rEWymk2ZxDBQJUG3P+PbmyMcF3bYWvz0ma69Af1oobDqT/iAsvzhB58xhQUg== + dependencies: + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.20.1" + "@babel/types" "^7.20.0" + +"@babel/highlight@^7.10.4", "@babel/highlight@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.18.6.tgz#81158601e93e2563795adcbfbdf5d64be3f2ecdf" + integrity sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g== + dependencies: + "@babel/helper-validator-identifier" "^7.18.6" + chalk "^2.0.0" + js-tokens "^4.0.0" + +"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.18.10", "@babel/parser@^7.19.6", "@babel/parser@^7.20.1", "@babel/parser@^7.7.0": + version "7.20.1" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.20.1.tgz#3e045a92f7b4623cafc2425eddcb8cf2e54f9cc5" + integrity sha512-hp0AYxaZJhxULfM1zyp7Wgr+pSUKBcP3M+PHnSzWGdXOzg/kHWIgiUWARvubhUKGOEw3xqY4x+lyZ9ytBVcELw== + +"@babel/plugin-syntax-async-generators@^7.8.4": + version "7.8.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" + integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-bigint@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea" + integrity sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-class-properties@^7.8.3": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" + integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== + dependencies: + "@babel/helper-plugin-utils" "^7.12.13" + +"@babel/plugin-syntax-import-meta@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" + integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-json-strings@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" + integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-jsx@^7.7.2": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.18.6.tgz#a8feef63b010150abd97f1649ec296e849943ca0" + integrity sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-syntax-logical-assignment-operators@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" + integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" + integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-numeric-separator@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" + integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-object-rest-spread@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" + integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-catch-binding@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" + integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-chaining@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" + integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-top-level-await@^7.8.3": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" + integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-typescript@^7.7.2": + version "7.20.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.20.0.tgz#4e9a0cfc769c85689b77a2e642d24e9f697fc8c7" + integrity sha512-rd9TkG+u1CExzS4SM1BlMEhMXwFLKVjOAFFCDx9PbX5ycJWDoWMcwdJH9RhkPu1dOgn5TrxLot/Gx6lWFuAUNQ== + dependencies: + "@babel/helper-plugin-utils" "^7.19.0" + +"@babel/template@^7.18.10", "@babel/template@^7.3.3": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.18.10.tgz#6f9134835970d1dbf0835c0d100c9f38de0c5e71" + integrity sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA== + dependencies: + "@babel/code-frame" "^7.18.6" + "@babel/parser" "^7.18.10" + "@babel/types" "^7.18.10" + +"@babel/traverse@^7.19.6", "@babel/traverse@^7.20.1", "@babel/traverse@^7.7.0", "@babel/traverse@^7.7.2": + version "7.20.1" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.20.1.tgz#9b15ccbf882f6d107eeeecf263fbcdd208777ec8" + integrity sha512-d3tN8fkVJwFLkHkBN479SOsw4DMZnz8cdbL/gvuDuzy3TS6Nfw80HuQqhw1pITbIruHyh7d1fMA47kWzmcUEGA== + dependencies: + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.20.1" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.19.0" + "@babel/helper-hoist-variables" "^7.18.6" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/parser" "^7.20.1" + "@babel/types" "^7.20.0" + debug "^4.1.0" + globals "^11.1.0" + +"@babel/types@^7.0.0", "@babel/types@^7.18.10", "@babel/types@^7.18.6", "@babel/types@^7.19.0", "@babel/types@^7.19.4", "@babel/types@^7.20.0", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.7.0": + version "7.20.0" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.20.0.tgz#52c94cf8a7e24e89d2a194c25c35b17a64871479" + integrity sha512-Jlgt3H0TajCW164wkTOTzHkZb075tMQMULzrLUoUeKmO7eFL96GgDxf7/Axhc5CAuKE3KFyVW1p6ysKsi2oXAg== + dependencies: + "@babel/helper-string-parser" "^7.19.4" + "@babel/helper-validator-identifier" "^7.19.1" + to-fast-properties "^2.0.0" + +"@balena/dockerignore@^1.0.2": + version "1.0.2" + resolved "https://registry.yarnpkg.com/@balena/dockerignore/-/dockerignore-1.0.2.tgz#9ffe4726915251e8eb69f44ef3547e0da2c03e0d" + integrity sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q== + +"@bcoe/v8-coverage@^0.2.3": + version "0.2.3" + resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" + integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== + +"@colors/colors@1.5.0": + version "1.5.0" + resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.5.0.tgz#bb504579c1cae923e6576a4f5da43d25f97bdbd9" + integrity sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ== + +"@cspotcode/source-map-support@^0.8.0": + version "0.8.1" + resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz#00629c35a688e05a88b1cda684fb9d5e73f000a1" + integrity sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw== + dependencies: + "@jridgewell/trace-mapping" "0.3.9" + +"@ensdomains/ens@^0.4.4": + version "0.4.5" + resolved "https://registry.yarnpkg.com/@ensdomains/ens/-/ens-0.4.5.tgz#e0aebc005afdc066447c6e22feb4eda89a5edbfc" + integrity sha512-JSvpj1iNMFjK6K+uVl4unqMoa9rf5jopb8cya5UGBWz23Nw8hSNT7efgUx4BTlAPAgpNlEioUfeTyQ6J9ZvTVw== + dependencies: + bluebird "^3.5.2" + eth-ens-namehash "^2.0.8" + solc "^0.4.20" + testrpc "0.0.1" + web3-utils "^1.0.0-beta.31" + +"@ensdomains/resolver@^0.2.4": + version "0.2.4" + resolved "https://registry.yarnpkg.com/@ensdomains/resolver/-/resolver-0.2.4.tgz#c10fe28bf5efbf49bff4666d909aed0265efbc89" + integrity sha512-bvaTH34PMCbv6anRa9I/0zjLJgY4EuznbEMgbV77JBCQ9KNC46rzi0avuxpOfu+xDjPEtSFGqVEOr5GlUSGudA== + +"@eslint/eslintrc@^0.4.3": + version "0.4.3" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-0.4.3.tgz#9e42981ef035beb3dd49add17acb96e8ff6f394c" + integrity sha512-J6KFFz5QCYUJq3pf0mjEcCJVERbzv71PUIDczuh9JkwGEzced6CO5ADLHB1rbf/+oPBtoPfMYNOpGDzCANlbXw== + dependencies: + ajv "^6.12.4" + debug "^4.1.1" + espree "^7.3.0" + globals "^13.9.0" + ignore "^4.0.6" + import-fresh "^3.2.1" + js-yaml "^3.13.1" + minimatch "^3.0.4" + strip-json-comments "^3.1.1" + +"@ethereum-waffle/chai@^3.4.4": + version "3.4.4" + resolved "https://registry.yarnpkg.com/@ethereum-waffle/chai/-/chai-3.4.4.tgz#16c4cc877df31b035d6d92486dfdf983df9138ff" + integrity sha512-/K8czydBtXXkcM9X6q29EqEkc5dN3oYenyH2a9hF7rGAApAJUpH8QBtojxOY/xQ2up5W332jqgxwp0yPiYug1g== + dependencies: + "@ethereum-waffle/provider" "^3.4.4" + ethers "^5.5.2" + +"@ethereum-waffle/compiler@^3.4.4": + version "3.4.4" + resolved "https://registry.yarnpkg.com/@ethereum-waffle/compiler/-/compiler-3.4.4.tgz#d568ee0f6029e68b5c645506079fbf67d0dfcf19" + integrity sha512-RUK3axJ8IkD5xpWjWoJgyHclOeEzDLQFga6gKpeGxiS/zBu+HB0W2FvsrrLalTFIaPw/CGYACRBSIxqiCqwqTQ== + dependencies: + "@resolver-engine/imports" "^0.3.3" + "@resolver-engine/imports-fs" "^0.3.3" + "@typechain/ethers-v5" "^2.0.0" + "@types/mkdirp" "^0.5.2" + "@types/node-fetch" "^2.5.5" + ethers "^5.0.1" + mkdirp "^0.5.1" + node-fetch "^2.6.1" + solc "^0.6.3" + ts-generator "^0.1.1" + typechain "^3.0.0" + +"@ethereum-waffle/ens@^3.4.4": + version "3.4.4" + resolved "https://registry.yarnpkg.com/@ethereum-waffle/ens/-/ens-3.4.4.tgz#db97ea2c9decbb70b9205d53de2ccbd6f3182ba1" + integrity sha512-0m4NdwWxliy3heBYva1Wr4WbJKLnwXizmy5FfSSr5PMbjI7SIGCdCB59U7/ZzY773/hY3bLnzLwvG5mggVjJWg== + dependencies: + "@ensdomains/ens" "^0.4.4" + "@ensdomains/resolver" "^0.2.4" + ethers "^5.5.2" + +"@ethereum-waffle/mock-contract@^3.4.4": + version "3.4.4" + resolved "https://registry.yarnpkg.com/@ethereum-waffle/mock-contract/-/mock-contract-3.4.4.tgz#fc6ffa18813546f4950a69f5892d4dd54b2c685a" + integrity sha512-Mp0iB2YNWYGUV+VMl5tjPsaXKbKo8MDH9wSJ702l9EBjdxFf/vBvnMBAC1Fub1lLtmD0JHtp1pq+mWzg/xlLnA== + dependencies: + "@ethersproject/abi" "^5.5.0" + ethers "^5.5.2" + +"@ethereum-waffle/provider@^3.4.4": + version "3.4.4" + resolved "https://registry.yarnpkg.com/@ethereum-waffle/provider/-/provider-3.4.4.tgz#398fc1f7eb91cc2df7d011272eacba8af0c7fffb" + integrity sha512-GK8oKJAM8+PKy2nK08yDgl4A80mFuI8zBkE0C9GqTRYQqvuxIyXoLmJ5NZU9lIwyWVv5/KsoA11BgAv2jXE82g== + dependencies: + "@ethereum-waffle/ens" "^3.4.4" + ethers "^5.5.2" + ganache-core "^2.13.2" + patch-package "^6.2.2" + postinstall-postinstall "^2.1.0" + +"@ethersproject/abi@5.0.0-beta.153": + version "5.0.0-beta.153" + resolved "https://registry.yarnpkg.com/@ethersproject/abi/-/abi-5.0.0-beta.153.tgz#43a37172b33794e4562999f6e2d555b7599a8eee" + integrity sha512-aXweZ1Z7vMNzJdLpR1CZUAIgnwjrZeUSvN9syCwlBaEBUFJmFY+HHnfuTI5vIhVs/mRkfJVrbEyl51JZQqyjAg== + dependencies: + "@ethersproject/address" ">=5.0.0-beta.128" + "@ethersproject/bignumber" ">=5.0.0-beta.130" + "@ethersproject/bytes" ">=5.0.0-beta.129" + "@ethersproject/constants" ">=5.0.0-beta.128" + "@ethersproject/hash" ">=5.0.0-beta.128" + "@ethersproject/keccak256" ">=5.0.0-beta.127" + "@ethersproject/logger" ">=5.0.0-beta.129" + "@ethersproject/properties" ">=5.0.0-beta.131" + "@ethersproject/strings" ">=5.0.0-beta.130" + +"@ethersproject/abi@5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@ethersproject/abi/-/abi-5.5.0.tgz#fb52820e22e50b854ff15ce1647cc508d6660613" + integrity sha512-loW7I4AohP5KycATvc0MgujU6JyCHPqHdeoo9z3Nr9xEiNioxa65ccdm1+fsoJhkuhdRtfcL8cfyGamz2AxZ5w== + dependencies: + "@ethersproject/address" "^5.5.0" + "@ethersproject/bignumber" "^5.5.0" + "@ethersproject/bytes" "^5.5.0" + "@ethersproject/constants" "^5.5.0" + "@ethersproject/hash" "^5.5.0" + "@ethersproject/keccak256" "^5.5.0" + "@ethersproject/logger" "^5.5.0" + "@ethersproject/properties" "^5.5.0" + "@ethersproject/strings" "^5.5.0" + +"@ethersproject/abi@5.7.0", "@ethersproject/abi@^5.1.2", "@ethersproject/abi@^5.5.0", "@ethersproject/abi@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/abi/-/abi-5.7.0.tgz#b3f3e045bbbeed1af3947335c247ad625a44e449" + integrity sha512-351ktp42TiRcYB3H1OP8yajPeAQstMW/yCFokj/AthP9bLHzQFPlOrxOcwYEDkUAICmOHljvN4K39OMTMUa9RA== + dependencies: + "@ethersproject/address" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/constants" "^5.7.0" + "@ethersproject/hash" "^5.7.0" + "@ethersproject/keccak256" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/strings" "^5.7.0" + +"@ethersproject/abstract-provider@5.5.1": + version "5.5.1" + resolved "https://registry.yarnpkg.com/@ethersproject/abstract-provider/-/abstract-provider-5.5.1.tgz#2f1f6e8a3ab7d378d8ad0b5718460f85649710c5" + integrity sha512-m+MA/ful6eKbxpr99xUYeRvLkfnlqzrF8SZ46d/xFB1A7ZVknYc/sXJG0RcufF52Qn2jeFj1hhcoQ7IXjNKUqg== + dependencies: + "@ethersproject/bignumber" "^5.5.0" + "@ethersproject/bytes" "^5.5.0" + "@ethersproject/logger" "^5.5.0" + "@ethersproject/networks" "^5.5.0" + "@ethersproject/properties" "^5.5.0" + "@ethersproject/transactions" "^5.5.0" + "@ethersproject/web" "^5.5.0" + +"@ethersproject/abstract-provider@5.7.0", "@ethersproject/abstract-provider@^5.5.0", "@ethersproject/abstract-provider@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/abstract-provider/-/abstract-provider-5.7.0.tgz#b0a8550f88b6bf9d51f90e4795d48294630cb9ef" + integrity sha512-R41c9UkchKCpAqStMYUpdunjo3pkEvZC3FAwZn5S5MGbXoMQOHIdHItezTETxAO5bevtMApSyEhn9+CHcDsWBw== + dependencies: + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/networks" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/transactions" "^5.7.0" + "@ethersproject/web" "^5.7.0" + +"@ethersproject/abstract-signer@5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@ethersproject/abstract-signer/-/abstract-signer-5.5.0.tgz#590ff6693370c60ae376bf1c7ada59eb2a8dd08d" + integrity sha512-lj//7r250MXVLKI7sVarXAbZXbv9P50lgmJQGr2/is82EwEb8r7HrxsmMqAjTsztMYy7ohrIhGMIml+Gx4D3mA== + dependencies: + "@ethersproject/abstract-provider" "^5.5.0" + "@ethersproject/bignumber" "^5.5.0" + "@ethersproject/bytes" "^5.5.0" + "@ethersproject/logger" "^5.5.0" + "@ethersproject/properties" "^5.5.0" + +"@ethersproject/abstract-signer@5.7.0", "@ethersproject/abstract-signer@^5.5.0", "@ethersproject/abstract-signer@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/abstract-signer/-/abstract-signer-5.7.0.tgz#13f4f32117868452191a4649723cb086d2b596b2" + integrity sha512-a16V8bq1/Cz+TGCkE2OPMTOUDLS3grCpdjoJCYNnVBbdYEMSgKrU0+B90s8b6H+ByYTBZN7a3g76jdIJi7UfKQ== + dependencies: + "@ethersproject/abstract-provider" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + +"@ethersproject/address@5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@ethersproject/address/-/address-5.5.0.tgz#bcc6f576a553f21f3dd7ba17248f81b473c9c78f" + integrity sha512-l4Nj0eWlTUh6ro5IbPTgbpT4wRbdH5l8CQf7icF7sb/SI3Nhd9Y9HzhonTSTi6CefI0necIw7LJqQPopPLZyWw== + dependencies: + "@ethersproject/bignumber" "^5.5.0" + "@ethersproject/bytes" "^5.5.0" + "@ethersproject/keccak256" "^5.5.0" + "@ethersproject/logger" "^5.5.0" + "@ethersproject/rlp" "^5.5.0" + +"@ethersproject/address@5.7.0", "@ethersproject/address@>=5.0.0-beta.128", "@ethersproject/address@^5.0.2", "@ethersproject/address@^5.5.0", "@ethersproject/address@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/address/-/address-5.7.0.tgz#19b56c4d74a3b0a46bfdbb6cfcc0a153fc697f37" + integrity sha512-9wYhYt7aghVGo758POM5nqcOMaE168Q6aRLJZwUmiqSrAungkG74gSSeKEIR7ukixesdRZGPgVqme6vmxs1fkA== + dependencies: + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/keccak256" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/rlp" "^5.7.0" + +"@ethersproject/base64@5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@ethersproject/base64/-/base64-5.5.0.tgz#881e8544e47ed976930836986e5eb8fab259c090" + integrity sha512-tdayUKhU1ljrlHzEWbStXazDpsx4eg1dBXUSI6+mHlYklOXoXF6lZvw8tnD6oVaWfnMxAgRSKROg3cVKtCcppA== + dependencies: + "@ethersproject/bytes" "^5.5.0" + +"@ethersproject/base64@5.7.0", "@ethersproject/base64@^5.5.0", "@ethersproject/base64@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/base64/-/base64-5.7.0.tgz#ac4ee92aa36c1628173e221d0d01f53692059e1c" + integrity sha512-Dr8tcHt2mEbsZr/mwTPIQAf3Ai0Bks/7gTw9dSqk1mQvhW3XvRlmDJr/4n+wg1JmCl16NZue17CDh8xb/vZ0sQ== + dependencies: + "@ethersproject/bytes" "^5.7.0" + +"@ethersproject/basex@5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@ethersproject/basex/-/basex-5.5.0.tgz#e40a53ae6d6b09ab4d977bd037010d4bed21b4d3" + integrity sha512-ZIodwhHpVJ0Y3hUCfUucmxKsWQA5TMnavp5j/UOuDdzZWzJlRmuOjcTMIGgHCYuZmHt36BfiSyQPSRskPxbfaQ== + dependencies: + "@ethersproject/bytes" "^5.5.0" + "@ethersproject/properties" "^5.5.0" + +"@ethersproject/basex@5.7.0", "@ethersproject/basex@^5.5.0", "@ethersproject/basex@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/basex/-/basex-5.7.0.tgz#97034dc7e8938a8ca943ab20f8a5e492ece4020b" + integrity sha512-ywlh43GwZLv2Voc2gQVTKBoVQ1mti3d8HK5aMxsfu/nRDnMmNqaSJ3r3n85HBByT8OpoY96SXM1FogC533T4zw== + dependencies: + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + +"@ethersproject/bignumber@5.5.0", "@ethersproject/bignumber@~5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@ethersproject/bignumber/-/bignumber-5.5.0.tgz#875b143f04a216f4f8b96245bde942d42d279527" + integrity sha512-6Xytlwvy6Rn3U3gKEc1vP7nR92frHkv6wtVr95LFR3jREXiCPzdWxKQ1cx4JGQBXxcguAwjA8murlYN2TSiEbg== + dependencies: + "@ethersproject/bytes" "^5.5.0" + "@ethersproject/logger" "^5.5.0" + bn.js "^4.11.9" + +"@ethersproject/bignumber@5.7.0", "@ethersproject/bignumber@>=5.0.0-beta.130", "@ethersproject/bignumber@^5.5.0", "@ethersproject/bignumber@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/bignumber/-/bignumber-5.7.0.tgz#e2f03837f268ba655ffba03a57853e18a18dc9c2" + integrity sha512-n1CAdIHRWjSucQO3MC1zPSVgV/6dy/fjL9pMrPP9peL+QxEg9wOsVqwD4+818B6LUEtaXzVHQiuivzRoxPxUGw== + dependencies: + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + bn.js "^5.2.1" + +"@ethersproject/bytes@5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@ethersproject/bytes/-/bytes-5.5.0.tgz#cb11c526de657e7b45d2e0f0246fb3b9d29a601c" + integrity sha512-ABvc7BHWhZU9PNM/tANm/Qx4ostPGadAuQzWTr3doklZOhDlmcBqclrQe/ZXUIj3K8wC28oYeuRa+A37tX9kog== + dependencies: + "@ethersproject/logger" "^5.5.0" + +"@ethersproject/bytes@5.7.0", "@ethersproject/bytes@>=5.0.0-beta.129", "@ethersproject/bytes@^5.5.0", "@ethersproject/bytes@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/bytes/-/bytes-5.7.0.tgz#a00f6ea8d7e7534d6d87f47188af1148d71f155d" + integrity sha512-nsbxwgFXWh9NyYWo+U8atvmMsSdKJprTcICAkvbBffT75qDocbuggBU0SJiVK2MuTrp0q+xvLkTnGMPK1+uA9A== + dependencies: + "@ethersproject/logger" "^5.7.0" + +"@ethersproject/constants@5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@ethersproject/constants/-/constants-5.5.0.tgz#d2a2cd7d94bd1d58377d1d66c4f53c9be4d0a45e" + integrity sha512-2MsRRVChkvMWR+GyMGY4N1sAX9Mt3J9KykCsgUFd/1mwS0UH1qw+Bv9k1UJb3X3YJYFco9H20pjSlOIfCG5HYQ== + dependencies: + "@ethersproject/bignumber" "^5.5.0" + +"@ethersproject/constants@5.7.0", "@ethersproject/constants@>=5.0.0-beta.128", "@ethersproject/constants@^5.5.0", "@ethersproject/constants@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/constants/-/constants-5.7.0.tgz#df80a9705a7e08984161f09014ea012d1c75295e" + integrity sha512-DHI+y5dBNvkpYUMiRQyxRBYBefZkJfo70VUkUAsRjcPs47muV9evftfZ0PJVCXYbAiCgght0DtcF9srFQmIgWA== + dependencies: + "@ethersproject/bignumber" "^5.7.0" + +"@ethersproject/contracts@5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@ethersproject/contracts/-/contracts-5.5.0.tgz#b735260d4bd61283a670a82d5275e2a38892c197" + integrity sha512-2viY7NzyvJkh+Ug17v7g3/IJC8HqZBDcOjYARZLdzRxrfGlRgmYgl6xPRKVbEzy1dWKw/iv7chDcS83pg6cLxg== + dependencies: + "@ethersproject/abi" "^5.5.0" + "@ethersproject/abstract-provider" "^5.5.0" + "@ethersproject/abstract-signer" "^5.5.0" + "@ethersproject/address" "^5.5.0" + "@ethersproject/bignumber" "^5.5.0" + "@ethersproject/bytes" "^5.5.0" + "@ethersproject/constants" "^5.5.0" + "@ethersproject/logger" "^5.5.0" + "@ethersproject/properties" "^5.5.0" + "@ethersproject/transactions" "^5.5.0" + +"@ethersproject/contracts@5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/contracts/-/contracts-5.7.0.tgz#c305e775abd07e48aa590e1a877ed5c316f8bd1e" + integrity sha512-5GJbzEU3X+d33CdfPhcyS+z8MzsTrBGk/sc+G+59+tPa9yFkl6HQ9D6L0QMgNTA9q8dT0XKxxkyp883XsQvbbg== + dependencies: + "@ethersproject/abi" "^5.7.0" + "@ethersproject/abstract-provider" "^5.7.0" + "@ethersproject/abstract-signer" "^5.7.0" + "@ethersproject/address" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/constants" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/transactions" "^5.7.0" + +"@ethersproject/hash@5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@ethersproject/hash/-/hash-5.5.0.tgz#7cee76d08f88d1873574c849e0207dcb32380cc9" + integrity sha512-dnGVpK1WtBjmnp3mUT0PlU2MpapnwWI0PibldQEq1408tQBAbZpPidkWoVVuNMOl/lISO3+4hXZWCL3YV7qzfg== + dependencies: + "@ethersproject/abstract-signer" "^5.5.0" + "@ethersproject/address" "^5.5.0" + "@ethersproject/bignumber" "^5.5.0" + "@ethersproject/bytes" "^5.5.0" + "@ethersproject/keccak256" "^5.5.0" + "@ethersproject/logger" "^5.5.0" + "@ethersproject/properties" "^5.5.0" + "@ethersproject/strings" "^5.5.0" + +"@ethersproject/hash@5.7.0", "@ethersproject/hash@>=5.0.0-beta.128", "@ethersproject/hash@^5.5.0", "@ethersproject/hash@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/hash/-/hash-5.7.0.tgz#eb7aca84a588508369562e16e514b539ba5240a7" + integrity sha512-qX5WrQfnah1EFnO5zJv1v46a8HW0+E5xuBBDTwMFZLuVTx0tbU2kkx15NqdjxecrLGatQN9FGQKpb1FKdHCt+g== + dependencies: + "@ethersproject/abstract-signer" "^5.7.0" + "@ethersproject/address" "^5.7.0" + "@ethersproject/base64" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/keccak256" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/strings" "^5.7.0" + +"@ethersproject/hdnode@5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@ethersproject/hdnode/-/hdnode-5.5.0.tgz#4a04e28f41c546f7c978528ea1575206a200ddf6" + integrity sha512-mcSOo9zeUg1L0CoJH7zmxwUG5ggQHU1UrRf8jyTYy6HxdZV+r0PBoL1bxr+JHIPXRzS6u/UW4mEn43y0tmyF8Q== + dependencies: + "@ethersproject/abstract-signer" "^5.5.0" + "@ethersproject/basex" "^5.5.0" + "@ethersproject/bignumber" "^5.5.0" + "@ethersproject/bytes" "^5.5.0" + "@ethersproject/logger" "^5.5.0" + "@ethersproject/pbkdf2" "^5.5.0" + "@ethersproject/properties" "^5.5.0" + "@ethersproject/sha2" "^5.5.0" + "@ethersproject/signing-key" "^5.5.0" + "@ethersproject/strings" "^5.5.0" + "@ethersproject/transactions" "^5.5.0" + "@ethersproject/wordlists" "^5.5.0" + +"@ethersproject/hdnode@5.7.0", "@ethersproject/hdnode@^5.5.0", "@ethersproject/hdnode@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/hdnode/-/hdnode-5.7.0.tgz#e627ddc6b466bc77aebf1a6b9e47405ca5aef9cf" + integrity sha512-OmyYo9EENBPPf4ERhR7oj6uAtUAhYGqOnIS+jE5pTXvdKBS99ikzq1E7Iv0ZQZ5V36Lqx1qZLeak0Ra16qpeOg== + dependencies: + "@ethersproject/abstract-signer" "^5.7.0" + "@ethersproject/basex" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/pbkdf2" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/sha2" "^5.7.0" + "@ethersproject/signing-key" "^5.7.0" + "@ethersproject/strings" "^5.7.0" + "@ethersproject/transactions" "^5.7.0" + "@ethersproject/wordlists" "^5.7.0" + +"@ethersproject/json-wallets@5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@ethersproject/json-wallets/-/json-wallets-5.5.0.tgz#dd522d4297e15bccc8e1427d247ec8376b60e325" + integrity sha512-9lA21XQnCdcS72xlBn1jfQdj2A1VUxZzOzi9UkNdnokNKke/9Ya2xA9aIK1SC3PQyBDLt4C+dfps7ULpkvKikQ== + dependencies: + "@ethersproject/abstract-signer" "^5.5.0" + "@ethersproject/address" "^5.5.0" + "@ethersproject/bytes" "^5.5.0" + "@ethersproject/hdnode" "^5.5.0" + "@ethersproject/keccak256" "^5.5.0" + "@ethersproject/logger" "^5.5.0" + "@ethersproject/pbkdf2" "^5.5.0" + "@ethersproject/properties" "^5.5.0" + "@ethersproject/random" "^5.5.0" + "@ethersproject/strings" "^5.5.0" + "@ethersproject/transactions" "^5.5.0" + aes-js "3.0.0" + scrypt-js "3.0.1" + +"@ethersproject/json-wallets@5.7.0", "@ethersproject/json-wallets@^5.5.0", "@ethersproject/json-wallets@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/json-wallets/-/json-wallets-5.7.0.tgz#5e3355287b548c32b368d91014919ebebddd5360" + integrity sha512-8oee5Xgu6+RKgJTkvEMl2wDgSPSAQ9MB/3JYjFV9jlKvcYHUXZC+cQp0njgmxdHkYWn8s6/IqIZYm0YWCjO/0g== + dependencies: + "@ethersproject/abstract-signer" "^5.7.0" + "@ethersproject/address" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/hdnode" "^5.7.0" + "@ethersproject/keccak256" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/pbkdf2" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/random" "^5.7.0" + "@ethersproject/strings" "^5.7.0" + "@ethersproject/transactions" "^5.7.0" + aes-js "3.0.0" + scrypt-js "3.0.1" + +"@ethersproject/keccak256@5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@ethersproject/keccak256/-/keccak256-5.5.0.tgz#e4b1f9d7701da87c564ffe336f86dcee82983492" + integrity sha512-5VoFCTjo2rYbBe1l2f4mccaRFN/4VQEYFwwn04aJV2h7qf4ZvI2wFxUE1XOX+snbwCLRzIeikOqtAoPwMza9kg== + dependencies: + "@ethersproject/bytes" "^5.5.0" + js-sha3 "0.8.0" + +"@ethersproject/keccak256@5.7.0", "@ethersproject/keccak256@>=5.0.0-beta.127", "@ethersproject/keccak256@^5.5.0", "@ethersproject/keccak256@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/keccak256/-/keccak256-5.7.0.tgz#3186350c6e1cd6aba7940384ec7d6d9db01f335a" + integrity sha512-2UcPboeL/iW+pSg6vZ6ydF8tCnv3Iu/8tUmLLzWWGzxWKFFqOBQFLo6uLUv6BDrLgCDfN28RJ/wtByx+jZ4KBg== + dependencies: + "@ethersproject/bytes" "^5.7.0" + js-sha3 "0.8.0" + +"@ethersproject/logger@5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@ethersproject/logger/-/logger-5.5.0.tgz#0c2caebeff98e10aefa5aef27d7441c7fd18cf5d" + integrity sha512-rIY/6WPm7T8n3qS2vuHTUBPdXHl+rGxWxW5okDfo9J4Z0+gRRZT0msvUdIJkE4/HS29GUMziwGaaKO2bWONBrg== + +"@ethersproject/logger@5.7.0", "@ethersproject/logger@>=5.0.0-beta.129", "@ethersproject/logger@^5.5.0", "@ethersproject/logger@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/logger/-/logger-5.7.0.tgz#6ce9ae168e74fecf287be17062b590852c311892" + integrity sha512-0odtFdXu/XHtjQXJYA3u9G0G8btm0ND5Cu8M7i5vhEcE8/HmF4Lbdqanwyv4uQTr2tx6b7fQRmgLrsnpQlmnig== + +"@ethersproject/networks@5.5.2": + version "5.5.2" + resolved "https://registry.yarnpkg.com/@ethersproject/networks/-/networks-5.5.2.tgz#784c8b1283cd2a931114ab428dae1bd00c07630b" + integrity sha512-NEqPxbGBfy6O3x4ZTISb90SjEDkWYDUbEeIFhJly0F7sZjoQMnj5KYzMSkMkLKZ+1fGpx00EDpHQCy6PrDupkQ== + dependencies: + "@ethersproject/logger" "^5.5.0" + +"@ethersproject/networks@5.7.1", "@ethersproject/networks@^5.5.0", "@ethersproject/networks@^5.7.0": + version "5.7.1" + resolved "https://registry.yarnpkg.com/@ethersproject/networks/-/networks-5.7.1.tgz#118e1a981d757d45ccea6bb58d9fd3d9db14ead6" + integrity sha512-n/MufjFYv3yFcUyfhnXotyDlNdFb7onmkSy8aQERi2PjNcnWQ66xXxa3XlS8nCcA8aJKJjIIMNJTC7tu80GwpQ== + dependencies: + "@ethersproject/logger" "^5.7.0" + +"@ethersproject/pbkdf2@5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@ethersproject/pbkdf2/-/pbkdf2-5.5.0.tgz#e25032cdf02f31505d47afbf9c3e000d95c4a050" + integrity sha512-SaDvQFvXPnz1QGpzr6/HToLifftSXGoXrbpZ6BvoZhmx4bNLHrxDe8MZisuecyOziP1aVEwzC2Hasj+86TgWVg== + dependencies: + "@ethersproject/bytes" "^5.5.0" + "@ethersproject/sha2" "^5.5.0" + +"@ethersproject/pbkdf2@5.7.0", "@ethersproject/pbkdf2@^5.5.0", "@ethersproject/pbkdf2@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/pbkdf2/-/pbkdf2-5.7.0.tgz#d2267d0a1f6e123f3771007338c47cccd83d3102" + integrity sha512-oR/dBRZR6GTyaofd86DehG72hY6NpAjhabkhxgr3X2FpJtJuodEl2auADWBZfhDHgVCbu3/H/Ocq2uC6dpNjjw== + dependencies: + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/sha2" "^5.7.0" + +"@ethersproject/properties@5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@ethersproject/properties/-/properties-5.5.0.tgz#61f00f2bb83376d2071baab02245f92070c59995" + integrity sha512-l3zRQg3JkD8EL3CPjNK5g7kMx4qSwiR60/uk5IVjd3oq1MZR5qUg40CNOoEJoX5wc3DyY5bt9EbMk86C7x0DNA== + dependencies: + "@ethersproject/logger" "^5.5.0" + +"@ethersproject/properties@5.7.0", "@ethersproject/properties@>=5.0.0-beta.131", "@ethersproject/properties@^5.5.0", "@ethersproject/properties@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/properties/-/properties-5.7.0.tgz#a6e12cb0439b878aaf470f1902a176033067ed30" + integrity sha512-J87jy8suntrAkIZtecpxEPxY//szqr1mlBaYlQ0r4RCaiD2hjheqF9s1LVE8vVuJCXisjIP+JgtK/Do54ej4Sw== + dependencies: + "@ethersproject/logger" "^5.7.0" + +"@ethersproject/providers@5.5.3": + version "5.5.3" + resolved "https://registry.yarnpkg.com/@ethersproject/providers/-/providers-5.5.3.tgz#56c2b070542ac44eb5de2ed3cf6784acd60a3130" + integrity sha512-ZHXxXXXWHuwCQKrgdpIkbzMNJMvs+9YWemanwp1fA7XZEv7QlilseysPvQe0D7Q7DlkJX/w/bGA1MdgK2TbGvA== + dependencies: + "@ethersproject/abstract-provider" "^5.5.0" + "@ethersproject/abstract-signer" "^5.5.0" + "@ethersproject/address" "^5.5.0" + "@ethersproject/basex" "^5.5.0" + "@ethersproject/bignumber" "^5.5.0" + "@ethersproject/bytes" "^5.5.0" + "@ethersproject/constants" "^5.5.0" + "@ethersproject/hash" "^5.5.0" + "@ethersproject/logger" "^5.5.0" + "@ethersproject/networks" "^5.5.0" + "@ethersproject/properties" "^5.5.0" + "@ethersproject/random" "^5.5.0" + "@ethersproject/rlp" "^5.5.0" + "@ethersproject/sha2" "^5.5.0" + "@ethersproject/strings" "^5.5.0" + "@ethersproject/transactions" "^5.5.0" + "@ethersproject/web" "^5.5.0" + bech32 "1.1.4" + ws "7.4.6" + +"@ethersproject/providers@5.7.2": + version "5.7.2" + resolved "https://registry.yarnpkg.com/@ethersproject/providers/-/providers-5.7.2.tgz#f8b1a4f275d7ce58cf0a2eec222269a08beb18cb" + integrity sha512-g34EWZ1WWAVgr4aptGlVBF8mhl3VWjv+8hoAnzStu8Ah22VHBsuGzP17eb6xDVRzw895G4W7vvx60lFFur/1Rg== + dependencies: + "@ethersproject/abstract-provider" "^5.7.0" + "@ethersproject/abstract-signer" "^5.7.0" + "@ethersproject/address" "^5.7.0" + "@ethersproject/base64" "^5.7.0" + "@ethersproject/basex" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/constants" "^5.7.0" + "@ethersproject/hash" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/networks" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/random" "^5.7.0" + "@ethersproject/rlp" "^5.7.0" + "@ethersproject/sha2" "^5.7.0" + "@ethersproject/strings" "^5.7.0" + "@ethersproject/transactions" "^5.7.0" + "@ethersproject/web" "^5.7.0" + bech32 "1.1.4" + ws "7.4.6" + +"@ethersproject/random@5.5.1": + version "5.5.1" + resolved "https://registry.yarnpkg.com/@ethersproject/random/-/random-5.5.1.tgz#7cdf38ea93dc0b1ed1d8e480ccdaf3535c555415" + integrity sha512-YaU2dQ7DuhL5Au7KbcQLHxcRHfgyNgvFV4sQOo0HrtW3Zkrc9ctWNz8wXQ4uCSfSDsqX2vcjhroxU5RQRV0nqA== + dependencies: + "@ethersproject/bytes" "^5.5.0" + "@ethersproject/logger" "^5.5.0" + +"@ethersproject/random@5.7.0", "@ethersproject/random@^5.5.0", "@ethersproject/random@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/random/-/random-5.7.0.tgz#af19dcbc2484aae078bb03656ec05df66253280c" + integrity sha512-19WjScqRA8IIeWclFme75VMXSBvi4e6InrUNuaR4s5pTF2qNhcGdCUwdxUVGtDDqC00sDLCO93jPQoDUH4HVmQ== + dependencies: + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + +"@ethersproject/rlp@5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@ethersproject/rlp/-/rlp-5.5.0.tgz#530f4f608f9ca9d4f89c24ab95db58ab56ab99a0" + integrity sha512-hLv8XaQ8PTI9g2RHoQGf/WSxBfTB/NudRacbzdxmst5VHAqd1sMibWG7SENzT5Dj3yZ3kJYx+WiRYEcQTAkcYA== + dependencies: + "@ethersproject/bytes" "^5.5.0" + "@ethersproject/logger" "^5.5.0" + +"@ethersproject/rlp@5.7.0", "@ethersproject/rlp@^5.5.0", "@ethersproject/rlp@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/rlp/-/rlp-5.7.0.tgz#de39e4d5918b9d74d46de93af80b7685a9c21304" + integrity sha512-rBxzX2vK8mVF7b0Tol44t5Tb8gomOHkj5guL+HhzQ1yBh/ydjGnpw6at+X6Iw0Kp3OzzzkcKp8N9r0W4kYSs9w== + dependencies: + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + +"@ethersproject/sha2@5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@ethersproject/sha2/-/sha2-5.5.0.tgz#a40a054c61f98fd9eee99af2c3cc6ff57ec24db7" + integrity sha512-B5UBoglbCiHamRVPLA110J+2uqsifpZaTmid2/7W5rbtYVz6gus6/hSDieIU/6gaKIDcOj12WnOdiymEUHIAOA== + dependencies: + "@ethersproject/bytes" "^5.5.0" + "@ethersproject/logger" "^5.5.0" + hash.js "1.1.7" + +"@ethersproject/sha2@5.7.0", "@ethersproject/sha2@^5.5.0", "@ethersproject/sha2@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/sha2/-/sha2-5.7.0.tgz#9a5f7a7824ef784f7f7680984e593a800480c9fb" + integrity sha512-gKlH42riwb3KYp0reLsFTokByAKoJdgFCwI+CCiX/k+Jm2mbNs6oOaCjYQSlI1+XBVejwH2KrmCbMAT/GnRDQw== + dependencies: + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + hash.js "1.1.7" + +"@ethersproject/signing-key@5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@ethersproject/signing-key/-/signing-key-5.5.0.tgz#2aa37169ce7e01e3e80f2c14325f624c29cedbe0" + integrity sha512-5VmseH7qjtNmDdZBswavhotYbWB0bOwKIlOTSlX14rKn5c11QmJwGt4GHeo7NrL/Ycl7uo9AHvEqs5xZgFBTng== + dependencies: + "@ethersproject/bytes" "^5.5.0" + "@ethersproject/logger" "^5.5.0" + "@ethersproject/properties" "^5.5.0" + bn.js "^4.11.9" + elliptic "6.5.4" + hash.js "1.1.7" + +"@ethersproject/signing-key@5.7.0", "@ethersproject/signing-key@^5.5.0", "@ethersproject/signing-key@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/signing-key/-/signing-key-5.7.0.tgz#06b2df39411b00bc57c7c09b01d1e41cf1b16ab3" + integrity sha512-MZdy2nL3wO0u7gkB4nA/pEf8lu1TlFswPNmy8AiYkfKTdO6eXBJyUdmHO/ehm/htHw9K/qF8ujnTyUAD+Ry54Q== + dependencies: + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + bn.js "^5.2.1" + elliptic "6.5.4" + hash.js "1.1.7" + +"@ethersproject/solidity@5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@ethersproject/solidity/-/solidity-5.5.0.tgz#2662eb3e5da471b85a20531e420054278362f93f" + integrity sha512-9NgZs9LhGMj6aCtHXhtmFQ4AN4sth5HuFXVvAQtzmm0jpSCNOTGtrHZJAeYTh7MBjRR8brylWZxBZR9zDStXbw== + dependencies: + "@ethersproject/bignumber" "^5.5.0" + "@ethersproject/bytes" "^5.5.0" + "@ethersproject/keccak256" "^5.5.0" + "@ethersproject/logger" "^5.5.0" + "@ethersproject/sha2" "^5.5.0" + "@ethersproject/strings" "^5.5.0" + +"@ethersproject/solidity@5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/solidity/-/solidity-5.7.0.tgz#5e9c911d8a2acce2a5ebb48a5e2e0af20b631cb8" + integrity sha512-HmabMd2Dt/raavyaGukF4XxizWKhKQ24DoLtdNbBmNKUOPqwjsKQSdV9GQtj9CBEea9DlzETlVER1gYeXXBGaA== + dependencies: + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/keccak256" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/sha2" "^5.7.0" + "@ethersproject/strings" "^5.7.0" + +"@ethersproject/strings@5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@ethersproject/strings/-/strings-5.5.0.tgz#e6784d00ec6c57710755699003bc747e98c5d549" + integrity sha512-9fy3TtF5LrX/wTrBaT8FGE6TDJyVjOvXynXJz5MT5azq+E6D92zuKNx7i29sWW2FjVOaWjAsiZ1ZWznuduTIIQ== + dependencies: + "@ethersproject/bytes" "^5.5.0" + "@ethersproject/constants" "^5.5.0" + "@ethersproject/logger" "^5.5.0" + +"@ethersproject/strings@5.7.0", "@ethersproject/strings@>=5.0.0-beta.130", "@ethersproject/strings@^5.5.0", "@ethersproject/strings@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/strings/-/strings-5.7.0.tgz#54c9d2a7c57ae8f1205c88a9d3a56471e14d5ed2" + integrity sha512-/9nu+lj0YswRNSH0NXYqrh8775XNyEdUQAuf3f+SmOrnVewcJ5SBNAjF7lpgehKi4abvNNXyf+HX86czCdJ8Mg== + dependencies: + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/constants" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + +"@ethersproject/transactions@5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@ethersproject/transactions/-/transactions-5.5.0.tgz#7e9bf72e97bcdf69db34fe0d59e2f4203c7a2908" + integrity sha512-9RZYSKX26KfzEd/1eqvv8pLauCKzDTub0Ko4LfIgaERvRuwyaNV78mJs7cpIgZaDl6RJui4o49lHwwCM0526zA== + dependencies: + "@ethersproject/address" "^5.5.0" + "@ethersproject/bignumber" "^5.5.0" + "@ethersproject/bytes" "^5.5.0" + "@ethersproject/constants" "^5.5.0" + "@ethersproject/keccak256" "^5.5.0" + "@ethersproject/logger" "^5.5.0" + "@ethersproject/properties" "^5.5.0" + "@ethersproject/rlp" "^5.5.0" + "@ethersproject/signing-key" "^5.5.0" + +"@ethersproject/transactions@5.7.0", "@ethersproject/transactions@^5.0.0-beta.135", "@ethersproject/transactions@^5.5.0", "@ethersproject/transactions@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/transactions/-/transactions-5.7.0.tgz#91318fc24063e057885a6af13fdb703e1f993d3b" + integrity sha512-kmcNicCp1lp8qanMTC3RIikGgoJ80ztTyvtsFvCYpSCfkjhD0jZ2LOrnbcuxuToLIUYYf+4XwD1rP+B/erDIhQ== + dependencies: + "@ethersproject/address" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/constants" "^5.7.0" + "@ethersproject/keccak256" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/rlp" "^5.7.0" + "@ethersproject/signing-key" "^5.7.0" + +"@ethersproject/units@5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@ethersproject/units/-/units-5.5.0.tgz#104d02db5b5dc42cc672cc4587bafb87a95ee45e" + integrity sha512-7+DpjiZk4v6wrikj+TCyWWa9dXLNU73tSTa7n0TSJDxkYbV3Yf1eRh9ToMLlZtuctNYu9RDNNy2USq3AdqSbag== + dependencies: + "@ethersproject/bignumber" "^5.5.0" + "@ethersproject/constants" "^5.5.0" + "@ethersproject/logger" "^5.5.0" + +"@ethersproject/units@5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/units/-/units-5.7.0.tgz#637b563d7e14f42deeee39245275d477aae1d8b1" + integrity sha512-pD3xLMy3SJu9kG5xDGI7+xhTEmGXlEqXU4OfNapmfnxLVY4EMSSRp7j1k7eezutBPH7RBN/7QPnwR7hzNlEFeg== + dependencies: + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/constants" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + +"@ethersproject/wallet@5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@ethersproject/wallet/-/wallet-5.5.0.tgz#322a10527a440ece593980dca6182f17d54eae75" + integrity sha512-Mlu13hIctSYaZmUOo7r2PhNSd8eaMPVXe1wxrz4w4FCE4tDYBywDH+bAR1Xz2ADyXGwqYMwstzTrtUVIsKDO0Q== + dependencies: + "@ethersproject/abstract-provider" "^5.5.0" + "@ethersproject/abstract-signer" "^5.5.0" + "@ethersproject/address" "^5.5.0" + "@ethersproject/bignumber" "^5.5.0" + "@ethersproject/bytes" "^5.5.0" + "@ethersproject/hash" "^5.5.0" + "@ethersproject/hdnode" "^5.5.0" + "@ethersproject/json-wallets" "^5.5.0" + "@ethersproject/keccak256" "^5.5.0" + "@ethersproject/logger" "^5.5.0" + "@ethersproject/properties" "^5.5.0" + "@ethersproject/random" "^5.5.0" + "@ethersproject/signing-key" "^5.5.0" + "@ethersproject/transactions" "^5.5.0" + "@ethersproject/wordlists" "^5.5.0" + +"@ethersproject/wallet@5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/wallet/-/wallet-5.7.0.tgz#4e5d0790d96fe21d61d38fb40324e6c7ef350b2d" + integrity sha512-MhmXlJXEJFBFVKrDLB4ZdDzxcBxQ3rLyCkhNqVu3CDYvR97E+8r01UgrI+TI99Le+aYm/in/0vp86guJuM7FCA== + dependencies: + "@ethersproject/abstract-provider" "^5.7.0" + "@ethersproject/abstract-signer" "^5.7.0" + "@ethersproject/address" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/hash" "^5.7.0" + "@ethersproject/hdnode" "^5.7.0" + "@ethersproject/json-wallets" "^5.7.0" + "@ethersproject/keccak256" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/random" "^5.7.0" + "@ethersproject/signing-key" "^5.7.0" + "@ethersproject/transactions" "^5.7.0" + "@ethersproject/wordlists" "^5.7.0" + +"@ethersproject/web@5.5.1": + version "5.5.1" + resolved "https://registry.yarnpkg.com/@ethersproject/web/-/web-5.5.1.tgz#cfcc4a074a6936c657878ac58917a61341681316" + integrity sha512-olvLvc1CB12sREc1ROPSHTdFCdvMh0J5GSJYiQg2D0hdD4QmJDy8QYDb1CvoqD/bF1c++aeKv2sR5uduuG9dQg== + dependencies: + "@ethersproject/base64" "^5.5.0" + "@ethersproject/bytes" "^5.5.0" + "@ethersproject/logger" "^5.5.0" + "@ethersproject/properties" "^5.5.0" + "@ethersproject/strings" "^5.5.0" + +"@ethersproject/web@5.7.1", "@ethersproject/web@^5.5.0", "@ethersproject/web@^5.7.0": + version "5.7.1" + resolved "https://registry.yarnpkg.com/@ethersproject/web/-/web-5.7.1.tgz#de1f285b373149bee5928f4eb7bcb87ee5fbb4ae" + integrity sha512-Gueu8lSvyjBWL4cYsWsjh6MtMwM0+H4HvqFPZfB6dV8ctbP9zFAO73VG1cMWae0FLPCtz0peKPpZY8/ugJJX2w== + dependencies: + "@ethersproject/base64" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/strings" "^5.7.0" + +"@ethersproject/wordlists@5.5.0": + version "5.5.0" + resolved "https://registry.yarnpkg.com/@ethersproject/wordlists/-/wordlists-5.5.0.tgz#aac74963aa43e643638e5172353d931b347d584f" + integrity sha512-bL0UTReWDiaQJJYOC9sh/XcRu/9i2jMrzf8VLRmPKx58ckSlOJiohODkECCO50dtLZHcGU6MLXQ4OOrgBwP77Q== + dependencies: + "@ethersproject/bytes" "^5.5.0" + "@ethersproject/hash" "^5.5.0" + "@ethersproject/logger" "^5.5.0" + "@ethersproject/properties" "^5.5.0" + "@ethersproject/strings" "^5.5.0" + +"@ethersproject/wordlists@5.7.0", "@ethersproject/wordlists@^5.5.0", "@ethersproject/wordlists@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/wordlists/-/wordlists-5.7.0.tgz#8fb2c07185d68c3e09eb3bfd6e779ba2774627f5" + integrity sha512-S2TFNJNfHWVHNE6cNDjbVlZ6MgE17MIxMbMg2zv3wn+3XSJGosL1m9ZVv3GXCf/2ymSsQ+hRI5IzoMJTG6aoVA== + dependencies: + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/hash" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/strings" "^5.7.0" + +"@humanwhocodes/config-array@^0.5.0": + version "0.5.0" + resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.5.0.tgz#1407967d4c6eecd7388f83acf1eaf4d0c6e58ef9" + integrity sha512-FagtKFz74XrTl7y6HCzQpwDfXP0yhxe9lHLD1UZxjvZIcbyRz8zTFF/yYNfSfzU414eDwZ1SrO0Qvtyf+wFMQg== + dependencies: + "@humanwhocodes/object-schema" "^1.2.0" + debug "^4.1.1" + minimatch "^3.0.4" + +"@humanwhocodes/object-schema@^1.2.0": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" + integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== + +"@iarna/toml@^2.2.5": + version "2.2.5" + resolved "https://registry.yarnpkg.com/@iarna/toml/-/toml-2.2.5.tgz#b32366c89b43c6f8cefbdefac778b9c828e3ba8c" + integrity sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg== + +"@istanbuljs/load-nyc-config@^1.0.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" + integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== + dependencies: + camelcase "^5.3.1" + find-up "^4.1.0" + get-package-type "^0.1.0" + js-yaml "^3.13.1" + resolve-from "^5.0.0" + +"@istanbuljs/schema@^0.1.2": + version "0.1.3" + resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" + integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== + +"@jest/console@^29.2.1": + version "29.2.1" + resolved "https://registry.yarnpkg.com/@jest/console/-/console-29.2.1.tgz#5f2c62dcdd5ce66e94b6d6729e021758bceea090" + integrity sha512-MF8Adcw+WPLZGBiNxn76DOuczG3BhODTcMlDCA4+cFi41OkaY/lyI0XUUhi73F88Y+7IHoGmD80pN5CtxQUdSw== + dependencies: + "@jest/types" "^29.2.1" + "@types/node" "*" + chalk "^4.0.0" + jest-message-util "^29.2.1" + jest-util "^29.2.1" + slash "^3.0.0" + +"@jest/core@^29.2.2": + version "29.2.2" + resolved "https://registry.yarnpkg.com/@jest/core/-/core-29.2.2.tgz#207aa8973d9de8769f9518732bc5f781efc3ffa7" + integrity sha512-susVl8o2KYLcZhhkvSB+b7xX575CX3TmSvxfeDjpRko7KmT89rHkXj6XkDkNpSeFMBzIENw5qIchO9HC9Sem+A== + dependencies: + "@jest/console" "^29.2.1" + "@jest/reporters" "^29.2.2" + "@jest/test-result" "^29.2.1" + "@jest/transform" "^29.2.2" + "@jest/types" "^29.2.1" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + ci-info "^3.2.0" + exit "^0.1.2" + graceful-fs "^4.2.9" + jest-changed-files "^29.2.0" + jest-config "^29.2.2" + jest-haste-map "^29.2.1" + jest-message-util "^29.2.1" + jest-regex-util "^29.2.0" + jest-resolve "^29.2.2" + jest-resolve-dependencies "^29.2.2" + jest-runner "^29.2.2" + jest-runtime "^29.2.2" + jest-snapshot "^29.2.2" + jest-util "^29.2.1" + jest-validate "^29.2.2" + jest-watcher "^29.2.2" + micromatch "^4.0.4" + pretty-format "^29.2.1" + slash "^3.0.0" + strip-ansi "^6.0.0" + +"@jest/environment@^29.2.2": + version "29.2.2" + resolved "https://registry.yarnpkg.com/@jest/environment/-/environment-29.2.2.tgz#481e729048d42e87d04842c38aa4d09c507f53b0" + integrity sha512-OWn+Vhu0I1yxuGBJEFFekMYc8aGBGrY4rt47SOh/IFaI+D7ZHCk7pKRiSoZ2/Ml7b0Ony3ydmEHRx/tEOC7H1A== + dependencies: + "@jest/fake-timers" "^29.2.2" + "@jest/types" "^29.2.1" + "@types/node" "*" + jest-mock "^29.2.2" + +"@jest/expect-utils@^29.2.2": + version "29.2.2" + resolved "https://registry.yarnpkg.com/@jest/expect-utils/-/expect-utils-29.2.2.tgz#460a5b5a3caf84d4feb2668677393dd66ff98665" + integrity sha512-vwnVmrVhTmGgQzyvcpze08br91OL61t9O0lJMDyb6Y/D8EKQ9V7rGUb/p7PDt0GPzK0zFYqXWFo4EO2legXmkg== + dependencies: + jest-get-type "^29.2.0" + +"@jest/expect@^29.2.2": + version "29.2.2" + resolved "https://registry.yarnpkg.com/@jest/expect/-/expect-29.2.2.tgz#81edbd33afbde7795ca07ff6b4753d15205032e4" + integrity sha512-zwblIZnrIVt8z/SiEeJ7Q9wKKuB+/GS4yZe9zw7gMqfGf4C5hBLGrVyxu1SzDbVSqyMSlprKl3WL1r80cBNkgg== + dependencies: + expect "^29.2.2" + jest-snapshot "^29.2.2" + +"@jest/fake-timers@^29.2.2": + version "29.2.2" + resolved "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-29.2.2.tgz#d8332e6e3cfa99cde4bc87d04a17d6b699deb340" + integrity sha512-nqaW3y2aSyZDl7zQ7t1XogsxeavNpH6kkdq+EpXncIDvAkjvFD7hmhcIs1nWloengEWUoWqkqSA6MSbf9w6DgA== + dependencies: + "@jest/types" "^29.2.1" + "@sinonjs/fake-timers" "^9.1.2" + "@types/node" "*" + jest-message-util "^29.2.1" + jest-mock "^29.2.2" + jest-util "^29.2.1" + +"@jest/globals@^29.2.2": + version "29.2.2" + resolved "https://registry.yarnpkg.com/@jest/globals/-/globals-29.2.2.tgz#205ff1e795aa774301c2c0ba0be182558471b845" + integrity sha512-/nt+5YMh65kYcfBhj38B3Hm0Trk4IsuMXNDGKE/swp36yydBWfz3OXkLqkSvoAtPW8IJMSJDFCbTM2oj5SNprw== + dependencies: + "@jest/environment" "^29.2.2" + "@jest/expect" "^29.2.2" + "@jest/types" "^29.2.1" + jest-mock "^29.2.2" + +"@jest/reporters@^29.2.2": + version "29.2.2" + resolved "https://registry.yarnpkg.com/@jest/reporters/-/reporters-29.2.2.tgz#69b395f79c3a97ce969ce05ccf1a482e5d6de290" + integrity sha512-AzjL2rl2zJC0njIzcooBvjA4sJjvdoq98sDuuNs4aNugtLPSQ+91nysGKRF0uY1to5k0MdGMdOBggUsPqvBcpA== + dependencies: + "@bcoe/v8-coverage" "^0.2.3" + "@jest/console" "^29.2.1" + "@jest/test-result" "^29.2.1" + "@jest/transform" "^29.2.2" + "@jest/types" "^29.2.1" + "@jridgewell/trace-mapping" "^0.3.15" + "@types/node" "*" + chalk "^4.0.0" + collect-v8-coverage "^1.0.0" + exit "^0.1.2" + glob "^7.1.3" + graceful-fs "^4.2.9" + istanbul-lib-coverage "^3.0.0" + istanbul-lib-instrument "^5.1.0" + istanbul-lib-report "^3.0.0" + istanbul-lib-source-maps "^4.0.0" + istanbul-reports "^3.1.3" + jest-message-util "^29.2.1" + jest-util "^29.2.1" + jest-worker "^29.2.1" + slash "^3.0.0" + string-length "^4.0.1" + strip-ansi "^6.0.0" + v8-to-istanbul "^9.0.1" + +"@jest/schemas@^29.0.0": + version "29.0.0" + resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-29.0.0.tgz#5f47f5994dd4ef067fb7b4188ceac45f77fe952a" + integrity sha512-3Ab5HgYIIAnS0HjqJHQYZS+zXc4tUmTmBH3z83ajI6afXp8X3ZtdLX+nXx+I7LNkJD7uN9LAVhgnjDgZa2z0kA== + dependencies: + "@sinclair/typebox" "^0.24.1" + +"@jest/source-map@^29.2.0": + version "29.2.0" + resolved "https://registry.yarnpkg.com/@jest/source-map/-/source-map-29.2.0.tgz#ab3420c46d42508dcc3dc1c6deee0b613c235744" + integrity sha512-1NX9/7zzI0nqa6+kgpSdKPK+WU1p+SJk3TloWZf5MzPbxri9UEeXX5bWZAPCzbQcyuAzubcdUHA7hcNznmRqWQ== + dependencies: + "@jridgewell/trace-mapping" "^0.3.15" + callsites "^3.0.0" + graceful-fs "^4.2.9" + +"@jest/test-result@^29.2.1": + version "29.2.1" + resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-29.2.1.tgz#f42dbf7b9ae465d0a93eee6131473b8bb3bd2edb" + integrity sha512-lS4+H+VkhbX6z64tZP7PAUwPqhwj3kbuEHcaLuaBuB+riyaX7oa1txe0tXgrFj5hRWvZKvqO7LZDlNWeJ7VTPA== + dependencies: + "@jest/console" "^29.2.1" + "@jest/types" "^29.2.1" + "@types/istanbul-lib-coverage" "^2.0.0" + collect-v8-coverage "^1.0.0" + +"@jest/test-sequencer@^29.2.2": + version "29.2.2" + resolved "https://registry.yarnpkg.com/@jest/test-sequencer/-/test-sequencer-29.2.2.tgz#4ac7487b237e517a1f55e7866fb5553f6e0168b9" + integrity sha512-Cuc1znc1pl4v9REgmmLf0jBd3Y65UXJpioGYtMr/JNpQEIGEzkmHhy6W6DLbSsXeUA13TDzymPv0ZGZ9jH3eIw== + dependencies: + "@jest/test-result" "^29.2.1" + graceful-fs "^4.2.9" + jest-haste-map "^29.2.1" + slash "^3.0.0" + +"@jest/transform@^29.2.2": + version "29.2.2" + resolved "https://registry.yarnpkg.com/@jest/transform/-/transform-29.2.2.tgz#dfc03fc092b31ffea0c55917728e75bfcf8b5de6" + integrity sha512-aPe6rrletyuEIt2axxgdtxljmzH8O/nrov4byy6pDw9S8inIrTV+2PnjyP/oFHMSynzGxJ2s6OHowBNMXp/Jzg== + dependencies: + "@babel/core" "^7.11.6" + "@jest/types" "^29.2.1" + "@jridgewell/trace-mapping" "^0.3.15" + babel-plugin-istanbul "^6.1.1" + chalk "^4.0.0" + convert-source-map "^1.4.0" + fast-json-stable-stringify "^2.1.0" + graceful-fs "^4.2.9" + jest-haste-map "^29.2.1" + jest-regex-util "^29.2.0" + jest-util "^29.2.1" + micromatch "^4.0.4" + pirates "^4.0.4" + slash "^3.0.0" + write-file-atomic "^4.0.1" + +"@jest/types@^29.2.1": + version "29.2.1" + resolved "https://registry.yarnpkg.com/@jest/types/-/types-29.2.1.tgz#ec9c683094d4eb754e41e2119d8bdaef01cf6da0" + integrity sha512-O/QNDQODLnINEPAI0cl9U6zUIDXEWXt6IC1o2N2QENuos7hlGUIthlKyV4p6ki3TvXFX071blj8HUhgLGquPjw== + dependencies: + "@jest/schemas" "^29.0.0" + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^17.0.8" + chalk "^4.0.0" + +"@jridgewell/gen-mapping@^0.1.0": + version "0.1.1" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz#e5d2e450306a9491e3bd77e323e38d7aff315996" + integrity sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w== + dependencies: + "@jridgewell/set-array" "^1.0.0" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@jridgewell/gen-mapping@^0.3.2": + version "0.3.2" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz#c1aedc61e853f2bb9f5dfe6d4442d3b565b253b9" + integrity sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A== + dependencies: + "@jridgewell/set-array" "^1.0.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.9" + +"@jridgewell/resolve-uri@3.1.0", "@jridgewell/resolve-uri@^3.0.3": + version "3.1.0" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" + integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== + +"@jridgewell/set-array@^1.0.0", "@jridgewell/set-array@^1.0.1": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" + integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== + +"@jridgewell/sourcemap-codec@1.4.14", "@jridgewell/sourcemap-codec@^1.4.10": + version "1.4.14" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" + integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== + +"@jridgewell/trace-mapping@0.3.9": + version "0.3.9" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz#6534fd5933a53ba7cbf3a17615e273a0d1273ff9" + integrity sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ== + dependencies: + "@jridgewell/resolve-uri" "^3.0.3" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@jridgewell/trace-mapping@^0.3.12", "@jridgewell/trace-mapping@^0.3.15", "@jridgewell/trace-mapping@^0.3.9": + version "0.3.17" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.17.tgz#793041277af9073b0951a7fe0f0d8c4c98c36985" + integrity sha512-MCNzAp77qzKca9+W/+I0+sEpaUnZoeasnghNeVc41VZCEKaCH73Vq3BZZ/SzWIgrqE4H4ceI+p+b6C0mHf9T4g== + dependencies: + "@jridgewell/resolve-uri" "3.1.0" + "@jridgewell/sourcemap-codec" "1.4.14" + +"@matterlabs/hardhat-zksync-solc@^0.3.14-beta.3": + version "0.3.14-beta.3" + resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-solc/-/hardhat-zksync-solc-0.3.14-beta.3.tgz#b80275666459d8a047480d223c1098075b790170" + integrity sha512-H7MqJ4QXDgCvTYPWTJGjIJ71IGShT450SiSKKS3Vz8qbJNJusv7KKDsIDe2urwCTwLasSxRXk+Z+cEf03TnR8A== + dependencies: + "@nomiclabs/hardhat-docker" "^2.0.0" + chalk "4.1.2" + dockerode "^3.3.4" + +"@metamask/eth-sig-util@^4.0.0": + version "4.0.1" + resolved "https://registry.yarnpkg.com/@metamask/eth-sig-util/-/eth-sig-util-4.0.1.tgz#3ad61f6ea9ad73ba5b19db780d40d9aae5157088" + integrity sha512-tghyZKLHZjcdlDqCA3gNZmLeR0XvOE9U1qoQO9ohyAZT6Pya+H9vkBPcsyXytmYLNgVoin7CKCmweo/R43V+tQ== + dependencies: + ethereumjs-abi "^0.6.8" + ethereumjs-util "^6.2.1" + ethjs-util "^0.1.6" + tweetnacl "^1.0.3" + tweetnacl-util "^0.15.1" + +"@noble/hashes@1.1.2": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@noble/hashes/-/hashes-1.1.2.tgz#e9e035b9b166ca0af657a7848eb2718f0f22f183" + integrity sha512-KYRCASVTv6aeUi1tsF8/vpyR7zpfs3FUzy2Jqm+MU+LmUKhQ0y2FpfwqkCcxSg2ua4GALJd8k2R76WxwZGbQpA== + +"@noble/hashes@~1.1.1": + version "1.1.3" + resolved "https://registry.yarnpkg.com/@noble/hashes/-/hashes-1.1.3.tgz#360afc77610e0a61f3417e497dcf36862e4f8111" + integrity sha512-CE0FCR57H2acVI5UOzIGSSIYxZ6v/HOhDR0Ro9VLyhnzLwx0o8W1mmgaqlEUx4049qJDlIBRztv5k+MM8vbO3A== + +"@noble/secp256k1@1.6.3", "@noble/secp256k1@~1.6.0": + version "1.6.3" + resolved "https://registry.yarnpkg.com/@noble/secp256k1/-/secp256k1-1.6.3.tgz#7eed12d9f4404b416999d0c87686836c4c5c9b94" + integrity sha512-T04e4iTurVy7I8Sw4+c5OSN9/RkPlo1uKxAomtxQNLq8j1uPAqnsqG1bqvY3Jv7c13gyr6dui0zmh/I3+f/JaQ== + +"@nodelib/fs.scandir@2.1.5": + version "2.1.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" + integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== + dependencies: + "@nodelib/fs.stat" "2.0.5" + run-parallel "^1.1.9" + +"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": + version "2.0.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" + integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== + +"@nodelib/fs.walk@^1.2.3": + version "1.2.8" + resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" + integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== + dependencies: + "@nodelib/fs.scandir" "2.1.5" + fastq "^1.6.0" + +"@nomicfoundation/ethereumjs-block@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/ethereumjs-block/-/ethereumjs-block-4.0.0.tgz#fdd5c045e7baa5169abeed0e1202bf94e4481c49" + integrity sha512-bk8uP8VuexLgyIZAHExH1QEovqx0Lzhc9Ntm63nCRKLHXIZkobaFaeCVwTESV7YkPKUk7NiK11s8ryed4CS9yA== + dependencies: + "@nomicfoundation/ethereumjs-common" "^3.0.0" + "@nomicfoundation/ethereumjs-rlp" "^4.0.0" + "@nomicfoundation/ethereumjs-trie" "^5.0.0" + "@nomicfoundation/ethereumjs-tx" "^4.0.0" + "@nomicfoundation/ethereumjs-util" "^8.0.0" + ethereum-cryptography "0.1.3" + +"@nomicfoundation/ethereumjs-blockchain@^6.0.0": + version "6.0.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/ethereumjs-blockchain/-/ethereumjs-blockchain-6.0.0.tgz#1a8c243a46d4d3691631f139bfb3a4a157187b0c" + integrity sha512-pLFEoea6MWd81QQYSReLlLfH7N9v7lH66JC/NMPN848ySPPQA5renWnE7wPByfQFzNrPBuDDRFFULMDmj1C0xw== + dependencies: + "@nomicfoundation/ethereumjs-block" "^4.0.0" + "@nomicfoundation/ethereumjs-common" "^3.0.0" + "@nomicfoundation/ethereumjs-ethash" "^2.0.0" + "@nomicfoundation/ethereumjs-rlp" "^4.0.0" + "@nomicfoundation/ethereumjs-trie" "^5.0.0" + "@nomicfoundation/ethereumjs-util" "^8.0.0" + abstract-level "^1.0.3" + debug "^4.3.3" + ethereum-cryptography "0.1.3" + level "^8.0.0" + lru-cache "^5.1.1" + memory-level "^1.0.0" + +"@nomicfoundation/ethereumjs-common@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/ethereumjs-common/-/ethereumjs-common-3.0.0.tgz#f6bcc7753994555e49ab3aa517fc8bcf89c280b9" + integrity sha512-WS7qSshQfxoZOpHG/XqlHEGRG1zmyjYrvmATvc4c62+gZXgre1ymYP8ZNgx/3FyZY0TWe9OjFlKOfLqmgOeYwA== + dependencies: + "@nomicfoundation/ethereumjs-util" "^8.0.0" + crc-32 "^1.2.0" + +"@nomicfoundation/ethereumjs-ethash@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/ethereumjs-ethash/-/ethereumjs-ethash-2.0.0.tgz#11539c32fe0990e1122ff987d1b84cfa34774e81" + integrity sha512-WpDvnRncfDUuXdsAXlI4lXbqUDOA+adYRQaEezIkxqDkc+LDyYDbd/xairmY98GnQzo1zIqsIL6GB5MoMSJDew== + dependencies: + "@nomicfoundation/ethereumjs-block" "^4.0.0" + "@nomicfoundation/ethereumjs-rlp" "^4.0.0" + "@nomicfoundation/ethereumjs-util" "^8.0.0" + abstract-level "^1.0.3" + bigint-crypto-utils "^3.0.23" + ethereum-cryptography "0.1.3" + +"@nomicfoundation/ethereumjs-evm@^1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/ethereumjs-evm/-/ethereumjs-evm-1.0.0.tgz#99cd173c03b59107c156a69c5e215409098a370b" + integrity sha512-hVS6qRo3V1PLKCO210UfcEQHvlG7GqR8iFzp0yyjTg2TmJQizcChKgWo8KFsdMw6AyoLgLhHGHw4HdlP8a4i+Q== + dependencies: + "@nomicfoundation/ethereumjs-common" "^3.0.0" + "@nomicfoundation/ethereumjs-util" "^8.0.0" + "@types/async-eventemitter" "^0.2.1" + async-eventemitter "^0.2.4" + debug "^4.3.3" + ethereum-cryptography "0.1.3" + mcl-wasm "^0.7.1" + rustbn.js "~0.2.0" + +"@nomicfoundation/ethereumjs-rlp@^4.0.0", "@nomicfoundation/ethereumjs-rlp@^4.0.0-beta.2": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/ethereumjs-rlp/-/ethereumjs-rlp-4.0.0.tgz#d9a9c5f0f10310c8849b6525101de455a53e771d" + integrity sha512-GaSOGk5QbUk4eBP5qFbpXoZoZUj/NrW7MRa0tKY4Ew4c2HAS0GXArEMAamtFrkazp0BO4K5p2ZCG3b2FmbShmw== + +"@nomicfoundation/ethereumjs-statemanager@^1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/ethereumjs-statemanager/-/ethereumjs-statemanager-1.0.0.tgz#14a9d4e1c828230368f7ab520c144c34d8721e4b" + integrity sha512-jCtqFjcd2QejtuAMjQzbil/4NHf5aAWxUc+CvS0JclQpl+7M0bxMofR2AJdtz+P3u0ke2euhYREDiE7iSO31vQ== + dependencies: + "@nomicfoundation/ethereumjs-common" "^3.0.0" + "@nomicfoundation/ethereumjs-rlp" "^4.0.0" + "@nomicfoundation/ethereumjs-trie" "^5.0.0" + "@nomicfoundation/ethereumjs-util" "^8.0.0" + debug "^4.3.3" + ethereum-cryptography "0.1.3" + functional-red-black-tree "^1.0.1" + +"@nomicfoundation/ethereumjs-trie@^5.0.0": + version "5.0.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/ethereumjs-trie/-/ethereumjs-trie-5.0.0.tgz#dcfbe3be53a94bc061c9767a396c16702bc2f5b7" + integrity sha512-LIj5XdE+s+t6WSuq/ttegJzZ1vliwg6wlb+Y9f4RlBpuK35B9K02bO7xU+E6Rgg9RGptkWd6TVLdedTI4eNc2A== + dependencies: + "@nomicfoundation/ethereumjs-rlp" "^4.0.0" + "@nomicfoundation/ethereumjs-util" "^8.0.0" + ethereum-cryptography "0.1.3" + readable-stream "^3.6.0" + +"@nomicfoundation/ethereumjs-tx@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/ethereumjs-tx/-/ethereumjs-tx-4.0.0.tgz#59dc7452b0862b30342966f7052ab9a1f7802f52" + integrity sha512-Gg3Lir2lNUck43Kp/3x6TfBNwcWC9Z1wYue9Nz3v4xjdcv6oDW9QSMJxqsKw9QEGoBBZ+gqwpW7+F05/rs/g1w== + dependencies: + "@nomicfoundation/ethereumjs-common" "^3.0.0" + "@nomicfoundation/ethereumjs-rlp" "^4.0.0" + "@nomicfoundation/ethereumjs-util" "^8.0.0" + ethereum-cryptography "0.1.3" + +"@nomicfoundation/ethereumjs-util@^8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/ethereumjs-util/-/ethereumjs-util-8.0.0.tgz#deb2b15d2c308a731e82977aefc4e61ca0ece6c5" + integrity sha512-2emi0NJ/HmTG+CGY58fa+DQuAoroFeSH9gKu9O6JnwTtlzJtgfTixuoOqLEgyyzZVvwfIpRueuePb8TonL1y+A== + dependencies: + "@nomicfoundation/ethereumjs-rlp" "^4.0.0-beta.2" + ethereum-cryptography "0.1.3" + +"@nomicfoundation/ethereumjs-vm@^6.0.0": + version "6.0.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/ethereumjs-vm/-/ethereumjs-vm-6.0.0.tgz#2bb50d332bf41790b01a3767ffec3987585d1de6" + integrity sha512-JMPxvPQ3fzD063Sg3Tp+UdwUkVxMoo1uML6KSzFhMH3hoQi/LMuXBoEHAoW83/vyNS9BxEe6jm6LmT5xdeEJ6w== + dependencies: + "@nomicfoundation/ethereumjs-block" "^4.0.0" + "@nomicfoundation/ethereumjs-blockchain" "^6.0.0" + "@nomicfoundation/ethereumjs-common" "^3.0.0" + "@nomicfoundation/ethereumjs-evm" "^1.0.0" + "@nomicfoundation/ethereumjs-rlp" "^4.0.0" + "@nomicfoundation/ethereumjs-statemanager" "^1.0.0" + "@nomicfoundation/ethereumjs-trie" "^5.0.0" + "@nomicfoundation/ethereumjs-tx" "^4.0.0" + "@nomicfoundation/ethereumjs-util" "^8.0.0" + "@types/async-eventemitter" "^0.2.1" + async-eventemitter "^0.2.4" + debug "^4.3.3" + ethereum-cryptography "0.1.3" + functional-red-black-tree "^1.0.1" + mcl-wasm "^0.7.1" + rustbn.js "~0.2.0" + +"@nomicfoundation/solidity-analyzer-darwin-arm64@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-darwin-arm64/-/solidity-analyzer-darwin-arm64-0.1.0.tgz#83a7367342bd053a76d04bbcf4f373fef07cf760" + integrity sha512-vEF3yKuuzfMHsZecHQcnkUrqm8mnTWfJeEVFHpg+cO+le96xQA4lAJYdUan8pXZohQxv1fSReQsn4QGNuBNuCw== + +"@nomicfoundation/solidity-analyzer-darwin-x64@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-darwin-x64/-/solidity-analyzer-darwin-x64-0.1.0.tgz#1225f7da647ae1ad25a87125664704ecc0af6ccc" + integrity sha512-dlHeIg0pTL4dB1l9JDwbi/JG6dHQaU1xpDK+ugYO8eJ1kxx9Dh2isEUtA4d02cQAl22cjOHTvifAk96A+ItEHA== + +"@nomicfoundation/solidity-analyzer-freebsd-x64@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-freebsd-x64/-/solidity-analyzer-freebsd-x64-0.1.0.tgz#dbc052dcdfd50ae50fd5ae1788b69b4e0fa40040" + integrity sha512-WFCZYMv86WowDA4GiJKnebMQRt3kCcFqHeIomW6NMyqiKqhK1kIZCxSLDYsxqlx396kKLPN1713Q1S8tu68GKg== + +"@nomicfoundation/solidity-analyzer-linux-arm64-gnu@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-linux-arm64-gnu/-/solidity-analyzer-linux-arm64-gnu-0.1.0.tgz#e6b2eea633995b557e74e881d2a43eab4760903d" + integrity sha512-DTw6MNQWWlCgc71Pq7CEhEqkb7fZnS7oly13pujs4cMH1sR0JzNk90Mp1zpSCsCs4oKan2ClhMlLKtNat/XRKQ== + +"@nomicfoundation/solidity-analyzer-linux-arm64-musl@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-linux-arm64-musl/-/solidity-analyzer-linux-arm64-musl-0.1.0.tgz#af81107f5afa794f19988a368647727806e18dc4" + integrity sha512-wUpUnR/3GV5Da88MhrxXh/lhb9kxh9V3Jya2NpBEhKDIRCDmtXMSqPMXHZmOR9DfCwCvG6vLFPr/+YrPCnUN0w== + +"@nomicfoundation/solidity-analyzer-linux-x64-gnu@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-linux-x64-gnu/-/solidity-analyzer-linux-x64-gnu-0.1.0.tgz#6877e1da1a06a9f08446070ab6e0a5347109f868" + integrity sha512-lR0AxK1x/MeKQ/3Pt923kPvwigmGX3OxeU5qNtQ9pj9iucgk4PzhbS3ruUeSpYhUxG50jN4RkIGwUMoev5lguw== + +"@nomicfoundation/solidity-analyzer-linux-x64-musl@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-linux-x64-musl/-/solidity-analyzer-linux-x64-musl-0.1.0.tgz#bb6cd83a0c259eccef4183796b6329a66cf7ebd9" + integrity sha512-A1he/8gy/JeBD3FKvmI6WUJrGrI5uWJNr5Xb9WdV+DK0F8msuOqpEByLlnTdLkXMwW7nSl3awvLezOs9xBHJEg== + +"@nomicfoundation/solidity-analyzer-win32-arm64-msvc@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-win32-arm64-msvc/-/solidity-analyzer-win32-arm64-msvc-0.1.0.tgz#9d4bca1cc9a1333fde985675083b0b7d165f6076" + integrity sha512-7x5SXZ9R9H4SluJZZP8XPN+ju7Mx+XeUMWZw7ZAqkdhP5mK19I4vz3x0zIWygmfE8RT7uQ5xMap0/9NPsO+ykw== + +"@nomicfoundation/solidity-analyzer-win32-ia32-msvc@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-win32-ia32-msvc/-/solidity-analyzer-win32-ia32-msvc-0.1.0.tgz#0db5bfc6aa952bea4098d8d2c8947b4e5c4337ee" + integrity sha512-m7w3xf+hnE774YRXu+2mGV7RiF3QJtUoiYU61FascCkQhX3QMQavh7saH/vzb2jN5D24nT/jwvaHYX/MAM9zUw== + +"@nomicfoundation/solidity-analyzer-win32-x64-msvc@0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-win32-x64-msvc/-/solidity-analyzer-win32-x64-msvc-0.1.0.tgz#2e0f39a2924dcd77db6b419828595e984fabcb33" + integrity sha512-xCuybjY0sLJQnJhupiFAXaek2EqF0AP0eBjgzaalPXSNvCEN6ZYHvUzdA50ENDVeSYFXcUsYf3+FsD3XKaeptA== + +"@nomicfoundation/solidity-analyzer@^0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer/-/solidity-analyzer-0.1.0.tgz#e5ddc43ad5c0aab96e5054520d8e16212e125f50" + integrity sha512-xGWAiVCGOycvGiP/qrlf9f9eOn7fpNbyJygcB0P21a1MDuVPlKt0Srp7rvtBEutYQ48ouYnRXm33zlRnlTOPHg== + optionalDependencies: + "@nomicfoundation/solidity-analyzer-darwin-arm64" "0.1.0" + "@nomicfoundation/solidity-analyzer-darwin-x64" "0.1.0" + "@nomicfoundation/solidity-analyzer-freebsd-x64" "0.1.0" + "@nomicfoundation/solidity-analyzer-linux-arm64-gnu" "0.1.0" + "@nomicfoundation/solidity-analyzer-linux-arm64-musl" "0.1.0" + "@nomicfoundation/solidity-analyzer-linux-x64-gnu" "0.1.0" + "@nomicfoundation/solidity-analyzer-linux-x64-musl" "0.1.0" + "@nomicfoundation/solidity-analyzer-win32-arm64-msvc" "0.1.0" + "@nomicfoundation/solidity-analyzer-win32-ia32-msvc" "0.1.0" + "@nomicfoundation/solidity-analyzer-win32-x64-msvc" "0.1.0" + +"@nomiclabs/hardhat-docker@^2.0.0": + version "2.0.2" + resolved "https://registry.yarnpkg.com/@nomiclabs/hardhat-docker/-/hardhat-docker-2.0.2.tgz#ae964be17951275a55859ff7358e9e7c77448846" + integrity sha512-XgGEpRT3wlA1VslyB57zyAHV+oll8KnV1TjwnxxC1tpAL04/lbdwpdO5KxInVN8irMSepqFpsiSkqlcnvbE7Ng== + dependencies: + dockerode "^2.5.8" + fs-extra "^7.0.1" + node-fetch "^2.6.0" + +"@nomiclabs/hardhat-ethers@^2.0.0": + version "2.2.2" + resolved "https://registry.yarnpkg.com/@nomiclabs/hardhat-ethers/-/hardhat-ethers-2.2.2.tgz#812d48929c3bf8fe840ec29eab4b613693467679" + integrity sha512-NLDlDFL2us07C0jB/9wzvR0kuLivChJWCXTKcj3yqjZqMoYp7g7wwS157F70VHx/+9gHIBGzak5pKDwG8gEefA== + +"@nomiclabs/hardhat-etherscan@^2.1.0": + version "2.1.8" + resolved "https://registry.yarnpkg.com/@nomiclabs/hardhat-etherscan/-/hardhat-etherscan-2.1.8.tgz#e206275e96962cd15e5ba9148b44388bc922d8c2" + integrity sha512-0+rj0SsZotVOcTLyDOxnOc3Gulo8upo0rsw/h+gBPcmtj91YqYJNhdARHoBxOhhE8z+5IUQPx+Dii04lXT14PA== + dependencies: + "@ethersproject/abi" "^5.1.2" + "@ethersproject/address" "^5.0.2" + cbor "^5.0.2" + debug "^4.1.1" + fs-extra "^7.0.1" + node-fetch "^2.6.0" + semver "^6.3.0" + +"@nomiclabs/hardhat-solpp@^2.0.0": + version "2.0.1" + resolved "https://registry.yarnpkg.com/@nomiclabs/hardhat-solpp/-/hardhat-solpp-2.0.1.tgz#04039b3745b8d2b48c9b8bec6509e9785631aaba" + integrity sha512-aWYvB91GPJcnye4Ph26Jd9BfBNNisI1iRNSbHB2i09OpxucSHAPMvvqTfWDN1HE5EMjqlTJ2rQLdlDcYqQxPJw== + dependencies: + fs-extra "^7.0.1" + solpp "^0.11.5" + +"@nomiclabs/hardhat-waffle@^2.0.0": + version "2.0.3" + resolved "https://registry.yarnpkg.com/@nomiclabs/hardhat-waffle/-/hardhat-waffle-2.0.3.tgz#9c538a09c5ed89f68f5fd2dc3f78f16ed1d6e0b1" + integrity sha512-049PHSnI1CZq6+XTbrMbMv5NaL7cednTfPenx02k3cEh8wBMLa6ys++dBETJa6JjfwgA9nBhhHQ173LJv6k2Pg== + dependencies: + "@types/sinon-chai" "^3.2.3" + "@types/web3" "1.0.19" + +"@openzeppelin/contracts-upgradeable@4.8.0": + version "4.8.0" + resolved "https://registry.yarnpkg.com/@openzeppelin/contracts-upgradeable/-/contracts-upgradeable-4.8.0.tgz#26688982f46969018e3ed3199e72a07c8d114275" + integrity sha512-5GeFgqMiDlqGT8EdORadp1ntGF0qzWZLmEY7Wbp/yVhN7/B3NNzCxujuI77ktlyG81N3CUZP8cZe3ZAQ/cW10w== + +"@openzeppelin/contracts-upgradeable@^4.6.0": + version "4.8.1" + resolved "https://registry.yarnpkg.com/@openzeppelin/contracts-upgradeable/-/contracts-upgradeable-4.8.1.tgz#363f7dd08f25f8f77e16d374350c3d6b43340a7a" + integrity sha512-1wTv+20lNiC0R07jyIAbHU7TNHKRwGiTGRfiNnA8jOWjKT98g5OgLpYWOi40Vgpk8SPLA9EvfJAbAeIyVn+7Bw== + +"@openzeppelin/contracts@4.6.0": + version "4.6.0" + resolved "https://registry.yarnpkg.com/@openzeppelin/contracts/-/contracts-4.6.0.tgz#c91cf64bc27f573836dba4122758b4743418c1b3" + integrity sha512-8vi4d50NNya/bQqCmaVzvHNmwHvS0OBKb7HNtuNwEE3scXWrP31fKQoGxNMT+KbzmrNZzatE3QK5p2gFONI/hg== + +"@openzeppelin/contracts@4.8.0": + version "4.8.0" + resolved "https://registry.yarnpkg.com/@openzeppelin/contracts/-/contracts-4.8.0.tgz#6854c37df205dd2c056bdfa1b853f5d732109109" + integrity sha512-AGuwhRRL+NaKx73WKRNzeCxOCOCxpaqF+kp8TJ89QzAipSwZy/NoflkWaL9bywXFRhIzXt8j38sfF7KBKCPWLw== + +"@openzeppelin/contracts@^4.8.0": + version "4.8.1" + resolved "https://registry.yarnpkg.com/@openzeppelin/contracts/-/contracts-4.8.1.tgz#709cfc4bbb3ca9f4460d60101f15dac6b7a2d5e4" + integrity sha512-xQ6eUZl+RDyb/FiZe1h+U7qr/f4p/SrTSQcTPH2bjur3C5DbuW/zFgCU/b1P/xcIaEqJep+9ju4xDRi3rmChdQ== + +"@resolver-engine/core@^0.3.3": + version "0.3.3" + resolved "https://registry.yarnpkg.com/@resolver-engine/core/-/core-0.3.3.tgz#590f77d85d45bc7ecc4e06c654f41345db6ca967" + integrity sha512-eB8nEbKDJJBi5p5SrvrvILn4a0h42bKtbCTri3ZxCGt6UvoQyp7HnGOfki944bUjBSHKK3RvgfViHn+kqdXtnQ== + dependencies: + debug "^3.1.0" + is-url "^1.2.4" + request "^2.85.0" + +"@resolver-engine/fs@^0.3.3": + version "0.3.3" + resolved "https://registry.yarnpkg.com/@resolver-engine/fs/-/fs-0.3.3.tgz#fbf83fa0c4f60154a82c817d2fe3f3b0c049a973" + integrity sha512-wQ9RhPUcny02Wm0IuJwYMyAG8fXVeKdmhm8xizNByD4ryZlx6PP6kRen+t/haF43cMfmaV7T3Cx6ChOdHEhFUQ== + dependencies: + "@resolver-engine/core" "^0.3.3" + debug "^3.1.0" + +"@resolver-engine/imports-fs@^0.3.3": + version "0.3.3" + resolved "https://registry.yarnpkg.com/@resolver-engine/imports-fs/-/imports-fs-0.3.3.tgz#4085db4b8d3c03feb7a425fbfcf5325c0d1e6c1b" + integrity sha512-7Pjg/ZAZtxpeyCFlZR5zqYkz+Wdo84ugB5LApwriT8XFeQoLwGUj4tZFFvvCuxaNCcqZzCYbonJgmGObYBzyCA== + dependencies: + "@resolver-engine/fs" "^0.3.3" + "@resolver-engine/imports" "^0.3.3" + debug "^3.1.0" + +"@resolver-engine/imports@^0.3.3": + version "0.3.3" + resolved "https://registry.yarnpkg.com/@resolver-engine/imports/-/imports-0.3.3.tgz#badfb513bb3ff3c1ee9fd56073e3144245588bcc" + integrity sha512-anHpS4wN4sRMwsAbMXhMfOD/y4a4Oo0Cw/5+rue7hSwGWsDOQaAU1ClK1OxjUC35/peazxEl8JaSRRS+Xb8t3Q== + dependencies: + "@resolver-engine/core" "^0.3.3" + debug "^3.1.0" + hosted-git-info "^2.6.0" + path-browserify "^1.0.0" + url "^0.11.0" + +"@scure/base@~1.1.0": + version "1.1.1" + resolved "https://registry.yarnpkg.com/@scure/base/-/base-1.1.1.tgz#ebb651ee52ff84f420097055f4bf46cfba403938" + integrity sha512-ZxOhsSyxYwLJj3pLZCefNitxsj093tb2vq90mp2txoYeBqbcjDjqFhyM8eUjq/uFm6zJ+mUuqxlS2FkuSY1MTA== + +"@scure/bip32@1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@scure/bip32/-/bip32-1.1.0.tgz#dea45875e7fbc720c2b4560325f1cf5d2246d95b" + integrity sha512-ftTW3kKX54YXLCxH6BB7oEEoJfoE2pIgw7MINKAs5PsS6nqKPuKk1haTF/EuHmYqG330t5GSrdmtRuHaY1a62Q== + dependencies: + "@noble/hashes" "~1.1.1" + "@noble/secp256k1" "~1.6.0" + "@scure/base" "~1.1.0" + +"@scure/bip39@1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@scure/bip39/-/bip39-1.1.0.tgz#92f11d095bae025f166bef3defcc5bf4945d419a" + integrity sha512-pwrPOS16VeTKg98dYXQyIjJEcWfz7/1YJIwxUEPFfQPtc86Ym/1sVgQ2RLoD43AazMk2l/unK4ITySSpW2+82w== + dependencies: + "@noble/hashes" "~1.1.1" + "@scure/base" "~1.1.0" + +"@sentry/core@5.30.0": + version "5.30.0" + resolved "https://registry.yarnpkg.com/@sentry/core/-/core-5.30.0.tgz#6b203664f69e75106ee8b5a2fe1d717379b331f3" + integrity sha512-TmfrII8w1PQZSZgPpUESqjB+jC6MvZJZdLtE/0hZ+SrnKhW3x5WlYLvTXZpcWePYBku7rl2wn1RZu6uT0qCTeg== + dependencies: + "@sentry/hub" "5.30.0" + "@sentry/minimal" "5.30.0" + "@sentry/types" "5.30.0" + "@sentry/utils" "5.30.0" + tslib "^1.9.3" + +"@sentry/hub@5.30.0": + version "5.30.0" + resolved "https://registry.yarnpkg.com/@sentry/hub/-/hub-5.30.0.tgz#2453be9b9cb903404366e198bd30c7ca74cdc100" + integrity sha512-2tYrGnzb1gKz2EkMDQcfLrDTvmGcQPuWxLnJKXJvYTQDGLlEvi2tWz1VIHjunmOvJrB5aIQLhm+dcMRwFZDCqQ== + dependencies: + "@sentry/types" "5.30.0" + "@sentry/utils" "5.30.0" + tslib "^1.9.3" + +"@sentry/minimal@5.30.0": + version "5.30.0" + resolved "https://registry.yarnpkg.com/@sentry/minimal/-/minimal-5.30.0.tgz#ce3d3a6a273428e0084adcb800bc12e72d34637b" + integrity sha512-BwWb/owZKtkDX+Sc4zCSTNcvZUq7YcH3uAVlmh/gtR9rmUvbzAA3ewLuB3myi4wWRAMEtny6+J/FN/x+2wn9Xw== + dependencies: + "@sentry/hub" "5.30.0" + "@sentry/types" "5.30.0" + tslib "^1.9.3" + +"@sentry/node@^5.18.1": + version "5.30.0" + resolved "https://registry.yarnpkg.com/@sentry/node/-/node-5.30.0.tgz#4ca479e799b1021285d7fe12ac0858951c11cd48" + integrity sha512-Br5oyVBF0fZo6ZS9bxbJZG4ApAjRqAnqFFurMVJJdunNb80brh7a5Qva2kjhm+U6r9NJAB5OmDyPkA1Qnt+QVg== + dependencies: + "@sentry/core" "5.30.0" + "@sentry/hub" "5.30.0" + "@sentry/tracing" "5.30.0" + "@sentry/types" "5.30.0" + "@sentry/utils" "5.30.0" + cookie "^0.4.1" + https-proxy-agent "^5.0.0" + lru_map "^0.3.3" + tslib "^1.9.3" + +"@sentry/tracing@5.30.0": + version "5.30.0" + resolved "https://registry.yarnpkg.com/@sentry/tracing/-/tracing-5.30.0.tgz#501d21f00c3f3be7f7635d8710da70d9419d4e1f" + integrity sha512-dUFowCr0AIMwiLD7Fs314Mdzcug+gBVo/+NCMyDw8tFxJkwWAKl7Qa2OZxLQ0ZHjakcj1hNKfCQJ9rhyfOl4Aw== + dependencies: + "@sentry/hub" "5.30.0" + "@sentry/minimal" "5.30.0" + "@sentry/types" "5.30.0" + "@sentry/utils" "5.30.0" + tslib "^1.9.3" + +"@sentry/types@5.30.0": + version "5.30.0" + resolved "https://registry.yarnpkg.com/@sentry/types/-/types-5.30.0.tgz#19709bbe12a1a0115bc790b8942917da5636f402" + integrity sha512-R8xOqlSTZ+htqrfteCWU5Nk0CDN5ApUTvrlvBuiH1DyP6czDZ4ktbZB0hAgBlVcK0U+qpD3ag3Tqqpa5Q67rPw== + +"@sentry/utils@5.30.0": + version "5.30.0" + resolved "https://registry.yarnpkg.com/@sentry/utils/-/utils-5.30.0.tgz#9a5bd7ccff85ccfe7856d493bffa64cabc41e980" + integrity sha512-zaYmoH0NWWtvnJjC9/CBseXMtKHm/tm40sz3YfJRxeQjyzRqNQPgivpd9R/oDJCYj999mzdW382p/qi2ypjLww== + dependencies: + "@sentry/types" "5.30.0" + tslib "^1.9.3" + +"@sinclair/typebox@^0.24.1": + version "0.24.51" + resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.24.51.tgz#645f33fe4e02defe26f2f5c0410e1c094eac7f5f" + integrity sha512-1P1OROm/rdubP5aFDSZQILU0vrLCJ4fvHt6EoqHEM+2D/G5MK3bIaymUKLit8Js9gbns5UyJnkP/TZROLw4tUA== + +"@sindresorhus/is@^0.14.0": + version "0.14.0" + resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.14.0.tgz#9fb3a3cf3132328151f353de4632e01e52102bea" + integrity sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ== + +"@sindresorhus/is@^4.0.0": + version "4.6.0" + resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-4.6.0.tgz#3c7c9c46e678feefe7a2e5bb609d3dbd665ffb3f" + integrity sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw== + +"@sinonjs/commons@^1.7.0": + version "1.8.4" + resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.4.tgz#d1f2d80f1bd0f2520873f161588bd9b7f8567120" + integrity sha512-RpmQdHVo8hCEHDVpO39zToS9jOhR6nw+/lQAzRNq9ErrGV9IeHM71XCn68svVl/euFeVW6BWX4p35gkhbOcSIQ== + dependencies: + type-detect "4.0.8" + +"@sinonjs/fake-timers@^9.1.2": + version "9.1.2" + resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-9.1.2.tgz#4eaab737fab77332ab132d396a3c0d364bd0ea8c" + integrity sha512-BPS4ynJW/o92PUR4wgriz2Ud5gpST5vz6GQfMixEDK0Z8ZCUv2M7SkBLykH56T++Xs+8ln9zTGbOvNGIe02/jw== + dependencies: + "@sinonjs/commons" "^1.7.0" + +"@solidity-parser/parser@^0.14.1", "@solidity-parser/parser@^0.14.2": + version "0.14.5" + resolved "https://registry.yarnpkg.com/@solidity-parser/parser/-/parser-0.14.5.tgz#87bc3cc7b068e08195c219c91cd8ddff5ef1a804" + integrity sha512-6dKnHZn7fg/iQATVEzqyUOyEidbn05q7YA2mQ9hC0MMXhhV3/JrsxmFSYZAcr7j1yUP700LLhTruvJ3MiQmjJg== + dependencies: + antlr4ts "^0.5.0-alpha.4" + +"@szmarczak/http-timer@^1.1.2": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-1.1.2.tgz#b1665e2c461a2cd92f4c1bbf50d5454de0d4b421" + integrity sha512-XIB2XbzHTN6ieIjfIMV9hlVcfPU26s2vafYWQcZHWXHOxiaRZYEDKEwdl129Zyg50+foYV2jCgtrqSA6qNuNSA== + dependencies: + defer-to-connect "^1.0.1" + +"@szmarczak/http-timer@^4.0.5": + version "4.0.6" + resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-4.0.6.tgz#b4a914bb62e7c272d4e5989fe4440f812ab1d807" + integrity sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w== + dependencies: + defer-to-connect "^2.0.0" + +"@tsconfig/node10@^1.0.7": + version "1.0.9" + resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.9.tgz#df4907fc07a886922637b15e02d4cebc4c0021b2" + integrity sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA== + +"@tsconfig/node12@^1.0.7": + version "1.0.11" + resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.11.tgz#ee3def1f27d9ed66dac6e46a295cffb0152e058d" + integrity sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag== + +"@tsconfig/node14@^1.0.0": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.3.tgz#e4386316284f00b98435bf40f72f75a09dabf6c1" + integrity sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow== + +"@tsconfig/node16@^1.0.2": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.3.tgz#472eaab5f15c1ffdd7f8628bd4c4f753995ec79e" + integrity sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ== + +"@typechain/ethers-v5@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@typechain/ethers-v5/-/ethers-v5-2.0.0.tgz#cd3ca1590240d587ca301f4c029b67bfccd08810" + integrity sha512-0xdCkyGOzdqh4h5JSf+zoWx85IusEjDcPIwNEHP8mrWSnCae4rvrqB+/gtpdNfX7zjlFlZiMeePn2r63EI3Lrw== + dependencies: + ethers "^5.0.2" + +"@types/argparse@^1.0.36": + version "1.0.38" + resolved "https://registry.yarnpkg.com/@types/argparse/-/argparse-1.0.38.tgz#a81fd8606d481f873a3800c6ebae4f1d768a56a9" + integrity sha512-ebDJ9b0e702Yr7pWgB0jzm+CX4Srzz8RcXtLJDJB+BSccqMa36uyH/zUsSYao5+BD1ytv3k3rPYCq4mAE1hsXA== + +"@types/async-eventemitter@^0.2.1": + version "0.2.1" + resolved "https://registry.yarnpkg.com/@types/async-eventemitter/-/async-eventemitter-0.2.1.tgz#f8e6280e87e8c60b2b938624b0a3530fb3e24712" + integrity sha512-M2P4Ng26QbAeITiH7w1d7OxtldgfAe0wobpyJzVK/XOb0cUGKU2R4pfAhqcJBXAe2ife5ZOhSv4wk7p+ffURtg== + +"@types/babel__core@^7.1.14": + version "7.1.19" + resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.1.19.tgz#7b497495b7d1b4812bdb9d02804d0576f43ee460" + integrity sha512-WEOTgRsbYkvA/KCsDwVEGkd7WAr1e3g31VHQ8zy5gul/V1qKullU/BU5I68X5v7V3GnB9eotmom4v5a5gjxorw== + dependencies: + "@babel/parser" "^7.1.0" + "@babel/types" "^7.0.0" + "@types/babel__generator" "*" + "@types/babel__template" "*" + "@types/babel__traverse" "*" + +"@types/babel__generator@*": + version "7.6.4" + resolved "https://registry.yarnpkg.com/@types/babel__generator/-/babel__generator-7.6.4.tgz#1f20ce4c5b1990b37900b63f050182d28c2439b7" + integrity sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg== + dependencies: + "@babel/types" "^7.0.0" + +"@types/babel__template@*": + version "7.4.1" + resolved "https://registry.yarnpkg.com/@types/babel__template/-/babel__template-7.4.1.tgz#3d1a48fd9d6c0edfd56f2ff578daed48f36c8969" + integrity sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g== + dependencies: + "@babel/parser" "^7.1.0" + "@babel/types" "^7.0.0" + +"@types/babel__traverse@*", "@types/babel__traverse@^7.0.6": + version "7.18.2" + resolved "https://registry.yarnpkg.com/@types/babel__traverse/-/babel__traverse-7.18.2.tgz#235bf339d17185bdec25e024ca19cce257cc7309" + integrity sha512-FcFaxOr2V5KZCviw1TnutEMVUVsGt4D2hP1TAfXZAMKuHYW3xQhe3jTxNPWutgCJ3/X1c5yX8ZoGVEItxKbwBg== + dependencies: + "@babel/types" "^7.3.0" + +"@types/bn.js@*", "@types/bn.js@^5.1.0": + version "5.1.1" + resolved "https://registry.yarnpkg.com/@types/bn.js/-/bn.js-5.1.1.tgz#b51e1b55920a4ca26e9285ff79936bbdec910682" + integrity sha512-qNrYbZqMx0uJAfKnKclPh+dTwK33KfLHYqtyODwd5HnXOjnkhc4qgn3BrK6RWyGZm5+sIFE7Q7Vz6QQtJB7w7g== + dependencies: + "@types/node" "*" + +"@types/bn.js@^4.11.3", "@types/bn.js@^4.11.5": + version "4.11.6" + resolved "https://registry.yarnpkg.com/@types/bn.js/-/bn.js-4.11.6.tgz#c306c70d9358aaea33cd4eda092a742b9505967c" + integrity sha512-pqr857jrp2kPuO9uRjZ3PwnJTjoQy+fcdxvBTvHm6dkmEL9q+hDD/2j/0ELOBPtPnS8LjCX0gI9nbl8lVkadpg== + dependencies: + "@types/node" "*" + +"@types/cacheable-request@^6.0.1": + version "6.0.3" + resolved "https://registry.yarnpkg.com/@types/cacheable-request/-/cacheable-request-6.0.3.tgz#a430b3260466ca7b5ca5bfd735693b36e7a9d183" + integrity sha512-IQ3EbTzGxIigb1I3qPZc1rWJnH0BmSKv5QYTalEwweFvyBDLSAe24zP0le/hyi7ecGfZVlIVAg4BZqb8WBwKqw== + dependencies: + "@types/http-cache-semantics" "*" + "@types/keyv" "^3.1.4" + "@types/node" "*" + "@types/responselike" "^1.0.0" + +"@types/chai-as-promised@^7.1.4": + version "7.1.5" + resolved "https://registry.yarnpkg.com/@types/chai-as-promised/-/chai-as-promised-7.1.5.tgz#6e016811f6c7a64f2eed823191c3a6955094e255" + integrity sha512-jStwss93SITGBwt/niYrkf2C+/1KTeZCZl1LaeezTlqppAKeoQC7jxyqYuP72sxBGKCIbw7oHgbYssIRzT5FCQ== + dependencies: + "@types/chai" "*" + +"@types/chai@*": + version "4.3.4" + resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.3.4.tgz#e913e8175db8307d78b4e8fa690408ba6b65dee4" + integrity sha512-KnRanxnpfpjUTqTCXslZSEdLfXExwgNxYPdiO2WGUj8+HDjFi8R3k5RVKPeSCzLjCcshCAtVO2QBbVuAV4kTnw== + +"@types/chai@^4.2.19", "@types/chai@^4.2.21": + version "4.3.3" + resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.3.3.tgz#3c90752792660c4b562ad73b3fbd68bf3bc7ae07" + integrity sha512-hC7OMnszpxhZPduX+m+nrx+uFoLkWOMiR4oa/AZF3MuSETYTZmFfJAHqZEM8MVlvfG7BEUcgvtwoCTxBp6hm3g== + +"@types/deep-extend@^0.4.31": + version "0.4.32" + resolved "https://registry.yarnpkg.com/@types/deep-extend/-/deep-extend-0.4.32.tgz#0af51fffde55cb168e8d68f8236908c2cdfe7419" + integrity sha512-7/pcMJr5I5OnpWTTfv0o3fJ9+f36EqoQa27/oJlbfvfZAMMrPyU5/+AUC+5OOtTEKdyoW4lAeIBYHtodtEdNUA== + +"@types/graceful-fs@^4.1.3": + version "4.1.5" + resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.5.tgz#21ffba0d98da4350db64891f92a9e5db3cdb4e15" + integrity sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw== + dependencies: + "@types/node" "*" + +"@types/http-cache-semantics@*": + version "4.0.1" + resolved "https://registry.yarnpkg.com/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz#0ea7b61496902b95890dc4c3a116b60cb8dae812" + integrity sha512-SZs7ekbP8CN0txVG2xVRH6EgKmEm31BOxA07vkFaETzZz1xh+cbt8BcI0slpymvwhx5dlFnQG2rTlPVQn+iRPQ== + +"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1": + version "2.0.4" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44" + integrity sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g== + +"@types/istanbul-lib-report@*": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686" + integrity sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg== + dependencies: + "@types/istanbul-lib-coverage" "*" + +"@types/istanbul-reports@^3.0.0": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz#9153fe98bba2bd565a63add9436d6f0d7f8468ff" + integrity sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw== + dependencies: + "@types/istanbul-lib-report" "*" + +"@types/jest@^29.0.3": + version "29.2.1" + resolved "https://registry.yarnpkg.com/@types/jest/-/jest-29.2.1.tgz#31fda30bdf2861706abc5f1730be78bed54f83ee" + integrity sha512-nKixEdnGDqFOZkMTF74avFNr3yRqB1ZJ6sRZv5/28D5x2oLN14KApv7F9mfDT/vUic0L3tRCsh3XWpWjtJisUQ== + dependencies: + expect "^29.0.0" + pretty-format "^29.0.0" + +"@types/json-schema@^7.0.7": + version "7.0.11" + resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.11.tgz#d421b6c527a3037f7c84433fd2c4229e016863d3" + integrity sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ== + +"@types/keyv@^3.1.4": + version "3.1.4" + resolved "https://registry.yarnpkg.com/@types/keyv/-/keyv-3.1.4.tgz#3ccdb1c6751b0c7e52300bcdacd5bcbf8faa75b6" + integrity sha512-BQ5aZNSCpj7D6K2ksrRCTmKRLEpnPvWDiLPfoGyhZ++8YtiK9d/3DBKPJgry359X/P1PfruyYwvnvwFjuEiEIg== + dependencies: + "@types/node" "*" + +"@types/lru-cache@^5.1.0": + version "5.1.1" + resolved "https://registry.yarnpkg.com/@types/lru-cache/-/lru-cache-5.1.1.tgz#c48c2e27b65d2a153b19bfc1a317e30872e01eef" + integrity sha512-ssE3Vlrys7sdIzs5LOxCzTVMsU7i9oa/IaW92wF32JFb3CVczqOkru2xspuKczHEbG3nvmPY7IFqVmGGHdNbYw== + +"@types/mkdirp@^0.5.2": + version "0.5.2" + resolved "https://registry.yarnpkg.com/@types/mkdirp/-/mkdirp-0.5.2.tgz#503aacfe5cc2703d5484326b1b27efa67a339c1f" + integrity sha512-U5icWpv7YnZYGsN4/cmh3WD2onMY0aJIiTE6+51TwJCttdHvtCYmkBNOobHlXwrJRL0nkH9jH4kD+1FAdMN4Tg== + dependencies: + "@types/node" "*" + +"@types/mocha-steps@^1.3.0": + version "1.3.0" + resolved "https://registry.yarnpkg.com/@types/mocha-steps/-/mocha-steps-1.3.0.tgz#3086c74675f45359c514f80c5090f9bac23097aa" + integrity sha512-hI0P9rS20BhHSXWTqLYcRYy6PGYk9vMZFNX7UF0ZWUrDMuqawtVRuTkYq7rG25sBSpL28BZggABBecFDK6ZZyg== + dependencies: + "@types/mocha" "*" + +"@types/mocha@*": + version "10.0.1" + resolved "https://registry.yarnpkg.com/@types/mocha/-/mocha-10.0.1.tgz#2f4f65bb08bc368ac39c96da7b2f09140b26851b" + integrity sha512-/fvYntiO1GeICvqbQ3doGDIP97vWmvFt83GKguJ6prmQM2iXZfFcq6YE8KteFyRtX2/h5Hf91BYvPodJKFYv5Q== + +"@types/mocha@^8.2.2", "@types/mocha@^8.2.3": + version "8.2.3" + resolved "https://registry.yarnpkg.com/@types/mocha/-/mocha-8.2.3.tgz#bbeb55fbc73f28ea6de601fbfa4613f58d785323" + integrity sha512-ekGvFhFgrc2zYQoX4JeZPmVzZxw6Dtllga7iGHzfbYIYkAMUx/sAFP2GdFpLff+vdHXu5fl7WX9AT+TtqYcsyw== + +"@types/node-fetch@^2.5.5", "@types/node-fetch@^2.5.7": + version "2.6.2" + resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.2.tgz#d1a9c5fd049d9415dce61571557104dec3ec81da" + integrity sha512-DHqhlq5jeESLy19TYhLakJ07kNumXWjcDdxXsLUMJZ6ue8VZJj4kLPQVE/2mdHh3xZziNF1xppu5lwmS53HR+A== + dependencies: + "@types/node" "*" + form-data "^3.0.0" + +"@types/node@*": + version "18.11.9" + resolved "https://registry.yarnpkg.com/@types/node/-/node-18.11.9.tgz#02d013de7058cea16d36168ef2fc653464cfbad4" + integrity sha512-CRpX21/kGdzjOpFsZSkcrXMGIBWMGNIHXXBVFSH+ggkftxg+XYP20TESbh+zFvFj3EQOl5byk0HTRn1IL6hbqg== + +"@types/node@^12.12.6": + version "12.20.55" + resolved "https://registry.yarnpkg.com/@types/node/-/node-12.20.55.tgz#c329cbd434c42164f846b909bd6f85b5537f6240" + integrity sha512-J8xLz7q2OFulZ2cyGTLE1TbbZcjpno7FaN6zdJNrgAdrJ+DZzh/uFR6YrTb4C+nXakvud8Q4+rbhoIWlYQbUFQ== + +"@types/node@^14.14.5", "@types/node@^14.6.1": + version "14.18.33" + resolved "https://registry.yarnpkg.com/@types/node/-/node-14.18.33.tgz#8c29a0036771569662e4635790ffa9e057db379b" + integrity sha512-qelS/Ra6sacc4loe/3MSjXNL1dNQ/GjxNHVzuChwMfmk7HuycRLVQN2qNY3XahK+fZc5E2szqQSKUyAF0E+2bg== + +"@types/node@^16.0.0": + version "16.18.3" + resolved "https://registry.yarnpkg.com/@types/node/-/node-16.18.3.tgz#d7f7ba828ad9e540270f01ce00d391c54e6e0abc" + integrity sha512-jh6m0QUhIRcZpNv7Z/rpN+ZWXOicUUQbSoWks7Htkbb9IjFQj4kzcX/xFCkjstCj5flMsN8FiSvt+q+Tcs4Llg== + +"@types/pbkdf2@^3.0.0": + version "3.1.0" + resolved "https://registry.yarnpkg.com/@types/pbkdf2/-/pbkdf2-3.1.0.tgz#039a0e9b67da0cdc4ee5dab865caa6b267bb66b1" + integrity sha512-Cf63Rv7jCQ0LaL8tNXmEyqTHuIJxRdlS5vMh1mj5voN4+QFhVZnlZruezqpWYDiJ8UTzhP0VmeLXCmBk66YrMQ== + dependencies: + "@types/node" "*" + +"@types/prettier@^2.1.1": + version "2.7.2" + resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.7.2.tgz#6c2324641cc4ba050a8c710b2b251b377581fbf0" + integrity sha512-KufADq8uQqo1pYKVIYzfKbJfBAc0sOeXqGbFaSpv8MRmC/zXgowNZmFcbngndGk922QDmOASEXUZCaY48gs4cg== + +"@types/prettier@^2.1.5": + version "2.7.1" + resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.7.1.tgz#dfd20e2dc35f027cdd6c1908e80a5ddc7499670e" + integrity sha512-ri0UmynRRvZiiUJdiz38MmIblKK+oH30MztdBVR95dv/Ubw6neWSb8u1XpRb72L4qsZOhz+L+z9JD40SJmfWow== + +"@types/resolve@^0.0.8": + version "0.0.8" + resolved "https://registry.yarnpkg.com/@types/resolve/-/resolve-0.0.8.tgz#f26074d238e02659e323ce1a13d041eee280e194" + integrity sha512-auApPaJf3NPfe18hSoJkp8EbZzer2ISk7o8mCC3M9he/a04+gbMF97NkpD2S8riMGvm4BMRI59/SZQSaLTKpsQ== + dependencies: + "@types/node" "*" + +"@types/responselike@^1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@types/responselike/-/responselike-1.0.0.tgz#251f4fe7d154d2bad125abe1b429b23afd262e29" + integrity sha512-85Y2BjiufFzaMIlvJDvTTB8Fxl2xfLo4HgmHzVBz08w4wDePCTjYw66PdrolO0kzli3yam/YCgRufyo1DdQVTA== + dependencies: + "@types/node" "*" + +"@types/secp256k1@^4.0.1": + version "4.0.3" + resolved "https://registry.yarnpkg.com/@types/secp256k1/-/secp256k1-4.0.3.tgz#1b8e55d8e00f08ee7220b4d59a6abe89c37a901c" + integrity sha512-Da66lEIFeIz9ltsdMZcpQvmrmmoqrfju8pm1BH8WbYjZSwUgCwXLb9C+9XYogwBITnbsSaMdVPb2ekf7TV+03w== + dependencies: + "@types/node" "*" + +"@types/sinon-chai@^3.2.3": + version "3.2.9" + resolved "https://registry.yarnpkg.com/@types/sinon-chai/-/sinon-chai-3.2.9.tgz#71feb938574bbadcb176c68e5ff1a6014c5e69d4" + integrity sha512-/19t63pFYU0ikrdbXKBWj9PCdnKyTd0Qkz0X91Ta081cYsq90OxYdcWwK/dwEoDa6dtXgj2HJfmzgq+QZTHdmQ== + dependencies: + "@types/chai" "*" + "@types/sinon" "*" + +"@types/sinon@*": + version "10.0.13" + resolved "https://registry.yarnpkg.com/@types/sinon/-/sinon-10.0.13.tgz#60a7a87a70d9372d0b7b38cc03e825f46981fb83" + integrity sha512-UVjDqJblVNQYvVNUsj0PuYYw0ELRmgt1Nt5Vk0pT5f16ROGfcKJY8o1HVuMOJOpD727RrGB9EGvoaTQE5tgxZQ== + dependencies: + "@types/sinonjs__fake-timers" "*" + +"@types/sinonjs__fake-timers@*": + version "8.1.2" + resolved "https://registry.yarnpkg.com/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-8.1.2.tgz#bf2e02a3dbd4aecaf95942ecd99b7402e03fad5e" + integrity sha512-9GcLXF0/v3t80caGs5p2rRfkB+a8VBGLJZVih6CNFkx8IZ994wiKKLSRs9nuFwk1HevWs/1mnUmkApGrSGsShA== + +"@types/stack-utils@^2.0.0": + version "2.0.1" + resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.1.tgz#20f18294f797f2209b5f65c8e3b5c8e8261d127c" + integrity sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw== + +"@types/tabtab@^3.0.1": + version "3.0.2" + resolved "https://registry.yarnpkg.com/@types/tabtab/-/tabtab-3.0.2.tgz#047657fdeb98a13bfd38c6d92d8327066759695c" + integrity sha512-d8aOSJPS3SEGZevyr7vbAVUNPWGFmdFlk13vbPPK87vz+gYGM57L8T11k4wK2mOgQYZjEVYQEqmCTvupPoQBWw== + dependencies: + "@types/node" "*" + +"@types/underscore@*": + version "1.11.4" + resolved "https://registry.yarnpkg.com/@types/underscore/-/underscore-1.11.4.tgz#62e393f8bc4bd8a06154d110c7d042a93751def3" + integrity sha512-uO4CD2ELOjw8tasUrAhvnn2W4A0ZECOvMjCivJr4gA9pGgjv+qxKWY9GLTMVEK8ej85BxQOocUyE7hImmSQYcg== + +"@types/web3@1.0.19": + version "1.0.19" + resolved "https://registry.yarnpkg.com/@types/web3/-/web3-1.0.19.tgz#46b85d91d398ded9ab7c85a5dd57cb33ac558924" + integrity sha512-fhZ9DyvDYDwHZUp5/STa9XW2re0E8GxoioYJ4pEUZ13YHpApSagixj7IAdoYH5uAK+UalGq6Ml8LYzmgRA/q+A== + dependencies: + "@types/bn.js" "*" + "@types/underscore" "*" + +"@types/yargs-parser@*": + version "21.0.0" + resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b" + integrity sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA== + +"@types/yargs@^17.0.8": + version "17.0.13" + resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.13.tgz#34cced675ca1b1d51fcf4d34c3c6f0fa142a5c76" + integrity sha512-9sWaruZk2JGxIQU+IhI1fhPYRcQ0UuTNuKuCW9bR5fp7qi2Llf7WDzNa17Cy7TKnh3cdxDOiyTu6gaLS0eDatg== + dependencies: + "@types/yargs-parser" "*" + +"@typescript-eslint/eslint-plugin@^4.10.0": + version "4.33.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-4.33.0.tgz#c24dc7c8069c7706bc40d99f6fa87edcb2005276" + integrity sha512-aINiAxGVdOl1eJyVjaWn/YcVAq4Gi/Yo35qHGCnqbWVz61g39D0h23veY/MA0rFFGfxK7TySg2uwDeNv+JgVpg== + dependencies: + "@typescript-eslint/experimental-utils" "4.33.0" + "@typescript-eslint/scope-manager" "4.33.0" + debug "^4.3.1" + functional-red-black-tree "^1.0.1" + ignore "^5.1.8" + regexpp "^3.1.0" + semver "^7.3.5" + tsutils "^3.21.0" + +"@typescript-eslint/experimental-utils@4.33.0": + version "4.33.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/experimental-utils/-/experimental-utils-4.33.0.tgz#6f2a786a4209fa2222989e9380b5331b2810f7fd" + integrity sha512-zeQjOoES5JFjTnAhI5QY7ZviczMzDptls15GFsI6jyUOq0kOf9+WonkhtlIhh0RgHRnqj5gdNxW5j1EvAyYg6Q== + dependencies: + "@types/json-schema" "^7.0.7" + "@typescript-eslint/scope-manager" "4.33.0" + "@typescript-eslint/types" "4.33.0" + "@typescript-eslint/typescript-estree" "4.33.0" + eslint-scope "^5.1.1" + eslint-utils "^3.0.0" + +"@typescript-eslint/parser@^4.10.0": + version "4.33.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-4.33.0.tgz#dfe797570d9694e560528d18eecad86c8c744899" + integrity sha512-ZohdsbXadjGBSK0/r+d87X0SBmKzOq4/S5nzK6SBgJspFo9/CUDJ7hjayuze+JK7CZQLDMroqytp7pOcFKTxZA== + dependencies: + "@typescript-eslint/scope-manager" "4.33.0" + "@typescript-eslint/types" "4.33.0" + "@typescript-eslint/typescript-estree" "4.33.0" + debug "^4.3.1" + +"@typescript-eslint/scope-manager@4.33.0": + version "4.33.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-4.33.0.tgz#d38e49280d983e8772e29121cf8c6e9221f280a3" + integrity sha512-5IfJHpgTsTZuONKbODctL4kKuQje/bzBRkwHE8UOZ4f89Zeddg+EGZs8PD8NcN4LdM3ygHWYB3ukPAYjvl/qbQ== + dependencies: + "@typescript-eslint/types" "4.33.0" + "@typescript-eslint/visitor-keys" "4.33.0" + +"@typescript-eslint/types@4.33.0": + version "4.33.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-4.33.0.tgz#a1e59036a3b53ae8430ceebf2a919dc7f9af6d72" + integrity sha512-zKp7CjQzLQImXEpLt2BUw1tvOMPfNoTAfb8l51evhYbOEEzdWyQNmHWWGPR6hwKJDAi+1VXSBmnhL9kyVTTOuQ== + +"@typescript-eslint/typescript-estree@4.33.0": + version "4.33.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-4.33.0.tgz#0dfb51c2908f68c5c08d82aefeaf166a17c24609" + integrity sha512-rkWRY1MPFzjwnEVHsxGemDzqqddw2QbTJlICPD9p9I9LfsO8fdmfQPOX3uKfUaGRDFJbfrtm/sXhVXN4E+bzCA== + dependencies: + "@typescript-eslint/types" "4.33.0" + "@typescript-eslint/visitor-keys" "4.33.0" + debug "^4.3.1" + globby "^11.0.3" + is-glob "^4.0.1" + semver "^7.3.5" + tsutils "^3.21.0" + +"@typescript-eslint/visitor-keys@4.33.0": + version "4.33.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-4.33.0.tgz#2a22f77a41604289b7a186586e9ec48ca92ef1dd" + integrity sha512-uqi/2aSz9g2ftcHWf8uLPJA70rUv6yuMW5Bohw+bwcuzaxQIHaKFZCKGoGXIrc9vkTJ3+0txM73K0Hq3d5wgIg== + dependencies: + "@typescript-eslint/types" "4.33.0" + eslint-visitor-keys "^2.0.0" + +"@ungap/promise-all-settled@1.1.2": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@ungap/promise-all-settled/-/promise-all-settled-1.1.2.tgz#aa58042711d6e3275dd37dc597e5d31e8c290a44" + integrity sha512-sL/cEvJWAnClXw0wHk85/2L0G6Sj8UB0Ctc1TEMbKSsmpRosqhwj9gWgFRZSrBr2f9tiXISwNhCPmlfqUqyb9Q== + +"@yarnpkg/lockfile@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@yarnpkg/lockfile/-/lockfile-1.1.0.tgz#e77a97fbd345b76d83245edcd17d393b1b41fb31" + integrity sha512-GpSwvyXOcOOlV70vbnzjj4fW5xW/FdUF6nQEt1ENy7m4ZCczi1+/buVUPAqmGfqznsORNFzUMjctTIp8a9tuCQ== + +JSONStream@1.3.2: + version "1.3.2" + resolved "https://registry.yarnpkg.com/JSONStream/-/JSONStream-1.3.2.tgz#c102371b6ec3a7cf3b847ca00c20bb0fce4c6dea" + integrity sha512-mn0KSip7N4e0UDPZHnqDsHECo5uGQrixQKnAskOM1BIB8hd7QKbd6il8IPRPudPHOeHiECoCFqhyMaRO9+nWyA== + dependencies: + jsonparse "^1.2.0" + through ">=2.2.7 <3" + +abort-controller@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/abort-controller/-/abort-controller-3.0.0.tgz#eaf54d53b62bae4138e809ca225c8439a6efb392" + integrity sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg== + dependencies: + event-target-shim "^5.0.0" + +abstract-level@^1.0.0, abstract-level@^1.0.2, abstract-level@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/abstract-level/-/abstract-level-1.0.3.tgz#78a67d3d84da55ee15201486ab44c09560070741" + integrity sha512-t6jv+xHy+VYwc4xqZMn2Pa9DjcdzvzZmQGRjTFc8spIbRGHgBrEKbPq+rYXc7CCo0lxgYvSgKVg9qZAhpVQSjA== + dependencies: + buffer "^6.0.3" + catering "^2.1.0" + is-buffer "^2.0.5" + level-supports "^4.0.0" + level-transcoder "^1.0.1" + module-error "^1.0.1" + queue-microtask "^1.2.3" + +abstract-leveldown@3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/abstract-leveldown/-/abstract-leveldown-3.0.0.tgz#5cb89f958a44f526779d740d1440e743e0c30a57" + integrity sha512-KUWx9UWGQD12zsmLNj64/pndaz4iJh/Pj7nopgkfDG6RlCcbMZvT6+9l7dchK4idog2Is8VdC/PvNbFuFmalIQ== + dependencies: + xtend "~4.0.0" + +abstract-leveldown@^2.4.1, abstract-leveldown@~2.7.1: + version "2.7.2" + resolved "https://registry.yarnpkg.com/abstract-leveldown/-/abstract-leveldown-2.7.2.tgz#87a44d7ebebc341d59665204834c8b7e0932cc93" + integrity sha512-+OVvxH2rHVEhWLdbudP6p0+dNMXu8JA1CbhP19T8paTYAcX7oJ4OVjT+ZUVpv7mITxXHqDMej+GdqXBmXkw09w== + dependencies: + xtend "~4.0.0" + +abstract-leveldown@^5.0.0, abstract-leveldown@~5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/abstract-leveldown/-/abstract-leveldown-5.0.0.tgz#f7128e1f86ccabf7d2893077ce5d06d798e386c6" + integrity sha512-5mU5P1gXtsMIXg65/rsYGsi93+MlogXZ9FA8JnwKurHQg64bfXwGYVdVdijNTVNOlAsuIiOwHdvFFD5JqCJQ7A== + dependencies: + xtend "~4.0.0" + +abstract-leveldown@~2.6.0: + version "2.6.3" + resolved "https://registry.yarnpkg.com/abstract-leveldown/-/abstract-leveldown-2.6.3.tgz#1c5e8c6a5ef965ae8c35dfb3a8770c476b82c4b8" + integrity sha512-2++wDf/DYqkPR3o5tbfdhF96EfMApo1GpPfzOsR/ZYXdkSmELlvOOEAl9iKkRsktMPHdGjO4rtkBpf2I7TiTeA== + dependencies: + xtend "~4.0.0" + +accepts@~1.3.8: + version "1.3.8" + resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e" + integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw== + dependencies: + mime-types "~2.1.34" + negotiator "0.6.3" + +acorn-jsx@^5.0.0, acorn-jsx@^5.3.1: + version "5.3.2" + resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" + integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== + +acorn-walk@^8.1.1: + version "8.2.0" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1" + integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA== + +acorn@^6.0.7: + version "6.4.2" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.4.2.tgz#35866fd710528e92de10cf06016498e47e39e1e6" + integrity sha512-XtGIhXwF8YM8bJhGxG5kXgjkEuNGLTkoYqVE+KMR+aspr4KGYmKYg7yUe3KghyQ9yheNwLnjmzh/7+gfDBmHCQ== + +acorn@^7.4.0: + version "7.4.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" + integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== + +acorn@^8.4.1: + version "8.8.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.8.1.tgz#0a3f9cbecc4ec3bea6f0a80b66ae8dd2da250b73" + integrity sha512-7zFpHzhnqYKrkYdUjF1HI1bzd0VygEGX8lFk4k5zVMqHEoES+P+7TKI+EvLO9WVMJ8eekdO0aDEK044xTXwPPA== + +adm-zip@^0.4.16: + version "0.4.16" + resolved "https://registry.yarnpkg.com/adm-zip/-/adm-zip-0.4.16.tgz#cf4c508fdffab02c269cbc7f471a875f05570365" + integrity sha512-TFi4HBKSGfIKsK5YCkKaaFG2m4PEDyViZmEwof3MTIgzimHLto6muaHVpbrljdIvIrFZzEq/p4nafOeLcYegrg== + +aes-js@3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/aes-js/-/aes-js-3.0.0.tgz#e21df10ad6c2053295bcbb8dab40b09dbea87e4d" + integrity sha512-H7wUZRn8WpTq9jocdxQ2c8x2sKo9ZVmzfRE13GiNJXfp7NcKYEdvl3vspKjXox6RIG2VtaRe4JFvxG4rqp2Zuw== + +aes-js@^3.1.1: + version "3.1.2" + resolved "https://registry.yarnpkg.com/aes-js/-/aes-js-3.1.2.tgz#db9aabde85d5caabbfc0d4f2a4446960f627146a" + integrity sha512-e5pEa2kBnBOgR4Y/p20pskXI74UEz7de8ZGVo58asOtvSVG5YAbJeELPZxOmt+Bnz3rX753YKhfIn4X4l1PPRQ== + +agent-base@6: + version "6.0.2" + resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" + integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== + dependencies: + debug "4" + +aggregate-error@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.1.0.tgz#92670ff50f5359bdb7a3e0d40d0ec30c5737687a" + integrity sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA== + dependencies: + clean-stack "^2.0.0" + indent-string "^4.0.0" + +ajv@^6.10.0, ajv@^6.10.2, ajv@^6.12.3, ajv@^6.12.4, ajv@^6.6.1, ajv@^6.9.1: + version "6.12.6" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== + dependencies: + fast-deep-equal "^3.1.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" + +ajv@^8.0.1: + version "8.11.0" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.11.0.tgz#977e91dd96ca669f54a11e23e378e33b884a565f" + integrity sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg== + dependencies: + fast-deep-equal "^3.1.1" + json-schema-traverse "^1.0.0" + require-from-string "^2.0.2" + uri-js "^4.2.2" + +ansi-colors@4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.1.tgz#cbb9ae256bf750af1eab344f229aa27fe94ba348" + integrity sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA== + +ansi-colors@^4.1.1: + version "4.1.3" + resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.3.tgz#37611340eb2243e70cc604cad35d63270d48781b" + integrity sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw== + +ansi-escapes@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-3.2.0.tgz#8780b98ff9dbf5638152d1f1fe5c1d7b4442976b" + integrity sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ== + +ansi-escapes@^4.2.1, ansi-escapes@^4.3.0: + version "4.3.2" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" + integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== + dependencies: + type-fest "^0.21.3" + +ansi-regex@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" + integrity sha512-TIGnTpdo+E3+pCyAluZvtED5p5wCqLdezCyhPZzKPcxvFplEt4i+W7OONCKgeZFT3+y5NZZfOOS/Bdcanm1MYA== + +ansi-regex@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.1.tgz#123d6479e92ad45ad897d4054e3c7ca7db4944e1" + integrity sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw== + +ansi-regex@^4.1.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.1.tgz#164daac87ab2d6f6db3a29875e2d1766582dabed" + integrity sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g== + +ansi-regex@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== + +ansi-styles@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" + integrity sha512-kmCevFghRiWM7HB5zTPULl4r9bVFSWjz62MhqizDGUrq2NWuNMQyuv4tHHoKJHs69M/MF64lEcHdYIocrdWQYA== + +ansi-styles@^3.2.0, ansi-styles@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== + dependencies: + color-convert "^1.9.0" + +ansi-styles@^4.0.0, ansi-styles@^4.1.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +ansi-styles@^5.0.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" + integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== + +antlr4@4.7.1: + version "4.7.1" + resolved "https://registry.yarnpkg.com/antlr4/-/antlr4-4.7.1.tgz#69984014f096e9e775f53dd9744bf994d8959773" + integrity sha512-haHyTW7Y9joE5MVs37P2lNYfU2RWBLfcRDD8OWldcdZm5TiCE91B5Xl1oWSwiDUSd4rlExpt2pu1fksYQjRBYQ== + +antlr4@~4.8.0: + version "4.8.0" + resolved "https://registry.yarnpkg.com/antlr4/-/antlr4-4.8.0.tgz#f938ec171be7fc2855cd3a533e87647185b32b6a" + integrity sha512-en/MxQ4OkPgGJQ3wD/muzj1uDnFSzdFIhc2+c6bHZokWkuBb6RRvFjpWhPxWLbgQvaEzldJZ0GSQpfSAaE3hqg== + +antlr4ts@^0.5.0-alpha.4: + version "0.5.0-alpha.4" + resolved "https://registry.yarnpkg.com/antlr4ts/-/antlr4ts-0.5.0-alpha.4.tgz#71702865a87478ed0b40c0709f422cf14d51652a" + integrity sha512-WPQDt1B74OfPv/IMS2ekXAKkTZIHl88uMetg6q3OTqgFxZ/dxDXI0EWLyZid/1Pe6hTftyg5N7gel5wNAGxXyQ== + +any-promise@^1.0.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f" + integrity sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A== + +anymatch@^3.0.3, anymatch@~3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" + integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg== + dependencies: + normalize-path "^3.0.0" + picomatch "^2.0.4" + +arg@^4.1.0: + version "4.1.3" + resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089" + integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA== + +argparse@^1.0.10, argparse@^1.0.7: + version "1.0.10" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" + integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== + dependencies: + sprintf-js "~1.0.2" + +argparse@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" + integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== + +arr-diff@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" + integrity sha512-YVIQ82gZPGBebQV/a8dar4AitzCQs0jjXwMPZllpXMaGjXPYVUawSxQrRsjhjupyVxEvbHgUmIhKVlND+j02kA== + +arr-flatten@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" + integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== + +arr-union@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" + integrity sha512-sKpyeERZ02v1FeCZT8lrfJq5u6goHCtpTAzPwJYe7c8SPFOboNjNg1vz2L4VTn9T4PQxEx13TbXLmYUcS6Ug7Q== + +array-back@^1.0.3, array-back@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/array-back/-/array-back-1.0.4.tgz#644ba7f095f7ffcf7c43b5f0dc39d3c1f03c063b" + integrity sha512-1WxbZvrmyhkNoeYcizokbmh5oiOCIfyvGtcqbK3Ls1v1fKcquzxnQSceOx6tzq7jmai2kFLWIpGND2cLhH6TPw== + dependencies: + typical "^2.6.0" + +array-back@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/array-back/-/array-back-2.0.0.tgz#6877471d51ecc9c9bfa6136fb6c7d5fe69748022" + integrity sha512-eJv4pLLufP3g5kcZry0j6WXpIbzYw9GUB4mVJZno9wfwiBxbizTnHCw3VJb07cBihbFX48Y7oSrW9y+gt4glyw== + dependencies: + typical "^2.6.1" + +array-flatten@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" + integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg== + +array-union@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" + integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== + +array-unique@^0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" + integrity sha512-SleRWjh9JUud2wH1hPs9rZBZ33H6T9HOiL0uwGnGx9FpE6wKGyfWugmbkEOIs6qWrZhg0LWeLziLrEwQJhs5mQ== + +array.prototype.reduce@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/array.prototype.reduce/-/array.prototype.reduce-1.0.5.tgz#6b20b0daa9d9734dd6bc7ea66b5bbce395471eac" + integrity sha512-kDdugMl7id9COE8R7MHF5jWk7Dqt/fs4Pv+JXoICnYwqpjjjbUurz6w5fT5IG6brLdJhv6/VoHB0H7oyIBXd+Q== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.20.4" + es-array-method-boxes-properly "^1.0.0" + is-string "^1.0.7" + +asn1.js@^5.2.0: + version "5.4.1" + resolved "https://registry.yarnpkg.com/asn1.js/-/asn1.js-5.4.1.tgz#11a980b84ebb91781ce35b0fdc2ee294e3783f07" + integrity sha512-+I//4cYPccV8LdmBLiX8CYvf9Sp3vQsrqu2QNXRcrbiWvcx/UdlFiqUJJzxRQxgsZmvhXhn4cSKeSmoFjVdupA== + dependencies: + bn.js "^4.0.0" + inherits "^2.0.1" + minimalistic-assert "^1.0.0" + safer-buffer "^2.1.0" + +asn1@^0.2.4, asn1@~0.2.3: + version "0.2.6" + resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.6.tgz#0d3a7bb6e64e02a90c0303b31f292868ea09a08d" + integrity sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ== + dependencies: + safer-buffer "~2.1.0" + +assert-plus@1.0.0, assert-plus@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" + integrity sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw== + +assertion-error@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/assertion-error/-/assertion-error-1.1.0.tgz#e60b6b0e8f301bd97e5375215bda406c85118c0b" + integrity sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw== + +assign-symbols@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" + integrity sha512-Q+JC7Whu8HhmTdBph/Tq59IoRtoy6KAm5zzPv00WdujX82lbAL8K7WVjne7vdCsAmbF4AYaDOPyO3k0kl8qIrw== + +ast-parents@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/ast-parents/-/ast-parents-0.0.1.tgz#508fd0f05d0c48775d9eccda2e174423261e8dd3" + integrity sha512-XHusKxKz3zoYk1ic8Un640joHbFMhbqneyoZfoKnEGtf2ey9Uh/IdpcQplODdO/kENaMIWsD0nJm4+wX3UNLHA== + +astral-regex@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-1.0.0.tgz#6c8c3fb827dd43ee3918f27b82782ab7658a6fd9" + integrity sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg== + +astral-regex@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-2.0.0.tgz#483143c567aeed4785759c0865786dc77d7d2e31" + integrity sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ== + +async-eventemitter@^0.2.2, async-eventemitter@^0.2.4: + version "0.2.4" + resolved "https://registry.yarnpkg.com/async-eventemitter/-/async-eventemitter-0.2.4.tgz#f5e7c8ca7d3e46aab9ec40a292baf686a0bafaca" + integrity sha512-pd20BwL7Yt1zwDFy+8MX8F1+WCT8aQeKj0kQnTrH9WaeRETlRamVhD0JtRPmrV4GfOJ2F9CvdQkZeZhnh2TuHw== + dependencies: + async "^2.4.0" + +async-limiter@~1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.1.tgz#dd379e94f0db8310b08291f9d64c3209766617fd" + integrity sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ== + +async@2.6.2: + version "2.6.2" + resolved "https://registry.yarnpkg.com/async/-/async-2.6.2.tgz#18330ea7e6e313887f5d2f2a904bac6fe4dd5381" + integrity sha512-H1qVYh1MYhEEFLsP97cVKqCGo7KfCyTt6uEWqsTBr9SO84oK9Uwbyd/yCW+6rKJLHksBNUVWZDAjfS+Ccx0Bbg== + dependencies: + lodash "^4.17.11" + +async@^1.4.2: + version "1.5.2" + resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" + integrity sha512-nSVgobk4rv61R9PUSDtYt7mPVB2olxNR5RWJcAsH676/ef11bUZwvu7+RGYrYauVdDPcO519v68wRhXQtxsV9w== + +async@^2.0.1, async@^2.1.2, async@^2.4.0, async@^2.5.0, async@^2.6.1: + version "2.6.4" + resolved "https://registry.yarnpkg.com/async/-/async-2.6.4.tgz#706b7ff6084664cd7eae713f6f965433b5504221" + integrity sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA== + dependencies: + lodash "^4.17.14" + +asynckit@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== + +at-least-node@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2" + integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== + +atob@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" + integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== + +available-typed-arrays@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz#92f95616501069d07d10edb2fc37d3e1c65123b7" + integrity sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw== + +aws-sign2@~0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" + integrity sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA== + +aws4@^1.8.0: + version "1.12.0" + resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.12.0.tgz#ce1c9d143389679e253b314241ea9aa5cec980d3" + integrity sha512-NmWvPnx0F1SfrQbYwOi7OeaNGokp9XhzNioJ/CSBs8Qa4vxug81mhJEAVZwxXuBmYB5KDRfMq/F3RR0BIU7sWg== + +axios@^0.21.1: + version "0.21.4" + resolved "https://registry.yarnpkg.com/axios/-/axios-0.21.4.tgz#c67b90dc0568e5c1cf2b0b858c43ba28e2eda575" + integrity sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg== + dependencies: + follow-redirects "^1.14.0" + +axios@^0.27.2: + version "0.27.2" + resolved "https://registry.yarnpkg.com/axios/-/axios-0.27.2.tgz#207658cc8621606e586c85db4b41a750e756d972" + integrity sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ== + dependencies: + follow-redirects "^1.14.9" + form-data "^4.0.0" + +babel-code-frame@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b" + integrity sha512-XqYMR2dfdGMW+hd0IUZ2PwK+fGeFkOxZJ0wY+JaQAHzt1Zx8LcvpiZD2NiGkEG8qx0CfkAOr5xt76d1e8vG90g== + dependencies: + chalk "^1.1.3" + esutils "^2.0.2" + js-tokens "^3.0.2" + +babel-core@^6.0.14, babel-core@^6.26.0: + version "6.26.3" + resolved "https://registry.yarnpkg.com/babel-core/-/babel-core-6.26.3.tgz#b2e2f09e342d0f0c88e2f02e067794125e75c207" + integrity sha512-6jyFLuDmeidKmUEb3NM+/yawG0M2bDZ9Z1qbZP59cyHLz8kYGKYwpJP0UwUKKUiTRNvxfLesJnTedqczP7cTDA== + dependencies: + babel-code-frame "^6.26.0" + babel-generator "^6.26.0" + babel-helpers "^6.24.1" + babel-messages "^6.23.0" + babel-register "^6.26.0" + babel-runtime "^6.26.0" + babel-template "^6.26.0" + babel-traverse "^6.26.0" + babel-types "^6.26.0" + babylon "^6.18.0" + convert-source-map "^1.5.1" + debug "^2.6.9" + json5 "^0.5.1" + lodash "^4.17.4" + minimatch "^3.0.4" + path-is-absolute "^1.0.1" + private "^0.1.8" + slash "^1.0.0" + source-map "^0.5.7" + +babel-eslint@^10.1.0: + version "10.1.0" + resolved "https://registry.yarnpkg.com/babel-eslint/-/babel-eslint-10.1.0.tgz#6968e568a910b78fb3779cdd8b6ac2f479943232" + integrity sha512-ifWaTHQ0ce+448CYop8AdrQiBsGrnC+bMgfyKFdi6EsPLTAWG+QfyDeM6OH+FmWnKvEq5NnBMLvlBUPKQZoDSg== + dependencies: + "@babel/code-frame" "^7.0.0" + "@babel/parser" "^7.7.0" + "@babel/traverse" "^7.7.0" + "@babel/types" "^7.7.0" + eslint-visitor-keys "^1.0.0" + resolve "^1.12.0" + +babel-generator@^6.26.0: + version "6.26.1" + resolved "https://registry.yarnpkg.com/babel-generator/-/babel-generator-6.26.1.tgz#1844408d3b8f0d35a404ea7ac180f087a601bd90" + integrity sha512-HyfwY6ApZj7BYTcJURpM5tznulaBvyio7/0d4zFOeMPUmfxkCjHocCuoLa2SAGzBI8AREcH3eP3758F672DppA== + dependencies: + babel-messages "^6.23.0" + babel-runtime "^6.26.0" + babel-types "^6.26.0" + detect-indent "^4.0.0" + jsesc "^1.3.0" + lodash "^4.17.4" + source-map "^0.5.7" + trim-right "^1.0.1" + +babel-helper-builder-binary-assignment-operator-visitor@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-helper-builder-binary-assignment-operator-visitor/-/babel-helper-builder-binary-assignment-operator-visitor-6.24.1.tgz#cce4517ada356f4220bcae8a02c2b346f9a56664" + integrity sha512-gCtfYORSG1fUMX4kKraymq607FWgMWg+j42IFPc18kFQEsmtaibP4UrqsXt8FlEJle25HUd4tsoDR7H2wDhe9Q== + dependencies: + babel-helper-explode-assignable-expression "^6.24.1" + babel-runtime "^6.22.0" + babel-types "^6.24.1" + +babel-helper-call-delegate@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-helper-call-delegate/-/babel-helper-call-delegate-6.24.1.tgz#ece6aacddc76e41c3461f88bfc575bd0daa2df8d" + integrity sha512-RL8n2NiEj+kKztlrVJM9JT1cXzzAdvWFh76xh/H1I4nKwunzE4INBXn8ieCZ+wh4zWszZk7NBS1s/8HR5jDkzQ== + dependencies: + babel-helper-hoist-variables "^6.24.1" + babel-runtime "^6.22.0" + babel-traverse "^6.24.1" + babel-types "^6.24.1" + +babel-helper-define-map@^6.24.1: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-helper-define-map/-/babel-helper-define-map-6.26.0.tgz#a5f56dab41a25f97ecb498c7ebaca9819f95be5f" + integrity sha512-bHkmjcC9lM1kmZcVpA5t2om2nzT/xiZpo6TJq7UlZ3wqKfzia4veeXbIhKvJXAMzhhEBd3cR1IElL5AenWEUpA== + dependencies: + babel-helper-function-name "^6.24.1" + babel-runtime "^6.26.0" + babel-types "^6.26.0" + lodash "^4.17.4" + +babel-helper-explode-assignable-expression@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-helper-explode-assignable-expression/-/babel-helper-explode-assignable-expression-6.24.1.tgz#f25b82cf7dc10433c55f70592d5746400ac22caa" + integrity sha512-qe5csbhbvq6ccry9G7tkXbzNtcDiH4r51rrPUbwwoTzZ18AqxWYRZT6AOmxrpxKnQBW0pYlBI/8vh73Z//78nQ== + dependencies: + babel-runtime "^6.22.0" + babel-traverse "^6.24.1" + babel-types "^6.24.1" + +babel-helper-function-name@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-helper-function-name/-/babel-helper-function-name-6.24.1.tgz#d3475b8c03ed98242a25b48351ab18399d3580a9" + integrity sha512-Oo6+e2iX+o9eVvJ9Y5eKL5iryeRdsIkwRYheCuhYdVHsdEQysbc2z2QkqCLIYnNxkT5Ss3ggrHdXiDI7Dhrn4Q== + dependencies: + babel-helper-get-function-arity "^6.24.1" + babel-runtime "^6.22.0" + babel-template "^6.24.1" + babel-traverse "^6.24.1" + babel-types "^6.24.1" + +babel-helper-get-function-arity@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-helper-get-function-arity/-/babel-helper-get-function-arity-6.24.1.tgz#8f7782aa93407c41d3aa50908f89b031b1b6853d" + integrity sha512-WfgKFX6swFB1jS2vo+DwivRN4NB8XUdM3ij0Y1gnC21y1tdBoe6xjVnd7NSI6alv+gZXCtJqvrTeMW3fR/c0ng== + dependencies: + babel-runtime "^6.22.0" + babel-types "^6.24.1" + +babel-helper-hoist-variables@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-helper-hoist-variables/-/babel-helper-hoist-variables-6.24.1.tgz#1ecb27689c9d25513eadbc9914a73f5408be7a76" + integrity sha512-zAYl3tqerLItvG5cKYw7f1SpvIxS9zi7ohyGHaI9cgDUjAT6YcY9jIEH5CstetP5wHIVSceXwNS7Z5BpJg+rOw== + dependencies: + babel-runtime "^6.22.0" + babel-types "^6.24.1" + +babel-helper-optimise-call-expression@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-helper-optimise-call-expression/-/babel-helper-optimise-call-expression-6.24.1.tgz#f7a13427ba9f73f8f4fa993c54a97882d1244257" + integrity sha512-Op9IhEaxhbRT8MDXx2iNuMgciu2V8lDvYCNQbDGjdBNCjaMvyLf4wl4A3b8IgndCyQF8TwfgsQ8T3VD8aX1/pA== + dependencies: + babel-runtime "^6.22.0" + babel-types "^6.24.1" + +babel-helper-regex@^6.24.1: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-helper-regex/-/babel-helper-regex-6.26.0.tgz#325c59f902f82f24b74faceed0363954f6495e72" + integrity sha512-VlPiWmqmGJp0x0oK27Out1D+71nVVCTSdlbhIVoaBAj2lUgrNjBCRR9+llO4lTSb2O4r7PJg+RobRkhBrf6ofg== + dependencies: + babel-runtime "^6.26.0" + babel-types "^6.26.0" + lodash "^4.17.4" + +babel-helper-remap-async-to-generator@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-helper-remap-async-to-generator/-/babel-helper-remap-async-to-generator-6.24.1.tgz#5ec581827ad723fecdd381f1c928390676e4551b" + integrity sha512-RYqaPD0mQyQIFRu7Ho5wE2yvA/5jxqCIj/Lv4BXNq23mHYu/vxikOy2JueLiBxQknwapwrJeNCesvY0ZcfnlHg== + dependencies: + babel-helper-function-name "^6.24.1" + babel-runtime "^6.22.0" + babel-template "^6.24.1" + babel-traverse "^6.24.1" + babel-types "^6.24.1" + +babel-helper-replace-supers@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-helper-replace-supers/-/babel-helper-replace-supers-6.24.1.tgz#bf6dbfe43938d17369a213ca8a8bf74b6a90ab1a" + integrity sha512-sLI+u7sXJh6+ToqDr57Bv973kCepItDhMou0xCP2YPVmR1jkHSCY+p1no8xErbV1Siz5QE8qKT1WIwybSWlqjw== + dependencies: + babel-helper-optimise-call-expression "^6.24.1" + babel-messages "^6.23.0" + babel-runtime "^6.22.0" + babel-template "^6.24.1" + babel-traverse "^6.24.1" + babel-types "^6.24.1" + +babel-helpers@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-helpers/-/babel-helpers-6.24.1.tgz#3471de9caec388e5c850e597e58a26ddf37602b2" + integrity sha512-n7pFrqQm44TCYvrCDb0MqabAF+JUBq+ijBvNMUxpkLjJaAu32faIexewMumrH5KLLJ1HDyT0PTEqRyAe/GwwuQ== + dependencies: + babel-runtime "^6.22.0" + babel-template "^6.24.1" + +babel-jest@^29.2.2: + version "29.2.2" + resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-29.2.2.tgz#2c15abd8c2081293c9c3f4f80a4ed1d51542fee5" + integrity sha512-kkq2QSDIuvpgfoac3WZ1OOcHsQQDU5xYk2Ql7tLdJ8BVAYbefEXal+NfS45Y5LVZA7cxC8KYcQMObpCt1J025w== + dependencies: + "@jest/transform" "^29.2.2" + "@types/babel__core" "^7.1.14" + babel-plugin-istanbul "^6.1.1" + babel-preset-jest "^29.2.0" + chalk "^4.0.0" + graceful-fs "^4.2.9" + slash "^3.0.0" + +babel-messages@^6.23.0: + version "6.23.0" + resolved "https://registry.yarnpkg.com/babel-messages/-/babel-messages-6.23.0.tgz#f3cdf4703858035b2a2951c6ec5edf6c62f2630e" + integrity sha512-Bl3ZiA+LjqaMtNYopA9TYE9HP1tQ+E5dLxE0XrAzcIJeK2UqF0/EaqXwBn9esd4UmTfEab+P+UYQ1GnioFIb/w== + dependencies: + babel-runtime "^6.22.0" + +babel-plugin-check-es2015-constants@^6.22.0: + version "6.22.0" + resolved "https://registry.yarnpkg.com/babel-plugin-check-es2015-constants/-/babel-plugin-check-es2015-constants-6.22.0.tgz#35157b101426fd2ffd3da3f75c7d1e91835bbf8a" + integrity sha512-B1M5KBP29248dViEo1owyY32lk1ZSH2DaNNrXLGt8lyjjHm7pBqAdQ7VKUPR6EEDO323+OvT3MQXbCin8ooWdA== + dependencies: + babel-runtime "^6.22.0" + +babel-plugin-istanbul@^6.1.1: + version "6.1.1" + resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" + integrity sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@istanbuljs/load-nyc-config" "^1.0.0" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-instrument "^5.0.4" + test-exclude "^6.0.0" + +babel-plugin-jest-hoist@^29.2.0: + version "29.2.0" + resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-29.2.0.tgz#23ee99c37390a98cfddf3ef4a78674180d823094" + integrity sha512-TnspP2WNiR3GLfCsUNHqeXw0RoQ2f9U5hQ5L3XFpwuO8htQmSrhh8qsB6vi5Yi8+kuynN1yjDjQsPfkebmB6ZA== + dependencies: + "@babel/template" "^7.3.3" + "@babel/types" "^7.3.3" + "@types/babel__core" "^7.1.14" + "@types/babel__traverse" "^7.0.6" + +babel-plugin-syntax-async-functions@^6.8.0: + version "6.13.0" + resolved "https://registry.yarnpkg.com/babel-plugin-syntax-async-functions/-/babel-plugin-syntax-async-functions-6.13.0.tgz#cad9cad1191b5ad634bf30ae0872391e0647be95" + integrity sha512-4Zp4unmHgw30A1eWI5EpACji2qMocisdXhAftfhXoSV9j0Tvj6nRFE3tOmRY912E0FMRm/L5xWE7MGVT2FoLnw== + +babel-plugin-syntax-exponentiation-operator@^6.8.0: + version "6.13.0" + resolved "https://registry.yarnpkg.com/babel-plugin-syntax-exponentiation-operator/-/babel-plugin-syntax-exponentiation-operator-6.13.0.tgz#9ee7e8337290da95288201a6a57f4170317830de" + integrity sha512-Z/flU+T9ta0aIEKl1tGEmN/pZiI1uXmCiGFRegKacQfEJzp7iNsKloZmyJlQr+75FCJtiFfGIK03SiCvCt9cPQ== + +babel-plugin-syntax-trailing-function-commas@^6.22.0: + version "6.22.0" + resolved "https://registry.yarnpkg.com/babel-plugin-syntax-trailing-function-commas/-/babel-plugin-syntax-trailing-function-commas-6.22.0.tgz#ba0360937f8d06e40180a43fe0d5616fff532cf3" + integrity sha512-Gx9CH3Q/3GKbhs07Bszw5fPTlU+ygrOGfAhEt7W2JICwufpC4SuO0mG0+4NykPBSYPMJhqvVlDBU17qB1D+hMQ== + +babel-plugin-transform-async-to-generator@^6.22.0: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-async-to-generator/-/babel-plugin-transform-async-to-generator-6.24.1.tgz#6536e378aff6cb1d5517ac0e40eb3e9fc8d08761" + integrity sha512-7BgYJujNCg0Ti3x0c/DL3tStvnKS6ktIYOmo9wginv/dfZOrbSZ+qG4IRRHMBOzZ5Awb1skTiAsQXg/+IWkZYw== + dependencies: + babel-helper-remap-async-to-generator "^6.24.1" + babel-plugin-syntax-async-functions "^6.8.0" + babel-runtime "^6.22.0" + +babel-plugin-transform-es2015-arrow-functions@^6.22.0: + version "6.22.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-arrow-functions/-/babel-plugin-transform-es2015-arrow-functions-6.22.0.tgz#452692cb711d5f79dc7f85e440ce41b9f244d221" + integrity sha512-PCqwwzODXW7JMrzu+yZIaYbPQSKjDTAsNNlK2l5Gg9g4rz2VzLnZsStvp/3c46GfXpwkyufb3NCyG9+50FF1Vg== + dependencies: + babel-runtime "^6.22.0" + +babel-plugin-transform-es2015-block-scoped-functions@^6.22.0: + version "6.22.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-block-scoped-functions/-/babel-plugin-transform-es2015-block-scoped-functions-6.22.0.tgz#bbc51b49f964d70cb8d8e0b94e820246ce3a6141" + integrity sha512-2+ujAT2UMBzYFm7tidUsYh+ZoIutxJ3pN9IYrF1/H6dCKtECfhmB8UkHVpyxDwkj0CYbQG35ykoz925TUnBc3A== + dependencies: + babel-runtime "^6.22.0" + +babel-plugin-transform-es2015-block-scoping@^6.23.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-block-scoping/-/babel-plugin-transform-es2015-block-scoping-6.26.0.tgz#d70f5299c1308d05c12f463813b0a09e73b1895f" + integrity sha512-YiN6sFAQ5lML8JjCmr7uerS5Yc/EMbgg9G8ZNmk2E3nYX4ckHR01wrkeeMijEf5WHNK5TW0Sl0Uu3pv3EdOJWw== + dependencies: + babel-runtime "^6.26.0" + babel-template "^6.26.0" + babel-traverse "^6.26.0" + babel-types "^6.26.0" + lodash "^4.17.4" + +babel-plugin-transform-es2015-classes@^6.23.0: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-classes/-/babel-plugin-transform-es2015-classes-6.24.1.tgz#5a4c58a50c9c9461e564b4b2a3bfabc97a2584db" + integrity sha512-5Dy7ZbRinGrNtmWpquZKZ3EGY8sDgIVB4CU8Om8q8tnMLrD/m94cKglVcHps0BCTdZ0TJeeAWOq2TK9MIY6cag== + dependencies: + babel-helper-define-map "^6.24.1" + babel-helper-function-name "^6.24.1" + babel-helper-optimise-call-expression "^6.24.1" + babel-helper-replace-supers "^6.24.1" + babel-messages "^6.23.0" + babel-runtime "^6.22.0" + babel-template "^6.24.1" + babel-traverse "^6.24.1" + babel-types "^6.24.1" + +babel-plugin-transform-es2015-computed-properties@^6.22.0: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-computed-properties/-/babel-plugin-transform-es2015-computed-properties-6.24.1.tgz#6fe2a8d16895d5634f4cd999b6d3480a308159b3" + integrity sha512-C/uAv4ktFP/Hmh01gMTvYvICrKze0XVX9f2PdIXuriCSvUmV9j+u+BB9f5fJK3+878yMK6dkdcq+Ymr9mrcLzw== + dependencies: + babel-runtime "^6.22.0" + babel-template "^6.24.1" + +babel-plugin-transform-es2015-destructuring@^6.23.0: + version "6.23.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-destructuring/-/babel-plugin-transform-es2015-destructuring-6.23.0.tgz#997bb1f1ab967f682d2b0876fe358d60e765c56d" + integrity sha512-aNv/GDAW0j/f4Uy1OEPZn1mqD+Nfy9viFGBfQ5bZyT35YqOiqx7/tXdyfZkJ1sC21NyEsBdfDY6PYmLHF4r5iA== + dependencies: + babel-runtime "^6.22.0" + +babel-plugin-transform-es2015-duplicate-keys@^6.22.0: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-duplicate-keys/-/babel-plugin-transform-es2015-duplicate-keys-6.24.1.tgz#73eb3d310ca969e3ef9ec91c53741a6f1576423e" + integrity sha512-ossocTuPOssfxO2h+Z3/Ea1Vo1wWx31Uqy9vIiJusOP4TbF7tPs9U0sJ9pX9OJPf4lXRGj5+6Gkl/HHKiAP5ug== + dependencies: + babel-runtime "^6.22.0" + babel-types "^6.24.1" + +babel-plugin-transform-es2015-for-of@^6.23.0: + version "6.23.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-for-of/-/babel-plugin-transform-es2015-for-of-6.23.0.tgz#f47c95b2b613df1d3ecc2fdb7573623c75248691" + integrity sha512-DLuRwoygCoXx+YfxHLkVx5/NpeSbVwfoTeBykpJK7JhYWlL/O8hgAK/reforUnZDlxasOrVPPJVI/guE3dCwkw== + dependencies: + babel-runtime "^6.22.0" + +babel-plugin-transform-es2015-function-name@^6.22.0: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-function-name/-/babel-plugin-transform-es2015-function-name-6.24.1.tgz#834c89853bc36b1af0f3a4c5dbaa94fd8eacaa8b" + integrity sha512-iFp5KIcorf11iBqu/y/a7DK3MN5di3pNCzto61FqCNnUX4qeBwcV1SLqe10oXNnCaxBUImX3SckX2/o1nsrTcg== + dependencies: + babel-helper-function-name "^6.24.1" + babel-runtime "^6.22.0" + babel-types "^6.24.1" + +babel-plugin-transform-es2015-literals@^6.22.0: + version "6.22.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-literals/-/babel-plugin-transform-es2015-literals-6.22.0.tgz#4f54a02d6cd66cf915280019a31d31925377ca2e" + integrity sha512-tjFl0cwMPpDYyoqYA9li1/7mGFit39XiNX5DKC/uCNjBctMxyL1/PT/l4rSlbvBG1pOKI88STRdUsWXB3/Q9hQ== + dependencies: + babel-runtime "^6.22.0" + +babel-plugin-transform-es2015-modules-amd@^6.22.0, babel-plugin-transform-es2015-modules-amd@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-amd/-/babel-plugin-transform-es2015-modules-amd-6.24.1.tgz#3b3e54017239842d6d19c3011c4bd2f00a00d154" + integrity sha512-LnIIdGWIKdw7zwckqx+eGjcS8/cl8D74A3BpJbGjKTFFNJSMrjN4bIh22HY1AlkUbeLG6X6OZj56BDvWD+OeFA== + dependencies: + babel-plugin-transform-es2015-modules-commonjs "^6.24.1" + babel-runtime "^6.22.0" + babel-template "^6.24.1" + +babel-plugin-transform-es2015-modules-commonjs@^6.23.0, babel-plugin-transform-es2015-modules-commonjs@^6.24.1: + version "6.26.2" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-commonjs/-/babel-plugin-transform-es2015-modules-commonjs-6.26.2.tgz#58a793863a9e7ca870bdc5a881117ffac27db6f3" + integrity sha512-CV9ROOHEdrjcwhIaJNBGMBCodN+1cfkwtM1SbUHmvyy35KGT7fohbpOxkE2uLz1o6odKK2Ck/tz47z+VqQfi9Q== + dependencies: + babel-plugin-transform-strict-mode "^6.24.1" + babel-runtime "^6.26.0" + babel-template "^6.26.0" + babel-types "^6.26.0" + +babel-plugin-transform-es2015-modules-systemjs@^6.23.0: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-systemjs/-/babel-plugin-transform-es2015-modules-systemjs-6.24.1.tgz#ff89a142b9119a906195f5f106ecf305d9407d23" + integrity sha512-ONFIPsq8y4bls5PPsAWYXH/21Hqv64TBxdje0FvU3MhIV6QM2j5YS7KvAzg/nTIVLot2D2fmFQrFWCbgHlFEjg== + dependencies: + babel-helper-hoist-variables "^6.24.1" + babel-runtime "^6.22.0" + babel-template "^6.24.1" + +babel-plugin-transform-es2015-modules-umd@^6.23.0: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-modules-umd/-/babel-plugin-transform-es2015-modules-umd-6.24.1.tgz#ac997e6285cd18ed6176adb607d602344ad38468" + integrity sha512-LpVbiT9CLsuAIp3IG0tfbVo81QIhn6pE8xBJ7XSeCtFlMltuar5VuBV6y6Q45tpui9QWcy5i0vLQfCfrnF7Kiw== + dependencies: + babel-plugin-transform-es2015-modules-amd "^6.24.1" + babel-runtime "^6.22.0" + babel-template "^6.24.1" + +babel-plugin-transform-es2015-object-super@^6.22.0: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-object-super/-/babel-plugin-transform-es2015-object-super-6.24.1.tgz#24cef69ae21cb83a7f8603dad021f572eb278f8d" + integrity sha512-8G5hpZMecb53vpD3mjs64NhI1au24TAmokQ4B+TBFBjN9cVoGoOvotdrMMRmHvVZUEvqGUPWL514woru1ChZMA== + dependencies: + babel-helper-replace-supers "^6.24.1" + babel-runtime "^6.22.0" + +babel-plugin-transform-es2015-parameters@^6.23.0: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-parameters/-/babel-plugin-transform-es2015-parameters-6.24.1.tgz#57ac351ab49caf14a97cd13b09f66fdf0a625f2b" + integrity sha512-8HxlW+BB5HqniD+nLkQ4xSAVq3bR/pcYW9IigY+2y0dI+Y7INFeTbfAQr+63T3E4UDsZGjyb+l9txUnABWxlOQ== + dependencies: + babel-helper-call-delegate "^6.24.1" + babel-helper-get-function-arity "^6.24.1" + babel-runtime "^6.22.0" + babel-template "^6.24.1" + babel-traverse "^6.24.1" + babel-types "^6.24.1" + +babel-plugin-transform-es2015-shorthand-properties@^6.22.0: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-shorthand-properties/-/babel-plugin-transform-es2015-shorthand-properties-6.24.1.tgz#24f875d6721c87661bbd99a4622e51f14de38aa0" + integrity sha512-mDdocSfUVm1/7Jw/FIRNw9vPrBQNePy6wZJlR8HAUBLybNp1w/6lr6zZ2pjMShee65t/ybR5pT8ulkLzD1xwiw== + dependencies: + babel-runtime "^6.22.0" + babel-types "^6.24.1" + +babel-plugin-transform-es2015-spread@^6.22.0: + version "6.22.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-spread/-/babel-plugin-transform-es2015-spread-6.22.0.tgz#d6d68a99f89aedc4536c81a542e8dd9f1746f8d1" + integrity sha512-3Ghhi26r4l3d0Js933E5+IhHwk0A1yiutj9gwvzmFbVV0sPMYk2lekhOufHBswX7NCoSeF4Xrl3sCIuSIa+zOg== + dependencies: + babel-runtime "^6.22.0" + +babel-plugin-transform-es2015-sticky-regex@^6.22.0: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-sticky-regex/-/babel-plugin-transform-es2015-sticky-regex-6.24.1.tgz#00c1cdb1aca71112cdf0cf6126c2ed6b457ccdbc" + integrity sha512-CYP359ADryTo3pCsH0oxRo/0yn6UsEZLqYohHmvLQdfS9xkf+MbCzE3/Kolw9OYIY4ZMilH25z/5CbQbwDD+lQ== + dependencies: + babel-helper-regex "^6.24.1" + babel-runtime "^6.22.0" + babel-types "^6.24.1" + +babel-plugin-transform-es2015-template-literals@^6.22.0: + version "6.22.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-template-literals/-/babel-plugin-transform-es2015-template-literals-6.22.0.tgz#a84b3450f7e9f8f1f6839d6d687da84bb1236d8d" + integrity sha512-x8b9W0ngnKzDMHimVtTfn5ryimars1ByTqsfBDwAqLibmuuQY6pgBQi5z1ErIsUOWBdw1bW9FSz5RZUojM4apg== + dependencies: + babel-runtime "^6.22.0" + +babel-plugin-transform-es2015-typeof-symbol@^6.23.0: + version "6.23.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-typeof-symbol/-/babel-plugin-transform-es2015-typeof-symbol-6.23.0.tgz#dec09f1cddff94b52ac73d505c84df59dcceb372" + integrity sha512-fz6J2Sf4gYN6gWgRZaoFXmq93X+Li/8vf+fb0sGDVtdeWvxC9y5/bTD7bvfWMEq6zetGEHpWjtzRGSugt5kNqw== + dependencies: + babel-runtime "^6.22.0" + +babel-plugin-transform-es2015-unicode-regex@^6.22.0: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-unicode-regex/-/babel-plugin-transform-es2015-unicode-regex-6.24.1.tgz#d38b12f42ea7323f729387f18a7c5ae1faeb35e9" + integrity sha512-v61Dbbihf5XxnYjtBN04B/JBvsScY37R1cZT5r9permN1cp+b70DY3Ib3fIkgn1DI9U3tGgBJZVD8p/mE/4JbQ== + dependencies: + babel-helper-regex "^6.24.1" + babel-runtime "^6.22.0" + regexpu-core "^2.0.0" + +babel-plugin-transform-exponentiation-operator@^6.22.0: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-exponentiation-operator/-/babel-plugin-transform-exponentiation-operator-6.24.1.tgz#2ab0c9c7f3098fa48907772bb813fe41e8de3a0e" + integrity sha512-LzXDmbMkklvNhprr20//RStKVcT8Cu+SQtX18eMHLhjHf2yFzwtQ0S2f0jQ+89rokoNdmwoSqYzAhq86FxlLSQ== + dependencies: + babel-helper-builder-binary-assignment-operator-visitor "^6.24.1" + babel-plugin-syntax-exponentiation-operator "^6.8.0" + babel-runtime "^6.22.0" + +babel-plugin-transform-regenerator@^6.22.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-regenerator/-/babel-plugin-transform-regenerator-6.26.0.tgz#e0703696fbde27f0a3efcacf8b4dca2f7b3a8f2f" + integrity sha512-LS+dBkUGlNR15/5WHKe/8Neawx663qttS6AGqoOUhICc9d1KciBvtrQSuc0PI+CxQ2Q/S1aKuJ+u64GtLdcEZg== + dependencies: + regenerator-transform "^0.10.0" + +babel-plugin-transform-strict-mode@^6.24.1: + version "6.24.1" + resolved "https://registry.yarnpkg.com/babel-plugin-transform-strict-mode/-/babel-plugin-transform-strict-mode-6.24.1.tgz#d5faf7aa578a65bbe591cf5edae04a0c67020758" + integrity sha512-j3KtSpjyLSJxNoCDrhwiJad8kw0gJ9REGj8/CqL0HeRyLnvUNYV9zcqluL6QJSXh3nfsLEmSLvwRfGzrgR96Pw== + dependencies: + babel-runtime "^6.22.0" + babel-types "^6.24.1" + +babel-preset-current-node-syntax@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b" + integrity sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ== + dependencies: + "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/plugin-syntax-bigint" "^7.8.3" + "@babel/plugin-syntax-class-properties" "^7.8.3" + "@babel/plugin-syntax-import-meta" "^7.8.3" + "@babel/plugin-syntax-json-strings" "^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators" "^7.8.3" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + "@babel/plugin-syntax-numeric-separator" "^7.8.3" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + "@babel/plugin-syntax-top-level-await" "^7.8.3" + +babel-preset-env@^1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/babel-preset-env/-/babel-preset-env-1.7.0.tgz#dea79fa4ebeb883cd35dab07e260c1c9c04df77a" + integrity sha512-9OR2afuKDneX2/q2EurSftUYM0xGu4O2D9adAhVfADDhrYDaxXV0rBbevVYoY9n6nyX1PmQW/0jtpJvUNr9CHg== + dependencies: + babel-plugin-check-es2015-constants "^6.22.0" + babel-plugin-syntax-trailing-function-commas "^6.22.0" + babel-plugin-transform-async-to-generator "^6.22.0" + babel-plugin-transform-es2015-arrow-functions "^6.22.0" + babel-plugin-transform-es2015-block-scoped-functions "^6.22.0" + babel-plugin-transform-es2015-block-scoping "^6.23.0" + babel-plugin-transform-es2015-classes "^6.23.0" + babel-plugin-transform-es2015-computed-properties "^6.22.0" + babel-plugin-transform-es2015-destructuring "^6.23.0" + babel-plugin-transform-es2015-duplicate-keys "^6.22.0" + babel-plugin-transform-es2015-for-of "^6.23.0" + babel-plugin-transform-es2015-function-name "^6.22.0" + babel-plugin-transform-es2015-literals "^6.22.0" + babel-plugin-transform-es2015-modules-amd "^6.22.0" + babel-plugin-transform-es2015-modules-commonjs "^6.23.0" + babel-plugin-transform-es2015-modules-systemjs "^6.23.0" + babel-plugin-transform-es2015-modules-umd "^6.23.0" + babel-plugin-transform-es2015-object-super "^6.22.0" + babel-plugin-transform-es2015-parameters "^6.23.0" + babel-plugin-transform-es2015-shorthand-properties "^6.22.0" + babel-plugin-transform-es2015-spread "^6.22.0" + babel-plugin-transform-es2015-sticky-regex "^6.22.0" + babel-plugin-transform-es2015-template-literals "^6.22.0" + babel-plugin-transform-es2015-typeof-symbol "^6.23.0" + babel-plugin-transform-es2015-unicode-regex "^6.22.0" + babel-plugin-transform-exponentiation-operator "^6.22.0" + babel-plugin-transform-regenerator "^6.22.0" + browserslist "^3.2.6" + invariant "^2.2.2" + semver "^5.3.0" + +babel-preset-jest@^29.2.0: + version "29.2.0" + resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-29.2.0.tgz#3048bea3a1af222e3505e4a767a974c95a7620dc" + integrity sha512-z9JmMJppMxNv8N7fNRHvhMg9cvIkMxQBXgFkane3yKVEvEOP+kB50lk8DFRvF9PGqbyXxlmebKWhuDORO8RgdA== + dependencies: + babel-plugin-jest-hoist "^29.2.0" + babel-preset-current-node-syntax "^1.0.0" + +babel-register@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-register/-/babel-register-6.26.0.tgz#6ed021173e2fcb486d7acb45c6009a856f647071" + integrity sha512-veliHlHX06wjaeY8xNITbveXSiI+ASFnOqvne/LaIJIqOWi2Ogmj91KOugEz/hoh/fwMhXNBJPCv8Xaz5CyM4A== + dependencies: + babel-core "^6.26.0" + babel-runtime "^6.26.0" + core-js "^2.5.0" + home-or-tmp "^2.0.0" + lodash "^4.17.4" + mkdirp "^0.5.1" + source-map-support "^0.4.15" + +babel-runtime@^6.18.0, babel-runtime@^6.22.0, babel-runtime@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe" + integrity sha512-ITKNuq2wKlW1fJg9sSW52eepoYgZBggvOAHC0u/CYu/qxQ9EVzThCgR69BnSXLHjy2f7SY5zaQ4yt7H9ZVxY2g== + dependencies: + core-js "^2.4.0" + regenerator-runtime "^0.11.0" + +babel-template@^6.24.1, babel-template@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-template/-/babel-template-6.26.0.tgz#de03e2d16396b069f46dd9fff8521fb1a0e35e02" + integrity sha512-PCOcLFW7/eazGUKIoqH97sO9A2UYMahsn/yRQ7uOk37iutwjq7ODtcTNF+iFDSHNfkctqsLRjLP7URnOx0T1fg== + dependencies: + babel-runtime "^6.26.0" + babel-traverse "^6.26.0" + babel-types "^6.26.0" + babylon "^6.18.0" + lodash "^4.17.4" + +babel-traverse@^6.24.1, babel-traverse@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-traverse/-/babel-traverse-6.26.0.tgz#46a9cbd7edcc62c8e5c064e2d2d8d0f4035766ee" + integrity sha512-iSxeXx7apsjCHe9c7n8VtRXGzI2Bk1rBSOJgCCjfyXb6v1aCqE1KSEpq/8SXuVN8Ka/Rh1WDTF0MDzkvTA4MIA== + dependencies: + babel-code-frame "^6.26.0" + babel-messages "^6.23.0" + babel-runtime "^6.26.0" + babel-types "^6.26.0" + babylon "^6.18.0" + debug "^2.6.8" + globals "^9.18.0" + invariant "^2.2.2" + lodash "^4.17.4" + +babel-types@^6.19.0, babel-types@^6.24.1, babel-types@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-6.26.0.tgz#a3b073f94ab49eb6fa55cd65227a334380632497" + integrity sha512-zhe3V/26rCWsEZK8kZN+HaQj5yQ1CilTObixFzKW1UWjqG7618Twz6YEsCnjfg5gBcJh02DrpCkS9h98ZqDY+g== + dependencies: + babel-runtime "^6.26.0" + esutils "^2.0.2" + lodash "^4.17.4" + to-fast-properties "^1.0.3" + +babelify@^7.3.0: + version "7.3.0" + resolved "https://registry.yarnpkg.com/babelify/-/babelify-7.3.0.tgz#aa56aede7067fd7bd549666ee16dc285087e88e5" + integrity sha512-vID8Fz6pPN5pJMdlUnNFSfrlcx5MUule4k9aKs/zbZPyXxMTcRrB0M4Tarw22L8afr8eYSWxDPYCob3TdrqtlA== + dependencies: + babel-core "^6.0.14" + object-assign "^4.0.0" + +babylon@^6.18.0: + version "6.18.0" + resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.18.0.tgz#af2f3b88fa6f5c1e4c634d1a0f8eac4f55b395e3" + integrity sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ== + +backoff@^2.5.0: + version "2.5.0" + resolved "https://registry.yarnpkg.com/backoff/-/backoff-2.5.0.tgz#f616eda9d3e4b66b8ca7fca79f695722c5f8e26f" + integrity sha512-wC5ihrnUXmR2douXmXLCe5O3zg3GKIyvRi/hi58a/XyRxVI+3/yM0PYueQOZXPXQ9pxBislYkw+sF9b7C/RuMA== + dependencies: + precond "0.2" + +balanced-match@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== + +base-x@^3.0.2, base-x@^3.0.8: + version "3.0.9" + resolved "https://registry.yarnpkg.com/base-x/-/base-x-3.0.9.tgz#6349aaabb58526332de9f60995e548a53fe21320" + integrity sha512-H7JU6iBHTal1gp56aKoaa//YUxEaAOUiydvrV/pILqIHXTtqxSkATOnDA2u+jZ/61sD+L/412+7kzXRtWukhpQ== + dependencies: + safe-buffer "^5.0.1" + +base64-js@^1.3.1: + version "1.5.1" + resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" + integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== + +base@^0.11.1: + version "0.11.2" + resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" + integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== + dependencies: + cache-base "^1.0.1" + class-utils "^0.3.5" + component-emitter "^1.2.1" + define-property "^1.0.0" + isobject "^3.0.1" + mixin-deep "^1.2.0" + pascalcase "^0.1.1" + +bcrypt-pbkdf@^1.0.0, bcrypt-pbkdf@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" + integrity sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w== + dependencies: + tweetnacl "^0.14.3" + +bech32@1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/bech32/-/bech32-1.1.4.tgz#e38c9f37bf179b8eb16ae3a772b40c356d4832e9" + integrity sha512-s0IrSOzLlbvX7yp4WBfPITzpAU8sqQcpsmwXDiKwrG4r491vwCO/XpejasRNl0piBMe/DvP4Tz0mIS/X1DPJBQ== + +bigint-crypto-utils@^3.0.23: + version "3.1.7" + resolved "https://registry.yarnpkg.com/bigint-crypto-utils/-/bigint-crypto-utils-3.1.7.tgz#c4c1b537c7c1ab7aadfaecf3edfd45416bf2c651" + integrity sha512-zpCQpIE2Oy5WIQpjC9iYZf8Uh9QqoS51ZCooAcNvzv1AQ3VWdT52D0ksr1+/faeK8HVIej1bxXcP75YcqH3KPA== + dependencies: + bigint-mod-arith "^3.1.0" + +bigint-mod-arith@^3.1.0: + version "3.1.2" + resolved "https://registry.yarnpkg.com/bigint-mod-arith/-/bigint-mod-arith-3.1.2.tgz#658e416bc593a463d97b59766226d0a3021a76b1" + integrity sha512-nx8J8bBeiRR+NlsROFH9jHswW5HO8mgfOSqW0AmjicMMvaONDa8AO+5ViKDUUNytBPWiwfvZP4/Bj4Y3lUfvgQ== + +bignumber.js@^9.0.0, bignumber.js@^9.0.1: + version "9.1.1" + resolved "https://registry.yarnpkg.com/bignumber.js/-/bignumber.js-9.1.1.tgz#c4df7dc496bd849d4c9464344c1aa74228b4dac6" + integrity sha512-pHm4LsMJ6lzgNGVfZHjMoO8sdoRhOzOH4MLmY65Jg70bpxCKu5iOHNJyfF6OyvYw7t8Fpf35RuzUyqnQsj8Vig== + +binary-extensions@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" + integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== + +bip39@2.5.0: + version "2.5.0" + resolved "https://registry.yarnpkg.com/bip39/-/bip39-2.5.0.tgz#51cbd5179460504a63ea3c000db3f787ca051235" + integrity sha512-xwIx/8JKoT2+IPJpFEfXoWdYwP7UVAoUxxLNfGCfVowaJE7yg1Y5B1BVPqlUNsBq5/nGwmFkwRJ8xDW4sX8OdA== + dependencies: + create-hash "^1.1.0" + pbkdf2 "^3.0.9" + randombytes "^2.0.1" + safe-buffer "^5.0.1" + unorm "^1.3.3" + +bl@^1.0.0: + version "1.2.3" + resolved "https://registry.yarnpkg.com/bl/-/bl-1.2.3.tgz#1e8dd80142eac80d7158c9dccc047fb620e035e7" + integrity sha512-pvcNpa0UU69UT341rO6AYy4FVAIkUHuZXRIWbq+zHnsVcRzDDjIAhGuuYoi0d//cwIwtt4pkpKycWEfjdV+vww== + dependencies: + readable-stream "^2.3.5" + safe-buffer "^5.1.1" + +bl@^4.0.3: + version "4.1.0" + resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a" + integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w== + dependencies: + buffer "^5.5.0" + inherits "^2.0.4" + readable-stream "^3.4.0" + +blakejs@^1.1.0: + version "1.2.1" + resolved "https://registry.yarnpkg.com/blakejs/-/blakejs-1.2.1.tgz#5057e4206eadb4a97f7c0b6e197a505042fc3814" + integrity sha512-QXUSXI3QVc/gJME0dBpXrag1kbzOqCjCX8/b54ntNyW6sjtoqxqRk3LTmXzaJoh71zMsDCjM+47jS7XiwN/+fQ== + +bluebird@^3.5.0, bluebird@^3.5.2: + version "3.7.2" + resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" + integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== + +bn-str-256@^1.9.1: + version "1.9.1" + resolved "https://registry.yarnpkg.com/bn-str-256/-/bn-str-256-1.9.1.tgz#898cebee70a3edc3968f97b4cebbc4771025aa82" + integrity sha512-u3muv3WO5sYv9nUQsPnDGLg731yNt/MOlKPK5pmBVqClcl7tY97tyfKxw8ed44HVrpi+7dkgJgQpbXP47a3GoQ== + dependencies: + decimal.js-light "^2.5.0" + lodash "^4.17.11" + +bn.js@4.11.6: + version "4.11.6" + resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.11.6.tgz#53344adb14617a13f6e8dd2ce28905d1c0ba3215" + integrity sha512-XWwnNNFCuuSQ0m3r3C4LE3EiORltHd9M05pq6FOlVeiophzRbMo50Sbz1ehl8K3Z+jw9+vmgnXefY1hz8X+2wA== + +bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.10.0, bn.js@^4.11.0, bn.js@^4.11.6, bn.js@^4.11.8, bn.js@^4.11.9, bn.js@^4.8.0: + version "4.12.0" + resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.12.0.tgz#775b3f278efbb9718eec7361f483fb36fbbfea88" + integrity sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA== + +bn.js@^5.0.0, bn.js@^5.1.1, bn.js@^5.1.2, bn.js@^5.2.0, bn.js@^5.2.1: + version "5.2.1" + resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.2.1.tgz#0bc527a6a0d18d0aa8d5b0538ce4a77dccfa7b70" + integrity sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ== + +body-parser@1.20.1, body-parser@^1.16.0: + version "1.20.1" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.1.tgz#b1812a8912c195cd371a3ee5e66faa2338a5c668" + integrity sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw== + dependencies: + bytes "3.1.2" + content-type "~1.0.4" + debug "2.6.9" + depd "2.0.0" + destroy "1.2.0" + http-errors "2.0.0" + iconv-lite "0.4.24" + on-finished "2.4.1" + qs "6.11.0" + raw-body "2.5.1" + type-is "~1.6.18" + unpipe "1.0.0" + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +brace-expansion@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" + integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== + dependencies: + balanced-match "^1.0.0" + +braces@^2.3.1: + version "2.3.2" + resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" + integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== + dependencies: + arr-flatten "^1.1.0" + array-unique "^0.3.2" + extend-shallow "^2.0.1" + fill-range "^4.0.0" + isobject "^3.0.1" + repeat-element "^1.1.2" + snapdragon "^0.8.1" + snapdragon-node "^2.0.1" + split-string "^3.0.2" + to-regex "^3.0.1" + +braces@^3.0.2, braces@~3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" + integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== + dependencies: + fill-range "^7.0.1" + +brorand@^1.0.1, brorand@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f" + integrity sha512-cKV8tMCEpQs4hK/ik71d6LrPOnpkpGBR0wzxqr68g2m/LB2GxVYQroAjMJZRVM1Y4BCjCKc3vAamxSzOY2RP+w== + +browser-level@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/browser-level/-/browser-level-1.0.1.tgz#36e8c3183d0fe1c405239792faaab5f315871011" + integrity sha512-XECYKJ+Dbzw0lbydyQuJzwNXtOpbMSq737qxJN11sIRTErOMShvDpbzTlgju7orJKvx4epULolZAuJGLzCmWRQ== + dependencies: + abstract-level "^1.0.2" + catering "^2.1.1" + module-error "^1.0.2" + run-parallel-limit "^1.1.0" + +browser-stdout@1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/browser-stdout/-/browser-stdout-1.3.1.tgz#baa559ee14ced73452229bad7326467c61fabd60" + integrity sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw== + +browserify-aes@^1.0.0, browserify-aes@^1.0.4, browserify-aes@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.2.0.tgz#326734642f403dabc3003209853bb70ad428ef48" + integrity sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA== + dependencies: + buffer-xor "^1.0.3" + cipher-base "^1.0.0" + create-hash "^1.1.0" + evp_bytestokey "^1.0.3" + inherits "^2.0.1" + safe-buffer "^5.0.1" + +browserify-cipher@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/browserify-cipher/-/browserify-cipher-1.0.1.tgz#8d6474c1b870bfdabcd3bcfcc1934a10e94f15f0" + integrity sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w== + dependencies: + browserify-aes "^1.0.4" + browserify-des "^1.0.0" + evp_bytestokey "^1.0.0" + +browserify-des@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/browserify-des/-/browserify-des-1.0.2.tgz#3af4f1f59839403572f1c66204375f7a7f703e9c" + integrity sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A== + dependencies: + cipher-base "^1.0.1" + des.js "^1.0.0" + inherits "^2.0.1" + safe-buffer "^5.1.2" + +browserify-rsa@^4.0.0, browserify-rsa@^4.0.1: + version "4.1.0" + resolved "https://registry.yarnpkg.com/browserify-rsa/-/browserify-rsa-4.1.0.tgz#b2fd06b5b75ae297f7ce2dc651f918f5be158c8d" + integrity sha512-AdEER0Hkspgno2aR97SAf6vi0y0k8NuOpGnVH3O99rcA5Q6sh8QxcngtHuJ6uXwnfAXNM4Gn1Gb7/MV1+Ymbog== + dependencies: + bn.js "^5.0.0" + randombytes "^2.0.1" + +browserify-sign@^4.0.0: + version "4.2.1" + resolved "https://registry.yarnpkg.com/browserify-sign/-/browserify-sign-4.2.1.tgz#eaf4add46dd54be3bb3b36c0cf15abbeba7956c3" + integrity sha512-/vrA5fguVAKKAVTNJjgSm1tRQDHUU6DbwO9IROu/0WAzC8PKhucDSh18J0RMvVeHAn5puMd+QHC2erPRNf8lmg== + dependencies: + bn.js "^5.1.1" + browserify-rsa "^4.0.1" + create-hash "^1.2.0" + create-hmac "^1.1.7" + elliptic "^6.5.3" + inherits "^2.0.4" + parse-asn1 "^5.1.5" + readable-stream "^3.6.0" + safe-buffer "^5.2.0" + +browserslist@^3.2.6: + version "3.2.8" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-3.2.8.tgz#b0005361d6471f0f5952797a76fc985f1f978fc6" + integrity sha512-WHVocJYavUwVgVViC0ORikPHQquXwVh939TaelZ4WDqpWgTX/FsGhl/+P4qBUAGcRvtOgDgC+xftNWWp2RUTAQ== + dependencies: + caniuse-lite "^1.0.30000844" + electron-to-chromium "^1.3.47" + +browserslist@^4.21.3: + version "4.21.4" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.21.4.tgz#e7496bbc67b9e39dd0f98565feccdcb0d4ff6987" + integrity sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw== + dependencies: + caniuse-lite "^1.0.30001400" + electron-to-chromium "^1.4.251" + node-releases "^2.0.6" + update-browserslist-db "^1.0.9" + +bs-logger@0.x: + version "0.2.6" + resolved "https://registry.yarnpkg.com/bs-logger/-/bs-logger-0.2.6.tgz#eb7d365307a72cf974cc6cda76b68354ad336bd8" + integrity sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog== + dependencies: + fast-json-stable-stringify "2.x" + +bs58@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/bs58/-/bs58-4.0.1.tgz#be161e76c354f6f788ae4071f63f34e8c4f0a42a" + integrity sha512-Ok3Wdf5vOIlBrgCvTq96gBkJw+JUEzdBgyaza5HLtPm7yTHkjRy8+JzNyHF7BHa0bNWOQIp3m5YF0nnFcOIKLw== + dependencies: + base-x "^3.0.2" + +bs58check@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/bs58check/-/bs58check-2.1.2.tgz#53b018291228d82a5aa08e7d796fdafda54aebfc" + integrity sha512-0TS1jicxdU09dwJMNZtVAfzPi6Q6QeN0pM1Fkzrjn+XYHvzMKPU3pHVpva+769iNVSfIYWf7LJ6WR+BuuMf8cA== + dependencies: + bs58 "^4.0.0" + create-hash "^1.1.0" + safe-buffer "^5.1.2" + +bser@2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" + integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== + dependencies: + node-int64 "^0.4.0" + +buffer-alloc-unsafe@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz#bd7dc26ae2972d0eda253be061dba992349c19f0" + integrity sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg== + +buffer-alloc@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/buffer-alloc/-/buffer-alloc-1.2.0.tgz#890dd90d923a873e08e10e5fd51a57e5b7cce0ec" + integrity sha512-CFsHQgjtW1UChdXgbyJGtnm+O/uLQeZdtbDo8mfUgYXCHSM1wgrVxXm6bSyrUuErEb+4sYVGCzASBRot7zyrow== + dependencies: + buffer-alloc-unsafe "^1.1.0" + buffer-fill "^1.0.0" + +buffer-equal-constant-time@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819" + integrity sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA== + +buffer-fill@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/buffer-fill/-/buffer-fill-1.0.0.tgz#f8f78b76789888ef39f205cd637f68e702122b2c" + integrity sha512-T7zexNBwiiaCOGDg9xNX9PBmjrubblRkENuptryuI64URkXDFum9il/JGL8Lm8wYfAXpredVXXZz7eMHilimiQ== + +buffer-from@^1.0.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" + integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== + +buffer-reverse@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/buffer-reverse/-/buffer-reverse-1.0.1.tgz#49283c8efa6f901bc01fa3304d06027971ae2f60" + integrity sha512-M87YIUBsZ6N924W57vDwT/aOu8hw7ZgdByz6ijksLjmHJELBASmYTTlNHRgjE+pTsT9oJXGaDSgqqwfdHotDUg== + +buffer-to-arraybuffer@^0.0.5: + version "0.0.5" + resolved "https://registry.yarnpkg.com/buffer-to-arraybuffer/-/buffer-to-arraybuffer-0.0.5.tgz#6064a40fa76eb43c723aba9ef8f6e1216d10511a" + integrity sha512-3dthu5CYiVB1DEJp61FtApNnNndTckcqe4pFcLdvHtrpG+kcyekCJKg4MRiDcFW7A6AODnXB9U4dwQiCW5kzJQ== + +buffer-xor@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9" + integrity sha512-571s0T7nZWK6vB67HI5dyUF7wXiNcfaPPPTl6zYCNApANjIvYJTg7hlud/+cJpdAhS7dVzqMLmfhfHR3rAcOjQ== + +buffer-xor@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-2.0.2.tgz#34f7c64f04c777a1f8aac5e661273bb9dd320289" + integrity sha512-eHslX0bin3GB+Lx2p7lEYRShRewuNZL3fUl4qlVJGGiwoPGftmt8JQgk2Y9Ji5/01TnVDo33E5b5O3vUB1HdqQ== + dependencies: + safe-buffer "^5.1.1" + +buffer@^5.0.5, buffer@^5.2.1, buffer@^5.5.0, buffer@^5.6.0: + version "5.7.1" + resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" + integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== + dependencies: + base64-js "^1.3.1" + ieee754 "^1.1.13" + +buffer@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/buffer/-/buffer-6.0.3.tgz#2ace578459cc8fbe2a70aaa8f52ee63b6a74c6c6" + integrity sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA== + dependencies: + base64-js "^1.3.1" + ieee754 "^1.2.1" + +bufferutil@^4.0.1: + version "4.0.7" + resolved "https://registry.yarnpkg.com/bufferutil/-/bufferutil-4.0.7.tgz#60c0d19ba2c992dd8273d3f73772ffc894c153ad" + integrity sha512-kukuqc39WOHtdxtw4UScxF/WVnMFVSQVKhtx3AjZJzhd0RGZZldcrfSEbVsWWe6KNH253574cq5F+wpv0G9pJw== + dependencies: + node-gyp-build "^4.3.0" + +buildcheck@0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/buildcheck/-/buildcheck-0.0.3.tgz#70451897a95d80f7807e68fc412eb2e7e35ff4d5" + integrity sha512-pziaA+p/wdVImfcbsZLNF32EiWyujlQLwolMqUQE8xpKNOH7KmZQaY8sXN7DGOEzPAElo9QTaeNRfGnf3iOJbA== + +busboy@^1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/busboy/-/busboy-1.6.0.tgz#966ea36a9502e43cdb9146962523b92f531f6893" + integrity sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA== + dependencies: + streamsearch "^1.1.0" + +bytes@3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" + integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== + +bytewise-core@^1.2.2: + version "1.2.3" + resolved "https://registry.yarnpkg.com/bytewise-core/-/bytewise-core-1.2.3.tgz#3fb410c7e91558eb1ab22a82834577aa6bd61d42" + integrity sha512-nZD//kc78OOxeYtRlVk8/zXqTB4gf/nlguL1ggWA8FuchMyOxcyHR4QPQZMUmA7czC+YnaBrPUCubqAWe50DaA== + dependencies: + typewise-core "^1.2" + +bytewise@~1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/bytewise/-/bytewise-1.1.0.tgz#1d13cbff717ae7158094aa881b35d081b387253e" + integrity sha512-rHuuseJ9iQ0na6UDhnrRVDh8YnWVlU6xM3VH6q/+yHDeUH2zIhUzP+2/h3LIrhLDBtTqzWpE3p3tP/boefskKQ== + dependencies: + bytewise-core "^1.2.2" + typewise "^1.0.3" + +cache-base@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" + integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== + dependencies: + collection-visit "^1.0.0" + component-emitter "^1.2.1" + get-value "^2.0.6" + has-value "^1.0.0" + isobject "^3.0.1" + set-value "^2.0.0" + to-object-path "^0.3.0" + union-value "^1.0.0" + unset-value "^1.0.0" + +cacheable-lookup@^5.0.3: + version "5.0.4" + resolved "https://registry.yarnpkg.com/cacheable-lookup/-/cacheable-lookup-5.0.4.tgz#5a6b865b2c44357be3d5ebc2a467b032719a7005" + integrity sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA== + +cacheable-request@^6.0.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-6.1.0.tgz#20ffb8bd162ba4be11e9567d823db651052ca912" + integrity sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg== + dependencies: + clone-response "^1.0.2" + get-stream "^5.1.0" + http-cache-semantics "^4.0.0" + keyv "^3.0.0" + lowercase-keys "^2.0.0" + normalize-url "^4.1.0" + responselike "^1.0.2" + +cacheable-request@^7.0.2: + version "7.0.2" + resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-7.0.2.tgz#ea0d0b889364a25854757301ca12b2da77f91d27" + integrity sha512-pouW8/FmiPQbuGpkXQ9BAPv/Mo5xDGANgSNXzTzJ8DrKGuXOssM4wIQRjfanNRh3Yu5cfYPvcorqbhg2KIJtew== + dependencies: + clone-response "^1.0.2" + get-stream "^5.1.0" + http-cache-semantics "^4.0.0" + keyv "^4.0.0" + lowercase-keys "^2.0.0" + normalize-url "^6.0.1" + responselike "^2.0.0" + +cachedown@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/cachedown/-/cachedown-1.0.0.tgz#d43f036e4510696b31246d7db31ebf0f7ac32d15" + integrity sha512-t+yVk82vQWCJF3PsWHMld+jhhjkkWjcAzz8NbFx1iULOXWl8Tm/FdM4smZNVw3MRr0X+lVTx9PKzvEn4Ng19RQ== + dependencies: + abstract-leveldown "^2.4.1" + lru-cache "^3.2.0" + +call-bind@^1.0.0, call-bind@^1.0.2, call-bind@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" + integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== + dependencies: + function-bind "^1.1.1" + get-intrinsic "^1.0.2" + +caller-callsite@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/caller-callsite/-/caller-callsite-2.0.0.tgz#847e0fce0a223750a9a027c54b33731ad3154134" + integrity sha512-JuG3qI4QOftFsZyOn1qq87fq5grLIyk1JYd5lJmdA+fG7aQ9pA/i3JIJGcO3q0MrRcHlOt1U+ZeHW8Dq9axALQ== + dependencies: + callsites "^2.0.0" + +caller-path@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/caller-path/-/caller-path-2.0.0.tgz#468f83044e369ab2010fac5f06ceee15bb2cb1f4" + integrity sha512-MCL3sf6nCSXOwCTzvPKhN18TU7AHTvdtam8DAogxcrJ8Rjfbbg7Lgng64H9Iy+vUV6VGFClN/TyxBkAebLRR4A== + dependencies: + caller-callsite "^2.0.0" + +callsites@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-2.0.0.tgz#06eb84f00eea413da86affefacbffb36093b3c50" + integrity sha512-ksWePWBloaWPxJYQ8TL0JHvtci6G5QTKwQ95RcWAa/lzoAKuAOflGdAK92hpHXjkwb8zLxoLNUoNYZgVsaJzvQ== + +callsites@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" + integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== + +camelcase@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-3.0.0.tgz#32fc4b9fcdaf845fcdf7e73bb97cac2261f0ab0a" + integrity sha512-4nhGqUkc4BqbBBB4Q6zLuD7lzzrHYrjKGeYaEji/3tFR5VdJu9v+LilhGIVe8wxEJPPOeWo7eg8dwY13TZ1BNg== + +camelcase@^5.3.1: + version "5.3.1" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" + integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== + +camelcase@^6.0.0, camelcase@^6.2.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" + integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== + +caniuse-lite@^1.0.30000844: + version "1.0.30001448" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001448.tgz#ca7550b1587c92a392a2b377cd9c508b3b4395bf" + integrity sha512-tq2YI+MJnooG96XpbTRYkBxLxklZPOdLmNIOdIhvf7SNJan6u5vCKum8iT7ZfCt70m1GPkuC7P3TtX6UuhupuA== + +caniuse-lite@^1.0.30001400: + version "1.0.30001429" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001429.tgz#70cdae959096756a85713b36dd9cb82e62325639" + integrity sha512-511ThLu1hF+5RRRt0zYCf2U2yRr9GPF6m5y90SBCWsvSoYoW7yAGlv/elyPaNfvGCkp6kj/KFZWU0BMA69Prsg== + +caseless@~0.12.0: + version "0.12.0" + resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" + integrity sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw== + +catering@^2.1.0, catering@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/catering/-/catering-2.1.1.tgz#66acba06ed5ee28d5286133982a927de9a04b510" + integrity sha512-K7Qy8O9p76sL3/3m7/zLKbRkyOlSZAgzEaLhyj2mXS8PsCud2Eo4hAb8aLtZqHh0QGqLcb9dlJSu6lHRVENm1w== + +cbor@^5.0.2: + version "5.2.0" + resolved "https://registry.yarnpkg.com/cbor/-/cbor-5.2.0.tgz#4cca67783ccd6de7b50ab4ed62636712f287a67c" + integrity sha512-5IMhi9e1QU76ppa5/ajP1BmMWZ2FHkhAhjeVKQ/EFCgYSEaeVaoGtL7cxJskf9oCCk+XjzaIdc3IuU/dbA/o2A== + dependencies: + bignumber.js "^9.0.1" + nofilter "^1.0.4" + +chai-as-promised@^7.1.1: + version "7.1.1" + resolved "https://registry.yarnpkg.com/chai-as-promised/-/chai-as-promised-7.1.1.tgz#08645d825deb8696ee61725dbf590c012eb00ca0" + integrity sha512-azL6xMoi+uxu6z4rhWQ1jbdUhOMhis2PvscD/xjLqNMkv3BPPp2JyyuTHOrf9BOosGpNQ11v6BKv/g57RXbiaA== + dependencies: + check-error "^1.0.2" + +chai@^4.3.4: + version "4.3.6" + resolved "https://registry.yarnpkg.com/chai/-/chai-4.3.6.tgz#ffe4ba2d9fa9d6680cc0b370adae709ec9011e9c" + integrity sha512-bbcp3YfHCUzMOvKqsztczerVgBKSsEijCySNlHHbX3VG1nskvqjz5Rfso1gGwD6w6oOV3eI60pKuMOV5MV7p3Q== + dependencies: + assertion-error "^1.1.0" + check-error "^1.0.2" + deep-eql "^3.0.1" + get-func-name "^2.0.0" + loupe "^2.3.1" + pathval "^1.1.1" + type-detect "^4.0.5" + +chalk@4.1.2, chalk@^4.0.0, chalk@^4.1.0, chalk@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +chalk@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" + integrity sha512-U3lRVLMSlsCfjqYPbLyVv11M9CPW4I728d6TCKMAOJueEeB9/8o+eSsMnxPJD+Q+K909sdESg7C+tIkoH6on1A== + dependencies: + ansi-styles "^2.2.1" + escape-string-regexp "^1.0.2" + has-ansi "^2.0.0" + strip-ansi "^3.0.0" + supports-color "^2.0.0" + +chalk@^2.0.0, chalk@^2.1.0, chalk@^2.4.1, chalk@^2.4.2: + version "2.4.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + +char-regex@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" + integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== + +chardet@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e" + integrity sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA== + +check-error@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/check-error/-/check-error-1.0.2.tgz#574d312edd88bb5dd8912e9286dd6c0aed4aac82" + integrity sha512-BrgHpW9NURQgzoNyjfq0Wu6VFO6D7IZEmJNdtgNqpzGG8RuNFHt2jQxWlAs4HMe119chBnv+34syEZtc6IhLtA== + +checkpoint-store@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/checkpoint-store/-/checkpoint-store-1.1.0.tgz#04e4cb516b91433893581e6d4601a78e9552ea06" + integrity sha512-J/NdY2WvIx654cc6LWSq/IYFFCUf75fFTgwzFnmbqyORH4MwgiQCgswLLKBGzmsyTI5V7i5bp/So6sMbDWhedg== + dependencies: + functional-red-black-tree "^1.0.1" + +chokidar@3.5.3, chokidar@^3.4.0: + version "3.5.3" + resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" + integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== + dependencies: + anymatch "~3.1.2" + braces "~3.0.2" + glob-parent "~5.1.2" + is-binary-path "~2.1.0" + is-glob "~4.0.1" + normalize-path "~3.0.0" + readdirp "~3.6.0" + optionalDependencies: + fsevents "~2.3.2" + +chownr@^1.0.1, chownr@^1.1.1, chownr@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" + integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== + +ci-info@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46" + integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ== + +ci-info@^3.2.0: + version "3.5.0" + resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.5.0.tgz#bfac2a29263de4c829d806b1ab478e35091e171f" + integrity sha512-yH4RezKOGlOhxkmhbeNuC4eYZKAUsEaGtBuBzDDP1eFUKiccDWzBABxBfOx31IDwDIXMTxWuwAxUGModvkbuVw== + +cids@^0.7.1: + version "0.7.5" + resolved "https://registry.yarnpkg.com/cids/-/cids-0.7.5.tgz#60a08138a99bfb69b6be4ceb63bfef7a396b28b2" + integrity sha512-zT7mPeghoWAu+ppn8+BS1tQ5qGmbMfB4AregnQjA/qHY3GC1m1ptI9GkWNlgeu38r7CuRdXB47uY2XgAYt6QVA== + dependencies: + buffer "^5.5.0" + class-is "^1.1.0" + multibase "~0.6.0" + multicodec "^1.0.0" + multihashes "~0.4.15" + +cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de" + integrity sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q== + dependencies: + inherits "^2.0.1" + safe-buffer "^5.0.1" + +cjs-module-lexer@^1.0.0: + version "1.2.2" + resolved "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-1.2.2.tgz#9f84ba3244a512f3a54e5277e8eef4c489864e40" + integrity sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA== + +class-is@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/class-is/-/class-is-1.1.0.tgz#9d3c0fba0440d211d843cec3dedfa48055005825" + integrity sha512-rhjH9AG1fvabIDoGRVH587413LPjTZgmDF9fOFCbFJQV4yuocX1mHxxvXI4g3cGwbVY9wAYIoKlg1N79frJKQw== + +class-utils@^0.3.5: + version "0.3.6" + resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" + integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== + dependencies: + arr-union "^3.1.0" + define-property "^0.2.5" + isobject "^3.0.0" + static-extend "^0.1.1" + +classic-level@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/classic-level/-/classic-level-1.2.0.tgz#2d52bdec8e7a27f534e67fdeb890abef3e643c27" + integrity sha512-qw5B31ANxSluWz9xBzklRWTUAJ1SXIdaVKTVS7HcTGKOAmExx65Wo5BUICW+YGORe2FOUaDghoI9ZDxj82QcFg== + dependencies: + abstract-level "^1.0.2" + catering "^2.1.0" + module-error "^1.0.1" + napi-macros "~2.0.0" + node-gyp-build "^4.3.0" + +clean-stack@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b" + integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== + +cli-cursor@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-2.1.0.tgz#b35dac376479facc3e94747d41d0d0f5238ffcb5" + integrity sha512-8lgKz8LmCRYZZQDpRyT2m5rKJ08TnU4tR9FFFW2rxpxR1FzWi4PQ/NfyODchAatHaUgnSPVcx/R5w6NuTBzFiw== + dependencies: + restore-cursor "^2.0.0" + +cli-table3@^0.6.0: + version "0.6.3" + resolved "https://registry.yarnpkg.com/cli-table3/-/cli-table3-0.6.3.tgz#61ab765aac156b52f222954ffc607a6f01dbeeb2" + integrity sha512-w5Jac5SykAeZJKntOxJCrm63Eg5/4dhMWIcuTbo9rpE+brgaSZo0RuNJZeOyMgsUdhDeojvgyQLmjI+K50ZGyg== + dependencies: + string-width "^4.2.0" + optionalDependencies: + "@colors/colors" "1.5.0" + +cli-width@^2.0.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-2.2.1.tgz#b0433d0b4e9c847ef18868a4ef16fd5fc8271c48" + integrity sha512-GRMWDxpOB6Dgk2E5Uo+3eEBvtOOlimMmpbFiKuLFnQzYDavtLFY3K5ona41jgN/WdRZtG7utuVSVTL4HbZHGkw== + +cliui@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-3.2.0.tgz#120601537a916d29940f934da3b48d585a39213d" + integrity sha512-0yayqDxWQbqk3ojkYqUKqaAQ6AfNKeKWRNA8kR0WXzAsdHpP4BIaOmMAG87JGuO6qcobyW4GjxHd9PmhEd+T9w== + dependencies: + string-width "^1.0.1" + strip-ansi "^3.0.1" + wrap-ansi "^2.0.0" + +cliui@^7.0.2: + version "7.0.4" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" + integrity sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.0" + wrap-ansi "^7.0.0" + +cliui@^8.0.1: + version "8.0.1" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-8.0.1.tgz#0c04b075db02cbfe60dc8e6cf2f5486b1a3608aa" + integrity sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.1" + wrap-ansi "^7.0.0" + +clone-response@^1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.3.tgz#af2032aa47816399cf5f0a1d0db902f517abb8c3" + integrity sha512-ROoL94jJH2dUVML2Y/5PEDNaSHgeOdSDicUyS7izcF63G6sTc/FTjLub4b8Il9S8S0beOfYt0TaA5qvFK+w0wA== + dependencies: + mimic-response "^1.0.0" + +clone@2.1.2, clone@^2.0.0: + version "2.1.2" + resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.2.tgz#1b7f4b9f591f1e8f83670401600345a02887435f" + integrity sha512-3Pe/CF1Nn94hyhIYpjtiLhdCoEoz0DqQ+988E9gmeEdQZlojxnOb74wctFyuwWQHzqyf9X7C7MG8juUpqBJT8w== + +co@^4.6.0: + version "4.6.0" + resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" + integrity sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ== + +code-point-at@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" + integrity sha512-RpAVKQA5T63xEj6/giIbUEtZwJ4UFIc3ZtvEkiaUERylqe8xb5IvqcgOurZLahv93CLKfxcw5YI+DZcUBRyLXA== + +collect-v8-coverage@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz#cc2c8e94fc18bbdffe64d6534570c8a673b27f59" + integrity sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg== + +collection-visit@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" + integrity sha512-lNkKvzEeMBBjUGHZ+q6z9pSJla0KWAQPvtzhEV9+iGyQYG+pBpl7xKDhxoNSOZH2hhv0v5k0y2yAM4o4SjoSkw== + dependencies: + map-visit "^1.0.0" + object-visit "^1.0.0" + +collections@^5.1.12: + version "5.1.13" + resolved "https://registry.yarnpkg.com/collections/-/collections-5.1.13.tgz#eee204a93b67473c8e74e00e934a997cc2817585" + integrity sha512-SCb6Qd+d3Z02corWQ7/mqXiXeeTdHvkP6TeFSYfGYdCFp1WrjSNZ3j6y8Y3T/7osGEe0iOcU2g1d346l99m4Lg== + dependencies: + weak-map "~1.0.x" + +color-convert@^1.9.0: + version "1.9.3" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" + integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== + dependencies: + color-name "1.1.3" + +color-convert@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== + +color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +combined-stream@^1.0.6, combined-stream@^1.0.8, combined-stream@~1.0.6: + version "1.0.8" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== + dependencies: + delayed-stream "~1.0.0" + +command-exists@^1.2.8: + version "1.2.9" + resolved "https://registry.yarnpkg.com/command-exists/-/command-exists-1.2.9.tgz#c50725af3808c8ab0260fd60b01fbfa25b954f69" + integrity sha512-LTQ/SGc+s0Xc0Fu5WaKnR0YiygZkm9eKFvyS+fRsU7/ZWFF8ykFM6Pc9aCVf1+xasOOZpO3BAVgVrKvsqKHV7w== + +command-line-args@^4.0.7: + version "4.0.7" + resolved "https://registry.yarnpkg.com/command-line-args/-/command-line-args-4.0.7.tgz#f8d1916ecb90e9e121eda6428e41300bfb64cc46" + integrity sha512-aUdPvQRAyBvQd2n7jXcsMDz68ckBJELXNzBybCHOibUWEg0mWTnaYCSRU8h9R+aNRSvDihJtssSRCiDRpLaezA== + dependencies: + array-back "^2.0.0" + find-replace "^1.0.3" + typical "^2.6.1" + +commander@2.18.0: + version "2.18.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-2.18.0.tgz#2bf063ddee7c7891176981a2cc798e5754bc6970" + integrity sha512-6CYPa+JP2ftfRU2qkDK+UTVeQYosOg/2GbcjIcKPHfinyOLPVGXu/ovN86RP49Re5ndJK1N0kuiidFFuepc4ZQ== + +commander@3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/commander/-/commander-3.0.2.tgz#6837c3fb677ad9933d1cfba42dd14d5117d6b39e" + integrity sha512-Gar0ASD4BDyKC4hl4DwHqDrmvjoxWKZigVnAbn5H1owvm4CxCPdb0HQDehwNYMJpla5+M2tPmPARzhtYuwpHow== + +commander@^2.19.0: + version "2.20.3" + resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" + integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== + +commander@^6.0.0: + version "6.2.1" + resolved "https://registry.yarnpkg.com/commander/-/commander-6.2.1.tgz#0792eb682dfbc325999bb2b84fddddba110ac73c" + integrity sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA== + +commander@^8.1.0, commander@^8.3.0: + version "8.3.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-8.3.0.tgz#4837ea1b2da67b9c616a67afbb0fafee567bca66" + integrity sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww== + +commander@~2.9.0: + version "2.9.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-2.9.0.tgz#9c99094176e12240cb22d6c5146098400fe0f7d4" + integrity sha512-bmkUukX8wAOjHdN26xj5c4ctEV22TQ7dQYhSmuckKhToXrkUn0iIaolHdIxYYqD55nhpSPA9zPQ1yP57GdXP2A== + dependencies: + graceful-readlink ">= 1.0.0" + +component-emitter@^1.2.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" + integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== + +concat-stream@^1.5.1, concat-stream@~1.6.2: + version "1.6.2" + resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" + integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== + dependencies: + buffer-from "^1.0.0" + inherits "^2.0.3" + readable-stream "^2.2.2" + typedarray "^0.0.6" + +content-disposition@0.5.4: + version "0.5.4" + resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe" + integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== + dependencies: + safe-buffer "5.2.1" + +content-hash@^2.5.2: + version "2.5.2" + resolved "https://registry.yarnpkg.com/content-hash/-/content-hash-2.5.2.tgz#bbc2655e7c21f14fd3bfc7b7d4bfe6e454c9e211" + integrity sha512-FvIQKy0S1JaWV10sMsA7TRx8bpU+pqPkhbsfvOJAdjRXvYxEckAwQWGwtRjiaJfh+E0DvcWUGqcdjwMGFjsSdw== + dependencies: + cids "^0.7.1" + multicodec "^0.5.5" + multihashes "^0.4.15" + +content-type@~1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" + integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== + +convert-source-map@^1.4.0, convert-source-map@^1.5.1, convert-source-map@^1.6.0, convert-source-map@^1.7.0: + version "1.9.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f" + integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== + +cookie-signature@1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" + integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ== + +cookie@0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b" + integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== + +cookie@^0.4.1: + version "0.4.2" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.2.tgz#0e41f24de5ecf317947c82fc789e06a884824432" + integrity sha512-aSWTXFzaKWkvHO1Ny/s+ePFpvKsPnjc551iI41v3ny/ow6tBG5Vd+FuqGNhh1LxOmVzOlGUriIlOaokOvhaStA== + +cookiejar@^2.1.1: + version "2.1.4" + resolved "https://registry.yarnpkg.com/cookiejar/-/cookiejar-2.1.4.tgz#ee669c1fea2cf42dc31585469d193fef0d65771b" + integrity sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw== + +copy-descriptor@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" + integrity sha512-XgZ0pFcakEUlbwQEVNg3+QAis1FyTL3Qel9FYy8pSkQqoG3PNoT0bOCQtOXcOkur21r2Eq2kI+IE+gsmAEVlYw== + +core-js-pure@^3.0.1: + version "3.27.2" + resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.27.2.tgz#47e9cc96c639eefc910da03c3ece26c5067c7553" + integrity sha512-Cf2jqAbXgWH3VVzjyaaFkY1EBazxugUepGymDoeteyYr9ByX51kD2jdHZlsEF/xnJMyN3Prua7mQuzwMg6Zc9A== + +core-js@^2.4.0, core-js@^2.5.0: + version "2.6.12" + resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.12.tgz#d9333dfa7b065e347cc5682219d6f690859cc2ec" + integrity sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ== + +core-util-is@1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" + integrity sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ== + +core-util-is@~1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" + integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== + +cors@^2.8.1: + version "2.8.5" + resolved "https://registry.yarnpkg.com/cors/-/cors-2.8.5.tgz#eac11da51592dd86b9f06f6e7ac293b3df875d29" + integrity sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g== + dependencies: + object-assign "^4" + vary "^1" + +cosmiconfig@^5.0.7: + version "5.2.1" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-5.2.1.tgz#040f726809c591e77a17c0a3626ca45b4f168b1a" + integrity sha512-H65gsXo1SKjf8zmrJ67eJk8aIRKV5ff2D4uKZIBZShbhGSpEmsQOPW/SKMKYhSTrqR7ufy6RP69rPogdaPh/kA== + dependencies: + import-fresh "^2.0.0" + is-directory "^0.3.1" + js-yaml "^3.13.1" + parse-json "^4.0.0" + +cpu-features@~0.0.4: + version "0.0.4" + resolved "https://registry.yarnpkg.com/cpu-features/-/cpu-features-0.0.4.tgz#0023475bb4f4c525869c162e4108099e35bf19d8" + integrity sha512-fKiZ/zp1mUwQbnzb9IghXtHtDoTMtNeb8oYGx6kX2SYfhnG0HNdBEBIzB9b5KlXu5DQPhfy3mInbBxFcgwAr3A== + dependencies: + buildcheck "0.0.3" + nan "^2.15.0" + +crc-32@^1.2.0: + version "1.2.2" + resolved "https://registry.yarnpkg.com/crc-32/-/crc-32-1.2.2.tgz#3cad35a934b8bf71f25ca524b6da51fb7eace2ff" + integrity sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ== + +create-ecdh@^4.0.0: + version "4.0.4" + resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.4.tgz#d6e7f4bffa66736085a0762fd3a632684dabcc4e" + integrity sha512-mf+TCx8wWc9VpuxfP2ht0iSISLZnt0JgWlrOKZiNqyUZWnjIaCIVNQArMHnCZKfEYRg6IM7A+NeJoN8gf/Ws0A== + dependencies: + bn.js "^4.1.0" + elliptic "^6.5.3" + +create-hash@^1.1.0, create-hash@^1.1.2, create-hash@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.2.0.tgz#889078af11a63756bcfb59bd221996be3a9ef196" + integrity sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg== + dependencies: + cipher-base "^1.0.1" + inherits "^2.0.1" + md5.js "^1.3.4" + ripemd160 "^2.0.1" + sha.js "^2.4.0" + +create-hmac@^1.1.0, create-hmac@^1.1.4, create-hmac@^1.1.7: + version "1.1.7" + resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.7.tgz#69170c78b3ab957147b2b8b04572e47ead2243ff" + integrity sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg== + dependencies: + cipher-base "^1.0.3" + create-hash "^1.1.0" + inherits "^2.0.1" + ripemd160 "^2.0.0" + safe-buffer "^5.0.1" + sha.js "^2.4.8" + +create-require@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333" + integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ== + +cross-fetch@^2.1.0, cross-fetch@^2.1.1: + version "2.2.6" + resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-2.2.6.tgz#2ef0bb39a24ac034787965c457368a28730e220a" + integrity sha512-9JZz+vXCmfKUZ68zAptS7k4Nu8e2qcibe7WVZYps7sAgk5R8GYTc+T1WR0v1rlP9HxgARmOX1UTIJZFytajpNA== + dependencies: + node-fetch "^2.6.7" + whatwg-fetch "^2.0.4" + +cross-spawn@^6.0.5: + version "6.0.5" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" + integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ== + dependencies: + nice-try "^1.0.4" + path-key "^2.0.1" + semver "^5.5.0" + shebang-command "^1.2.0" + which "^1.2.9" + +cross-spawn@^7.0.2, cross-spawn@^7.0.3: + version "7.0.3" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== + dependencies: + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" + +crypto-browserify@3.12.0: + version "3.12.0" + resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.12.0.tgz#396cf9f3137f03e4b8e532c58f698254e00f80ec" + integrity sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg== + dependencies: + browserify-cipher "^1.0.0" + browserify-sign "^4.0.0" + create-ecdh "^4.0.0" + create-hash "^1.1.0" + create-hmac "^1.1.0" + diffie-hellman "^5.0.0" + inherits "^2.0.1" + pbkdf2 "^3.0.3" + public-encrypt "^4.0.0" + randombytes "^2.0.0" + randomfill "^1.0.3" + +crypto-js@^3.1.9-1: + version "3.3.0" + resolved "https://registry.yarnpkg.com/crypto-js/-/crypto-js-3.3.0.tgz#846dd1cce2f68aacfa156c8578f926a609b7976b" + integrity sha512-DIT51nX0dCfKltpRiXV+/TVZq+Qq2NgF4644+K7Ttnla7zEzqc+kjJyiB96BHNyUTBxyjzRcZYpUdZa+QAqi6Q== + +d@1, d@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/d/-/d-1.0.1.tgz#8698095372d58dbee346ffd0c7093f99f8f9eb5a" + integrity sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA== + dependencies: + es5-ext "^0.10.50" + type "^1.0.1" + +dashdash@^1.12.0: + version "1.14.1" + resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" + integrity sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g== + dependencies: + assert-plus "^1.0.0" + +debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.8, debug@^2.6.9: + version "2.6.9" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + +debug@3.2.6: + version "3.2.6" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b" + integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ== + dependencies: + ms "^2.1.1" + +debug@4, debug@4.3.4, debug@^4.0.1, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.3: + version "4.3.4" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" + integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== + dependencies: + ms "2.1.2" + +debug@4.3.3: + version "4.3.3" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.3.tgz#04266e0b70a98d4462e6e288e38259213332b664" + integrity sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q== + dependencies: + ms "2.1.2" + +debug@^3.1.0, debug@^3.2.6: + version "3.2.7" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" + integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== + dependencies: + ms "^2.1.1" + +decamelize@^1.1.1: + version "1.2.0" + resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" + integrity sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA== + +decamelize@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-4.0.0.tgz#aa472d7bf660eb15f3494efd531cab7f2a709837" + integrity sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ== + +decimal.js-light@^2.5.0: + version "2.5.1" + resolved "https://registry.yarnpkg.com/decimal.js-light/-/decimal.js-light-2.5.1.tgz#134fd32508f19e208f4fb2f8dac0d2626a867934" + integrity sha512-qIMFpTMZmny+MMIitAB6D7iVPEorVw6YQRWkvarTkT4tBeSLLiHzcwj6q0MmYSFCiVpiqPJTJEYIrpcPzVEIvg== + +decode-uri-component@^0.2.0: + version "0.2.2" + resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.2.tgz#e69dbe25d37941171dd540e024c444cd5188e1e9" + integrity sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ== + +decompress-response@^3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-3.3.0.tgz#80a4dd323748384bfa248083622aedec982adff3" + integrity sha512-BzRPQuY1ip+qDonAOz42gRm/pg9F768C+npV/4JOsxRC2sq+Rlk+Q4ZCAsOhnIaMrgarILY+RMUIvMmmX1qAEA== + dependencies: + mimic-response "^1.0.0" + +decompress-response@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-6.0.0.tgz#ca387612ddb7e104bd16d85aab00d5ecf09c66fc" + integrity sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ== + dependencies: + mimic-response "^3.1.0" + +dedent@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c" + integrity sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA== + +deep-eql@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/deep-eql/-/deep-eql-3.0.1.tgz#dfc9404400ad1c8fe023e7da1df1c147c4b444df" + integrity sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw== + dependencies: + type-detect "^4.0.0" + +deep-equal@~1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-1.1.1.tgz#b5c98c942ceffaf7cb051e24e1434a25a2e6076a" + integrity sha512-yd9c5AdiqVcR+JjcwUQb9DkhJc8ngNr0MahEBGvDiJw8puWab2yZlh+nkasOnZP+EGTAP6rRp2JzJhJZzvNF8g== + dependencies: + is-arguments "^1.0.4" + is-date-object "^1.0.1" + is-regex "^1.0.4" + object-is "^1.0.1" + object-keys "^1.1.1" + regexp.prototype.flags "^1.2.0" + +deep-extend@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" + integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== + +deep-extend@~0.5.1: + version "0.5.1" + resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.5.1.tgz#b894a9dd90d3023fbf1c55a394fb858eb2066f1f" + integrity sha512-N8vBdOa+DF7zkRrDCsaOXoCs/E2fJfx9B9MrKnnSiHNh4ws7eSys6YQE4KvT1cecKmOASYQBhbKjeuDD9lT81w== + +deep-is@^0.1.3, deep-is@~0.1.3: + version "0.1.4" + resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" + integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== + +deepmerge@^4.2.2: + version "4.2.2" + resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" + integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== + +defer-to-connect@^1.0.1: + version "1.1.3" + resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-1.1.3.tgz#331ae050c08dcf789f8c83a7b81f0ed94f4ac591" + integrity sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ== + +defer-to-connect@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-2.0.1.tgz#8016bdb4143e4632b77a3449c6236277de520587" + integrity sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg== + +deferred-leveldown@~1.2.1: + version "1.2.2" + resolved "https://registry.yarnpkg.com/deferred-leveldown/-/deferred-leveldown-1.2.2.tgz#3acd2e0b75d1669924bc0a4b642851131173e1eb" + integrity sha512-uukrWD2bguRtXilKt6cAWKyoXrTSMo5m7crUdLfWQmu8kIm88w3QZoUL+6nhpfKVmhHANER6Re3sKoNoZ3IKMA== + dependencies: + abstract-leveldown "~2.6.0" + +deferred-leveldown@~4.0.0: + version "4.0.2" + resolved "https://registry.yarnpkg.com/deferred-leveldown/-/deferred-leveldown-4.0.2.tgz#0b0570087827bf480a23494b398f04c128c19a20" + integrity sha512-5fMC8ek8alH16QiV0lTCis610D1Zt1+LA4MS4d63JgS32lrCjTFDUFz2ao09/j2I4Bqb5jL4FZYwu7Jz0XO1ww== + dependencies: + abstract-leveldown "~5.0.0" + inherits "^2.0.3" + +define-properties@^1.1.3, define-properties@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.4.tgz#0b14d7bd7fbeb2f3572c3a7eda80ea5d57fb05b1" + integrity sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA== + dependencies: + has-property-descriptors "^1.0.0" + object-keys "^1.1.1" + +define-property@^0.2.5: + version "0.2.5" + resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" + integrity sha512-Rr7ADjQZenceVOAKop6ALkkRAmH1A4Gx9hV/7ZujPUN2rkATqFO0JZLZInbAjpZYoJ1gUx8MRMQVkYemcbMSTA== + dependencies: + is-descriptor "^0.1.0" + +define-property@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" + integrity sha512-cZTYKFWspt9jZsMscWo8sc/5lbPC9Q0N5nBLgb+Yd915iL3udB1uFgS3B8YCx66UVHq018DAVFoee7x+gxggeA== + dependencies: + is-descriptor "^1.0.0" + +define-property@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" + integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== + dependencies: + is-descriptor "^1.0.2" + isobject "^3.0.1" + +defined@~1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.1.tgz#c0b9db27bfaffd95d6f61399419b893df0f91ebf" + integrity sha512-hsBd2qSVCRE+5PmNdHt1uzyrFu5d3RwmFDKzyNZMFq/EwDNJF7Ee5+D5oEKF0hU6LhtoUF1macFvOe4AskQC1Q== + +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== + +depd@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" + integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== + +des.js@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.1.tgz#5382142e1bdc53f85d86d53e5f4aa7deb91e0843" + integrity sha512-Q0I4pfFrv2VPd34/vfLrFOoRmlYj3OV50i7fskps1jZWK1kApMWWT9G6RRUeYedLcBDIhnSDaUvJMb3AhUlaEA== + dependencies: + inherits "^2.0.1" + minimalistic-assert "^1.0.0" + +destroy@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015" + integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== + +detect-indent@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-4.0.0.tgz#f76d064352cdf43a1cb6ce619c4ee3a9475de208" + integrity sha512-BDKtmHlOzwI7iRuEkhzsnPoi5ypEhWAJB5RvHWe1kMr06js3uK5B3734i3ui5Yd+wOJV1cpE4JnivPD283GU/A== + dependencies: + repeating "^2.0.0" + +detect-newline@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" + integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== + +diff-sequences@^29.2.0: + version "29.2.0" + resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-29.2.0.tgz#4c55b5b40706c7b5d2c5c75999a50c56d214e8f6" + integrity sha512-413SY5JpYeSBZxmenGEmCVQ8mCgtFJF0w9PROdaS6z987XC2Pd2GOKqOITLtMftmyFZqgtCOb/QA7/Z3ZXfzIw== + +diff@5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/diff/-/diff-5.0.0.tgz#7ed6ad76d859d030787ec35855f5b1daf31d852b" + integrity sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w== + +diff@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" + integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== + +diffie-hellman@^5.0.0: + version "5.0.3" + resolved "https://registry.yarnpkg.com/diffie-hellman/-/diffie-hellman-5.0.3.tgz#40e8ee98f55a2149607146921c63e1ae5f3d2875" + integrity sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg== + dependencies: + bn.js "^4.1.0" + miller-rabin "^4.0.0" + randombytes "^2.0.0" + +dir-glob@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" + integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== + dependencies: + path-type "^4.0.0" + +docker-modem@^1.0.8: + version "1.0.9" + resolved "https://registry.yarnpkg.com/docker-modem/-/docker-modem-1.0.9.tgz#a1f13e50e6afb6cf3431b2d5e7aac589db6aaba8" + integrity sha512-lVjqCSCIAUDZPAZIeyM125HXfNvOmYYInciphNrLrylUtKyW66meAjSPXWchKVzoIYZx69TPnAepVSSkeawoIw== + dependencies: + JSONStream "1.3.2" + debug "^3.2.6" + readable-stream "~1.0.26-4" + split-ca "^1.0.0" + +docker-modem@^3.0.0: + version "3.0.6" + resolved "https://registry.yarnpkg.com/docker-modem/-/docker-modem-3.0.6.tgz#8c76338641679e28ec2323abb65b3276fb1ce597" + integrity sha512-h0Ow21gclbYsZ3mkHDfsYNDqtRhXS8fXr51bU0qr1dxgTMJj0XufbzX+jhNOvA8KuEEzn6JbvLVhXyv+fny9Uw== + dependencies: + debug "^4.1.1" + readable-stream "^3.5.0" + split-ca "^1.0.1" + ssh2 "^1.11.0" + +dockerode@^2.5.8: + version "2.5.8" + resolved "https://registry.yarnpkg.com/dockerode/-/dockerode-2.5.8.tgz#1b661e36e1e4f860e25f56e0deabe9f87f1d0acc" + integrity sha512-+7iOUYBeDTScmOmQqpUYQaE7F4vvIt6+gIZNHWhqAQEI887tiPFB9OvXI/HzQYqfUNvukMK+9myLW63oTJPZpw== + dependencies: + concat-stream "~1.6.2" + docker-modem "^1.0.8" + tar-fs "~1.16.3" + +dockerode@^3.3.4: + version "3.3.4" + resolved "https://registry.yarnpkg.com/dockerode/-/dockerode-3.3.4.tgz#875de614a1be797279caa9fe27e5637cf0e40548" + integrity sha512-3EUwuXnCU+RUlQEheDjmBE0B7q66PV9Rw5NiH1sXwINq0M9c5ERP9fxgkw36ZHOtzf4AGEEYySnkx/sACC9EgQ== + dependencies: + "@balena/dockerignore" "^1.0.2" + docker-modem "^3.0.0" + tar-fs "~2.0.1" + +doctrine@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" + integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== + dependencies: + esutils "^2.0.2" + +dom-walk@^0.1.0: + version "0.1.2" + resolved "https://registry.yarnpkg.com/dom-walk/-/dom-walk-0.1.2.tgz#0c548bef048f4d1f2a97249002236060daa3fd84" + integrity sha512-6QvTW9mrGeIegrFXdtQi9pk7O/nSK6lSdXW2eqUspN5LWD7UTji2Fqw5V2YLjBpHEoU9Xl/eUWNpDeZvoyOv2w== + +dotenv@^16.0.3: + version "16.0.3" + resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.0.3.tgz#115aec42bac5053db3c456db30cc243a5a836a07" + integrity sha512-7GO6HghkA5fYG9TYnNxi14/7K9f5occMlp3zXAuSxn7CKCxt9xbNWG7yF8hTCSUchlfWSe3uLmlPfigevRItzQ== + +dotenv@^8.2.0: + version "8.6.0" + resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-8.6.0.tgz#061af664d19f7f4d8fc6e4ff9b584ce237adcb8b" + integrity sha512-IrPdXQsk2BbzvCBGBOTmmSH5SodmqZNt4ERAZDmW4CT+tL8VtvinqywuANaFu4bOMWki16nqf0e4oC0QIaDr/g== + +dotignore@~0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/dotignore/-/dotignore-0.1.2.tgz#f942f2200d28c3a76fbdd6f0ee9f3257c8a2e905" + integrity sha512-UGGGWfSauusaVJC+8fgV+NVvBXkCTmVv7sk6nojDZZvuOUNGUy0Zk4UpHQD6EDjS0jpBwcACvH4eofvyzBcRDw== + dependencies: + minimatch "^3.0.4" + +duplexer3@^0.1.4: + version "0.1.5" + resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.5.tgz#0b5e4d7bad5de8901ea4440624c8e1d20099217e" + integrity sha512-1A8za6ws41LQgv9HrE/66jyC5yuSjQ3L/KOpFtoBilsAK2iA2wuS5rTt1OCzIvtS2V7nVmedsUU+DGRcjBmOYA== + +ecc-jsbn@~0.1.1: + version "0.1.2" + resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" + integrity sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw== + dependencies: + jsbn "~0.1.0" + safer-buffer "^2.1.0" + +ecdsa-sig-formatter@1.0.11: + version "1.0.11" + resolved "https://registry.yarnpkg.com/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz#ae0f0fa2d85045ef14a817daa3ce9acd0489e5bf" + integrity sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ== + dependencies: + safe-buffer "^5.0.1" + +ee-first@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" + integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== + +electron-to-chromium@^1.3.47, electron-to-chromium@^1.4.251: + version "1.4.284" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.284.tgz#61046d1e4cab3a25238f6bf7413795270f125592" + integrity sha512-M8WEXFuKXMYMVr45fo8mq0wUrrJHheiKZf6BArTKk9ZBYCKJEOU5H8cdWgDT+qCVZf7Na4lVUaZsA+h6uA9+PA== + +elliptic@6.5.4, elliptic@^6.4.0, elliptic@^6.5.2, elliptic@^6.5.3, elliptic@^6.5.4: + version "6.5.4" + resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.4.tgz#da37cebd31e79a1367e941b592ed1fbebd58abbb" + integrity sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ== + dependencies: + bn.js "^4.11.9" + brorand "^1.1.0" + hash.js "^1.0.0" + hmac-drbg "^1.0.1" + inherits "^2.0.4" + minimalistic-assert "^1.0.1" + minimalistic-crypto-utils "^1.0.1" + +emittery@^0.13.1: + version "0.13.1" + resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.13.1.tgz#c04b8c3457490e0847ae51fced3af52d338e3dad" + integrity sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ== + +emoji-regex@^10.1.0: + version "10.2.1" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-10.2.1.tgz#a41c330d957191efd3d9dfe6e1e8e1e9ab048b3f" + integrity sha512-97g6QgOk8zlDRdgq1WxwgTMgEWGVAQvB5Fdpgc1MkNy56la5SKP9GsMXKDOdqwn90/41a8yPwIGk1Y6WVbeMQA== + +emoji-regex@^7.0.1: + version "7.0.3" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156" + integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA== + +emoji-regex@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== + +encodeurl@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" + integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== + +encoding-down@5.0.4, encoding-down@~5.0.0: + version "5.0.4" + resolved "https://registry.yarnpkg.com/encoding-down/-/encoding-down-5.0.4.tgz#1e477da8e9e9d0f7c8293d320044f8b2cd8e9614" + integrity sha512-8CIZLDcSKxgzT+zX8ZVfgNbu8Md2wq/iqa1Y7zyVR18QBEAc0Nmzuvj/N5ykSKpfGzjM8qxbaFntLPwnVoUhZw== + dependencies: + abstract-leveldown "^5.0.0" + inherits "^2.0.3" + level-codec "^9.0.0" + level-errors "^2.0.0" + xtend "^4.0.1" + +encoding@^0.1.11: + version "0.1.13" + resolved "https://registry.yarnpkg.com/encoding/-/encoding-0.1.13.tgz#56574afdd791f54a8e9b2785c0582a2d26210fa9" + integrity sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A== + dependencies: + iconv-lite "^0.6.2" + +end-of-stream@^1.0.0, end-of-stream@^1.1.0, end-of-stream@^1.4.1: + version "1.4.4" + resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" + integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== + dependencies: + once "^1.4.0" + +enquirer@^2.3.0, enquirer@^2.3.5: + version "2.3.6" + resolved "https://registry.yarnpkg.com/enquirer/-/enquirer-2.3.6.tgz#2a7fe5dd634a1e4125a975ec994ff5456dc3734d" + integrity sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg== + dependencies: + ansi-colors "^4.1.1" + +entities@~2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/entities/-/entities-2.0.3.tgz#5c487e5742ab93c15abb5da22759b8590ec03b7f" + integrity sha512-MyoZ0jgnLvB2X3Lg5HqpFmn1kybDiIfEQmKzTb5apr51Rb+T3KdmMiqa70T+bhGnyv7bQ6WMj2QMHpGMmlrUYQ== + +env-paths@^2.2.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/env-paths/-/env-paths-2.2.1.tgz#420399d416ce1fbe9bc0a07c62fa68d67fd0f8f2" + integrity sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A== + +errno@~0.1.1: + version "0.1.8" + resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.8.tgz#8bb3e9c7d463be4976ff888f76b4809ebc2e811f" + integrity sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A== + dependencies: + prr "~1.0.1" + +error-ex@^1.2.0, error-ex@^1.3.1: + version "1.3.2" + resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" + integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== + dependencies: + is-arrayish "^0.2.1" + +es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19.5: + version "1.20.4" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.20.4.tgz#1d103f9f8d78d4cf0713edcd6d0ed1a46eed5861" + integrity sha512-0UtvRN79eMe2L+UNEF1BwRe364sj/DXhQ/k5FmivgoSdpM90b8Jc0mDzKMGo7QS0BVbOP/bTwBKNnDc9rNzaPA== + dependencies: + call-bind "^1.0.2" + es-to-primitive "^1.2.1" + function-bind "^1.1.1" + function.prototype.name "^1.1.5" + get-intrinsic "^1.1.3" + get-symbol-description "^1.0.0" + has "^1.0.3" + has-property-descriptors "^1.0.0" + has-symbols "^1.0.3" + internal-slot "^1.0.3" + is-callable "^1.2.7" + is-negative-zero "^2.0.2" + is-regex "^1.1.4" + is-shared-array-buffer "^1.0.2" + is-string "^1.0.7" + is-weakref "^1.0.2" + object-inspect "^1.12.2" + object-keys "^1.1.1" + object.assign "^4.1.4" + regexp.prototype.flags "^1.4.3" + safe-regex-test "^1.0.0" + string.prototype.trimend "^1.0.5" + string.prototype.trimstart "^1.0.5" + unbox-primitive "^1.0.2" + +es-abstract@^1.20.4: + version "1.21.1" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.21.1.tgz#e6105a099967c08377830a0c9cb589d570dd86c6" + integrity sha512-QudMsPOz86xYz/1dG1OuGBKOELjCh99IIWHLzy5znUB6j8xG2yMA7bfTV86VSqKF+Y/H08vQPR+9jyXpuC6hfg== + dependencies: + available-typed-arrays "^1.0.5" + call-bind "^1.0.2" + es-set-tostringtag "^2.0.1" + es-to-primitive "^1.2.1" + function-bind "^1.1.1" + function.prototype.name "^1.1.5" + get-intrinsic "^1.1.3" + get-symbol-description "^1.0.0" + globalthis "^1.0.3" + gopd "^1.0.1" + has "^1.0.3" + has-property-descriptors "^1.0.0" + has-proto "^1.0.1" + has-symbols "^1.0.3" + internal-slot "^1.0.4" + is-array-buffer "^3.0.1" + is-callable "^1.2.7" + is-negative-zero "^2.0.2" + is-regex "^1.1.4" + is-shared-array-buffer "^1.0.2" + is-string "^1.0.7" + is-typed-array "^1.1.10" + is-weakref "^1.0.2" + object-inspect "^1.12.2" + object-keys "^1.1.1" + object.assign "^4.1.4" + regexp.prototype.flags "^1.4.3" + safe-regex-test "^1.0.0" + string.prototype.trimend "^1.0.6" + string.prototype.trimstart "^1.0.6" + typed-array-length "^1.0.4" + unbox-primitive "^1.0.2" + which-typed-array "^1.1.9" + +es-array-method-boxes-properly@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/es-array-method-boxes-properly/-/es-array-method-boxes-properly-1.0.0.tgz#873f3e84418de4ee19c5be752990b2e44718d09e" + integrity sha512-wd6JXUmyHmt8T5a2xreUwKcGPq6f1f+WwIJkijUqiGcJz1qqnZgP6XIK+QyIWU5lT7imeNxUll48bziG+TSYcA== + +es-set-tostringtag@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/es-set-tostringtag/-/es-set-tostringtag-2.0.1.tgz#338d502f6f674301d710b80c8592de8a15f09cd8" + integrity sha512-g3OMbtlwY3QewlqAiMLI47KywjWZoEytKr8pf6iTC8uJq5bIAH52Z9pnQ8pVL6whrCto53JZDuUIsifGeLorTg== + dependencies: + get-intrinsic "^1.1.3" + has "^1.0.3" + has-tostringtag "^1.0.0" + +es-to-primitive@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" + integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== + dependencies: + is-callable "^1.1.4" + is-date-object "^1.0.1" + is-symbol "^1.0.2" + +es5-ext@^0.10.35, es5-ext@^0.10.50: + version "0.10.62" + resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.62.tgz#5e6adc19a6da524bf3d1e02bbc8960e5eb49a9a5" + integrity sha512-BHLqn0klhEpnOKSrzn/Xsz2UIW8j+cGmo9JLzr8BiUapV8hPL9+FliFqjwr9ngW7jWdnxv6eO+/LqyhJVqgrjA== + dependencies: + es6-iterator "^2.0.3" + es6-symbol "^3.1.3" + next-tick "^1.1.0" + +es6-iterator@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7" + integrity sha512-zw4SRzoUkd+cl+ZoE15A9o1oQd920Bb0iOJMQkQhl3jNc03YqVjAhG7scf9C5KWRU/R13Orf588uCC6525o02g== + dependencies: + d "1" + es5-ext "^0.10.35" + es6-symbol "^3.1.1" + +es6-promisify@^6.0.0: + version "6.1.1" + resolved "https://registry.yarnpkg.com/es6-promisify/-/es6-promisify-6.1.1.tgz#46837651b7b06bf6fff893d03f29393668d01621" + integrity sha512-HBL8I3mIki5C1Cc9QjKUenHtnG0A5/xA8Q/AllRcfiwl2CZFXGK7ddBiCoRwAix4i2KxcQfjtIVcrVbB3vbmwg== + +es6-symbol@^3.1.1, es6-symbol@^3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18" + integrity sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA== + dependencies: + d "^1.0.1" + ext "^1.1.2" + +escalade@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" + integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== + +escape-html@~1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" + integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== + +escape-string-regexp@4.0.0, escape-string-regexp@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" + integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== + +escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== + +escape-string-regexp@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" + integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== + +eslint-config-alloy@^3.8.2: + version "3.10.0" + resolved "https://registry.yarnpkg.com/eslint-config-alloy/-/eslint-config-alloy-3.10.0.tgz#b2d85ba3bd7dddcc6d7fc79088c192a646f4f246" + integrity sha512-V34DUmW5n9NU2KbqKw6ow6qHt4RKksuvLKaAAC64ZMPnzwLH8ia7s0N4pEjeVzdtVL77jehCJkupLo8eUdKGYA== + +eslint-scope@^4.0.3: + version "4.0.3" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-4.0.3.tgz#ca03833310f6889a3264781aa82e63eb9cfe7848" + integrity sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg== + dependencies: + esrecurse "^4.1.0" + estraverse "^4.1.1" + +eslint-scope@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" + integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== + dependencies: + esrecurse "^4.3.0" + estraverse "^4.1.1" + +eslint-utils@^1.3.1: + version "1.4.3" + resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-1.4.3.tgz#74fec7c54d0776b6f67e0251040b5806564e981f" + integrity sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q== + dependencies: + eslint-visitor-keys "^1.1.0" + +eslint-utils@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-2.1.0.tgz#d2de5e03424e707dc10c74068ddedae708741b27" + integrity sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg== + dependencies: + eslint-visitor-keys "^1.1.0" + +eslint-utils@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-3.0.0.tgz#8aebaface7345bb33559db0a1f13a1d2d48c3672" + integrity sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA== + dependencies: + eslint-visitor-keys "^2.0.0" + +eslint-visitor-keys@^1.0.0, eslint-visitor-keys@^1.1.0, eslint-visitor-keys@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz#30ebd1ef7c2fdff01c3a4f151044af25fab0523e" + integrity sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ== + +eslint-visitor-keys@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303" + integrity sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw== + +eslint@^5.6.0: + version "5.16.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-5.16.0.tgz#a1e3ac1aae4a3fbd8296fcf8f7ab7314cbb6abea" + integrity sha512-S3Rz11i7c8AA5JPv7xAH+dOyq/Cu/VXHiHXBPOU1k/JAM5dXqQPt3qcrhpHSorXmrpu2g0gkIBVXAqCpzfoZIg== + dependencies: + "@babel/code-frame" "^7.0.0" + ajv "^6.9.1" + chalk "^2.1.0" + cross-spawn "^6.0.5" + debug "^4.0.1" + doctrine "^3.0.0" + eslint-scope "^4.0.3" + eslint-utils "^1.3.1" + eslint-visitor-keys "^1.0.0" + espree "^5.0.1" + esquery "^1.0.1" + esutils "^2.0.2" + file-entry-cache "^5.0.1" + functional-red-black-tree "^1.0.1" + glob "^7.1.2" + globals "^11.7.0" + ignore "^4.0.6" + import-fresh "^3.0.0" + imurmurhash "^0.1.4" + inquirer "^6.2.2" + js-yaml "^3.13.0" + json-stable-stringify-without-jsonify "^1.0.1" + levn "^0.3.0" + lodash "^4.17.11" + minimatch "^3.0.4" + mkdirp "^0.5.1" + natural-compare "^1.4.0" + optionator "^0.8.2" + path-is-inside "^1.0.2" + progress "^2.0.0" + regexpp "^2.0.1" + semver "^5.5.1" + strip-ansi "^4.0.0" + strip-json-comments "^2.0.1" + table "^5.2.3" + text-table "^0.2.0" + +eslint@^7.16.0: + version "7.32.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-7.32.0.tgz#c6d328a14be3fb08c8d1d21e12c02fdb7a2a812d" + integrity sha512-VHZ8gX+EDfz+97jGcgyGCyRia/dPOd6Xh9yPv8Bl1+SoaIwD+a/vlrOmGRUyOYu7MwUhc7CxqeaDZU13S4+EpA== + dependencies: + "@babel/code-frame" "7.12.11" + "@eslint/eslintrc" "^0.4.3" + "@humanwhocodes/config-array" "^0.5.0" + ajv "^6.10.0" + chalk "^4.0.0" + cross-spawn "^7.0.2" + debug "^4.0.1" + doctrine "^3.0.0" + enquirer "^2.3.5" + escape-string-regexp "^4.0.0" + eslint-scope "^5.1.1" + eslint-utils "^2.1.0" + eslint-visitor-keys "^2.0.0" + espree "^7.3.1" + esquery "^1.4.0" + esutils "^2.0.2" + fast-deep-equal "^3.1.3" + file-entry-cache "^6.0.1" + functional-red-black-tree "^1.0.1" + glob-parent "^5.1.2" + globals "^13.6.0" + ignore "^4.0.6" + import-fresh "^3.0.0" + imurmurhash "^0.1.4" + is-glob "^4.0.0" + js-yaml "^3.13.1" + json-stable-stringify-without-jsonify "^1.0.1" + levn "^0.4.1" + lodash.merge "^4.6.2" + minimatch "^3.0.4" + natural-compare "^1.4.0" + optionator "^0.9.1" + progress "^2.0.0" + regexpp "^3.1.0" + semver "^7.2.1" + strip-ansi "^6.0.0" + strip-json-comments "^3.1.0" + table "^6.0.9" + text-table "^0.2.0" + v8-compile-cache "^2.0.3" + +espree@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/espree/-/espree-5.0.1.tgz#5d6526fa4fc7f0788a5cf75b15f30323e2f81f7a" + integrity sha512-qWAZcWh4XE/RwzLJejfcofscgMc9CamR6Tn1+XRXNzrvUSSbiAjGOI/fggztjIi7y9VLPqnICMIPiGyr8JaZ0A== + dependencies: + acorn "^6.0.7" + acorn-jsx "^5.0.0" + eslint-visitor-keys "^1.0.0" + +espree@^7.3.0, espree@^7.3.1: + version "7.3.1" + resolved "https://registry.yarnpkg.com/espree/-/espree-7.3.1.tgz#f2df330b752c6f55019f8bd89b7660039c1bbbb6" + integrity sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g== + dependencies: + acorn "^7.4.0" + acorn-jsx "^5.3.1" + eslint-visitor-keys "^1.3.0" + +esprima@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" + integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== + +esquery@^1.0.1, esquery@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.4.0.tgz#2148ffc38b82e8c7057dfed48425b3e61f0f24a5" + integrity sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w== + dependencies: + estraverse "^5.1.0" + +esrecurse@^4.1.0, esrecurse@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" + integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== + dependencies: + estraverse "^5.2.0" + +estraverse@^4.1.1: + version "4.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" + integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== + +estraverse@^5.1.0, estraverse@^5.2.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" + integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== + +esutils@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== + +etag@~1.8.1: + version "1.8.1" + resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" + integrity sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg== + +eth-block-tracker@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/eth-block-tracker/-/eth-block-tracker-3.0.1.tgz#95cd5e763c7293e0b1b2790a2a39ac2ac188a5e1" + integrity sha512-WUVxWLuhMmsfenfZvFO5sbl1qFY2IqUlw/FPVmjjdElpqLsZtSG+wPe9Dz7W/sB6e80HgFKknOmKk2eNlznHug== + dependencies: + eth-query "^2.1.0" + ethereumjs-tx "^1.3.3" + ethereumjs-util "^5.1.3" + ethjs-util "^0.1.3" + json-rpc-engine "^3.6.0" + pify "^2.3.0" + tape "^4.6.3" + +eth-ens-namehash@2.0.8, eth-ens-namehash@^2.0.8: + version "2.0.8" + resolved "https://registry.yarnpkg.com/eth-ens-namehash/-/eth-ens-namehash-2.0.8.tgz#229ac46eca86d52e0c991e7cb2aef83ff0f68bcf" + integrity sha512-VWEI1+KJfz4Km//dadyvBBoBeSQ0MHTXPvr8UIXiLW6IanxvAV+DmlZAijZwAyggqGUfwQBeHf7tc9wzc1piSw== + dependencies: + idna-uts46-hx "^2.3.1" + js-sha3 "^0.5.7" + +eth-json-rpc-infura@^3.1.0: + version "3.2.1" + resolved "https://registry.yarnpkg.com/eth-json-rpc-infura/-/eth-json-rpc-infura-3.2.1.tgz#26702a821067862b72d979c016fd611502c6057f" + integrity sha512-W7zR4DZvyTn23Bxc0EWsq4XGDdD63+XPUCEhV2zQvQGavDVC4ZpFDK4k99qN7bd7/fjj37+rxmuBOBeIqCA5Mw== + dependencies: + cross-fetch "^2.1.1" + eth-json-rpc-middleware "^1.5.0" + json-rpc-engine "^3.4.0" + json-rpc-error "^2.0.0" + +eth-json-rpc-middleware@^1.5.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/eth-json-rpc-middleware/-/eth-json-rpc-middleware-1.6.0.tgz#5c9d4c28f745ccb01630f0300ba945f4bef9593f" + integrity sha512-tDVCTlrUvdqHKqivYMjtFZsdD7TtpNLBCfKAcOpaVs7orBMS/A8HWro6dIzNtTZIR05FAbJ3bioFOnZpuCew9Q== + dependencies: + async "^2.5.0" + eth-query "^2.1.2" + eth-tx-summary "^3.1.2" + ethereumjs-block "^1.6.0" + ethereumjs-tx "^1.3.3" + ethereumjs-util "^5.1.2" + ethereumjs-vm "^2.1.0" + fetch-ponyfill "^4.0.0" + json-rpc-engine "^3.6.0" + json-rpc-error "^2.0.0" + json-stable-stringify "^1.0.1" + promise-to-callback "^1.0.0" + tape "^4.6.3" + +eth-lib@0.2.8: + version "0.2.8" + resolved "https://registry.yarnpkg.com/eth-lib/-/eth-lib-0.2.8.tgz#b194058bef4b220ad12ea497431d6cb6aa0623c8" + integrity sha512-ArJ7x1WcWOlSpzdoTBX8vkwlkSQ85CjjifSZtV4co64vWxSV8geWfPI9x4SVYu3DSxnX4yWFVTtGL+j9DUFLNw== + dependencies: + bn.js "^4.11.6" + elliptic "^6.4.0" + xhr-request-promise "^0.1.2" + +eth-lib@^0.1.26: + version "0.1.29" + resolved "https://registry.yarnpkg.com/eth-lib/-/eth-lib-0.1.29.tgz#0c11f5060d42da9f931eab6199084734f4dbd1d9" + integrity sha512-bfttrr3/7gG4E02HoWTDUcDDslN003OlOoBxk9virpAZQ1ja/jDgwkWB8QfJF7ojuEowrqy+lzp9VcJG7/k5bQ== + dependencies: + bn.js "^4.11.6" + elliptic "^6.4.0" + nano-json-stream-parser "^0.1.2" + servify "^0.1.12" + ws "^3.0.0" + xhr-request-promise "^0.1.2" + +eth-query@^2.0.2, eth-query@^2.1.0, eth-query@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/eth-query/-/eth-query-2.1.2.tgz#d6741d9000106b51510c72db92d6365456a6da5e" + integrity sha512-srES0ZcvwkR/wd5OQBRA1bIJMww1skfGS0s8wlwK3/oNP4+wnds60krvu5R1QbpRQjMmpG5OMIWro5s7gvDPsA== + dependencies: + json-rpc-random-id "^1.0.0" + xtend "^4.0.1" + +eth-sig-util@3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/eth-sig-util/-/eth-sig-util-3.0.0.tgz#75133b3d7c20a5731af0690c385e184ab942b97e" + integrity sha512-4eFkMOhpGbTxBQ3AMzVf0haUX2uTur7DpWiHzWyTURa28BVJJtOkcb9Ok5TV0YvEPG61DODPW7ZUATbJTslioQ== + dependencies: + buffer "^5.2.1" + elliptic "^6.4.0" + ethereumjs-abi "0.6.5" + ethereumjs-util "^5.1.1" + tweetnacl "^1.0.0" + tweetnacl-util "^0.15.0" + +eth-sig-util@^1.4.2: + version "1.4.2" + resolved "https://registry.yarnpkg.com/eth-sig-util/-/eth-sig-util-1.4.2.tgz#8d958202c7edbaae839707fba6f09ff327606210" + integrity sha512-iNZ576iTOGcfllftB73cPB5AN+XUQAT/T8xzsILsghXC1o8gJUqe3RHlcDqagu+biFpYQ61KQrZZJza8eRSYqw== + dependencies: + ethereumjs-abi "git+https://github.com/ethereumjs/ethereumjs-abi.git" + ethereumjs-util "^5.1.1" + +eth-tx-summary@^3.1.2: + version "3.2.4" + resolved "https://registry.yarnpkg.com/eth-tx-summary/-/eth-tx-summary-3.2.4.tgz#e10eb95eb57cdfe549bf29f97f1e4f1db679035c" + integrity sha512-NtlDnaVZah146Rm8HMRUNMgIwG/ED4jiqk0TME9zFheMl1jOp6jL1m0NKGjJwehXQ6ZKCPr16MTr+qspKpEXNg== + dependencies: + async "^2.1.2" + clone "^2.0.0" + concat-stream "^1.5.1" + end-of-stream "^1.1.0" + eth-query "^2.0.2" + ethereumjs-block "^1.4.1" + ethereumjs-tx "^1.1.1" + ethereumjs-util "^5.0.1" + ethereumjs-vm "^2.6.0" + through2 "^2.0.3" + +ethashjs@~0.0.7: + version "0.0.8" + resolved "https://registry.yarnpkg.com/ethashjs/-/ethashjs-0.0.8.tgz#227442f1bdee409a548fb04136e24c874f3aa6f9" + integrity sha512-/MSbf/r2/Ld8o0l15AymjOTlPqpN8Cr4ByUEA9GtR4x0yAh3TdtDzEg29zMjXCNPI7u6E5fOQdj/Cf9Tc7oVNw== + dependencies: + async "^2.1.2" + buffer-xor "^2.0.1" + ethereumjs-util "^7.0.2" + miller-rabin "^4.0.0" + +ethereum-bloom-filters@^1.0.6: + version "1.0.10" + resolved "https://registry.yarnpkg.com/ethereum-bloom-filters/-/ethereum-bloom-filters-1.0.10.tgz#3ca07f4aed698e75bd134584850260246a5fed8a" + integrity sha512-rxJ5OFN3RwjQxDcFP2Z5+Q9ho4eIdEmSc2ht0fCu8Se9nbXjZ7/031uXoUYJ87KHCOdVeiUuwSnoS7hmYAGVHA== + dependencies: + js-sha3 "^0.8.0" + +ethereum-common@0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/ethereum-common/-/ethereum-common-0.2.0.tgz#13bf966131cce1eeade62a1b434249bb4cb120ca" + integrity sha512-XOnAR/3rntJgbCdGhqdaLIxDLWKLmsZOGhHdBKadEr6gEnJLH52k93Ou+TUdFaPN3hJc3isBZBal3U/XZ15abA== + +ethereum-common@^0.0.18: + version "0.0.18" + resolved "https://registry.yarnpkg.com/ethereum-common/-/ethereum-common-0.0.18.tgz#2fdc3576f232903358976eb39da783213ff9523f" + integrity sha512-EoltVQTRNg2Uy4o84qpa2aXymXDJhxm7eos/ACOg0DG4baAbMjhbdAEsx9GeE8sC3XCxnYvrrzZDH8D8MtA2iQ== + +ethereum-cryptography@0.1.3, ethereum-cryptography@^0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/ethereum-cryptography/-/ethereum-cryptography-0.1.3.tgz#8d6143cfc3d74bf79bbd8edecdf29e4ae20dd191" + integrity sha512-w8/4x1SGGzc+tO97TASLja6SLd3fRIK2tLVcV2Gx4IB21hE19atll5Cq9o3d0ZmAYC/8aw0ipieTSiekAea4SQ== + dependencies: + "@types/pbkdf2" "^3.0.0" + "@types/secp256k1" "^4.0.1" + blakejs "^1.1.0" + browserify-aes "^1.2.0" + bs58check "^2.1.2" + create-hash "^1.2.0" + create-hmac "^1.1.7" + hash.js "^1.1.7" + keccak "^3.0.0" + pbkdf2 "^3.0.17" + randombytes "^2.1.0" + safe-buffer "^5.1.2" + scrypt-js "^3.0.0" + secp256k1 "^4.0.1" + setimmediate "^1.0.5" + +ethereum-cryptography@^1.0.3: + version "1.1.2" + resolved "https://registry.yarnpkg.com/ethereum-cryptography/-/ethereum-cryptography-1.1.2.tgz#74f2ac0f0f5fe79f012c889b3b8446a9a6264e6d" + integrity sha512-XDSJlg4BD+hq9N2FjvotwUET9Tfxpxc3kWGE2AqUG5vcbeunnbImVk3cj6e/xT3phdW21mE8R5IugU4fspQDcQ== + dependencies: + "@noble/hashes" "1.1.2" + "@noble/secp256k1" "1.6.3" + "@scure/bip32" "1.1.0" + "@scure/bip39" "1.1.0" + +ethereum-waffle@^3.0.0: + version "3.4.4" + resolved "https://registry.yarnpkg.com/ethereum-waffle/-/ethereum-waffle-3.4.4.tgz#1378b72040697857b7f5e8f473ca8f97a37b5840" + integrity sha512-PA9+jCjw4WC3Oc5ocSMBj5sXvueWQeAbvCA+hUlb6oFgwwKyq5ka3bWQ7QZcjzIX+TdFkxP4IbFmoY2D8Dkj9Q== + dependencies: + "@ethereum-waffle/chai" "^3.4.4" + "@ethereum-waffle/compiler" "^3.4.4" + "@ethereum-waffle/mock-contract" "^3.4.4" + "@ethereum-waffle/provider" "^3.4.4" + ethers "^5.0.1" + +ethereumjs-abi@0.6.5: + version "0.6.5" + resolved "https://registry.yarnpkg.com/ethereumjs-abi/-/ethereumjs-abi-0.6.5.tgz#5a637ef16ab43473fa72a29ad90871405b3f5241" + integrity sha512-rCjJZ/AE96c/AAZc6O3kaog4FhOsAViaysBxqJNy2+LHP0ttH0zkZ7nXdVHOAyt6lFwLO0nlCwWszysG/ao1+g== + dependencies: + bn.js "^4.10.0" + ethereumjs-util "^4.3.0" + +ethereumjs-abi@0.6.8, ethereumjs-abi@^0.6.8: + version "0.6.8" + resolved "https://registry.yarnpkg.com/ethereumjs-abi/-/ethereumjs-abi-0.6.8.tgz#71bc152db099f70e62f108b7cdfca1b362c6fcae" + integrity sha512-Tx0r/iXI6r+lRsdvkFDlut0N08jWMnKRZ6Gkq+Nmw75lZe4e6o3EkSnkaBP5NF6+m5PTGAr9JP43N3LyeoglsA== + dependencies: + bn.js "^4.11.8" + ethereumjs-util "^6.0.0" + +"ethereumjs-abi@git+https://github.com/ethereumjs/ethereumjs-abi.git": + version "0.6.8" + resolved "git+https://github.com/ethereumjs/ethereumjs-abi.git#ee3994657fa7a427238e6ba92a84d0b529bbcde0" + dependencies: + bn.js "^4.11.8" + ethereumjs-util "^6.0.0" + +ethereumjs-account@3.0.0, ethereumjs-account@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/ethereumjs-account/-/ethereumjs-account-3.0.0.tgz#728f060c8e0c6e87f1e987f751d3da25422570a9" + integrity sha512-WP6BdscjiiPkQfF9PVfMcwx/rDvfZTjFKY0Uwc09zSQr9JfIVH87dYIJu0gNhBhpmovV4yq295fdllS925fnBA== + dependencies: + ethereumjs-util "^6.0.0" + rlp "^2.2.1" + safe-buffer "^5.1.1" + +ethereumjs-account@^2.0.3: + version "2.0.5" + resolved "https://registry.yarnpkg.com/ethereumjs-account/-/ethereumjs-account-2.0.5.tgz#eeafc62de544cb07b0ee44b10f572c9c49e00a84" + integrity sha512-bgDojnXGjhMwo6eXQC0bY6UK2liSFUSMwwylOmQvZbSl/D7NXQ3+vrGO46ZeOgjGfxXmgIeVNDIiHw7fNZM4VA== + dependencies: + ethereumjs-util "^5.0.0" + rlp "^2.0.0" + safe-buffer "^5.1.1" + +ethereumjs-block@2.2.2, ethereumjs-block@^2.2.2, ethereumjs-block@~2.2.0, ethereumjs-block@~2.2.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/ethereumjs-block/-/ethereumjs-block-2.2.2.tgz#c7654be7e22df489fda206139ecd63e2e9c04965" + integrity sha512-2p49ifhek3h2zeg/+da6XpdFR3GlqY3BIEiqxGF8j9aSRIgkb7M1Ky+yULBKJOu8PAZxfhsYA+HxUk2aCQp3vg== + dependencies: + async "^2.0.1" + ethereumjs-common "^1.5.0" + ethereumjs-tx "^2.1.1" + ethereumjs-util "^5.0.0" + merkle-patricia-tree "^2.1.2" + +ethereumjs-block@^1.2.2, ethereumjs-block@^1.4.1, ethereumjs-block@^1.6.0: + version "1.7.1" + resolved "https://registry.yarnpkg.com/ethereumjs-block/-/ethereumjs-block-1.7.1.tgz#78b88e6cc56de29a6b4884ee75379b6860333c3f" + integrity sha512-B+sSdtqm78fmKkBq78/QLKJbu/4Ts4P2KFISdgcuZUPDm9x+N7qgBPIIFUGbaakQh8bzuquiRVbdmvPKqbILRg== + dependencies: + async "^2.0.1" + ethereum-common "0.2.0" + ethereumjs-tx "^1.2.2" + ethereumjs-util "^5.0.0" + merkle-patricia-tree "^2.1.2" + +ethereumjs-blockchain@^4.0.3: + version "4.0.4" + resolved "https://registry.yarnpkg.com/ethereumjs-blockchain/-/ethereumjs-blockchain-4.0.4.tgz#30f2228dc35f6dcf94423692a6902604ae34960f" + integrity sha512-zCxaRMUOzzjvX78DTGiKjA+4h2/sF0OYL1QuPux0DHpyq8XiNoF5GYHtb++GUxVlMsMfZV7AVyzbtgcRdIcEPQ== + dependencies: + async "^2.6.1" + ethashjs "~0.0.7" + ethereumjs-block "~2.2.2" + ethereumjs-common "^1.5.0" + ethereumjs-util "^6.1.0" + flow-stoplight "^1.0.0" + level-mem "^3.0.1" + lru-cache "^5.1.1" + rlp "^2.2.2" + semaphore "^1.1.0" + +ethereumjs-common@1.5.0: + version "1.5.0" + resolved "https://registry.yarnpkg.com/ethereumjs-common/-/ethereumjs-common-1.5.0.tgz#d3e82fc7c47c0cef95047f431a99485abc9bb1cd" + integrity sha512-SZOjgK1356hIY7MRj3/ma5qtfr/4B5BL+G4rP/XSMYr2z1H5el4RX5GReYCKmQmYI/nSBmRnwrZ17IfHuG0viQ== + +ethereumjs-common@^1.1.0, ethereumjs-common@^1.3.2, ethereumjs-common@^1.5.0: + version "1.5.2" + resolved "https://registry.yarnpkg.com/ethereumjs-common/-/ethereumjs-common-1.5.2.tgz#2065dbe9214e850f2e955a80e650cb6999066979" + integrity sha512-hTfZjwGX52GS2jcVO6E2sx4YuFnf0Fhp5ylo4pEPhEffNln7vS59Hr5sLnp3/QCazFLluuBZ+FZ6J5HTp0EqCA== + +ethereumjs-tx@2.1.2, ethereumjs-tx@^2.1.1, ethereumjs-tx@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ethereumjs-tx/-/ethereumjs-tx-2.1.2.tgz#5dfe7688bf177b45c9a23f86cf9104d47ea35fed" + integrity sha512-zZEK1onCeiORb0wyCXUvg94Ve5It/K6GD1K+26KfFKodiBiS6d9lfCXlUKGBBdQ+bv7Day+JK0tj1K+BeNFRAw== + dependencies: + ethereumjs-common "^1.5.0" + ethereumjs-util "^6.0.0" + +ethereumjs-tx@^1.1.1, ethereumjs-tx@^1.2.0, ethereumjs-tx@^1.2.2, ethereumjs-tx@^1.3.3: + version "1.3.7" + resolved "https://registry.yarnpkg.com/ethereumjs-tx/-/ethereumjs-tx-1.3.7.tgz#88323a2d875b10549b8347e09f4862b546f3d89a" + integrity sha512-wvLMxzt1RPhAQ9Yi3/HKZTn0FZYpnsmQdbKYfUUpi4j1SEIcbkd9tndVjcPrufY3V7j2IebOpC00Zp2P/Ay2kA== + dependencies: + ethereum-common "^0.0.18" + ethereumjs-util "^5.0.0" + +ethereumjs-util@6.2.1, ethereumjs-util@^6.0.0, ethereumjs-util@^6.1.0, ethereumjs-util@^6.2.0, ethereumjs-util@^6.2.1: + version "6.2.1" + resolved "https://registry.yarnpkg.com/ethereumjs-util/-/ethereumjs-util-6.2.1.tgz#fcb4e4dd5ceacb9d2305426ab1a5cd93e3163b69" + integrity sha512-W2Ktez4L01Vexijrm5EB6w7dg4n/TgpoYU4avuT5T3Vmnw/eCRtiBrJfQYS/DCSvDIOLn2k57GcHdeBcgVxAqw== + dependencies: + "@types/bn.js" "^4.11.3" + bn.js "^4.11.0" + create-hash "^1.1.2" + elliptic "^6.5.2" + ethereum-cryptography "^0.1.3" + ethjs-util "0.1.6" + rlp "^2.2.3" + +ethereumjs-util@^4.3.0: + version "4.5.1" + resolved "https://registry.yarnpkg.com/ethereumjs-util/-/ethereumjs-util-4.5.1.tgz#f4bf9b3b515a484e3cc8781d61d9d980f7c83bd0" + integrity sha512-WrckOZ7uBnei4+AKimpuF1B3Fv25OmoRgmYCpGsP7u8PFxXAmAgiJSYT2kRWnt6fVIlKaQlZvuwXp7PIrmn3/w== + dependencies: + bn.js "^4.8.0" + create-hash "^1.1.2" + elliptic "^6.5.2" + ethereum-cryptography "^0.1.3" + rlp "^2.0.0" + +ethereumjs-util@^5.0.0, ethereumjs-util@^5.0.1, ethereumjs-util@^5.1.1, ethereumjs-util@^5.1.2, ethereumjs-util@^5.1.3, ethereumjs-util@^5.1.5, ethereumjs-util@^5.2.0: + version "5.2.1" + resolved "https://registry.yarnpkg.com/ethereumjs-util/-/ethereumjs-util-5.2.1.tgz#a833f0e5fca7e5b361384dc76301a721f537bf65" + integrity sha512-v3kT+7zdyCm1HIqWlLNrHGqHGLpGYIhjeHxQjnDXjLT2FyGJDsd3LWMYUo7pAFRrk86CR3nUJfhC81CCoJNNGQ== + dependencies: + bn.js "^4.11.0" + create-hash "^1.1.2" + elliptic "^6.5.2" + ethereum-cryptography "^0.1.3" + ethjs-util "^0.1.3" + rlp "^2.0.0" + safe-buffer "^5.1.1" + +ethereumjs-util@^7.0.2, ethereumjs-util@^7.1.0: + version "7.1.5" + resolved "https://registry.yarnpkg.com/ethereumjs-util/-/ethereumjs-util-7.1.5.tgz#9ecf04861e4fbbeed7465ece5f23317ad1129181" + integrity sha512-SDl5kKrQAudFBUe5OJM9Ac6WmMyYmXX/6sTmLZ3ffG2eY6ZIGBes3pEDxNN6V72WyOw4CPD5RomKdsa8DAAwLg== + dependencies: + "@types/bn.js" "^5.1.0" + bn.js "^5.1.2" + create-hash "^1.1.2" + ethereum-cryptography "^0.1.3" + rlp "^2.2.4" + +ethereumjs-vm@4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/ethereumjs-vm/-/ethereumjs-vm-4.2.0.tgz#e885e861424e373dbc556278f7259ff3fca5edab" + integrity sha512-X6qqZbsY33p5FTuZqCnQ4+lo957iUJMM6Mpa6bL4UW0dxM6WmDSHuI4j/zOp1E2TDKImBGCJA9QPfc08PaNubA== + dependencies: + async "^2.1.2" + async-eventemitter "^0.2.2" + core-js-pure "^3.0.1" + ethereumjs-account "^3.0.0" + ethereumjs-block "^2.2.2" + ethereumjs-blockchain "^4.0.3" + ethereumjs-common "^1.5.0" + ethereumjs-tx "^2.1.2" + ethereumjs-util "^6.2.0" + fake-merkle-patricia-tree "^1.0.1" + functional-red-black-tree "^1.0.1" + merkle-patricia-tree "^2.3.2" + rustbn.js "~0.2.0" + safe-buffer "^5.1.1" + util.promisify "^1.0.0" + +ethereumjs-vm@^2.1.0, ethereumjs-vm@^2.3.4, ethereumjs-vm@^2.6.0: + version "2.6.0" + resolved "https://registry.yarnpkg.com/ethereumjs-vm/-/ethereumjs-vm-2.6.0.tgz#76243ed8de031b408793ac33907fb3407fe400c6" + integrity sha512-r/XIUik/ynGbxS3y+mvGnbOKnuLo40V5Mj1J25+HEO63aWYREIqvWeRO/hnROlMBE5WoniQmPmhiaN0ctiHaXw== + dependencies: + async "^2.1.2" + async-eventemitter "^0.2.2" + ethereumjs-account "^2.0.3" + ethereumjs-block "~2.2.0" + ethereumjs-common "^1.1.0" + ethereumjs-util "^6.0.0" + fake-merkle-patricia-tree "^1.0.1" + functional-red-black-tree "^1.0.1" + merkle-patricia-tree "^2.3.2" + rustbn.js "~0.2.0" + safe-buffer "^5.1.1" + +ethereumjs-wallet@0.6.5: + version "0.6.5" + resolved "https://registry.yarnpkg.com/ethereumjs-wallet/-/ethereumjs-wallet-0.6.5.tgz#685e9091645cee230ad125c007658833991ed474" + integrity sha512-MDwjwB9VQVnpp/Dc1XzA6J1a3wgHQ4hSvA1uWNatdpOrtCbPVuQSKSyRnjLvS0a+KKMw2pvQ9Ybqpb3+eW8oNA== + dependencies: + aes-js "^3.1.1" + bs58check "^2.1.2" + ethereum-cryptography "^0.1.3" + ethereumjs-util "^6.0.0" + randombytes "^2.0.6" + safe-buffer "^5.1.2" + scryptsy "^1.2.1" + utf8 "^3.0.0" + uuid "^3.3.2" + +ethers@^5.0.1, ethers@^5.0.2, ethers@^5.5.2, ethers@^5.7.0, ethers@~5.7.0: + version "5.7.2" + resolved "https://registry.yarnpkg.com/ethers/-/ethers-5.7.2.tgz#3a7deeabbb8c030d4126b24f84e525466145872e" + integrity sha512-wswUsmWo1aOK8rR7DIKiWSw9DbLWe6x98Jrn8wcTflTVvaXhAMaB5zGAXy0GYQEQp9iO1iSHWVyARQm11zUtyg== + dependencies: + "@ethersproject/abi" "5.7.0" + "@ethersproject/abstract-provider" "5.7.0" + "@ethersproject/abstract-signer" "5.7.0" + "@ethersproject/address" "5.7.0" + "@ethersproject/base64" "5.7.0" + "@ethersproject/basex" "5.7.0" + "@ethersproject/bignumber" "5.7.0" + "@ethersproject/bytes" "5.7.0" + "@ethersproject/constants" "5.7.0" + "@ethersproject/contracts" "5.7.0" + "@ethersproject/hash" "5.7.0" + "@ethersproject/hdnode" "5.7.0" + "@ethersproject/json-wallets" "5.7.0" + "@ethersproject/keccak256" "5.7.0" + "@ethersproject/logger" "5.7.0" + "@ethersproject/networks" "5.7.1" + "@ethersproject/pbkdf2" "5.7.0" + "@ethersproject/properties" "5.7.0" + "@ethersproject/providers" "5.7.2" + "@ethersproject/random" "5.7.0" + "@ethersproject/rlp" "5.7.0" + "@ethersproject/sha2" "5.7.0" + "@ethersproject/signing-key" "5.7.0" + "@ethersproject/solidity" "5.7.0" + "@ethersproject/strings" "5.7.0" + "@ethersproject/transactions" "5.7.0" + "@ethersproject/units" "5.7.0" + "@ethersproject/wallet" "5.7.0" + "@ethersproject/web" "5.7.1" + "@ethersproject/wordlists" "5.7.0" + +ethers@~5.5.0: + version "5.5.4" + resolved "https://registry.yarnpkg.com/ethers/-/ethers-5.5.4.tgz#e1155b73376a2f5da448e4a33351b57a885f4352" + integrity sha512-N9IAXsF8iKhgHIC6pquzRgPBJEzc9auw3JoRkaKe+y4Wl/LFBtDDunNe7YmdomontECAcC5APaAgWZBiu1kirw== + dependencies: + "@ethersproject/abi" "5.5.0" + "@ethersproject/abstract-provider" "5.5.1" + "@ethersproject/abstract-signer" "5.5.0" + "@ethersproject/address" "5.5.0" + "@ethersproject/base64" "5.5.0" + "@ethersproject/basex" "5.5.0" + "@ethersproject/bignumber" "5.5.0" + "@ethersproject/bytes" "5.5.0" + "@ethersproject/constants" "5.5.0" + "@ethersproject/contracts" "5.5.0" + "@ethersproject/hash" "5.5.0" + "@ethersproject/hdnode" "5.5.0" + "@ethersproject/json-wallets" "5.5.0" + "@ethersproject/keccak256" "5.5.0" + "@ethersproject/logger" "5.5.0" + "@ethersproject/networks" "5.5.2" + "@ethersproject/pbkdf2" "5.5.0" + "@ethersproject/properties" "5.5.0" + "@ethersproject/providers" "5.5.3" + "@ethersproject/random" "5.5.1" + "@ethersproject/rlp" "5.5.0" + "@ethersproject/sha2" "5.5.0" + "@ethersproject/signing-key" "5.5.0" + "@ethersproject/solidity" "5.5.0" + "@ethersproject/strings" "5.5.0" + "@ethersproject/transactions" "5.5.0" + "@ethersproject/units" "5.5.0" + "@ethersproject/wallet" "5.5.0" + "@ethersproject/web" "5.5.1" + "@ethersproject/wordlists" "5.5.0" + +ethjs-abi@0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/ethjs-abi/-/ethjs-abi-0.2.0.tgz#d3e2c221011520fc499b71682036c14fcc2f5b25" + integrity sha512-ELExSTNV7rm9WA22pA3kxNmaQz1o/peM8X1pZdWypEhuNzFjOR+hIV09Mcfw69Q0kbdd7JNGAFb5dyoL/bnItA== + dependencies: + bn.js "4.11.6" + js-sha3 "0.5.5" + number-to-bn "1.7.0" + +ethjs-abi@0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/ethjs-abi/-/ethjs-abi-0.2.1.tgz#e0a7a93a7e81163a94477bad56ede524ab6de533" + integrity sha512-g2AULSDYI6nEJyJaEVEXtTimRY2aPC2fi7ddSy0W+LXvEVL8Fe1y76o43ecbgdUKwZD+xsmEgX1yJr1Ia3r1IA== + dependencies: + bn.js "4.11.6" + js-sha3 "0.5.5" + number-to-bn "1.7.0" + +ethjs-contract@0.2.3: + version "0.2.3" + resolved "https://registry.yarnpkg.com/ethjs-contract/-/ethjs-contract-0.2.3.tgz#f113ced8ed1c9c635b0b7ec71901340b64e8cded" + integrity sha512-fKsHm57wxwHrZhVlD8AHU2lC2G3c1fmvoEz15BpqIkuGWiTbjuvrQo2Avc+3EQpSsTFWNdyxC0h1WKRcn5kkyQ== + dependencies: + babel-runtime "^6.26.0" + ethjs-abi "0.2.0" + ethjs-filter "0.1.8" + ethjs-util "0.1.3" + js-sha3 "0.5.5" + +ethjs-filter@0.1.8: + version "0.1.8" + resolved "https://registry.yarnpkg.com/ethjs-filter/-/ethjs-filter-0.1.8.tgz#2b02726b820ed4dd3860614d185c0c0f7ed1747f" + integrity sha512-qTDPskDL2UadHwjvM8A+WG9HwM4/FoSY3p3rMJORkHltYcAuiQZd2otzOYKcL5w2Q3sbAkW/E3yt/FPFL/AVXA== + +ethjs-format@0.2.7: + version "0.2.7" + resolved "https://registry.yarnpkg.com/ethjs-format/-/ethjs-format-0.2.7.tgz#20c92f31c259a381588d069830d838b489774b86" + integrity sha512-uNYAi+r3/mvR3xYu2AfSXx5teP4ovy9z2FrRsblU+h2logsaIKZPi9V3bn3V7wuRcnG0HZ3QydgZuVaRo06C4Q== + dependencies: + bn.js "4.11.6" + ethjs-schema "0.2.1" + ethjs-util "0.1.3" + is-hex-prefixed "1.0.0" + number-to-bn "1.7.0" + strip-hex-prefix "1.0.0" + +ethjs-provider-http@0.1.6: + version "0.1.6" + resolved "https://registry.yarnpkg.com/ethjs-provider-http/-/ethjs-provider-http-0.1.6.tgz#1ec5d9b4be257ef1d56a500b22a741985e889420" + integrity sha512-y054N5xyyx43KTQjgdkAEj2uEa/flwpENU5ldx/rmA0Q2yy0vyB2lsOIn/7V0uADMc4iRSHZfnFc9b9YS5Qkdw== + dependencies: + xhr2 "0.1.3" + +ethjs-query@0.3.8: + version "0.3.8" + resolved "https://registry.yarnpkg.com/ethjs-query/-/ethjs-query-0.3.8.tgz#aa5af02887bdd5f3c78b3256d0f22ffd5d357490" + integrity sha512-/J5JydqrOzU8O7VBOwZKUWXxHDGr46VqNjBCJgBVNNda+tv7Xc8Y2uJc6aMHHVbeN3YOQ7YRElgIc0q1CI02lQ== + dependencies: + babel-runtime "^6.26.0" + ethjs-format "0.2.7" + ethjs-rpc "0.2.0" + promise-to-callback "^1.0.0" + +ethjs-rpc@0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/ethjs-rpc/-/ethjs-rpc-0.2.0.tgz#3d0011e32cfff156ed6147818c6fb8f801701b4c" + integrity sha512-RINulkNZTKnj4R/cjYYtYMnFFaBcVALzbtEJEONrrka8IeoarNB9Jbzn+2rT00Cv8y/CxAI+GgY1d0/i2iQeOg== + dependencies: + promise-to-callback "^1.0.0" + +ethjs-schema@0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/ethjs-schema/-/ethjs-schema-0.2.1.tgz#47e138920421453617069034684642e26bb310f4" + integrity sha512-DXd8lwNrhT9sjsh/Vd2Z+4pfyGxhc0POVnLBUfwk5udtdoBzADyq+sK39dcb48+ZU+2VgtwHxtGWnLnCfmfW5g== + +ethjs-unit@0.1.6: + version "0.1.6" + resolved "https://registry.yarnpkg.com/ethjs-unit/-/ethjs-unit-0.1.6.tgz#c665921e476e87bce2a9d588a6fe0405b2c41699" + integrity sha512-/Sn9Y0oKl0uqQuvgFk/zQgR7aw1g36qX/jzSQ5lSwlO0GigPymk4eGQfeNTD03w1dPOqfz8V77Cy43jH56pagw== + dependencies: + bn.js "4.11.6" + number-to-bn "1.7.0" + +ethjs-util@0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/ethjs-util/-/ethjs-util-0.1.3.tgz#dfd5ea4a400dc5e421a889caf47e081ada78bb55" + integrity sha512-QqpX2dsEG2geSMG9dTMJVhfP1kGRdGMNjiHPiTjkju+X5cB0PQIwUzRr5k21pFkgF5zuLccqe83p7Gh5fFM5tQ== + dependencies: + is-hex-prefixed "1.0.0" + strip-hex-prefix "1.0.0" + +ethjs-util@0.1.6, ethjs-util@^0.1.3, ethjs-util@^0.1.6: + version "0.1.6" + resolved "https://registry.yarnpkg.com/ethjs-util/-/ethjs-util-0.1.6.tgz#f308b62f185f9fe6237132fb2a9818866a5cd536" + integrity sha512-CUnVOQq7gSpDHZVVrQW8ExxUETWrnrvXYvYz55wOU8Uj4VCgw56XC2B/fVqQN+f7gmrnRHSLVnFAwsCuNwji8w== + dependencies: + is-hex-prefixed "1.0.0" + strip-hex-prefix "1.0.0" + +ethjs@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/ethjs/-/ethjs-0.4.0.tgz#53e6312b6fe050620bd6c5813cb683191cfd6e4a" + integrity sha512-UnQeRMpQ+JETN2FviexEskUwByid+eO8rybjPnk2DNUzjUn0VKNrUbiCAud7Es6otDFwjUeOS58vMZwkZxIIog== + dependencies: + bn.js "4.11.6" + ethjs-abi "0.2.1" + ethjs-contract "0.2.3" + ethjs-filter "0.1.8" + ethjs-provider-http "0.1.6" + ethjs-query "0.3.8" + ethjs-unit "0.1.6" + ethjs-util "0.1.3" + js-sha3 "0.5.5" + number-to-bn "1.7.0" + +event-target-shim@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/event-target-shim/-/event-target-shim-5.0.1.tgz#5d4d3ebdf9583d63a5333ce2deb7480ab2b05789" + integrity sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ== + +eventemitter3@4.0.4: + version "4.0.4" + resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.4.tgz#b5463ace635a083d018bdc7c917b4c5f10a85384" + integrity sha512-rlaVLnVxtxvoyLsQQFBx53YmXHDxRIzzTLbdfxqi4yocpSjAxXwkU0cScM5JgSKMqEhrZpnvQ2D9gjylR0AimQ== + +events@^3.0.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" + integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== + +evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz#7fcbdb198dc71959432efe13842684e0525acb02" + integrity sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA== + dependencies: + md5.js "^1.3.4" + safe-buffer "^5.1.1" + +execa@^5.0.0: + version "5.1.1" + resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" + integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== + dependencies: + cross-spawn "^7.0.3" + get-stream "^6.0.0" + human-signals "^2.1.0" + is-stream "^2.0.0" + merge-stream "^2.0.0" + npm-run-path "^4.0.1" + onetime "^5.1.2" + signal-exit "^3.0.3" + strip-final-newline "^2.0.0" + +exit@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" + integrity sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ== + +expand-brackets@^2.1.4: + version "2.1.4" + resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" + integrity sha512-w/ozOKR9Obk3qoWeY/WDi6MFta9AoMR+zud60mdnbniMcBxRuFJyDt2LdX/14A1UABeqk+Uk+LDfUpvoGKppZA== + dependencies: + debug "^2.3.3" + define-property "^0.2.5" + extend-shallow "^2.0.1" + posix-character-classes "^0.1.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + +expect@^29.0.0, expect@^29.2.2: + version "29.2.2" + resolved "https://registry.yarnpkg.com/expect/-/expect-29.2.2.tgz#ba2dd0d7e818727710324a6e7f13dd0e6d086106" + integrity sha512-hE09QerxZ5wXiOhqkXy5d2G9ar+EqOyifnCXCpMNu+vZ6DG9TJ6CO2c2kPDSLqERTTWrO7OZj8EkYHQqSd78Yw== + dependencies: + "@jest/expect-utils" "^29.2.2" + jest-get-type "^29.2.0" + jest-matcher-utils "^29.2.2" + jest-message-util "^29.2.1" + jest-util "^29.2.1" + +express@^4.14.0: + version "4.18.2" + resolved "https://registry.yarnpkg.com/express/-/express-4.18.2.tgz#3fabe08296e930c796c19e3c516979386ba9fd59" + integrity sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ== + dependencies: + accepts "~1.3.8" + array-flatten "1.1.1" + body-parser "1.20.1" + content-disposition "0.5.4" + content-type "~1.0.4" + cookie "0.5.0" + cookie-signature "1.0.6" + debug "2.6.9" + depd "2.0.0" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + finalhandler "1.2.0" + fresh "0.5.2" + http-errors "2.0.0" + merge-descriptors "1.0.1" + methods "~1.1.2" + on-finished "2.4.1" + parseurl "~1.3.3" + path-to-regexp "0.1.7" + proxy-addr "~2.0.7" + qs "6.11.0" + range-parser "~1.2.1" + safe-buffer "5.2.1" + send "0.18.0" + serve-static "1.15.0" + setprototypeof "1.2.0" + statuses "2.0.1" + type-is "~1.6.18" + utils-merge "1.0.1" + vary "~1.1.2" + +ext@^1.1.2: + version "1.7.0" + resolved "https://registry.yarnpkg.com/ext/-/ext-1.7.0.tgz#0ea4383c0103d60e70be99e9a7f11027a33c4f5f" + integrity sha512-6hxeJYaL110a9b5TEJSj0gojyHQAmA2ch5Os+ySCiA1QGdS697XWY1pzsrSjqA9LDEEgdB/KypIlR59RcLuHYw== + dependencies: + type "^2.7.2" + +extend-shallow@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" + integrity sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug== + dependencies: + is-extendable "^0.1.0" + +extend-shallow@^3.0.0, extend-shallow@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" + integrity sha512-BwY5b5Ql4+qZoefgMj2NUmx+tehVTH/Kf4k1ZEtOHNFcm2wSxMRo992l6X3TIgni2eZVTZ85xMOjF31fwZAj6Q== + dependencies: + assign-symbols "^1.0.0" + is-extendable "^1.0.1" + +extend@~3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" + integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== + +external-editor@^3.0.3: + version "3.1.0" + resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-3.1.0.tgz#cb03f740befae03ea4d283caed2741a83f335495" + integrity sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew== + dependencies: + chardet "^0.7.0" + iconv-lite "^0.4.24" + tmp "^0.0.33" + +extglob@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" + integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== + dependencies: + array-unique "^0.3.2" + define-property "^1.0.0" + expand-brackets "^2.1.4" + extend-shallow "^2.0.1" + fragment-cache "^0.2.1" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + +extsprintf@1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" + integrity sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g== + +extsprintf@^1.2.0: + version "1.4.1" + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.1.tgz#8d172c064867f235c0c84a596806d279bf4bcc07" + integrity sha512-Wrk35e8ydCKDj/ArClo1VrPVmN8zph5V4AtHwIuHhvMXsKf73UT3BOD+azBIW+3wOJ4FhEH7zyaJCFvChjYvMA== + +fake-merkle-patricia-tree@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/fake-merkle-patricia-tree/-/fake-merkle-patricia-tree-1.0.1.tgz#4b8c3acfb520afadf9860b1f14cd8ce3402cddd3" + integrity sha512-Tgq37lkc9pUIgIKw5uitNUKcgcYL3R6JvXtKQbOf/ZSavXbidsksgp/pAY6p//uhw0I4yoMsvTSovvVIsk/qxA== + dependencies: + checkpoint-store "^1.1.0" + +fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fast-diff@^1.1.2: + version "1.2.0" + resolved "https://registry.yarnpkg.com/fast-diff/-/fast-diff-1.2.0.tgz#73ee11982d86caaf7959828d519cfe927fac5f03" + integrity sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w== + +fast-glob@^3.2.9: + version "3.2.12" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.12.tgz#7f39ec99c2e6ab030337142da9e0c18f37afae80" + integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w== + dependencies: + "@nodelib/fs.stat" "^2.0.2" + "@nodelib/fs.walk" "^1.2.3" + glob-parent "^5.1.2" + merge2 "^1.3.0" + micromatch "^4.0.4" + +fast-json-stable-stringify@2.x, fast-json-stable-stringify@^2.0.0, fast-json-stable-stringify@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + +fast-levenshtein@^2.0.6, fast-levenshtein@~2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== + +fastq@^1.6.0: + version "1.13.0" + resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.13.0.tgz#616760f88a7526bdfc596b7cab8c18938c36b98c" + integrity sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw== + dependencies: + reusify "^1.0.4" + +fb-watchman@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.2.tgz#e9524ee6b5c77e9e5001af0f85f3adbb8623255c" + integrity sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA== + dependencies: + bser "2.1.1" + +fetch-ponyfill@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/fetch-ponyfill/-/fetch-ponyfill-4.1.0.tgz#ae3ce5f732c645eab87e4ae8793414709b239893" + integrity sha512-knK9sGskIg2T7OnYLdZ2hZXn0CtDrAIBxYQLpmEf0BqfdWnwmM1weccUl5+4EdA44tzNSFAuxITPbXtPehUB3g== + dependencies: + node-fetch "~1.7.1" + +figures@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/figures/-/figures-2.0.0.tgz#3ab1a2d2a62c8bfb431a0c94cb797a2fce27c962" + integrity sha512-Oa2M9atig69ZkfwiApY8F2Yy+tzMbazyvqv21R0NsSC8floSOC09BbT1ITWAdoMGQvJ/aZnR1KMwdx9tvHnTNA== + dependencies: + escape-string-regexp "^1.0.5" + +file-entry-cache@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-5.0.1.tgz#ca0f6efa6dd3d561333fb14515065c2fafdf439c" + integrity sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g== + dependencies: + flat-cache "^2.0.1" + +file-entry-cache@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" + integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== + dependencies: + flat-cache "^3.0.4" + +fill-range@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" + integrity sha512-VcpLTWqWDiTerugjj8e3+esbg+skS3M9e54UuR3iCeIDMXCLTsAH8hTSzDQU/X6/6t3eYkOKoZSef2PlU6U1XQ== + dependencies: + extend-shallow "^2.0.1" + is-number "^3.0.0" + repeat-string "^1.6.1" + to-regex-range "^2.1.0" + +fill-range@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" + integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== + dependencies: + to-regex-range "^5.0.1" + +finalhandler@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32" + integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg== + dependencies: + debug "2.6.9" + encodeurl "~1.0.2" + escape-html "~1.0.3" + on-finished "2.4.1" + parseurl "~1.3.3" + statuses "2.0.1" + unpipe "~1.0.0" + +find-replace@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/find-replace/-/find-replace-1.0.3.tgz#b88e7364d2d9c959559f388c66670d6130441fa0" + integrity sha512-KrUnjzDCD9426YnCP56zGYy/eieTnhtK6Vn++j+JJzmlsWWwEkDnsyVF575spT6HJ6Ow9tlbT3TQTDsa+O4UWA== + dependencies: + array-back "^1.0.4" + test-value "^2.1.0" + +find-up@5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" + integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== + dependencies: + locate-path "^6.0.0" + path-exists "^4.0.0" + +find-up@^1.0.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-1.1.2.tgz#6b2e9822b1a2ce0a60ab64d610eccad53cb24d0f" + integrity sha512-jvElSjyuo4EMQGoTwo1uJU5pQMwTW5lS1x05zzfJuTIyLR3zwO27LYrxNg+dlvKpGOuGy/MzBdXh80g0ve5+HA== + dependencies: + path-exists "^2.0.0" + pinkie-promise "^2.0.0" + +find-up@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7" + integrity sha512-NWzkk0jSJtTt08+FBFMvXoeZnOJD+jTtsRmBYbAIzJdX6l7dLgR7CTubCM5/eDdPUBvLCeVasP1brfVR/9/EZQ== + dependencies: + locate-path "^2.0.0" + +find-up@^4.0.0, find-up@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" + integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== + dependencies: + locate-path "^5.0.0" + path-exists "^4.0.0" + +find-yarn-workspace-root@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/find-yarn-workspace-root/-/find-yarn-workspace-root-1.2.1.tgz#40eb8e6e7c2502ddfaa2577c176f221422f860db" + integrity sha512-dVtfb0WuQG+8Ag2uWkbG79hOUzEsRrhBzgfn86g2sJPkzmcpGdghbNTfUKGTxymFrY/tLIodDzLoW9nOJ4FY8Q== + dependencies: + fs-extra "^4.0.3" + micromatch "^3.1.4" + +find-yarn-workspace-root@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/find-yarn-workspace-root/-/find-yarn-workspace-root-2.0.0.tgz#f47fb8d239c900eb78179aa81b66673eac88f7bd" + integrity sha512-1IMnbjt4KzsQfnhnzNd8wUEgXZ44IzZaZmnLYx7D5FZlaHt2gW20Cri8Q+E/t5tIj4+epTBub+2Zxu/vNILzqQ== + dependencies: + micromatch "^4.0.2" + +flat-cache@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-2.0.1.tgz#5d296d6f04bda44a4630a301413bdbc2ec085ec0" + integrity sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA== + dependencies: + flatted "^2.0.0" + rimraf "2.6.3" + write "1.0.3" + +flat-cache@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" + integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== + dependencies: + flatted "^3.1.0" + rimraf "^3.0.2" + +flat@^5.0.2: + version "5.0.2" + resolved "https://registry.yarnpkg.com/flat/-/flat-5.0.2.tgz#8ca6fe332069ffa9d324c327198c598259ceb241" + integrity sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ== + +flatted@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/flatted/-/flatted-2.0.2.tgz#4575b21e2bcee7434aa9be662f4b7b5f9c2b5138" + integrity sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA== + +flatted@^3.1.0: + version "3.2.7" + resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.7.tgz#609f39207cb614b89d0765b477cb2d437fbf9787" + integrity sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ== + +flow-stoplight@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/flow-stoplight/-/flow-stoplight-1.0.0.tgz#4a292c5bcff8b39fa6cc0cb1a853d86f27eeff7b" + integrity sha512-rDjbZUKpN8OYhB0IE/vY/I8UWO/602IIJEU/76Tv4LvYnwHCk0BCsvz4eRr9n+FQcri7L5cyaXOo0+/Kh4HisA== + +follow-redirects@^1.12.1, follow-redirects@^1.14.0, follow-redirects@^1.14.9: + version "1.15.2" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13" + integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA== + +for-each@^0.3.3, for-each@~0.3.3: + version "0.3.3" + resolved "https://registry.yarnpkg.com/for-each/-/for-each-0.3.3.tgz#69b447e88a0a5d32c3e7084f3f1710034b21376e" + integrity sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw== + dependencies: + is-callable "^1.1.3" + +for-in@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" + integrity sha512-7EwmXrOjyL+ChxMhmG5lnW9MPt1aIeZEwKhQzoBUdTV0N3zuwWDZYVJatDvZ2OyzPUvdIAZDsCetk3coyMfcnQ== + +forever-agent@~0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" + integrity sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw== + +form-data@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" + integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.8" + mime-types "^2.1.12" + +form-data@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" + integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.8" + mime-types "^2.1.12" + +form-data@~2.3.2: + version "2.3.3" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" + integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.6" + mime-types "^2.1.12" + +forwarded@0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" + integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== + +fp-ts@1.19.3: + version "1.19.3" + resolved "https://registry.yarnpkg.com/fp-ts/-/fp-ts-1.19.3.tgz#261a60d1088fbff01f91256f91d21d0caaaaa96f" + integrity sha512-H5KQDspykdHuztLTg+ajGN0Z2qUjcEf3Ybxc6hLt0k7/zPkn29XnKnxlBPyW2XIddWrGaJBzBl4VLYOtk39yZg== + +fp-ts@^1.0.0: + version "1.19.5" + resolved "https://registry.yarnpkg.com/fp-ts/-/fp-ts-1.19.5.tgz#3da865e585dfa1fdfd51785417357ac50afc520a" + integrity sha512-wDNqTimnzs8QqpldiId9OavWK2NptormjXnRJTQecNjzwfyp6P/8s/zG8e4h3ja3oqkKaY72UlTjQYt/1yXf9A== + +fragment-cache@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" + integrity sha512-GMBAbW9antB8iZRHLoGw0b3HANt57diZYFO/HL1JGIC1MjKrdmhxvrJbupnVvpys0zsz7yBApXdQyfepKly2kA== + dependencies: + map-cache "^0.2.2" + +fresh@0.5.2: + version "0.5.2" + resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" + integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q== + +fs-constants@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad" + integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow== + +fs-extra@^0.30.0: + version "0.30.0" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-0.30.0.tgz#f233ffcc08d4da7d432daa449776989db1df93f0" + integrity sha512-UvSPKyhMn6LEd/WpUaV9C9t3zATuqoqfWc3QdPhPLb58prN9tqYPlPWi8Krxi44loBoUzlobqZ3+8tGpxxSzwA== + dependencies: + graceful-fs "^4.1.2" + jsonfile "^2.1.0" + klaw "^1.0.0" + path-is-absolute "^1.0.0" + rimraf "^2.2.8" + +fs-extra@^4.0.2, fs-extra@^4.0.3: + version "4.0.3" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-4.0.3.tgz#0d852122e5bc5beb453fb028e9c0c9bf36340c94" + integrity sha512-q6rbdDd1o2mAnQreO7YADIxf/Whx4AHBiRf6d+/cVT8h44ss+lHgxf1FemcqDnQt9X3ct4McHr+JMGlYSsK7Cg== + dependencies: + graceful-fs "^4.1.2" + jsonfile "^4.0.0" + universalify "^0.1.0" + +fs-extra@^7.0.0, fs-extra@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-7.0.1.tgz#4f189c44aa123b895f722804f55ea23eadc348e9" + integrity sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw== + dependencies: + graceful-fs "^4.1.2" + jsonfile "^4.0.0" + universalify "^0.1.0" + +fs-extra@^9.0.0: + version "9.1.0" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-9.1.0.tgz#5954460c764a8da2094ba3554bf839e6b9a7c86d" + integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ== + dependencies: + at-least-node "^1.0.0" + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + +fs-minipass@^1.2.7: + version "1.2.7" + resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.7.tgz#ccff8570841e7fe4265693da88936c55aed7f7c7" + integrity sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA== + dependencies: + minipass "^2.6.0" + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== + +fs@^0.0.1-security: + version "0.0.1-security" + resolved "https://registry.yarnpkg.com/fs/-/fs-0.0.1-security.tgz#8a7bd37186b6dddf3813f23858b57ecaaf5e41d4" + integrity sha512-3XY9e1pP0CVEUCdj5BmfIZxRBTSDycnbqhIOGec9QYtmVH2fbLpj86CFWkrNOkt/Fvty4KZG5lTglL9j/gJ87w== + +fsevents@^2.3.2, fsevents@~2.3.2: + version "2.3.2" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" + integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== + +function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + +function.prototype.name@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.5.tgz#cce0505fe1ffb80503e6f9e46cc64e46a12a9621" + integrity sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.0" + functions-have-names "^1.2.2" + +functional-red-black-tree@^1.0.1, functional-red-black-tree@~1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" + integrity sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g== + +functions-have-names@^1.2.2: + version "1.2.3" + resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" + integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== + +ganache-core@^2.13.2: + version "2.13.2" + resolved "https://registry.yarnpkg.com/ganache-core/-/ganache-core-2.13.2.tgz#27e6fc5417c10e6e76e2e646671869d7665814a3" + integrity sha512-tIF5cR+ANQz0+3pHWxHjIwHqFXcVo0Mb+kcsNhglNFALcYo49aQpnS9dqHartqPfMFjiHh/qFoD3mYK0d/qGgw== + dependencies: + abstract-leveldown "3.0.0" + async "2.6.2" + bip39 "2.5.0" + cachedown "1.0.0" + clone "2.1.2" + debug "3.2.6" + encoding-down "5.0.4" + eth-sig-util "3.0.0" + ethereumjs-abi "0.6.8" + ethereumjs-account "3.0.0" + ethereumjs-block "2.2.2" + ethereumjs-common "1.5.0" + ethereumjs-tx "2.1.2" + ethereumjs-util "6.2.1" + ethereumjs-vm "4.2.0" + heap "0.2.6" + keccak "3.0.1" + level-sublevel "6.6.4" + levelup "3.1.1" + lodash "4.17.20" + lru-cache "5.1.1" + merkle-patricia-tree "3.0.0" + patch-package "6.2.2" + seedrandom "3.0.1" + source-map-support "0.5.12" + tmp "0.1.0" + web3-provider-engine "14.2.1" + websocket "1.0.32" + optionalDependencies: + ethereumjs-wallet "0.6.5" + web3 "1.2.11" + +gensync@^1.0.0-beta.2: + version "1.0.0-beta.2" + resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" + integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== + +get-caller-file@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.3.tgz#f978fa4c90d1dfe7ff2d6beda2a515e713bdcf4a" + integrity sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w== + +get-caller-file@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" + integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== + +get-func-name@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/get-func-name/-/get-func-name-2.0.0.tgz#ead774abee72e20409433a066366023dd6887a41" + integrity sha512-Hm0ixYtaSZ/V7C8FJrtZIuBBI+iSgL+1Aq82zSu8VQNB4S3Gk8e7Qs3VwBDJAhmRZcFqkl3tQu36g/Foh5I5ig== + +get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.3.tgz#063c84329ad93e83893c7f4f243ef63ffa351385" + integrity sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A== + dependencies: + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.3" + +get-package-type@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" + integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== + +get-stdin@~5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-5.0.1.tgz#122e161591e21ff4c52530305693f20e6393a398" + integrity sha512-jZV7n6jGE3Gt7fgSTJoz91Ak5MuTLwMwkoYdjxuJ/AmjIsE1UC03y/IWkZCQGEvVNS9qoRNwy5BCqxImv0FVeA== + +get-stream@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" + integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== + dependencies: + pump "^3.0.0" + +get-stream@^5.1.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-5.2.0.tgz#4966a1795ee5ace65e706c4b7beb71257d6e22d3" + integrity sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA== + dependencies: + pump "^3.0.0" + +get-stream@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" + integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== + +get-symbol-description@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/get-symbol-description/-/get-symbol-description-1.0.0.tgz#7fdb81c900101fbd564dd5f1a30af5aadc1e58d6" + integrity sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.1.1" + +get-value@^2.0.3, get-value@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" + integrity sha512-Ln0UQDlxH1BapMu3GPtf7CuYNwRZf2gwCuPqbyG6pB8WfmFpzqcy4xtAaAMUhnNqjMKTiCPZG2oMT3YSx8U2NA== + +getpass@^0.1.1: + version "0.1.7" + resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" + integrity sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng== + dependencies: + assert-plus "^1.0.0" + +glob-parent@^5.1.2, glob-parent@~5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" + integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== + dependencies: + is-glob "^4.0.1" + +glob@7.2.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.0.tgz#d15535af7732e02e948f4c41628bd910293f6023" + integrity sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.4" + once "^1.3.0" + path-is-absolute "^1.0.0" + +glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@~7.2.3: + version "7.2.3" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" + integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.1.1" + once "^1.3.0" + path-is-absolute "^1.0.0" + +glob@~7.1.2: + version "7.1.7" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.7.tgz#3b193e9233f01d42d0b3f78294bbeeb418f94a90" + integrity sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.4" + once "^1.3.0" + path-is-absolute "^1.0.0" + +global@~4.4.0: + version "4.4.0" + resolved "https://registry.yarnpkg.com/global/-/global-4.4.0.tgz#3e7b105179006a323ed71aafca3e9c57a5cc6406" + integrity sha512-wv/LAoHdRE3BeTGz53FAamhGlPLhlssK45usmGFThIi4XqnBmjKQ16u+RNbP7WvigRZDxUsM0J3gcQ5yicaL0w== + dependencies: + min-document "^2.19.0" + process "^0.11.10" + +globals@^11.1.0, globals@^11.7.0: + version "11.12.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== + +globals@^13.6.0, globals@^13.9.0: + version "13.17.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-13.17.0.tgz#902eb1e680a41da93945adbdcb5a9f361ba69bd4" + integrity sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw== + dependencies: + type-fest "^0.20.2" + +globals@^9.18.0: + version "9.18.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-9.18.0.tgz#aa3896b3e69b487f17e31ed2143d69a8e30c2d8a" + integrity sha512-S0nG3CLEQiY/ILxqtztTWH/3iRRdyBLw6KMDxnKMchrtbj2OFmehVh0WUCfW3DUrIgx/qFrJPICrq4Z4sTR9UQ== + +globalthis@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/globalthis/-/globalthis-1.0.3.tgz#5852882a52b80dc301b0660273e1ed082f0b6ccf" + integrity sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA== + dependencies: + define-properties "^1.1.3" + +globby@^11.0.3: + version "11.1.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" + integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== + dependencies: + array-union "^2.1.0" + dir-glob "^3.0.1" + fast-glob "^3.2.9" + ignore "^5.2.0" + merge2 "^1.4.1" + slash "^3.0.0" + +gopd@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.0.1.tgz#29ff76de69dac7489b7c0918a5788e56477c332c" + integrity sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA== + dependencies: + get-intrinsic "^1.1.3" + +got@9.6.0: + version "9.6.0" + resolved "https://registry.yarnpkg.com/got/-/got-9.6.0.tgz#edf45e7d67f99545705de1f7bbeeeb121765ed85" + integrity sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q== + dependencies: + "@sindresorhus/is" "^0.14.0" + "@szmarczak/http-timer" "^1.1.2" + cacheable-request "^6.0.0" + decompress-response "^3.3.0" + duplexer3 "^0.1.4" + get-stream "^4.1.0" + lowercase-keys "^1.0.1" + mimic-response "^1.0.1" + p-cancelable "^1.0.0" + to-readable-stream "^1.0.0" + url-parse-lax "^3.0.0" + +got@^11.8.5: + version "11.8.6" + resolved "https://registry.yarnpkg.com/got/-/got-11.8.6.tgz#276e827ead8772eddbcfc97170590b841823233a" + integrity sha512-6tfZ91bOr7bOXnK7PRDCGBLa1H4U080YHNaAQ2KsMGlLEzRbk44nsZF2E1IeRc3vtJHPVbKCYgdFbaGO2ljd8g== + dependencies: + "@sindresorhus/is" "^4.0.0" + "@szmarczak/http-timer" "^4.0.5" + "@types/cacheable-request" "^6.0.1" + "@types/responselike" "^1.0.0" + cacheable-lookup "^5.0.3" + cacheable-request "^7.0.2" + decompress-response "^6.0.0" + http2-wrapper "^1.0.0-beta.5.2" + lowercase-keys "^2.0.0" + p-cancelable "^2.0.0" + responselike "^2.0.0" + +graceful-fs@^4.1.11, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.1.9, graceful-fs@^4.2.0, graceful-fs@^4.2.9: + version "4.2.10" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" + integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== + +"graceful-readlink@>= 1.0.0": + version "1.0.1" + resolved "https://registry.yarnpkg.com/graceful-readlink/-/graceful-readlink-1.0.1.tgz#4cafad76bc62f02fa039b2f94e9a3dd3a391a725" + integrity sha512-8tLu60LgxF6XpdbK8OW3FA+IfTNBn1ZHGHKF4KQbEeSkajYw5PlYJcKluntgegDPTg8UkHjpet1T82vk6TQ68w== + +growl@1.10.5: + version "1.10.5" + resolved "https://registry.yarnpkg.com/growl/-/growl-1.10.5.tgz#f2735dc2283674fa67478b10181059355c369e5e" + integrity sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA== + +handlebars@^4.7.6: + version "4.7.7" + resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.7.tgz#9ce33416aad02dbd6c8fafa8240d5d98004945a1" + integrity sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA== + dependencies: + minimist "^1.2.5" + neo-async "^2.6.0" + source-map "^0.6.1" + wordwrap "^1.0.0" + optionalDependencies: + uglify-js "^3.1.4" + +har-schema@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" + integrity sha512-Oqluz6zhGX8cyRaTQlFMPw80bSJVG2x/cFb8ZPhUILGgHka9SsokCCOQgpveePerqidZOrT14ipqfJb7ILcW5Q== + +har-validator@~5.1.3: + version "5.1.5" + resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.5.tgz#1f0803b9f8cb20c0fa13822df1ecddb36bde1efd" + integrity sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w== + dependencies: + ajv "^6.12.3" + har-schema "^2.0.0" + +hardhat-contract-sizer@^2.0.2: + version "2.7.0" + resolved "https://registry.yarnpkg.com/hardhat-contract-sizer/-/hardhat-contract-sizer-2.7.0.tgz#d892a741135628a77d25709a29ae164f2750b7eb" + integrity sha512-QcncKiEku9TInKH++frfCPaYaVvw6OR5C5dNUcb05WozeVOJGspbWbHOkOlfiaZUbEKTvHA6OY9LtMMvja9nzQ== + dependencies: + chalk "^4.0.0" + cli-table3 "^0.6.0" + +hardhat-typechain@^0.3.3: + version "0.3.5" + resolved "https://registry.yarnpkg.com/hardhat-typechain/-/hardhat-typechain-0.3.5.tgz#8e50616a9da348b33bd001168c8fda9c66b7b4af" + integrity sha512-w9lm8sxqTJACY+V7vijiH+NkPExnmtiQEjsV9JKD1KgMdVk2q8y+RhvU/c4B7+7b1+HylRUCxpOIvFuB3rE4+w== + +hardhat@2.12.4, hardhat@=2.12.4, hardhat@^2.12.4: + version "2.12.4" + resolved "https://registry.yarnpkg.com/hardhat/-/hardhat-2.12.4.tgz#e539ba58bee9ba1a1ced823bfdcec0b3c5a3e70f" + integrity sha512-rc9S2U/4M+77LxW1Kg7oqMMmjl81tzn5rNFARhbXKUA1am/nhfMJEujOjuKvt+ZGMiZ11PYSe8gyIpB/aRNDgw== + dependencies: + "@ethersproject/abi" "^5.1.2" + "@metamask/eth-sig-util" "^4.0.0" + "@nomicfoundation/ethereumjs-block" "^4.0.0" + "@nomicfoundation/ethereumjs-blockchain" "^6.0.0" + "@nomicfoundation/ethereumjs-common" "^3.0.0" + "@nomicfoundation/ethereumjs-evm" "^1.0.0" + "@nomicfoundation/ethereumjs-rlp" "^4.0.0" + "@nomicfoundation/ethereumjs-statemanager" "^1.0.0" + "@nomicfoundation/ethereumjs-trie" "^5.0.0" + "@nomicfoundation/ethereumjs-tx" "^4.0.0" + "@nomicfoundation/ethereumjs-util" "^8.0.0" + "@nomicfoundation/ethereumjs-vm" "^6.0.0" + "@nomicfoundation/solidity-analyzer" "^0.1.0" + "@sentry/node" "^5.18.1" + "@types/bn.js" "^5.1.0" + "@types/lru-cache" "^5.1.0" + abort-controller "^3.0.0" + adm-zip "^0.4.16" + aggregate-error "^3.0.0" + ansi-escapes "^4.3.0" + chalk "^2.4.2" + chokidar "^3.4.0" + ci-info "^2.0.0" + debug "^4.1.1" + enquirer "^2.3.0" + env-paths "^2.2.0" + ethereum-cryptography "^1.0.3" + ethereumjs-abi "^0.6.8" + find-up "^2.1.0" + fp-ts "1.19.3" + fs-extra "^7.0.1" + glob "7.2.0" + immutable "^4.0.0-rc.12" + io-ts "1.10.4" + keccak "^3.0.2" + lodash "^4.17.11" + mnemonist "^0.38.0" + mocha "^10.0.0" + p-map "^4.0.0" + qs "^6.7.0" + raw-body "^2.4.1" + resolve "1.17.0" + semver "^6.3.0" + solc "0.7.3" + source-map-support "^0.5.13" + stacktrace-parser "^0.1.10" + tsort "0.0.1" + undici "^5.4.0" + uuid "^8.3.2" + ws "^7.4.6" + +has-ansi@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" + integrity sha512-C8vBJ8DwUCx19vhm7urhTuUsr4/IyP6l4VzNQDv+ryHQObW3TTTp9yB68WpYgRe2bbaGuZ/se74IqFeVnMnLZg== + dependencies: + ansi-regex "^2.0.0" + +has-bigints@^1.0.1, has-bigints@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" + integrity sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ== + +has-flag@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== + +has-flag@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== + +has-property-descriptors@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz#610708600606d36961ed04c196193b6a607fa861" + integrity sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ== + dependencies: + get-intrinsic "^1.1.1" + +has-proto@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/has-proto/-/has-proto-1.0.1.tgz#1885c1305538958aff469fef37937c22795408e0" + integrity sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg== + +has-symbols@^1.0.1, has-symbols@^1.0.2, has-symbols@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" + integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== + +has-tostringtag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-tostringtag/-/has-tostringtag-1.0.0.tgz#7e133818a7d394734f941e73c3d3f9291e658b25" + integrity sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ== + dependencies: + has-symbols "^1.0.2" + +has-value@^0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" + integrity sha512-gpG936j8/MzaeID5Yif+577c17TxaDmhuyVgSwtnL/q8UUTySg8Mecb+8Cf1otgLoD7DDH75axp86ER7LFsf3Q== + dependencies: + get-value "^2.0.3" + has-values "^0.1.4" + isobject "^2.0.0" + +has-value@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" + integrity sha512-IBXk4GTsLYdQ7Rvt+GRBrFSVEkmuOUy4re0Xjd9kJSUQpnTrWR4/y9RpfexN9vkAPMFuQoeWKwqzPozRTlasGw== + dependencies: + get-value "^2.0.6" + has-values "^1.0.0" + isobject "^3.0.0" + +has-values@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" + integrity sha512-J8S0cEdWuQbqD9//tlZxiMuMNmxB8PlEwvYwuxsTmR1G5RXUePEX/SJn7aD0GMLieuZYSwNH0cQuJGwnYunXRQ== + +has-values@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" + integrity sha512-ODYZC64uqzmtfGMEAX/FvZiRyWLpAC3vYnNunURUnkGVTS+mI0smVsWaPydRBsE3g+ok7h960jChO8mFcWlHaQ== + dependencies: + is-number "^3.0.0" + kind-of "^4.0.0" + +has@^1.0.3, has@~1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + dependencies: + function-bind "^1.1.1" + +hash-base@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.1.0.tgz#55c381d9e06e1d2997a883b4a3fddfe7f0d3af33" + integrity sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA== + dependencies: + inherits "^2.0.4" + readable-stream "^3.6.0" + safe-buffer "^5.2.0" + +hash.js@1.1.7, hash.js@^1.0.0, hash.js@^1.0.3, hash.js@^1.1.7: + version "1.1.7" + resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.7.tgz#0babca538e8d4ee4a0f8988d68866537a003cf42" + integrity sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA== + dependencies: + inherits "^2.0.3" + minimalistic-assert "^1.0.1" + +he@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" + integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== + +heap@0.2.6: + version "0.2.6" + resolved "https://registry.yarnpkg.com/heap/-/heap-0.2.6.tgz#087e1f10b046932fc8594dd9e6d378afc9d1e5ac" + integrity sha512-MzzWcnfB1e4EG2vHi3dXHoBupmuXNZzx6pY6HldVS55JKKBoq3xOyzfSaZRkJp37HIhEYC78knabHff3zc4dQQ== + +hmac-drbg@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1" + integrity sha512-Tti3gMqLdZfhOQY1Mzf/AanLiqh1WTiJgEj26ZuYQ9fbkLomzGchCws4FyrSd4VkpBfiNhaE1On+lOz894jvXg== + dependencies: + hash.js "^1.0.3" + minimalistic-assert "^1.0.0" + minimalistic-crypto-utils "^1.0.1" + +home-or-tmp@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/home-or-tmp/-/home-or-tmp-2.0.0.tgz#e36c3f2d2cae7d746a857e38d18d5f32a7882db8" + integrity sha512-ycURW7oUxE2sNiPVw1HVEFsW+ecOpJ5zaj7eC0RlwhibhRBod20muUN8qu/gzx956YrLolVvs1MTXwKgC2rVEg== + dependencies: + os-homedir "^1.0.0" + os-tmpdir "^1.0.1" + +hosted-git-info@^2.1.4, hosted-git-info@^2.6.0: + version "2.8.9" + resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" + integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw== + +html-escaper@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" + integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== + +http-cache-semantics@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz#49e91c5cbf36c9b94bcfcd71c23d5249ec74e390" + integrity sha512-carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ== + +http-errors@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3" + integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== + dependencies: + depd "2.0.0" + inherits "2.0.4" + setprototypeof "1.2.0" + statuses "2.0.1" + toidentifier "1.0.1" + +http-https@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/http-https/-/http-https-1.0.0.tgz#2f908dd5f1db4068c058cd6e6d4ce392c913389b" + integrity sha512-o0PWwVCSp3O0wS6FvNr6xfBCHgt0m1tvPLFOCc2iFDKTRAXhB7m8klDf7ErowFH8POa6dVdGatKU5I1YYwzUyg== + +http-signature@~1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" + integrity sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ== + dependencies: + assert-plus "^1.0.0" + jsprim "^1.2.2" + sshpk "^1.7.0" + +http2-wrapper@^1.0.0-beta.5.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/http2-wrapper/-/http2-wrapper-1.0.3.tgz#b8f55e0c1f25d4ebd08b3b0c2c079f9590800b3d" + integrity sha512-V+23sDMr12Wnz7iTcDeJr3O6AIxlnvT/bmaAAAP/Xda35C90p9599p0F1eHR/N1KILWSoWVAiOMFjBBXaXSMxg== + dependencies: + quick-lru "^5.1.1" + resolve-alpn "^1.0.0" + +https-proxy-agent@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz#c59ef224a04fe8b754f3db0063a25ea30d0005d6" + integrity sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA== + dependencies: + agent-base "6" + debug "4" + +human-signals@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" + integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== + +iconv-lite@0.4.24, iconv-lite@^0.4.24: + version "0.4.24" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" + integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== + dependencies: + safer-buffer ">= 2.1.2 < 3" + +iconv-lite@^0.6.2: + version "0.6.3" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501" + integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw== + dependencies: + safer-buffer ">= 2.1.2 < 3.0.0" + +idna-uts46-hx@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/idna-uts46-hx/-/idna-uts46-hx-2.3.1.tgz#a1dc5c4df37eee522bf66d969cc980e00e8711f9" + integrity sha512-PWoF9Keq6laYdIRwwCdhTPl60xRqAloYNMQLiyUnG42VjT53oW07BXIRM+NK7eQjzXjAk2gUvX9caRxlnF9TAA== + dependencies: + punycode "2.1.0" + +ieee754@^1.1.13, ieee754@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" + integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== + +ignore@^4.0.6: + version "4.0.6" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-4.0.6.tgz#750e3db5862087b4737ebac8207ffd1ef27b25fc" + integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg== + +ignore@^5.1.8, ignore@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.0.tgz#6d3bac8fa7fe0d45d9f9be7bac2fc279577e345a" + integrity sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ== + +ignore@~5.1.4: + version "5.1.9" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.1.9.tgz#9ec1a5cbe8e1446ec60d4420060d43aa6e7382fb" + integrity sha512-2zeMQpbKz5dhZ9IwL0gbxSW5w0NK/MSAMtNuhgIHEPmaU3vPdKPL0UdvUCXs5SS4JAwsBxysK5sFMW8ocFiVjQ== + +immediate@^3.2.3: + version "3.3.0" + resolved "https://registry.yarnpkg.com/immediate/-/immediate-3.3.0.tgz#1aef225517836bcdf7f2a2de2600c79ff0269266" + integrity sha512-HR7EVodfFUdQCTIeySw+WDRFJlPcLOJbXfwwZ7Oom6tjsvZ3bOkCDJHehQC3nxJrv7+f9XecwazynjU8e4Vw3Q== + +immediate@~3.2.3: + version "3.2.3" + resolved "https://registry.yarnpkg.com/immediate/-/immediate-3.2.3.tgz#d140fa8f614659bd6541233097ddaac25cdd991c" + integrity sha512-RrGCXRm/fRVqMIhqXrGEX9rRADavPiDFSoMb/k64i9XMk8uH4r/Omi5Ctierj6XzNecwDbO4WuFbDD1zmpl3Tg== + +immutable@^4.0.0-rc.12: + version "4.1.0" + resolved "https://registry.yarnpkg.com/immutable/-/immutable-4.1.0.tgz#f795787f0db780183307b9eb2091fcac1f6fafef" + integrity sha512-oNkuqVTA8jqG1Q6c+UglTOD1xhC1BtjKI7XkCXRkZHrN5m18/XsnUp8Q89GkQO/z+0WjonSvl0FLhDYftp46nQ== + +import-fresh@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-2.0.0.tgz#d81355c15612d386c61f9ddd3922d4304822a546" + integrity sha512-eZ5H8rcgYazHbKC3PG4ClHNykCSxtAhxSSEM+2mb+7evD2CKF5V7c0dNum7AdpDh0ZdICwZY9sRSn8f+KH96sg== + dependencies: + caller-path "^2.0.0" + resolve-from "^3.0.0" + +import-fresh@^3.0.0, import-fresh@^3.2.1: + version "3.3.0" + resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" + integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== + dependencies: + parent-module "^1.0.0" + resolve-from "^4.0.0" + +import-local@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/import-local/-/import-local-3.1.0.tgz#b4479df8a5fd44f6cdce24070675676063c95cb4" + integrity sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg== + dependencies: + pkg-dir "^4.2.0" + resolve-cwd "^3.0.0" + +imurmurhash@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== + +indent-string@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" + integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1, inherits@~2.0.3, inherits@~2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +inherits@2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" + integrity sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw== + +ini@~1.3.0: + version "1.3.8" + resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" + integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== + +inquirer@^6.0.0, inquirer@^6.2.2: + version "6.5.2" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-6.5.2.tgz#ad50942375d036d327ff528c08bd5fab089928ca" + integrity sha512-cntlB5ghuB0iuO65Ovoi8ogLHiWGs/5yNrtUcKjFhSSiVeAIVpD7koaSU9RM8mpXw5YDi9RdYXGQMaOURB7ycQ== + dependencies: + ansi-escapes "^3.2.0" + chalk "^2.4.2" + cli-cursor "^2.1.0" + cli-width "^2.0.0" + external-editor "^3.0.3" + figures "^2.0.0" + lodash "^4.17.12" + mute-stream "0.0.7" + run-async "^2.2.0" + rxjs "^6.4.0" + string-width "^2.1.0" + strip-ansi "^5.1.0" + through "^2.3.6" + +internal-slot@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.3.tgz#7347e307deeea2faac2ac6205d4bc7d34967f59c" + integrity sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA== + dependencies: + get-intrinsic "^1.1.0" + has "^1.0.3" + side-channel "^1.0.4" + +internal-slot@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.4.tgz#8551e7baf74a7a6ba5f749cfb16aa60722f0d6f3" + integrity sha512-tA8URYccNzMo94s5MQZgH8NB/XTa6HsOo0MLfXTKKEnHVVdegzaQoFZ7Jp44bdvLvY2waT5dc+j5ICEswhi7UQ== + dependencies: + get-intrinsic "^1.1.3" + has "^1.0.3" + side-channel "^1.0.4" + +invariant@^2.2.2: + version "2.2.4" + resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6" + integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA== + dependencies: + loose-envify "^1.0.0" + +invert-kv@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-1.0.0.tgz#104a8e4aaca6d3d8cd157a8ef8bfab2d7a3ffdb6" + integrity sha512-xgs2NH9AE66ucSq4cNG1nhSFghr5l6tdL15Pk+jl46bmmBapgoaY/AacXyaDznAqmGL99TiLSQgO/XazFSKYeQ== + +io-ts@1.10.4: + version "1.10.4" + resolved "https://registry.yarnpkg.com/io-ts/-/io-ts-1.10.4.tgz#cd5401b138de88e4f920adbcb7026e2d1967e6e2" + integrity sha512-b23PteSnYXSONJ6JQXRAlvJhuw8KOtkqa87W4wDtvMrud/DTJd5X+NpOOI+O/zZwVq6v0VLAaJ+1EDViKEuN9g== + dependencies: + fp-ts "^1.0.0" + +ipaddr.js@1.9.1: + version "1.9.1" + resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" + integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== + +is-accessor-descriptor@^0.1.6: + version "0.1.6" + resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" + integrity sha512-e1BM1qnDbMRG3ll2U9dSK0UMHuWOs3pY3AtcFsmvwPtKL3MML/Q86i+GilLfvqEs4GW+ExB91tQ3Ig9noDIZ+A== + dependencies: + kind-of "^3.0.2" + +is-accessor-descriptor@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" + integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== + dependencies: + kind-of "^6.0.0" + +is-arguments@^1.0.4: + version "1.1.1" + resolved "https://registry.yarnpkg.com/is-arguments/-/is-arguments-1.1.1.tgz#15b3f88fda01f2a97fec84ca761a560f123efa9b" + integrity sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-array-buffer@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/is-array-buffer/-/is-array-buffer-3.0.1.tgz#deb1db4fcae48308d54ef2442706c0393997052a" + integrity sha512-ASfLknmY8Xa2XtB4wmbz13Wu202baeA18cJBCeCy0wXUHZF0IPyVEXqKEcd+t2fNSLLL1vC6k7lxZEojNbISXQ== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.1.3" + is-typed-array "^1.1.10" + +is-arrayish@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" + integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg== + +is-bigint@^1.0.1: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.0.4.tgz#08147a1875bc2b32005d41ccd8291dffc6691df3" + integrity sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg== + dependencies: + has-bigints "^1.0.1" + +is-binary-path@~2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" + integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== + dependencies: + binary-extensions "^2.0.0" + +is-boolean-object@^1.1.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/is-boolean-object/-/is-boolean-object-1.1.2.tgz#5c6dc200246dd9321ae4b885a114bb1f75f63719" + integrity sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-buffer@^1.1.5: + version "1.1.6" + resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" + integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== + +is-buffer@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.5.tgz#ebc252e400d22ff8d77fa09888821a24a658c191" + integrity sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ== + +is-callable@^1.1.3, is-callable@^1.1.4, is-callable@^1.2.7: + version "1.2.7" + resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.7.tgz#3bc2a85ea742d9e36205dcacdd72ca1fdc51b055" + integrity sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA== + +is-ci@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-2.0.0.tgz#6bc6334181810e04b5c22b3d589fdca55026404c" + integrity sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w== + dependencies: + ci-info "^2.0.0" + +is-core-module@^2.9.0: + version "2.11.0" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.11.0.tgz#ad4cb3e3863e814523c96f3f58d26cc570ff0144" + integrity sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw== + dependencies: + has "^1.0.3" + +is-data-descriptor@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" + integrity sha512-+w9D5ulSoBNlmw9OHn3U2v51SyoCd0he+bB3xMl62oijhrspxowjU+AIcDY0N3iEJbUEkB15IlMASQsxYigvXg== + dependencies: + kind-of "^3.0.2" + +is-data-descriptor@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" + integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== + dependencies: + kind-of "^6.0.0" + +is-date-object@^1.0.1: + version "1.0.5" + resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f" + integrity sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ== + dependencies: + has-tostringtag "^1.0.0" + +is-descriptor@^0.1.0: + version "0.1.6" + resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" + integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== + dependencies: + is-accessor-descriptor "^0.1.6" + is-data-descriptor "^0.1.4" + kind-of "^5.0.0" + +is-descriptor@^1.0.0, is-descriptor@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" + integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== + dependencies: + is-accessor-descriptor "^1.0.0" + is-data-descriptor "^1.0.0" + kind-of "^6.0.2" + +is-directory@^0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/is-directory/-/is-directory-0.3.1.tgz#61339b6f2475fc772fd9c9d83f5c8575dc154ae1" + integrity sha512-yVChGzahRFvbkscn2MlwGismPO12i9+znNruC5gVEntG3qu0xQMzsGg/JFbrsqDOHtHFPci+V5aP5T9I+yeKqw== + +is-docker@^2.0.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa" + integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== + +is-extendable@^0.1.0, is-extendable@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" + integrity sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw== + +is-extendable@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" + integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== + dependencies: + is-plain-object "^2.0.4" + +is-extglob@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== + +is-finite@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-finite/-/is-finite-1.1.0.tgz#904135c77fb42c0641d6aa1bcdbc4daa8da082f3" + integrity sha512-cdyMtqX/BOqqNBBiKlIVkytNHm49MtMlYyn1zxzvJKWmFMlGzm+ry5BBfYyeY9YmNKbRSo/o7OX9w9ale0wg3w== + +is-fn@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-fn/-/is-fn-1.0.0.tgz#9543d5de7bcf5b08a22ec8a20bae6e286d510d8c" + integrity sha512-XoFPJQmsAShb3jEQRfzf2rqXavq7fIqF/jOekp308JlThqrODnMpweVSGilKTCXELfLhltGP2AGgbQGVP8F1dg== + +is-fullwidth-code-point@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" + integrity sha512-1pqUqRjkhPJ9miNq9SwMfdvi6lBJcd6eFxvfaivQhaH3SgisfiuudvFntdKOmxuee/77l+FPjKrQjWvmPjWrRw== + dependencies: + number-is-nan "^1.0.0" + +is-fullwidth-code-point@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" + integrity sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w== + +is-fullwidth-code-point@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== + +is-function@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-function/-/is-function-1.0.2.tgz#4f097f30abf6efadac9833b17ca5dc03f8144e08" + integrity sha512-lw7DUp0aWXYg+CBCN+JKkcE0Q2RayZnSvnZBlwgxHBQhqt5pZNVy4Ri7H9GmmXkdu7LUthszM+Tor1u/2iBcpQ== + +is-generator-fn@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" + integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== + +is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1: + version "4.0.3" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" + integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== + dependencies: + is-extglob "^2.1.1" + +is-hex-prefixed@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-hex-prefixed/-/is-hex-prefixed-1.0.0.tgz#7d8d37e6ad77e5d127148913c573e082d777f554" + integrity sha512-WvtOiug1VFrE9v1Cydwm+FnXd3+w9GaeVUss5W4v/SLy3UW00vP+6iNF2SdnfiBoLy4bTqVdkftNGTUeOFVsbA== + +is-negative-zero@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.2.tgz#7bf6f03a28003b8b3965de3ac26f664d765f3150" + integrity sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA== + +is-number-object@^1.0.4: + version "1.0.7" + resolved "https://registry.yarnpkg.com/is-number-object/-/is-number-object-1.0.7.tgz#59d50ada4c45251784e9904f5246c742f07a42fc" + integrity sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ== + dependencies: + has-tostringtag "^1.0.0" + +is-number@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" + integrity sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg== + dependencies: + kind-of "^3.0.2" + +is-number@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" + integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== + +is-plain-obj@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-2.1.0.tgz#45e42e37fccf1f40da8e5f76ee21515840c09287" + integrity sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA== + +is-plain-object@^2.0.3, is-plain-object@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" + integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== + dependencies: + isobject "^3.0.1" + +is-regex@^1.0.4, is-regex@^1.1.4, is-regex@~1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" + integrity sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-shared-array-buffer@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz#8f259c573b60b6a32d4058a1a07430c0a7344c79" + integrity sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA== + dependencies: + call-bind "^1.0.2" + +is-stream@^1.0.1: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" + integrity sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ== + +is-stream@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" + integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== + +is-string@^1.0.5, is-string@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" + integrity sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg== + dependencies: + has-tostringtag "^1.0.0" + +is-symbol@^1.0.2, is-symbol@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.4.tgz#a6dac93b635b063ca6872236de88910a57af139c" + integrity sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg== + dependencies: + has-symbols "^1.0.2" + +is-typed-array@^1.1.10, is-typed-array@^1.1.9: + version "1.1.10" + resolved "https://registry.yarnpkg.com/is-typed-array/-/is-typed-array-1.1.10.tgz#36a5b5cb4189b575d1a3e4b08536bfb485801e3f" + integrity sha512-PJqgEHiWZvMpaFZ3uTc8kHPM4+4ADTlDniuQL7cU/UDA0Ql7F70yGfHph3cLNe+c9toaigv+DFzTJKhc2CtO6A== + dependencies: + available-typed-arrays "^1.0.5" + call-bind "^1.0.2" + for-each "^0.3.3" + gopd "^1.0.1" + has-tostringtag "^1.0.0" + +is-typedarray@^1.0.0, is-typedarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" + integrity sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA== + +is-unicode-supported@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz#3f26c76a809593b52bfa2ecb5710ed2779b522a7" + integrity sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw== + +is-url@^1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/is-url/-/is-url-1.2.4.tgz#04a4df46d28c4cff3d73d01ff06abeb318a1aa52" + integrity sha512-ITvGim8FhRiYe4IQ5uHSkj7pVaPDrCTkNd3yq3cV7iZAcJdHTUMPMEHcqSOy9xZ9qFenQCvi+2wjH9a1nXqHww== + +is-utf8@^0.2.0: + version "0.2.1" + resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72" + integrity sha512-rMYPYvCzsXywIsldgLaSoPlw5PfoB/ssr7hY4pLfcodrA5M/eArza1a9VmTiNIBNMjOGr1Ow9mTyU2o69U6U9Q== + +is-weakref@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-weakref/-/is-weakref-1.0.2.tgz#9529f383a9338205e89765e0392efc2f100f06f2" + integrity sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ== + dependencies: + call-bind "^1.0.2" + +is-windows@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" + integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== + +is-wsl@^2.1.1: + version "2.2.0" + resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271" + integrity sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww== + dependencies: + is-docker "^2.0.0" + +isarray@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" + integrity sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ== + +isarray@1.0.0, isarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ== + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== + +isobject@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" + integrity sha512-+OUdGJlgjOBZDfxnDjYYG6zp487z0JGNQq3cYQYg5f5hKR+syHMsaztzGeml/4kGG55CSpKSpWTY+jYGgsHLgA== + dependencies: + isarray "1.0.0" + +isobject@^3.0.0, isobject@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" + integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg== + +isstream@~0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" + integrity sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g== + +istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" + integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== + +istanbul-lib-instrument@^5.0.4, istanbul-lib-instrument@^5.1.0: + version "5.2.1" + resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz#d10c8885c2125574e1c231cacadf955675e1ce3d" + integrity sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg== + dependencies: + "@babel/core" "^7.12.3" + "@babel/parser" "^7.14.7" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-coverage "^3.2.0" + semver "^6.3.0" + +istanbul-lib-report@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6" + integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw== + dependencies: + istanbul-lib-coverage "^3.0.0" + make-dir "^3.0.0" + supports-color "^7.1.0" + +istanbul-lib-source-maps@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz#895f3a709fcfba34c6de5a42939022f3e4358551" + integrity sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw== + dependencies: + debug "^4.1.1" + istanbul-lib-coverage "^3.0.0" + source-map "^0.6.1" + +istanbul-reports@^3.1.3: + version "3.1.5" + resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.1.5.tgz#cc9a6ab25cb25659810e4785ed9d9fb742578bae" + integrity sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w== + dependencies: + html-escaper "^2.0.0" + istanbul-lib-report "^3.0.0" + +jest-changed-files@^29.2.0: + version "29.2.0" + resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-29.2.0.tgz#b6598daa9803ea6a4dce7968e20ab380ddbee289" + integrity sha512-qPVmLLyBmvF5HJrY7krDisx6Voi8DmlV3GZYX0aFNbaQsZeoz1hfxcCMbqDGuQCxU1dJy9eYc2xscE8QrCCYaA== + dependencies: + execa "^5.0.0" + p-limit "^3.1.0" + +jest-circus@^29.2.2: + version "29.2.2" + resolved "https://registry.yarnpkg.com/jest-circus/-/jest-circus-29.2.2.tgz#1dc4d35fd49bf5e64d3cc505fb2db396237a6dfa" + integrity sha512-upSdWxx+Mh4DV7oueuZndJ1NVdgtTsqM4YgywHEx05UMH5nxxA2Qu9T9T9XVuR021XxqSoaKvSmmpAbjwwwxMw== + dependencies: + "@jest/environment" "^29.2.2" + "@jest/expect" "^29.2.2" + "@jest/test-result" "^29.2.1" + "@jest/types" "^29.2.1" + "@types/node" "*" + chalk "^4.0.0" + co "^4.6.0" + dedent "^0.7.0" + is-generator-fn "^2.0.0" + jest-each "^29.2.1" + jest-matcher-utils "^29.2.2" + jest-message-util "^29.2.1" + jest-runtime "^29.2.2" + jest-snapshot "^29.2.2" + jest-util "^29.2.1" + p-limit "^3.1.0" + pretty-format "^29.2.1" + slash "^3.0.0" + stack-utils "^2.0.3" + +jest-cli@^29.2.2: + version "29.2.2" + resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-29.2.2.tgz#feaf0aa57d327e80d4f2f18d5f8cd2e77cac5371" + integrity sha512-R45ygnnb2CQOfd8rTPFR+/fls0d+1zXS6JPYTBBrnLPrhr58SSuPTiA5Tplv8/PXpz4zXR/AYNxmwIj6J6nrvg== + dependencies: + "@jest/core" "^29.2.2" + "@jest/test-result" "^29.2.1" + "@jest/types" "^29.2.1" + chalk "^4.0.0" + exit "^0.1.2" + graceful-fs "^4.2.9" + import-local "^3.0.2" + jest-config "^29.2.2" + jest-util "^29.2.1" + jest-validate "^29.2.2" + prompts "^2.0.1" + yargs "^17.3.1" + +jest-config@^29.2.2: + version "29.2.2" + resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-29.2.2.tgz#bf98623a46454d644630c1f0de8bba3f495c2d59" + integrity sha512-Q0JX54a5g1lP63keRfKR8EuC7n7wwny2HoTRDb8cx78IwQOiaYUVZAdjViY3WcTxpR02rPUpvNVmZ1fkIlZPcw== + dependencies: + "@babel/core" "^7.11.6" + "@jest/test-sequencer" "^29.2.2" + "@jest/types" "^29.2.1" + babel-jest "^29.2.2" + chalk "^4.0.0" + ci-info "^3.2.0" + deepmerge "^4.2.2" + glob "^7.1.3" + graceful-fs "^4.2.9" + jest-circus "^29.2.2" + jest-environment-node "^29.2.2" + jest-get-type "^29.2.0" + jest-regex-util "^29.2.0" + jest-resolve "^29.2.2" + jest-runner "^29.2.2" + jest-util "^29.2.1" + jest-validate "^29.2.2" + micromatch "^4.0.4" + parse-json "^5.2.0" + pretty-format "^29.2.1" + slash "^3.0.0" + strip-json-comments "^3.1.1" + +jest-diff@^29.2.1: + version "29.2.1" + resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-29.2.1.tgz#027e42f5a18b693fb2e88f81b0ccab533c08faee" + integrity sha512-gfh/SMNlQmP3MOUgdzxPOd4XETDJifADpT937fN1iUGz+9DgOu2eUPHH25JDkLVcLwwqxv3GzVyK4VBUr9fjfA== + dependencies: + chalk "^4.0.0" + diff-sequences "^29.2.0" + jest-get-type "^29.2.0" + pretty-format "^29.2.1" + +jest-docblock@^29.2.0: + version "29.2.0" + resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-29.2.0.tgz#307203e20b637d97cee04809efc1d43afc641e82" + integrity sha512-bkxUsxTgWQGbXV5IENmfiIuqZhJcyvF7tU4zJ/7ioTutdz4ToB5Yx6JOFBpgI+TphRY4lhOyCWGNH/QFQh5T6A== + dependencies: + detect-newline "^3.0.0" + +jest-each@^29.2.1: + version "29.2.1" + resolved "https://registry.yarnpkg.com/jest-each/-/jest-each-29.2.1.tgz#6b0a88ee85c2ba27b571a6010c2e0c674f5c9b29" + integrity sha512-sGP86H/CpWHMyK3qGIGFCgP6mt+o5tu9qG4+tobl0LNdgny0aitLXs9/EBacLy3Bwqy+v4uXClqJgASJWcruYw== + dependencies: + "@jest/types" "^29.2.1" + chalk "^4.0.0" + jest-get-type "^29.2.0" + jest-util "^29.2.1" + pretty-format "^29.2.1" + +jest-environment-node@^29.2.2: + version "29.2.2" + resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-29.2.2.tgz#a64b272773870c3a947cd338c25fd34938390bc2" + integrity sha512-B7qDxQjkIakQf+YyrqV5dICNs7tlCO55WJ4OMSXsqz1lpI/0PmeuXdx2F7eU8rnPbRkUR/fItSSUh0jvE2y/tw== + dependencies: + "@jest/environment" "^29.2.2" + "@jest/fake-timers" "^29.2.2" + "@jest/types" "^29.2.1" + "@types/node" "*" + jest-mock "^29.2.2" + jest-util "^29.2.1" + +jest-get-type@^29.2.0: + version "29.2.0" + resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-29.2.0.tgz#726646f927ef61d583a3b3adb1ab13f3a5036408" + integrity sha512-uXNJlg8hKFEnDgFsrCjznB+sTxdkuqiCL6zMgA75qEbAJjJYTs9XPrvDctrEig2GDow22T/LvHgO57iJhXB/UA== + +jest-haste-map@^29.2.1: + version "29.2.1" + resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-29.2.1.tgz#f803fec57f8075e6c55fb5cd551f99a72471c699" + integrity sha512-wF460rAFmYc6ARcCFNw4MbGYQjYkvjovb9GBT+W10Um8q5nHq98jD6fHZMDMO3tA56S8XnmNkM8GcA8diSZfnA== + dependencies: + "@jest/types" "^29.2.1" + "@types/graceful-fs" "^4.1.3" + "@types/node" "*" + anymatch "^3.0.3" + fb-watchman "^2.0.0" + graceful-fs "^4.2.9" + jest-regex-util "^29.2.0" + jest-util "^29.2.1" + jest-worker "^29.2.1" + micromatch "^4.0.4" + walker "^1.0.8" + optionalDependencies: + fsevents "^2.3.2" + +jest-leak-detector@^29.2.1: + version "29.2.1" + resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-29.2.1.tgz#ec551686b7d512ec875616c2c3534298b1ffe2fc" + integrity sha512-1YvSqYoiurxKOJtySc+CGVmw/e1v4yNY27BjWTVzp0aTduQeA7pdieLiW05wTYG/twlKOp2xS/pWuikQEmklug== + dependencies: + jest-get-type "^29.2.0" + pretty-format "^29.2.1" + +jest-matcher-utils@^29.0.3, jest-matcher-utils@^29.2.2: + version "29.2.2" + resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-29.2.2.tgz#9202f8e8d3a54733266784ce7763e9a08688269c" + integrity sha512-4DkJ1sDPT+UX2MR7Y3od6KtvRi9Im1ZGLGgdLFLm4lPexbTaCgJW5NN3IOXlQHF7NSHY/VHhflQ+WoKtD/vyCw== + dependencies: + chalk "^4.0.0" + jest-diff "^29.2.1" + jest-get-type "^29.2.0" + pretty-format "^29.2.1" + +jest-message-util@^29.2.1: + version "29.2.1" + resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-29.2.1.tgz#3a51357fbbe0cc34236f17a90d772746cf8d9193" + integrity sha512-Dx5nEjw9V8C1/Yj10S/8ivA8F439VS8vTq1L7hEgwHFn9ovSKNpYW/kwNh7UglaEgXO42XxzKJB+2x0nSglFVw== + dependencies: + "@babel/code-frame" "^7.12.13" + "@jest/types" "^29.2.1" + "@types/stack-utils" "^2.0.0" + chalk "^4.0.0" + graceful-fs "^4.2.9" + micromatch "^4.0.4" + pretty-format "^29.2.1" + slash "^3.0.0" + stack-utils "^2.0.3" + +jest-mock@^29.2.2: + version "29.2.2" + resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-29.2.2.tgz#9045618b3f9d27074bbcf2d55bdca6a5e2e8bca7" + integrity sha512-1leySQxNAnivvbcx0sCB37itu8f4OX2S/+gxLAV4Z62shT4r4dTG9tACDywUAEZoLSr36aYUTsVp3WKwWt4PMQ== + dependencies: + "@jest/types" "^29.2.1" + "@types/node" "*" + jest-util "^29.2.1" + +jest-pnp-resolver@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz#b704ac0ae028a89108a4d040b3f919dfddc8e33c" + integrity sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w== + +jest-regex-util@^29.2.0: + version "29.2.0" + resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-29.2.0.tgz#82ef3b587e8c303357728d0322d48bbfd2971f7b" + integrity sha512-6yXn0kg2JXzH30cr2NlThF+70iuO/3irbaB4mh5WyqNIvLLP+B6sFdluO1/1RJmslyh/f9osnefECflHvTbwVA== + +jest-resolve-dependencies@^29.2.2: + version "29.2.2" + resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-29.2.2.tgz#1f444766f37a25f1490b5137408b6ff746a05d64" + integrity sha512-wWOmgbkbIC2NmFsq8Lb+3EkHuW5oZfctffTGvwsA4JcJ1IRk8b2tg+hz44f0lngvRTeHvp3Kyix9ACgudHH9aQ== + dependencies: + jest-regex-util "^29.2.0" + jest-snapshot "^29.2.2" + +jest-resolve@^29.2.2: + version "29.2.2" + resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-29.2.2.tgz#ad6436053b0638b41e12bbddde2b66e1397b35b5" + integrity sha512-3gaLpiC3kr14rJR3w7vWh0CBX2QAhfpfiQTwrFPvVrcHe5VUBtIXaR004aWE/X9B2CFrITOQAp5gxLONGrk6GA== + dependencies: + chalk "^4.0.0" + graceful-fs "^4.2.9" + jest-haste-map "^29.2.1" + jest-pnp-resolver "^1.2.2" + jest-util "^29.2.1" + jest-validate "^29.2.2" + resolve "^1.20.0" + resolve.exports "^1.1.0" + slash "^3.0.0" + +jest-runner@^29.2.2: + version "29.2.2" + resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-29.2.2.tgz#6b5302ed15eba8bf05e6b14d40f1e8d469564da3" + integrity sha512-1CpUxXDrbsfy9Hr9/1zCUUhT813kGGK//58HeIw/t8fa/DmkecEwZSWlb1N/xDKXg3uCFHQp1GCvlSClfImMxg== + dependencies: + "@jest/console" "^29.2.1" + "@jest/environment" "^29.2.2" + "@jest/test-result" "^29.2.1" + "@jest/transform" "^29.2.2" + "@jest/types" "^29.2.1" + "@types/node" "*" + chalk "^4.0.0" + emittery "^0.13.1" + graceful-fs "^4.2.9" + jest-docblock "^29.2.0" + jest-environment-node "^29.2.2" + jest-haste-map "^29.2.1" + jest-leak-detector "^29.2.1" + jest-message-util "^29.2.1" + jest-resolve "^29.2.2" + jest-runtime "^29.2.2" + jest-util "^29.2.1" + jest-watcher "^29.2.2" + jest-worker "^29.2.1" + p-limit "^3.1.0" + source-map-support "0.5.13" + +jest-runtime@^29.2.2: + version "29.2.2" + resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-29.2.2.tgz#4068ee82423769a481460efd21d45a8efaa5c179" + integrity sha512-TpR1V6zRdLynckKDIQaY41od4o0xWL+KOPUCZvJK2bu5P1UXhjobt5nJ2ICNeIxgyj9NGkO0aWgDqYPVhDNKjA== + dependencies: + "@jest/environment" "^29.2.2" + "@jest/fake-timers" "^29.2.2" + "@jest/globals" "^29.2.2" + "@jest/source-map" "^29.2.0" + "@jest/test-result" "^29.2.1" + "@jest/transform" "^29.2.2" + "@jest/types" "^29.2.1" + "@types/node" "*" + chalk "^4.0.0" + cjs-module-lexer "^1.0.0" + collect-v8-coverage "^1.0.0" + glob "^7.1.3" + graceful-fs "^4.2.9" + jest-haste-map "^29.2.1" + jest-message-util "^29.2.1" + jest-mock "^29.2.2" + jest-regex-util "^29.2.0" + jest-resolve "^29.2.2" + jest-snapshot "^29.2.2" + jest-util "^29.2.1" + slash "^3.0.0" + strip-bom "^4.0.0" + +jest-snapshot@^29.2.2: + version "29.2.2" + resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-29.2.2.tgz#1016ce60297b77382386bad561107174604690c2" + integrity sha512-GfKJrpZ5SMqhli3NJ+mOspDqtZfJBryGA8RIBxF+G+WbDoC7HCqKaeAss4Z/Sab6bAW11ffasx8/vGsj83jyjA== + dependencies: + "@babel/core" "^7.11.6" + "@babel/generator" "^7.7.2" + "@babel/plugin-syntax-jsx" "^7.7.2" + "@babel/plugin-syntax-typescript" "^7.7.2" + "@babel/traverse" "^7.7.2" + "@babel/types" "^7.3.3" + "@jest/expect-utils" "^29.2.2" + "@jest/transform" "^29.2.2" + "@jest/types" "^29.2.1" + "@types/babel__traverse" "^7.0.6" + "@types/prettier" "^2.1.5" + babel-preset-current-node-syntax "^1.0.0" + chalk "^4.0.0" + expect "^29.2.2" + graceful-fs "^4.2.9" + jest-diff "^29.2.1" + jest-get-type "^29.2.0" + jest-haste-map "^29.2.1" + jest-matcher-utils "^29.2.2" + jest-message-util "^29.2.1" + jest-util "^29.2.1" + natural-compare "^1.4.0" + pretty-format "^29.2.1" + semver "^7.3.5" + +jest-util@^29.0.0, jest-util@^29.2.1: + version "29.2.1" + resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-29.2.1.tgz#f26872ba0dc8cbefaba32c34f98935f6cf5fc747" + integrity sha512-P5VWDj25r7kj7kl4pN2rG/RN2c1TLfYYYZYULnS/35nFDjBai+hBeo3MDrYZS7p6IoY3YHZnt2vq4L6mKnLk0g== + dependencies: + "@jest/types" "^29.2.1" + "@types/node" "*" + chalk "^4.0.0" + ci-info "^3.2.0" + graceful-fs "^4.2.9" + picomatch "^2.2.3" + +jest-validate@^29.2.2: + version "29.2.2" + resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-29.2.2.tgz#e43ce1931292dfc052562a11bc681af3805eadce" + integrity sha512-eJXATaKaSnOuxNfs8CLHgdABFgUrd0TtWS8QckiJ4L/QVDF4KVbZFBBOwCBZHOS0Rc5fOxqngXeGXE3nGQkpQA== + dependencies: + "@jest/types" "^29.2.1" + camelcase "^6.2.0" + chalk "^4.0.0" + jest-get-type "^29.2.0" + leven "^3.1.0" + pretty-format "^29.2.1" + +jest-watcher@^29.2.2: + version "29.2.2" + resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-29.2.2.tgz#7093d4ea8177e0a0da87681a9e7b09a258b9daf7" + integrity sha512-j2otfqh7mOvMgN2WlJ0n7gIx9XCMWntheYGlBK7+5g3b1Su13/UAK7pdKGyd4kDlrLwtH2QPvRv5oNIxWvsJ1w== + dependencies: + "@jest/test-result" "^29.2.1" + "@jest/types" "^29.2.1" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + emittery "^0.13.1" + jest-util "^29.2.1" + string-length "^4.0.1" + +jest-worker@^29.2.1: + version "29.2.1" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-29.2.1.tgz#8ba68255438252e1674f990f0180c54dfa26a3b1" + integrity sha512-ROHTZ+oj7sBrgtv46zZ84uWky71AoYi0vEV9CdEtc1FQunsoAGe5HbQmW76nI5QWdvECVPrSi1MCVUmizSavMg== + dependencies: + "@types/node" "*" + jest-util "^29.2.1" + merge-stream "^2.0.0" + supports-color "^8.0.0" + +jest@^29.0.3: + version "29.2.2" + resolved "https://registry.yarnpkg.com/jest/-/jest-29.2.2.tgz#24da83cbbce514718acd698926b7679109630476" + integrity sha512-r+0zCN9kUqoON6IjDdjbrsWobXM/09Nd45kIPRD8kloaRh1z5ZCMdVsgLXGxmlL7UpAJsvCYOQNO+NjvG/gqiQ== + dependencies: + "@jest/core" "^29.2.2" + "@jest/types" "^29.2.1" + import-local "^3.0.2" + jest-cli "^29.2.2" + +js-sha3@0.5.5: + version "0.5.5" + resolved "https://registry.yarnpkg.com/js-sha3/-/js-sha3-0.5.5.tgz#baf0c0e8c54ad5903447df96ade7a4a1bca79a4a" + integrity sha512-yLLwn44IVeunwjpDVTDZmQeVbB0h+dZpY2eO68B/Zik8hu6dH+rKeLxwua79GGIvW6xr8NBAcrtiUbYrTjEFTA== + +js-sha3@0.8.0, js-sha3@^0.8.0: + version "0.8.0" + resolved "https://registry.yarnpkg.com/js-sha3/-/js-sha3-0.8.0.tgz#b9b7a5da73afad7dedd0f8c463954cbde6818840" + integrity sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q== + +js-sha3@^0.5.7: + version "0.5.7" + resolved "https://registry.yarnpkg.com/js-sha3/-/js-sha3-0.5.7.tgz#0d4ffd8002d5333aabaf4a23eed2f6374c9f28e7" + integrity sha512-GII20kjaPX0zJ8wzkTbNDYMY7msuZcTWk8S5UOh6806Jq/wz1J8/bnr8uGU0DAUmYDjj2Mr4X1cW8v/GLYnR+g== + +"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + +js-tokens@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b" + integrity sha512-RjTcuD4xjtthQkaWH7dFlH85L+QaVtSoOyGdZ3g6HFhS9dFNDfLyqgm2NFe2X6cQpeFmt0452FJjFG5UameExg== + +js-yaml@4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" + integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== + dependencies: + argparse "^2.0.1" + +js-yaml@^3.12.0, js-yaml@^3.13.0, js-yaml@^3.13.1: + version "3.14.1" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" + integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + +js-yaml@~3.13.1: + version "3.13.1" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.13.1.tgz#aff151b30bfdfa8e49e05da22e7415e9dfa37847" + integrity sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw== + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + +jsbn@~0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" + integrity sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg== + +jsesc@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-1.3.0.tgz#46c3fec8c1892b12b0833db9bc7622176dbab34b" + integrity sha512-Mke0DA0QjUWuJlhsE0ZPPhYiJkRap642SmI/4ztCFaUs6V2AiH1sfecc+57NgaryfAA2VR3v6O+CSjC1jZJKOA== + +jsesc@^2.5.1: + version "2.5.2" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" + integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== + +jsesc@~0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" + integrity sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA== + +json-buffer@3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.0.tgz#5b1f397afc75d677bde8bcfc0e47e1f9a3d9a898" + integrity sha512-CuUqjv0FUZIdXkHPI8MezCnFCdaTAacej1TZYulLoAg1h/PhwkdXFN4V/gzY4g+fMBCOV2xF+rp7t2XD2ns/NQ== + +json-buffer@3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13" + integrity sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ== + +json-parse-better-errors@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" + integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw== + +json-parse-even-better-errors@^2.3.0: + version "2.3.1" + resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" + integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== + +json-rpc-engine@^3.4.0, json-rpc-engine@^3.6.0: + version "3.8.0" + resolved "https://registry.yarnpkg.com/json-rpc-engine/-/json-rpc-engine-3.8.0.tgz#9d4ff447241792e1d0a232f6ef927302bb0c62a9" + integrity sha512-6QNcvm2gFuuK4TKU1uwfH0Qd/cOSb9c1lls0gbnIhciktIUQJwz6NQNAW4B1KiGPenv7IKu97V222Yo1bNhGuA== + dependencies: + async "^2.0.1" + babel-preset-env "^1.7.0" + babelify "^7.3.0" + json-rpc-error "^2.0.0" + promise-to-callback "^1.0.0" + safe-event-emitter "^1.0.1" + +json-rpc-error@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/json-rpc-error/-/json-rpc-error-2.0.0.tgz#a7af9c202838b5e905c7250e547f1aff77258a02" + integrity sha512-EwUeWP+KgAZ/xqFpaP6YDAXMtCJi+o/QQpCQFIYyxr01AdADi2y413eM8hSqJcoQym9WMePAJWoaODEJufC4Ug== + dependencies: + inherits "^2.0.1" + +json-rpc-random-id@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json-rpc-random-id/-/json-rpc-random-id-1.0.1.tgz#ba49d96aded1444dbb8da3d203748acbbcdec8c8" + integrity sha512-RJ9YYNCkhVDBuP4zN5BBtYAzEl03yq/jIIsyif0JY9qyJuQQZNeDK7anAPKKlyEtLSj2s8h6hNh2F8zO5q7ScA== + +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== + +json-schema-traverse@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" + integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== + +json-schema@0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5" + integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA== + +json-stable-stringify-without-jsonify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" + integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw== + +json-stable-stringify@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/json-stable-stringify/-/json-stable-stringify-1.0.2.tgz#e06f23128e0bbe342dc996ed5a19e28b57b580e0" + integrity sha512-eunSSaEnxV12z+Z73y/j5N37/In40GK4GmsSy+tEHJMxknvqnA7/djeYtAgW0GsWHUfg+847WJjKaEylk2y09g== + dependencies: + jsonify "^0.0.1" + +json-stringify-safe@~5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" + integrity sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA== + +json5@^0.5.1: + version "0.5.1" + resolved "https://registry.yarnpkg.com/json5/-/json5-0.5.1.tgz#1eade7acc012034ad84e2396767ead9fa5495821" + integrity sha512-4xrs1aW+6N5DalkqSVA8fxh458CXvR99WU8WLKmq4v8eWAL86Xo3BVqyd3SkA9wEVjCMqyvvRRkshAdOnBp5rw== + +json5@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.1.tgz#655d50ed1e6f95ad1a3caababd2b0efda10b395c" + integrity sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA== + +jsonc-parser@~2.2.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/jsonc-parser/-/jsonc-parser-2.2.1.tgz#db73cd59d78cce28723199466b2a03d1be1df2bc" + integrity sha512-o6/yDBYccGvTz1+QFevz6l6OBZ2+fMVu2JZ9CIhzsYRX4mjaK5IyX9eldUdCmga16zlgQxyrj5pt9kzuj2C02w== + +jsonfile@^2.1.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-2.4.0.tgz#3736a2b428b87bbda0cc83b53fa3d633a35c2ae8" + integrity sha512-PKllAqbgLgxHaj8TElYymKCAgrASebJrWpTnEkOaTowt23VKXXN0sUeriJ+eh7y6ufb/CC5ap11pz71/cM0hUw== + optionalDependencies: + graceful-fs "^4.1.6" + +jsonfile@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" + integrity sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg== + optionalDependencies: + graceful-fs "^4.1.6" + +jsonfile@^6.0.1: + version "6.1.0" + resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae" + integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ== + dependencies: + universalify "^2.0.0" + optionalDependencies: + graceful-fs "^4.1.6" + +jsonify@^0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/jsonify/-/jsonify-0.0.1.tgz#2aa3111dae3d34a0f151c63f3a45d995d9420978" + integrity sha512-2/Ki0GcmuqSrgFyelQq9M05y7PS0mEwuIzrf3f1fPqkVDVRvZrPZtVSMHxdgo8Aq0sxAOb/cr2aqqA3LeWHVPg== + +jsonparse@^1.2.0: + version "1.3.1" + resolved "https://registry.yarnpkg.com/jsonparse/-/jsonparse-1.3.1.tgz#3f4dae4a91fac315f71062f8521cc239f1366280" + integrity sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg== + +jsonwebtoken@^8.5.1: + version "8.5.1" + resolved "https://registry.yarnpkg.com/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz#00e71e0b8df54c2121a1f26137df2280673bcc0d" + integrity sha512-XjwVfRS6jTMsqYs0EsuJ4LGxXV14zQybNd4L2r0UvbVnSF9Af8x7p5MzbJ90Ioz/9TI41/hTCvznF/loiSzn8w== + dependencies: + jws "^3.2.2" + lodash.includes "^4.3.0" + lodash.isboolean "^3.0.3" + lodash.isinteger "^4.0.4" + lodash.isnumber "^3.0.3" + lodash.isplainobject "^4.0.6" + lodash.isstring "^4.0.1" + lodash.once "^4.0.0" + ms "^2.1.1" + semver "^5.6.0" + +jsprim@^1.2.2: + version "1.4.2" + resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.2.tgz#712c65533a15c878ba59e9ed5f0e26d5b77c5feb" + integrity sha512-P2bSOMAc/ciLz6DzgjVlGJP9+BrJWu5UDGK70C2iweC5QBIeFf0ZXRvGjEj2uYgrY2MkAAhsSWHDWlFtEroZWw== + dependencies: + assert-plus "1.0.0" + extsprintf "1.3.0" + json-schema "0.4.0" + verror "1.10.0" + +jwa@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/jwa/-/jwa-1.4.1.tgz#743c32985cb9e98655530d53641b66c8645b039a" + integrity sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA== + dependencies: + buffer-equal-constant-time "1.0.1" + ecdsa-sig-formatter "1.0.11" + safe-buffer "^5.0.1" + +jws@^3.2.2: + version "3.2.2" + resolved "https://registry.yarnpkg.com/jws/-/jws-3.2.2.tgz#001099f3639468c9414000e99995fa52fb478304" + integrity sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA== + dependencies: + jwa "^1.4.1" + safe-buffer "^5.0.1" + +keccak@3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/keccak/-/keccak-3.0.1.tgz#ae30a0e94dbe43414f741375cff6d64c8bea0bff" + integrity sha512-epq90L9jlFWCW7+pQa6JOnKn2Xgl2mtI664seYR6MHskvI9agt7AnDqmAlp9TqU4/caMYbA08Hi5DMZAl5zdkA== + dependencies: + node-addon-api "^2.0.0" + node-gyp-build "^4.2.0" + +keccak@^3.0.0, keccak@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/keccak/-/keccak-3.0.2.tgz#4c2c6e8c54e04f2670ee49fa734eb9da152206e0" + integrity sha512-PyKKjkH53wDMLGrvmRGSNWgmSxZOUqbnXwKL9tmgbFYA1iAYqW21kfR7mZXV0MlESiefxQQE9X9fTa3X+2MPDQ== + dependencies: + node-addon-api "^2.0.0" + node-gyp-build "^4.2.0" + readable-stream "^3.6.0" + +keyv@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/keyv/-/keyv-3.1.0.tgz#ecc228486f69991e49e9476485a5be1e8fc5c4d9" + integrity sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA== + dependencies: + json-buffer "3.0.0" + +keyv@^4.0.0: + version "4.5.2" + resolved "https://registry.yarnpkg.com/keyv/-/keyv-4.5.2.tgz#0e310ce73bf7851ec702f2eaf46ec4e3805cce56" + integrity sha512-5MHbFaKn8cNSmVW7BYnijeAVlE4cYA/SVkifVgrh7yotnfhKmjuXpDKjrABLnT0SfHWV21P8ow07OGfRrNDg8g== + dependencies: + json-buffer "3.0.1" + +kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: + version "3.2.2" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" + integrity sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ== + dependencies: + is-buffer "^1.1.5" + +kind-of@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" + integrity sha512-24XsCxmEbRwEDbz/qz3stgin8TTzZ1ESR56OMCN0ujYg+vRutNSiOj9bHH9u85DKgXguraugV5sFuvbD4FW/hw== + dependencies: + is-buffer "^1.1.5" + +kind-of@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" + integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== + +kind-of@^6.0.0, kind-of@^6.0.2: + version "6.0.3" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" + integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== + +klaw-sync@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/klaw-sync/-/klaw-sync-6.0.0.tgz#1fd2cfd56ebb6250181114f0a581167099c2b28c" + integrity sha512-nIeuVSzdCCs6TDPTqI8w1Yre34sSq7AkZ4B3sfOBbI2CgVSB4Du4aLQijFU2+lhAFCwt9+42Hel6lQNIv6AntQ== + dependencies: + graceful-fs "^4.1.11" + +klaw@^1.0.0: + version "1.3.1" + resolved "https://registry.yarnpkg.com/klaw/-/klaw-1.3.1.tgz#4088433b46b3b1ba259d78785d8e96f73ba02439" + integrity sha512-TED5xi9gGQjGpNnvRWknrwAB1eL5GciPfVFOt3Vk1OJCVDQbzuSfrF3hkUQKlsgKrG1F+0t5W0m+Fje1jIt8rw== + optionalDependencies: + graceful-fs "^4.1.9" + +kleur@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" + integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== + +lcid@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/lcid/-/lcid-1.0.0.tgz#308accafa0bc483a3867b4b6f2b9506251d1b835" + integrity sha512-YiGkH6EnGrDGqLMITnGjXtGmNtjoXw9SVUzcaos8RBi7Ps0VBylkq+vOcY9QE5poLasPCR849ucFUkl0UzUyOw== + dependencies: + invert-kv "^1.0.0" + +level-codec@^9.0.0: + version "9.0.2" + resolved "https://registry.yarnpkg.com/level-codec/-/level-codec-9.0.2.tgz#fd60df8c64786a80d44e63423096ffead63d8cbc" + integrity sha512-UyIwNb1lJBChJnGfjmO0OR+ezh2iVu1Kas3nvBS/BzGnx79dv6g7unpKIDNPMhfdTEGoc7mC8uAu51XEtX+FHQ== + dependencies: + buffer "^5.6.0" + +level-codec@~7.0.0: + version "7.0.1" + resolved "https://registry.yarnpkg.com/level-codec/-/level-codec-7.0.1.tgz#341f22f907ce0f16763f24bddd681e395a0fb8a7" + integrity sha512-Ua/R9B9r3RasXdRmOtd+t9TCOEIIlts+TN/7XTT2unhDaL6sJn83S3rUyljbr6lVtw49N3/yA0HHjpV6Kzb2aQ== + +level-errors@^1.0.3: + version "1.1.2" + resolved "https://registry.yarnpkg.com/level-errors/-/level-errors-1.1.2.tgz#4399c2f3d3ab87d0625f7e3676e2d807deff404d" + integrity sha512-Sw/IJwWbPKF5Ai4Wz60B52yj0zYeqzObLh8k1Tk88jVmD51cJSKWSYpRyhVIvFzZdvsPqlH5wfhp/yxdsaQH4w== + dependencies: + errno "~0.1.1" + +level-errors@^2.0.0, level-errors@~2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/level-errors/-/level-errors-2.0.1.tgz#2132a677bf4e679ce029f517c2f17432800c05c8" + integrity sha512-UVprBJXite4gPS+3VznfgDSU8PTRuVX0NXwoWW50KLxd2yw4Y1t2JUR5In1itQnudZqRMT9DlAM3Q//9NCjCFw== + dependencies: + errno "~0.1.1" + +level-errors@~1.0.3: + version "1.0.5" + resolved "https://registry.yarnpkg.com/level-errors/-/level-errors-1.0.5.tgz#83dbfb12f0b8a2516bdc9a31c4876038e227b859" + integrity sha512-/cLUpQduF6bNrWuAC4pwtUKA5t669pCsCi2XbmojG2tFeOr9j6ShtdDCtFFQO1DRt+EVZhx9gPzP9G2bUaG4ig== + dependencies: + errno "~0.1.1" + +level-iterator-stream@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/level-iterator-stream/-/level-iterator-stream-2.0.3.tgz#ccfff7c046dcf47955ae9a86f46dfa06a31688b4" + integrity sha512-I6Heg70nfF+e5Y3/qfthJFexhRw/Gi3bIymCoXAlijZdAcLaPuWSJs3KXyTYf23ID6g0o2QF62Yh+grOXY3Rig== + dependencies: + inherits "^2.0.1" + readable-stream "^2.0.5" + xtend "^4.0.0" + +level-iterator-stream@~1.3.0: + version "1.3.1" + resolved "https://registry.yarnpkg.com/level-iterator-stream/-/level-iterator-stream-1.3.1.tgz#e43b78b1a8143e6fa97a4f485eb8ea530352f2ed" + integrity sha512-1qua0RHNtr4nrZBgYlpV0qHHeHpcRRWTxEZJ8xsemoHAXNL5tbooh4tPEEqIqsbWCAJBmUmkwYK/sW5OrFjWWw== + dependencies: + inherits "^2.0.1" + level-errors "^1.0.3" + readable-stream "^1.0.33" + xtend "^4.0.0" + +level-iterator-stream@~3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/level-iterator-stream/-/level-iterator-stream-3.0.1.tgz#2c98a4f8820d87cdacab3132506815419077c730" + integrity sha512-nEIQvxEED9yRThxvOrq8Aqziy4EGzrxSZK+QzEFAVuJvQ8glfyZ96GB6BoI4sBbLfjMXm2w4vu3Tkcm9obcY0g== + dependencies: + inherits "^2.0.1" + readable-stream "^2.3.6" + xtend "^4.0.0" + +level-mem@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/level-mem/-/level-mem-3.0.1.tgz#7ce8cf256eac40f716eb6489654726247f5a89e5" + integrity sha512-LbtfK9+3Ug1UmvvhR2DqLqXiPW1OJ5jEh0a3m9ZgAipiwpSxGj/qaVVy54RG5vAQN1nCuXqjvprCuKSCxcJHBg== + dependencies: + level-packager "~4.0.0" + memdown "~3.0.0" + +level-packager@~4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/level-packager/-/level-packager-4.0.1.tgz#7e7d3016af005be0869bc5fa8de93d2a7f56ffe6" + integrity sha512-svCRKfYLn9/4CoFfi+d8krOtrp6RoX8+xm0Na5cgXMqSyRru0AnDYdLl+YI8u1FyS6gGZ94ILLZDE5dh2but3Q== + dependencies: + encoding-down "~5.0.0" + levelup "^3.0.0" + +level-post@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/level-post/-/level-post-1.0.7.tgz#19ccca9441a7cc527879a0635000f06d5e8f27d0" + integrity sha512-PWYqG4Q00asOrLhX7BejSajByB4EmG2GaKHfj3h5UmmZ2duciXLPGYWIjBzLECFWUGOZWlm5B20h/n3Gs3HKew== + dependencies: + ltgt "^2.1.2" + +level-sublevel@6.6.4: + version "6.6.4" + resolved "https://registry.yarnpkg.com/level-sublevel/-/level-sublevel-6.6.4.tgz#f7844ae893919cd9d69ae19d7159499afd5352ba" + integrity sha512-pcCrTUOiO48+Kp6F1+UAzF/OtWqLcQVTVF39HLdZ3RO8XBoXt+XVPKZO1vVr1aUoxHZA9OtD2e1v7G+3S5KFDA== + dependencies: + bytewise "~1.1.0" + level-codec "^9.0.0" + level-errors "^2.0.0" + level-iterator-stream "^2.0.3" + ltgt "~2.1.1" + pull-defer "^0.2.2" + pull-level "^2.0.3" + pull-stream "^3.6.8" + typewiselite "~1.0.0" + xtend "~4.0.0" + +level-supports@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/level-supports/-/level-supports-4.0.1.tgz#431546f9d81f10ff0fea0e74533a0e875c08c66a" + integrity sha512-PbXpve8rKeNcZ9C1mUicC9auIYFyGpkV9/i6g76tLgANwWhtG2v7I4xNBUlkn3lE2/dZF3Pi0ygYGtLc4RXXdA== + +level-transcoder@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/level-transcoder/-/level-transcoder-1.0.1.tgz#f8cef5990c4f1283d4c86d949e73631b0bc8ba9c" + integrity sha512-t7bFwFtsQeD8cl8NIoQ2iwxA0CL/9IFw7/9gAjOonH0PWTTiRfY7Hq+Ejbsxh86tXobDQ6IOiddjNYIfOBs06w== + dependencies: + buffer "^6.0.3" + module-error "^1.0.1" + +level-ws@0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/level-ws/-/level-ws-0.0.0.tgz#372e512177924a00424b0b43aef2bb42496d228b" + integrity sha512-XUTaO/+Db51Uiyp/t7fCMGVFOTdtLS/NIACxE/GHsij15mKzxksZifKVjlXDF41JMUP/oM1Oc4YNGdKnc3dVLw== + dependencies: + readable-stream "~1.0.15" + xtend "~2.1.1" + +level-ws@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/level-ws/-/level-ws-1.0.0.tgz#19a22d2d4ac57b18cc7c6ecc4bd23d899d8f603b" + integrity sha512-RXEfCmkd6WWFlArh3X8ONvQPm8jNpfA0s/36M4QzLqrLEIt1iJE9WBHLZ5vZJK6haMjJPJGJCQWfjMNnRcq/9Q== + dependencies: + inherits "^2.0.3" + readable-stream "^2.2.8" + xtend "^4.0.1" + +level@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/level/-/level-8.0.0.tgz#41b4c515dabe28212a3e881b61c161ffead14394" + integrity sha512-ypf0jjAk2BWI33yzEaaotpq7fkOPALKAgDBxggO6Q9HGX2MRXn0wbP1Jn/tJv1gtL867+YOjOB49WaUF3UoJNQ== + dependencies: + browser-level "^1.0.1" + classic-level "^1.2.0" + +levelup@3.1.1, levelup@^3.0.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/levelup/-/levelup-3.1.1.tgz#c2c0b3be2b4dc316647c53b42e2f559e232d2189" + integrity sha512-9N10xRkUU4dShSRRFTBdNaBxofz+PGaIZO962ckboJZiNmLuhVT6FZ6ZKAsICKfUBO76ySaYU6fJWX/jnj3Lcg== + dependencies: + deferred-leveldown "~4.0.0" + level-errors "~2.0.0" + level-iterator-stream "~3.0.0" + xtend "~4.0.0" + +levelup@^1.2.1: + version "1.3.9" + resolved "https://registry.yarnpkg.com/levelup/-/levelup-1.3.9.tgz#2dbcae845b2bb2b6bea84df334c475533bbd82ab" + integrity sha512-VVGHfKIlmw8w1XqpGOAGwq6sZm2WwWLmlDcULkKWQXEA5EopA8OBNJ2Ck2v6bdk8HeEZSbCSEgzXadyQFm76sQ== + dependencies: + deferred-leveldown "~1.2.1" + level-codec "~7.0.0" + level-errors "~1.0.3" + level-iterator-stream "~1.3.0" + prr "~1.0.1" + semver "~5.4.1" + xtend "~4.0.0" + +leven@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" + integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== + +levn@^0.3.0, levn@~0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" + integrity sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA== + dependencies: + prelude-ls "~1.1.2" + type-check "~0.3.2" + +levn@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" + integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== + dependencies: + prelude-ls "^1.2.1" + type-check "~0.4.0" + +lines-and-columns@^1.1.6: + version "1.2.4" + resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" + integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== + +linkify-it@^3.0.1: + version "3.0.3" + resolved "https://registry.yarnpkg.com/linkify-it/-/linkify-it-3.0.3.tgz#a98baf44ce45a550efb4d49c769d07524cc2fa2e" + integrity sha512-ynTsyrFSdE5oZ/O9GEf00kPngmOfVwazR5GKDq6EYfhlpFug3J2zybX56a2PRRpc9P+FuSoGNAwjlbDs9jJBPQ== + dependencies: + uc.micro "^1.0.1" + +load-json-file@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-1.1.0.tgz#956905708d58b4bab4c2261b04f59f31c99374c0" + integrity sha512-cy7ZdNRXdablkXYNI049pthVeXFurRyb9+hA/dZzerZ0pGTx42z+y+ssxBaVV2l70t1muq5IdKhn4UtcoGUY9A== + dependencies: + graceful-fs "^4.1.2" + parse-json "^2.2.0" + pify "^2.0.0" + pinkie-promise "^2.0.0" + strip-bom "^2.0.0" + +load-json-file@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-4.0.0.tgz#2f5f45ab91e33216234fd53adab668eb4ec0993b" + integrity sha512-Kx8hMakjX03tiGTLAIdJ+lL0htKnXjEZN6hk/tozf/WOuYGdZBJrZ+rCJRbVCugsjB3jMLn9746NsQIf5VjBMw== + dependencies: + graceful-fs "^4.1.2" + parse-json "^4.0.0" + pify "^3.0.0" + strip-bom "^3.0.0" + +locate-path@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e" + integrity sha512-NCI2kiDkyR7VeEKm27Kda/iQHyKJe1Bu0FlTbYp3CqJu+9IFe9bLyAjMxf5ZDDbEg+iMPzB5zYyUTSm8wVTKmA== + dependencies: + p-locate "^2.0.0" + path-exists "^3.0.0" + +locate-path@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" + integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== + dependencies: + p-locate "^4.1.0" + +locate-path@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" + integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== + dependencies: + p-locate "^5.0.0" + +lodash.assign@^4.0.3, lodash.assign@^4.0.6: + version "4.2.0" + resolved "https://registry.yarnpkg.com/lodash.assign/-/lodash.assign-4.2.0.tgz#0d99f3ccd7a6d261d19bdaeb9245005d285808e7" + integrity sha512-hFuH8TY+Yji7Eja3mGiuAxBqLagejScbG8GbG0j6o9vzn0YL14My+ktnqtZgFTosKymC9/44wP6s7xyuLfnClw== + +lodash.differencewith@~4.5.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/lodash.differencewith/-/lodash.differencewith-4.5.0.tgz#bafafbc918b55154e179176a00bb0aefaac854b7" + integrity sha512-/8JFjydAS+4bQuo3CpLMBv7WxGFyk7/etOAsrQUCu0a9QVDemxv0YQ0rFyeZvqlUD314SERfNlgnlqqHmaQ0Cg== + +lodash.flatten@~4.4.0: + version "4.4.0" + resolved "https://registry.yarnpkg.com/lodash.flatten/-/lodash.flatten-4.4.0.tgz#f31c22225a9632d2bbf8e4addbef240aa765a61f" + integrity sha512-C5N2Z3DgnnKr0LOpv/hKCgKdb7ZZwafIrsesve6lmzvZIRZRGaZ/l6Q8+2W7NaT+ZwO3fFlSCzCzrDCFdJfZ4g== + +lodash.includes@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/lodash.includes/-/lodash.includes-4.3.0.tgz#60bb98a87cb923c68ca1e51325483314849f553f" + integrity sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w== + +lodash.isboolean@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz#6c2e171db2a257cd96802fd43b01b20d5f5870f6" + integrity sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg== + +lodash.isinteger@^4.0.4: + version "4.0.4" + resolved "https://registry.yarnpkg.com/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz#619c0af3d03f8b04c31f5882840b77b11cd68343" + integrity sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA== + +lodash.isnumber@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz#3ce76810c5928d03352301ac287317f11c0b1ffc" + integrity sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw== + +lodash.isplainobject@^4.0.6: + version "4.0.6" + resolved "https://registry.yarnpkg.com/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz#7c526a52d89b45c45cc690b88163be0497f550cb" + integrity sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA== + +lodash.isstring@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/lodash.isstring/-/lodash.isstring-4.0.1.tgz#d527dfb5456eca7cc9bb95d5daeaf88ba54a5451" + integrity sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw== + +lodash.memoize@4.x: + version "4.1.2" + resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" + integrity sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag== + +lodash.merge@^4.6.2: + version "4.6.2" + resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" + integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== + +lodash.once@^4.0.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac" + integrity sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg== + +lodash.truncate@^4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/lodash.truncate/-/lodash.truncate-4.4.2.tgz#5a350da0b1113b837ecfffd5812cbe58d6eae193" + integrity sha512-jttmRe7bRse52OsWIMDLaXxWqRAmtIUccAQ3garviCqJjafXOfNMO0yMfNpdD6zbGaTU0P5Nz7e7gAT6cKmJRw== + +lodash@4.17.20: + version "4.17.20" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.20.tgz#b44a9b6297bcb698f1c51a3545a2b3b368d59c52" + integrity sha512-PlhdFcillOINfeV7Ni6oF1TAEayyZBoZ8bcshTHqOYJYlrqzRK5hagpagky5o4HfCzzd1TRkXPMFq6cKk9rGmA== + +lodash@^4.17.11, lodash@^4.17.12, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.4: + version "4.17.21" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" + integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== + +log-symbols@4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-4.1.0.tgz#3fbdbb95b4683ac9fc785111e792e558d4abd503" + integrity sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg== + dependencies: + chalk "^4.1.0" + is-unicode-supported "^0.1.0" + +looper@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/looper/-/looper-2.0.0.tgz#66cd0c774af3d4fedac53794f742db56da8f09ec" + integrity sha512-6DzMHJcjbQX/UPHc1rRCBfKlLwDkvuGZ715cIR36wSdYqWXFT35uLXq5P/2orl3tz+t+VOVPxw4yPinQlUDGDQ== + +looper@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/looper/-/looper-3.0.0.tgz#2efa54c3b1cbaba9b94aee2e5914b0be57fbb749" + integrity sha512-LJ9wplN/uSn72oJRsXTx+snxPet5c8XiZmOKCm906NVYu+ag6SB6vUcnJcWxgnl2NfbIyeobAn7Bwv6xRj2XJg== + +loose-envify@^1.0.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" + integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== + dependencies: + js-tokens "^3.0.0 || ^4.0.0" + +loupe@^2.3.1: + version "2.3.4" + resolved "https://registry.yarnpkg.com/loupe/-/loupe-2.3.4.tgz#7e0b9bffc76f148f9be769cb1321d3dcf3cb25f3" + integrity sha512-OvKfgCC2Ndby6aSTREl5aCCPTNIzlDfQZvZxNUrBrihDhL3xcrYegTblhmEiCrg2kKQz4XsFIaemE5BF4ybSaQ== + dependencies: + get-func-name "^2.0.0" + +lowercase-keys@^1.0.0, lowercase-keys@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f" + integrity sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA== + +lowercase-keys@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479" + integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA== + +lru-cache@5.1.1, lru-cache@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" + integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== + dependencies: + yallist "^3.0.2" + +lru-cache@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-3.2.0.tgz#71789b3b7f5399bec8565dda38aa30d2a097efee" + integrity sha512-91gyOKTc2k66UG6kHiH4h3S2eltcPwE1STVfMYC/NG+nZwf8IIuiamfmpGZjpbbxzSyEJaLC0tNSmhjlQUTJow== + dependencies: + pseudomap "^1.0.1" + +lru-cache@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + dependencies: + yallist "^4.0.0" + +lru_map@^0.3.3: + version "0.3.3" + resolved "https://registry.yarnpkg.com/lru_map/-/lru_map-0.3.3.tgz#b5c8351b9464cbd750335a79650a0ec0e56118dd" + integrity sha512-Pn9cox5CsMYngeDbmChANltQl+5pi6XmTrraMSzhPmMBbmgcxmqWry0U3PGapCU1yB4/LqCcom7qhHZiF/jGfQ== + +ltgt@^2.1.2, ltgt@~2.2.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/ltgt/-/ltgt-2.2.1.tgz#f35ca91c493f7b73da0e07495304f17b31f87ee5" + integrity sha512-AI2r85+4MquTw9ZYqabu4nMwy9Oftlfa/e/52t9IjtfG+mGBbTNdAoZ3RQKLHR6r0wQnwZnPIEh/Ya6XTWAKNA== + +ltgt@~2.1.1: + version "2.1.3" + resolved "https://registry.yarnpkg.com/ltgt/-/ltgt-2.1.3.tgz#10851a06d9964b971178441c23c9e52698eece34" + integrity sha512-5VjHC5GsENtIi5rbJd+feEpDKhfr7j0odoUR2Uh978g+2p93nd5o34cTjQWohXsPsCZeqoDnIqEf88mPCe0Pfw== + +make-dir@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" + integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== + dependencies: + semver "^6.0.0" + +make-error@1.x, make-error@^1.1.1: + version "1.3.6" + resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" + integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== + +makeerror@1.0.12: + version "1.0.12" + resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" + integrity sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg== + dependencies: + tmpl "1.0.5" + +map-cache@^0.2.2: + version "0.2.2" + resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" + integrity sha512-8y/eV9QQZCiyn1SprXSrCmqJN0yNRATe+PO8ztwqrvrbdRLA3eYJF0yaR0YayLWkMbsQSKWS9N2gPcGEc4UsZg== + +map-visit@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" + integrity sha512-4y7uGv8bd2WdM9vpQsiQNo41Ln1NvhvDRuVt0k2JZQ+ezN2uaQes7lZeZ+QQUHOLQAtDaBJ+7wCbi+ab/KFs+w== + dependencies: + object-visit "^1.0.0" + +markdown-it@11.0.0: + version "11.0.0" + resolved "https://registry.yarnpkg.com/markdown-it/-/markdown-it-11.0.0.tgz#dbfc30363e43d756ebc52c38586b91b90046b876" + integrity sha512-+CvOnmbSubmQFSA9dKz1BRiaSMV7rhexl3sngKqFyXSagoA3fBdJQ8oZWtRy2knXdpDXaBw44euz37DeJQ9asg== + dependencies: + argparse "^1.0.7" + entities "~2.0.0" + linkify-it "^3.0.1" + mdurl "^1.0.1" + uc.micro "^1.0.5" + +markdownlint-cli@^0.24.0: + version "0.24.0" + resolved "https://registry.yarnpkg.com/markdownlint-cli/-/markdownlint-cli-0.24.0.tgz#d1c1d43cd53b87aaec93035b3234eef7097139a8" + integrity sha512-AusUxaX4sFayUBFTCKeHc8+fq73KFqIUW+ZZZYyQ/BvY0MoGAnE2C/3xiawSE7WXmpmguaWzhrXRuY6IrOLX7A== + dependencies: + commander "~2.9.0" + deep-extend "~0.5.1" + get-stdin "~5.0.1" + glob "~7.1.2" + ignore "~5.1.4" + js-yaml "~3.13.1" + jsonc-parser "~2.2.0" + lodash.differencewith "~4.5.0" + lodash.flatten "~4.4.0" + markdownlint "~0.21.0" + markdownlint-rule-helpers "~0.12.0" + minimatch "~3.0.4" + minimist "~1.2.5" + rc "~1.2.7" + +markdownlint-rule-helpers@~0.12.0: + version "0.12.0" + resolved "https://registry.yarnpkg.com/markdownlint-rule-helpers/-/markdownlint-rule-helpers-0.12.0.tgz#c41d9b990c50911572e8eb2fba3e6975a5514b7e" + integrity sha512-Q7qfAk+AJvx82ZY52OByC4yjoQYryOZt6D8TKrZJIwCfhZvcj8vCQNuwDqILushtDBTvGFmUPq+uhOb1KIMi6A== + +markdownlint@~0.21.0: + version "0.21.1" + resolved "https://registry.yarnpkg.com/markdownlint/-/markdownlint-0.21.1.tgz#9442afcf12bf65ce9d613212028cf85741677421" + integrity sha512-8kc88w5dyEzlmOWIElp8J17qBgzouOQfJ0LhCcpBFrwgyYK6JTKvILsk4FCEkiNqHkTxwxopT2RS2DYb/10qqg== + dependencies: + markdown-it "11.0.0" + +mcl-wasm@^0.7.1: + version "0.7.9" + resolved "https://registry.yarnpkg.com/mcl-wasm/-/mcl-wasm-0.7.9.tgz#c1588ce90042a8700c3b60e40efb339fc07ab87f" + integrity sha512-iJIUcQWA88IJB/5L15GnJVnSQJmf/YaxxV6zRavv83HILHaJQb6y0iFyDMdDO0gN8X37tdxmAOrH/P8B6RB8sQ== + +md5.js@^1.3.4: + version "1.3.5" + resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f" + integrity sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg== + dependencies: + hash-base "^3.0.0" + inherits "^2.0.1" + safe-buffer "^5.1.2" + +mdurl@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/mdurl/-/mdurl-1.0.1.tgz#fe85b2ec75a59037f2adfec100fd6c601761152e" + integrity sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g== + +media-typer@0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" + integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ== + +memdown@^1.0.0: + version "1.4.1" + resolved "https://registry.yarnpkg.com/memdown/-/memdown-1.4.1.tgz#b4e4e192174664ffbae41361aa500f3119efe215" + integrity sha512-iVrGHZB8i4OQfM155xx8akvG9FIj+ht14DX5CQkCTG4EHzZ3d3sgckIf/Lm9ivZalEsFuEVnWv2B2WZvbrro2w== + dependencies: + abstract-leveldown "~2.7.1" + functional-red-black-tree "^1.0.1" + immediate "^3.2.3" + inherits "~2.0.1" + ltgt "~2.2.0" + safe-buffer "~5.1.1" + +memdown@~3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/memdown/-/memdown-3.0.0.tgz#93aca055d743b20efc37492e9e399784f2958309" + integrity sha512-tbV02LfZMWLcHcq4tw++NuqMO+FZX8tNJEiD2aNRm48ZZusVg5N8NART+dmBkepJVye986oixErf7jfXboMGMA== + dependencies: + abstract-leveldown "~5.0.0" + functional-red-black-tree "~1.0.1" + immediate "~3.2.3" + inherits "~2.0.1" + ltgt "~2.2.0" + safe-buffer "~5.1.1" + +memory-level@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/memory-level/-/memory-level-1.0.0.tgz#7323c3fd368f9af2f71c3cd76ba403a17ac41692" + integrity sha512-UXzwewuWeHBz5krr7EvehKcmLFNoXxGcvuYhC41tRnkrTbJohtS7kVn9akmgirtRygg+f7Yjsfi8Uu5SGSQ4Og== + dependencies: + abstract-level "^1.0.0" + functional-red-black-tree "^1.0.1" + module-error "^1.0.1" + +memorystream@^0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/memorystream/-/memorystream-0.3.1.tgz#86d7090b30ce455d63fbae12dda51a47ddcaf9b2" + integrity sha512-S3UwM3yj5mtUSEfP41UZmt/0SCoVYUcU1rkXv+BQ5Ig8ndL4sPoJNBUJERafdPb5jjHJGuMgytgKvKIf58XNBw== + +merge-descriptors@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" + integrity sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w== + +merge-stream@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" + integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== + +merge2@^1.3.0, merge2@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" + integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== + +merkle-patricia-tree@3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/merkle-patricia-tree/-/merkle-patricia-tree-3.0.0.tgz#448d85415565df72febc33ca362b8b614f5a58f8" + integrity sha512-soRaMuNf/ILmw3KWbybaCjhx86EYeBbD8ph0edQCTed0JN/rxDt1EBN52Ajre3VyGo+91f8+/rfPIRQnnGMqmQ== + dependencies: + async "^2.6.1" + ethereumjs-util "^5.2.0" + level-mem "^3.0.1" + level-ws "^1.0.0" + readable-stream "^3.0.6" + rlp "^2.0.0" + semaphore ">=1.0.1" + +merkle-patricia-tree@^2.1.2, merkle-patricia-tree@^2.3.2: + version "2.3.2" + resolved "https://registry.yarnpkg.com/merkle-patricia-tree/-/merkle-patricia-tree-2.3.2.tgz#982ca1b5a0fde00eed2f6aeed1f9152860b8208a" + integrity sha512-81PW5m8oz/pz3GvsAwbauj7Y00rqm81Tzad77tHBwU7pIAtN+TJnMSOJhxBKflSVYhptMMb9RskhqHqrSm1V+g== + dependencies: + async "^1.4.2" + ethereumjs-util "^5.0.0" + level-ws "0.0.0" + levelup "^1.2.1" + memdown "^1.0.0" + readable-stream "^2.0.0" + rlp "^2.0.0" + semaphore ">=1.0.1" + +merkletreejs@^0.2.32: + version "0.2.32" + resolved "https://registry.yarnpkg.com/merkletreejs/-/merkletreejs-0.2.32.tgz#cf1c0760e2904e4a1cc269108d6009459fd06223" + integrity sha512-TostQBiwYRIwSE5++jGmacu3ODcKAgqb0Y/pnIohXS7sWxh1gCkSptbmF1a43faehRDpcHf7J/kv0Ml2D/zblQ== + dependencies: + bignumber.js "^9.0.1" + buffer-reverse "^1.0.1" + crypto-js "^3.1.9-1" + treeify "^1.1.0" + web3-utils "^1.3.4" + +methods@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" + integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w== + +micromatch@^3.1.4: + version "3.1.10" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" + integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== + dependencies: + arr-diff "^4.0.0" + array-unique "^0.3.2" + braces "^2.3.1" + define-property "^2.0.2" + extend-shallow "^3.0.2" + extglob "^2.0.4" + fragment-cache "^0.2.1" + kind-of "^6.0.2" + nanomatch "^1.2.9" + object.pick "^1.3.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.2" + +micromatch@^4.0.2, micromatch@^4.0.4: + version "4.0.5" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" + integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== + dependencies: + braces "^3.0.2" + picomatch "^2.3.1" + +miller-rabin@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/miller-rabin/-/miller-rabin-4.0.1.tgz#f080351c865b0dc562a8462966daa53543c78a4d" + integrity sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA== + dependencies: + bn.js "^4.0.0" + brorand "^1.0.1" + +mime-db@1.52.0: + version "1.52.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== + +mime-types@^2.1.12, mime-types@^2.1.16, mime-types@~2.1.19, mime-types@~2.1.24, mime-types@~2.1.34: + version "2.1.35" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + +mime@1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" + integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== + +mimic-fn@^1.0.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.2.0.tgz#820c86a39334640e99516928bd03fca88057d022" + integrity sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ== + +mimic-fn@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" + integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== + +mimic-response@^1.0.0, mimic-response@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-1.0.1.tgz#4923538878eef42063cb8a3e3b0798781487ab1b" + integrity sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ== + +mimic-response@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-3.1.0.tgz#2d1d59af9c1b129815accc2c46a022a5ce1fa3c9" + integrity sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ== + +min-document@^2.19.0: + version "2.19.0" + resolved "https://registry.yarnpkg.com/min-document/-/min-document-2.19.0.tgz#7bd282e3f5842ed295bb748cdd9f1ffa2c824685" + integrity sha512-9Wy1B3m3f66bPPmU5hdA4DR4PB2OfDU/+GS3yAB7IQozE3tqXaVv2zOjgla7MEGSRv95+ILmOuvhLkOK6wJtCQ== + dependencies: + dom-walk "^0.1.0" + +minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" + integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== + +minimalistic-crypto-utils@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a" + integrity sha512-JIYlbt6g8i5jKfJ3xz7rF0LXmv2TkDxBLUkiBeZ7bAx4GnnNMr8xFpGnOxn6GhTEHx3SjRrZEoU+j04prX1ktg== + +minimatch@4.2.1: + version "4.2.1" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-4.2.1.tgz#40d9d511a46bdc4e563c22c3080cde9c0d8299b4" + integrity sha512-9Uq1ChtSZO+Mxa/CL1eGizn2vRn3MlLgzhT0Iz8zaY8NdvxvB0d5QdPFmCKf7JKA9Lerx5vRrnwO03jsSfGG9g== + dependencies: + brace-expansion "^1.1.7" + +minimatch@5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.0.1.tgz#fb9022f7528125187c92bd9e9b6366be1cf3415b" + integrity sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g== + dependencies: + brace-expansion "^2.0.1" + +minimatch@^3.0.4, minimatch@^3.1.1: + version "3.1.2" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== + dependencies: + brace-expansion "^1.1.7" + +minimatch@~3.0.4: + version "3.0.8" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.8.tgz#5e6a59bd11e2ab0de1cfb843eb2d82e546c321c1" + integrity sha512-6FsRAQsxQ61mw+qP1ZzbL9Bc78x2p5OqNgNpnoAFLTrX8n5Kxph0CsnhmKKNXTWjXqU5L0pGPR7hYk+XWZr60Q== + dependencies: + brace-expansion "^1.1.7" + +minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6, minimist@~1.2.5, minimist@~1.2.7: + version "1.2.7" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.7.tgz#daa1c4d91f507390437c6a8bc01078e7000c4d18" + integrity sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g== + +minipass@^2.6.0, minipass@^2.9.0: + version "2.9.0" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.9.0.tgz#e713762e7d3e32fed803115cf93e04bca9fcc9a6" + integrity sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg== + dependencies: + safe-buffer "^5.1.2" + yallist "^3.0.0" + +minizlib@^1.3.3: + version "1.3.3" + resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.3.3.tgz#2290de96818a34c29551c8a8d301216bd65a861d" + integrity sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q== + dependencies: + minipass "^2.9.0" + +mixin-deep@^1.2.0: + version "1.3.2" + resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566" + integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA== + dependencies: + for-in "^1.0.2" + is-extendable "^1.0.1" + +mkdirp-classic@^0.5.2: + version "0.5.3" + resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113" + integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A== + +mkdirp-promise@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/mkdirp-promise/-/mkdirp-promise-5.0.1.tgz#e9b8f68e552c68a9c1713b84883f7a1dd039b8a1" + integrity sha512-Hepn5kb1lJPtVW84RFT40YG1OddBNTOVUZR2bzQUHc+Z03en8/3uX0+060JDhcEzyO08HmipsN9DcnFMxhIL9w== + dependencies: + mkdirp "*" + +mkdirp@*: + version "2.1.3" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-2.1.3.tgz#b083ff37be046fd3d6552468c1f0ff44c1545d1f" + integrity sha512-sjAkg21peAG9HS+Dkx7hlG9Ztx7HLeKnvB3NQRcu/mltCVmvkF0pisbiTSfDVYTT86XEfZrTUosLdZLStquZUw== + +mkdirp@^0.5.1, mkdirp@^0.5.5: + version "0.5.6" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6" + integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw== + dependencies: + minimist "^1.2.6" + +mnemonist@^0.38.0: + version "0.38.5" + resolved "https://registry.yarnpkg.com/mnemonist/-/mnemonist-0.38.5.tgz#4adc7f4200491237fe0fa689ac0b86539685cade" + integrity sha512-bZTFT5rrPKtPJxj8KSV0WkPyNxl72vQepqqVUAW2ARUpUSF2qXMB6jZj7hW5/k7C1rtpzqbD/IIbJwLXUjCHeg== + dependencies: + obliterator "^2.0.0" + +mocha-steps@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/mocha-steps/-/mocha-steps-1.3.0.tgz#2449231ec45ec56810f65502cb22e2571862957f" + integrity sha512-KZvpMJTqzLZw3mOb+EEuYi4YZS41C9iTnb7skVFRxHjUd1OYbl64tCMSmpdIRM9LnwIrSOaRfPtNpF5msgv6Eg== + +mocha@^10.0.0: + version "10.1.0" + resolved "https://registry.yarnpkg.com/mocha/-/mocha-10.1.0.tgz#dbf1114b7c3f9d0ca5de3133906aea3dfc89ef7a" + integrity sha512-vUF7IYxEoN7XhQpFLxQAEMtE4W91acW4B6En9l97MwE9stL1A9gusXfoHZCLVHDUJ/7V5+lbCM6yMqzo5vNymg== + dependencies: + ansi-colors "4.1.1" + browser-stdout "1.3.1" + chokidar "3.5.3" + debug "4.3.4" + diff "5.0.0" + escape-string-regexp "4.0.0" + find-up "5.0.0" + glob "7.2.0" + he "1.2.0" + js-yaml "4.1.0" + log-symbols "4.1.0" + minimatch "5.0.1" + ms "2.1.3" + nanoid "3.3.3" + serialize-javascript "6.0.0" + strip-json-comments "3.1.1" + supports-color "8.1.1" + workerpool "6.2.1" + yargs "16.2.0" + yargs-parser "20.2.4" + yargs-unparser "2.0.0" + +mocha@^9.0.2: + version "9.2.2" + resolved "https://registry.yarnpkg.com/mocha/-/mocha-9.2.2.tgz#d70db46bdb93ca57402c809333e5a84977a88fb9" + integrity sha512-L6XC3EdwT6YrIk0yXpavvLkn8h+EU+Y5UcCHKECyMbdUIxyMuZj4bX4U9e1nvnvUUvQVsV2VHQr5zLdcUkhW/g== + dependencies: + "@ungap/promise-all-settled" "1.1.2" + ansi-colors "4.1.1" + browser-stdout "1.3.1" + chokidar "3.5.3" + debug "4.3.3" + diff "5.0.0" + escape-string-regexp "4.0.0" + find-up "5.0.0" + glob "7.2.0" + growl "1.10.5" + he "1.2.0" + js-yaml "4.1.0" + log-symbols "4.1.0" + minimatch "4.2.1" + ms "2.1.3" + nanoid "3.3.1" + serialize-javascript "6.0.0" + strip-json-comments "3.1.1" + supports-color "8.1.1" + which "2.0.2" + workerpool "6.2.0" + yargs "16.2.0" + yargs-parser "20.2.4" + yargs-unparser "2.0.0" + +mock-fs@^4.1.0: + version "4.14.0" + resolved "https://registry.yarnpkg.com/mock-fs/-/mock-fs-4.14.0.tgz#ce5124d2c601421255985e6e94da80a7357b1b18" + integrity sha512-qYvlv/exQ4+svI3UOvPUpLDF0OMX5euvUH0Ny4N5QyRyhNdgAgUrVH3iUINSzEPLvx0kbo/Bp28GJKIqvE7URw== + +module-error@^1.0.1, module-error@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/module-error/-/module-error-1.0.2.tgz#8d1a48897ca883f47a45816d4fb3e3c6ba404d86" + integrity sha512-0yuvsqSCv8LbaOKhnsQ/T5JhyFlCYLPXK3U2sgV10zoKQwzs/MyfuQUOZQ1V/6OCOJsK/TRgNVrPuPDqtdMFtA== + +ms@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== + +ms@2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +ms@2.1.3, ms@^2.1.1: + version "2.1.3" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + +multibase@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/multibase/-/multibase-0.7.0.tgz#1adfc1c50abe05eefeb5091ac0c2728d6b84581b" + integrity sha512-TW8q03O0f6PNFTQDvh3xxH03c8CjGaaYrjkl9UQPG6rz53TQzzxJVCIWVjzcbN/Q5Y53Zd0IBQBMVktVgNx4Fg== + dependencies: + base-x "^3.0.8" + buffer "^5.5.0" + +multibase@~0.6.0: + version "0.6.1" + resolved "https://registry.yarnpkg.com/multibase/-/multibase-0.6.1.tgz#b76df6298536cc17b9f6a6db53ec88f85f8cc12b" + integrity sha512-pFfAwyTjbbQgNc3G7D48JkJxWtoJoBMaR4xQUOuB8RnCgRqaYmWNFeJTTvrJ2w51bjLq2zTby6Rqj9TQ9elSUw== + dependencies: + base-x "^3.0.8" + buffer "^5.5.0" + +multicodec@^0.5.5: + version "0.5.7" + resolved "https://registry.yarnpkg.com/multicodec/-/multicodec-0.5.7.tgz#1fb3f9dd866a10a55d226e194abba2dcc1ee9ffd" + integrity sha512-PscoRxm3f+88fAtELwUnZxGDkduE2HD9Q6GHUOywQLjOGT/HAdhjLDYNZ1e7VR0s0TP0EwZ16LNUTFpoBGivOA== + dependencies: + varint "^5.0.0" + +multicodec@^1.0.0: + version "1.0.4" + resolved "https://registry.yarnpkg.com/multicodec/-/multicodec-1.0.4.tgz#46ac064657c40380c28367c90304d8ed175a714f" + integrity sha512-NDd7FeS3QamVtbgfvu5h7fd1IlbaC4EQ0/pgU4zqE2vdHCmBGsUa0TiM8/TdSeG6BMPC92OOCf8F1ocE/Wkrrg== + dependencies: + buffer "^5.6.0" + varint "^5.0.0" + +multihashes@^0.4.15, multihashes@~0.4.15: + version "0.4.21" + resolved "https://registry.yarnpkg.com/multihashes/-/multihashes-0.4.21.tgz#dc02d525579f334a7909ade8a122dabb58ccfcb5" + integrity sha512-uVSvmeCWf36pU2nB4/1kzYZjsXD9vofZKpgudqkceYY5g2aZZXJ5r9lxuzoRLl1OAp28XljXsEJ/X/85ZsKmKw== + dependencies: + buffer "^5.5.0" + multibase "^0.7.0" + varint "^5.0.0" + +mute-stream@0.0.7: + version "0.0.7" + resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.7.tgz#3075ce93bc21b8fab43e1bc4da7e8115ed1e7bab" + integrity sha512-r65nCZhrbXXb6dXOACihYApHw2Q6pV0M3V0PSxd74N0+D8nzAdEAITq2oAjA1jVnKI+tGvEBUpqiMh0+rW6zDQ== + +mz@^2.7.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/mz/-/mz-2.7.0.tgz#95008057a56cafadc2bc63dde7f9ff6955948e32" + integrity sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q== + dependencies: + any-promise "^1.0.0" + object-assign "^4.0.1" + thenify-all "^1.0.0" + +nan@^2.15.0, nan@^2.16.0: + version "2.17.0" + resolved "https://registry.yarnpkg.com/nan/-/nan-2.17.0.tgz#c0150a2368a182f033e9aa5195ec76ea41a199cb" + integrity sha512-2ZTgtl0nJsO0KQCjEpxcIr5D+Yv90plTitZt9JBfQvVJDS5seMl3FOvsh3+9CoYWXf/1l5OaZzzF6nDm4cagaQ== + +nano-json-stream-parser@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/nano-json-stream-parser/-/nano-json-stream-parser-0.1.2.tgz#0cc8f6d0e2b622b479c40d499c46d64b755c6f5f" + integrity sha512-9MqxMH/BSJC7dnLsEMPyfN5Dvoo49IsPFYMcHw3Bcfc2kN0lpHRBSzlMSVx4HGyJ7s9B31CyBTVehWJoQ8Ctew== + +nanoid@3.3.1: + version "3.3.1" + resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.1.tgz#6347a18cac88af88f58af0b3594b723d5e99bb35" + integrity sha512-n6Vs/3KGyxPQd6uO0eH4Bv0ojGSUvuLlIHtC3Y0kEO23YRge8H9x1GCzLn28YX0H66pMkxuaeESFq4tKISKwdw== + +nanoid@3.3.3: + version "3.3.3" + resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.3.tgz#fd8e8b7aa761fe807dba2d1b98fb7241bb724a25" + integrity sha512-p1sjXuopFs0xg+fPASzQ28agW1oHD7xDsd9Xkf3T15H3c/cifrFHVwrh74PdoklAPi+i7MdRsE47vm2r6JoB+w== + +nanomatch@^1.2.9: + version "1.2.13" + resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" + integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== + dependencies: + arr-diff "^4.0.0" + array-unique "^0.3.2" + define-property "^2.0.2" + extend-shallow "^3.0.2" + fragment-cache "^0.2.1" + is-windows "^1.0.2" + kind-of "^6.0.2" + object.pick "^1.3.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + +napi-macros@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/napi-macros/-/napi-macros-2.0.0.tgz#2b6bae421e7b96eb687aa6c77a7858640670001b" + integrity sha512-A0xLykHtARfueITVDernsAWdtIMbOJgKgcluwENp3AlsKN/PloyO10HtmoqnFAQAcxPkgZN7wdfPfEd0zNGxbg== + +natural-compare@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== + +negotiator@0.6.3: + version "0.6.3" + resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" + integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== + +neo-async@^2.6.0: + version "2.6.2" + resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" + integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== + +next-tick@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.1.0.tgz#1836ee30ad56d67ef281b22bd199f709449b35eb" + integrity sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ== + +nice-try@^1.0.4: + version "1.0.5" + resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" + integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== + +node-addon-api@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-2.0.2.tgz#432cfa82962ce494b132e9d72a15b29f71ff5d32" + integrity sha512-Ntyt4AIXyaLIuMHF6IOoTakB3K+RWxwtsHNRxllEoA6vPwP9o4866g6YWDLUdnucilZhmkxiHwHr11gAENw+QA== + +node-fetch@^2.6.0, node-fetch@^2.6.1: + version "2.6.7" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" + integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== + dependencies: + whatwg-url "^5.0.0" + +node-fetch@^2.6.7: + version "2.6.8" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.8.tgz#a68d30b162bc1d8fd71a367e81b997e1f4d4937e" + integrity sha512-RZ6dBYuj8dRSfxpUSu+NsdF1dpPpluJxwOp+6IoDp/sH2QNDSvurYsAa+F1WxY2RjA1iP93xhcsUoYbF2XBqVg== + dependencies: + whatwg-url "^5.0.0" + +node-fetch@~1.7.1: + version "1.7.3" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-1.7.3.tgz#980f6f72d85211a5347c6b2bc18c5b84c3eb47ef" + integrity sha512-NhZ4CsKx7cYm2vSrBAr2PvFOe6sWDf0UYLRqA6svUYg7+/TSfVAu49jYC4BvQ4Sms9SZgdqGBgroqfDhJdTyKQ== + dependencies: + encoding "^0.1.11" + is-stream "^1.0.1" + +node-gyp-build@^4.2.0, node-gyp-build@^4.3.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.5.0.tgz#7a64eefa0b21112f89f58379da128ac177f20e40" + integrity sha512-2iGbaQBV+ITgCz76ZEjmhUKAKVf7xfY1sRl4UiKQspfZMH2h06SyhNsnSVy50cwkFQDGLyif6m/6uFXHkOZ6rg== + +node-int64@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" + integrity sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw== + +node-releases@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.6.tgz#8a7088c63a55e493845683ebf3c828d8c51c5503" + integrity sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg== + +nofilter@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/nofilter/-/nofilter-1.0.4.tgz#78d6f4b6a613e7ced8b015cec534625f7667006e" + integrity sha512-N8lidFp+fCz+TD51+haYdbDGrcBWwuHX40F5+z0qkUjMJ5Tp+rdSuAkMJ9N9eoolDlEVTf6u5icM+cNKkKW2mA== + +normalize-package-data@^2.3.2: + version "2.5.0" + resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" + integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== + dependencies: + hosted-git-info "^2.1.4" + resolve "^1.10.0" + semver "2 || 3 || 4 || 5" + validate-npm-package-license "^3.0.1" + +normalize-path@^3.0.0, normalize-path@~3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" + integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== + +normalize-url@^4.1.0: + version "4.5.1" + resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-4.5.1.tgz#0dd90cf1288ee1d1313b87081c9a5932ee48518a" + integrity sha512-9UZCFRHQdNrfTpGg8+1INIg93B6zE0aXMVFkw1WFwvO4SlZywU6aLg5Of0Ap/PgcbSw4LNxvMWXMeugwMCX0AA== + +normalize-url@^6.0.1: + version "6.1.0" + resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-6.1.0.tgz#40d0885b535deffe3f3147bec877d05fe4c5668a" + integrity sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A== + +npm-run-all@^4.1.5: + version "4.1.5" + resolved "https://registry.yarnpkg.com/npm-run-all/-/npm-run-all-4.1.5.tgz#04476202a15ee0e2e214080861bff12a51d98fba" + integrity sha512-Oo82gJDAVcaMdi3nuoKFavkIHBRVqQ1qvMb+9LHk/cF4P6B2m8aP04hGf7oL6wZ9BuGwX1onlLhpuoofSyoQDQ== + dependencies: + ansi-styles "^3.2.1" + chalk "^2.4.1" + cross-spawn "^6.0.5" + memorystream "^0.3.1" + minimatch "^3.0.4" + pidtree "^0.3.0" + read-pkg "^3.0.0" + shell-quote "^1.6.1" + string.prototype.padend "^3.0.0" + +npm-run-path@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" + integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== + dependencies: + path-key "^3.0.0" + +number-is-nan@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d" + integrity sha512-4jbtZXNAsfZbAHiiqjLPBiCl16dES1zI4Hpzzxw61Tk+loF+sBDBKx1ICKKKwIqQ7M0mFn1TmkN7euSncWgHiQ== + +number-to-bn@1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/number-to-bn/-/number-to-bn-1.7.0.tgz#bb3623592f7e5f9e0030b1977bd41a0c53fe1ea0" + integrity sha512-wsJ9gfSz1/s4ZsJN01lyonwuxA1tml6X1yBDnfpMglypcBRFZZkus26EdPSlqS5GJfYddVZa22p3VNb3z5m5Ig== + dependencies: + bn.js "4.11.6" + strip-hex-prefix "1.0.0" + +oauth-sign@~0.9.0: + version "0.9.0" + resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" + integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== + +object-assign@^4, object-assign@^4.0.0, object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== + +object-copy@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" + integrity sha512-79LYn6VAb63zgtmAteVOWo9Vdj71ZVBy3Pbse+VqxDpEP83XuujMrGqHIwAXJ5I/aM0zU7dIyIAhifVTPrNItQ== + dependencies: + copy-descriptor "^0.1.0" + define-property "^0.2.5" + kind-of "^3.0.3" + +object-inspect@^1.12.2, object-inspect@^1.9.0: + version "1.12.2" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.2.tgz#c0641f26394532f28ab8d796ab954e43c009a8ea" + integrity sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ== + +object-inspect@~1.12.3: + version "1.12.3" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.3.tgz#ba62dffd67ee256c8c086dfae69e016cd1f198b9" + integrity sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g== + +object-is@^1.0.1: + version "1.1.5" + resolved "https://registry.yarnpkg.com/object-is/-/object-is-1.1.5.tgz#b9deeaa5fc7f1846a0faecdceec138e5778f53ac" + integrity sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + +object-keys@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== + +object-keys@~0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-0.4.0.tgz#28a6aae7428dd2c3a92f3d95f21335dd204e0336" + integrity sha512-ncrLw+X55z7bkl5PnUvHwFK9FcGuFYo9gtjws2XtSzL+aZ8tm830P60WJ0dSmFVaSalWieW5MD7kEdnXda9yJw== + +object-visit@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" + integrity sha512-GBaMwwAVK9qbQN3Scdo0OyvgPW7l3lnaVMj84uTOZlswkX0KpF6fyDBJhtTthf7pymztoN36/KEr1DyhF96zEA== + dependencies: + isobject "^3.0.0" + +object.assign@^4.1.4: + version "4.1.4" + resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.4.tgz#9673c7c7c351ab8c4d0b516f4343ebf4dfb7799f" + integrity sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + has-symbols "^1.0.3" + object-keys "^1.1.1" + +object.getownpropertydescriptors@^2.1.1: + version "2.1.5" + resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.5.tgz#db5a9002489b64eef903df81d6623c07e5b4b4d3" + integrity sha512-yDNzckpM6ntyQiGTik1fKV1DcVDRS+w8bvpWNCBanvH5LfRX9O8WTHqQzG4RZwRAM4I0oU7TV11Lj5v0g20ibw== + dependencies: + array.prototype.reduce "^1.0.5" + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.20.4" + +object.pick@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" + integrity sha512-tqa/UMy/CCoYmj+H5qc07qvSL9dqcs/WZENZ1JbtWBlATP+iVOe778gE6MSijnyCnORzDuX6hU+LA4SZ09YjFQ== + dependencies: + isobject "^3.0.1" + +obliterator@^2.0.0: + version "2.0.4" + resolved "https://registry.yarnpkg.com/obliterator/-/obliterator-2.0.4.tgz#fa650e019b2d075d745e44f1effeb13a2adbe816" + integrity sha512-lgHwxlxV1qIg1Eap7LgIeoBWIMFibOjbrYPIPJZcI1mmGAI2m3lNYpK12Y+GBdPQ0U1hRwSord7GIaawz962qQ== + +oboe@2.1.4: + version "2.1.4" + resolved "https://registry.yarnpkg.com/oboe/-/oboe-2.1.4.tgz#20c88cdb0c15371bb04119257d4fdd34b0aa49f6" + integrity sha512-ymBJ4xSC6GBXLT9Y7lirj+xbqBLa+jADGJldGEYG7u8sZbS9GyG+u1Xk9c5cbriKwSpCg41qUhPjvU5xOpvIyQ== + dependencies: + http-https "^1.0.0" + +on-finished@2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f" + integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg== + dependencies: + ee-first "1.1.1" + +once@^1.3.0, once@^1.3.1, once@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== + dependencies: + wrappy "1" + +onetime@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-2.0.1.tgz#067428230fd67443b2794b22bba528b6867962d4" + integrity sha512-oyyPpiMaKARvvcgip+JV+7zci5L8D1W9RZIz2l1o08AM3pfspitVWnPt3mzHcBPp12oYMTy0pqrFs/C+m3EwsQ== + dependencies: + mimic-fn "^1.0.0" + +onetime@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" + integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== + dependencies: + mimic-fn "^2.1.0" + +open@^7.4.2: + version "7.4.2" + resolved "https://registry.yarnpkg.com/open/-/open-7.4.2.tgz#b8147e26dcf3e426316c730089fd71edd29c2321" + integrity sha512-MVHddDVweXZF3awtlAS+6pgKLlm/JgxZ90+/NBurBoQctVOOB/zDdVjcyPzQ+0laDGbsWgrRkflI65sQeOgT9Q== + dependencies: + is-docker "^2.0.0" + is-wsl "^2.1.1" + +optionator@^0.8.2: + version "0.8.3" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" + integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== + dependencies: + deep-is "~0.1.3" + fast-levenshtein "~2.0.6" + levn "~0.3.0" + prelude-ls "~1.1.2" + type-check "~0.3.2" + word-wrap "~1.2.3" + +optionator@^0.9.1: + version "0.9.1" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499" + integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== + dependencies: + deep-is "^0.1.3" + fast-levenshtein "^2.0.6" + levn "^0.4.1" + prelude-ls "^1.2.1" + type-check "^0.4.0" + word-wrap "^1.2.3" + +os-homedir@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" + integrity sha512-B5JU3cabzk8c67mRRd3ECmROafjYMXbuzlwtqdM8IbS8ktlTix8aFGb2bAGKrSRIlnfKwovGUUr72JUPyOb6kQ== + +os-locale@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-1.4.0.tgz#20f9f17ae29ed345e8bde583b13d2009803c14d9" + integrity sha512-PRT7ZORmwu2MEFt4/fv3Q+mEfN4zetKxufQrkShY2oGvUms9r8otu5HfdyIFHkYXjO7laNsoVGmM2MANfuTA8g== + dependencies: + lcid "^1.0.0" + +os-tmpdir@^1.0.1, os-tmpdir@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" + integrity sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g== + +p-cancelable@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-1.1.0.tgz#d078d15a3af409220c886f1d9a0ca2e441ab26cc" + integrity sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw== + +p-cancelable@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-2.1.1.tgz#aab7fbd416582fa32a3db49859c122487c5ed2cf" + integrity sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg== + +p-limit@^1.1.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.3.0.tgz#b86bd5f0c25690911c7590fcbfc2010d54b3ccb8" + integrity sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q== + dependencies: + p-try "^1.0.0" + +p-limit@^2.2.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" + integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== + dependencies: + p-try "^2.0.0" + +p-limit@^3.0.2, p-limit@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" + integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== + dependencies: + yocto-queue "^0.1.0" + +p-locate@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43" + integrity sha512-nQja7m7gSKuewoVRen45CtVfODR3crN3goVQ0DDZ9N3yHxgpkuBhZqsaiotSQRrADUrne346peY7kT3TSACykg== + dependencies: + p-limit "^1.1.0" + +p-locate@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" + integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== + dependencies: + p-limit "^2.2.0" + +p-locate@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" + integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== + dependencies: + p-limit "^3.0.2" + +p-map@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/p-map/-/p-map-4.0.0.tgz#bb2f95a5eda2ec168ec9274e06a747c3e2904d2b" + integrity sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ== + dependencies: + aggregate-error "^3.0.0" + +p-try@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3" + integrity sha512-U1etNYuMJoIz3ZXSrrySFjsXQTWOx2/jdi86L+2pRvph/qMKL6sbcCYdH23fqsbm8TH2Gn0OybpT4eSFlCVHww== + +p-try@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" + integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== + +parent-module@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" + integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== + dependencies: + callsites "^3.0.0" + +parse-asn1@^5.0.0, parse-asn1@^5.1.5: + version "5.1.6" + resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.6.tgz#385080a3ec13cb62a62d39409cb3e88844cdaed4" + integrity sha512-RnZRo1EPU6JBnra2vGHj0yhp6ebyjBZpmUCLHWiFhxlzvBCCpAuZ7elsBp1PVAbQN0/04VD/19rfzlBSwLstMw== + dependencies: + asn1.js "^5.2.0" + browserify-aes "^1.0.0" + evp_bytestokey "^1.0.0" + pbkdf2 "^3.0.3" + safe-buffer "^5.1.1" + +parse-headers@^2.0.0: + version "2.0.5" + resolved "https://registry.yarnpkg.com/parse-headers/-/parse-headers-2.0.5.tgz#069793f9356a54008571eb7f9761153e6c770da9" + integrity sha512-ft3iAoLOB/MlwbNXgzy43SWGP6sQki2jQvAyBg/zDFAgr9bfNWZIUj42Kw2eJIl8kEi4PbgE6U1Zau/HwI75HA== + +parse-json@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-2.2.0.tgz#f480f40434ef80741f8469099f8dea18f55a4dc9" + integrity sha512-QR/GGaKCkhwk1ePQNYDRKYZ3mwU9ypsKhB0XyFnLQdomyEqk3e8wpW3V5Jp88zbxK4n5ST1nqo+g9juTpownhQ== + dependencies: + error-ex "^1.2.0" + +parse-json@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-4.0.0.tgz#be35f5425be1f7f6c747184f98a788cb99477ee0" + integrity sha512-aOIos8bujGN93/8Ox/jPLh7RwVnPEysynVFE+fQZyg6jKELEHwzgKdLRFHUgXJL6kylijVSBC4BvN9OmsB48Rw== + dependencies: + error-ex "^1.3.1" + json-parse-better-errors "^1.0.1" + +parse-json@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" + integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== + dependencies: + "@babel/code-frame" "^7.0.0" + error-ex "^1.3.1" + json-parse-even-better-errors "^2.3.0" + lines-and-columns "^1.1.6" + +parseurl@~1.3.3: + version "1.3.3" + resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" + integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== + +pascalcase@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" + integrity sha512-XHXfu/yOQRy9vYOtUDVMN60OEJjW013GoObG1o+xwQTpB9eYJX/BjXMsdW13ZDPruFhYYn0AG22w0xgQMwl3Nw== + +patch-package@6.2.2: + version "6.2.2" + resolved "https://registry.yarnpkg.com/patch-package/-/patch-package-6.2.2.tgz#71d170d650c65c26556f0d0fbbb48d92b6cc5f39" + integrity sha512-YqScVYkVcClUY0v8fF0kWOjDYopzIM8e3bj/RU1DPeEF14+dCGm6UeOYm4jvCyxqIEQ5/eJzmbWfDWnUleFNMg== + dependencies: + "@yarnpkg/lockfile" "^1.1.0" + chalk "^2.4.2" + cross-spawn "^6.0.5" + find-yarn-workspace-root "^1.2.1" + fs-extra "^7.0.1" + is-ci "^2.0.0" + klaw-sync "^6.0.0" + minimist "^1.2.0" + rimraf "^2.6.3" + semver "^5.6.0" + slash "^2.0.0" + tmp "^0.0.33" + +patch-package@^6.2.2: + version "6.5.1" + resolved "https://registry.yarnpkg.com/patch-package/-/patch-package-6.5.1.tgz#3e5d00c16997e6160291fee06a521c42ac99b621" + integrity sha512-I/4Zsalfhc6bphmJTlrLoOcAF87jcxko4q0qsv4bGcurbr8IskEOtdnt9iCmsQVGL1B+iUhSQqweyTLJfCF9rA== + dependencies: + "@yarnpkg/lockfile" "^1.1.0" + chalk "^4.1.2" + cross-spawn "^6.0.5" + find-yarn-workspace-root "^2.0.0" + fs-extra "^9.0.0" + is-ci "^2.0.0" + klaw-sync "^6.0.0" + minimist "^1.2.6" + open "^7.4.2" + rimraf "^2.6.3" + semver "^5.6.0" + slash "^2.0.0" + tmp "^0.0.33" + yaml "^1.10.2" + +path-browserify@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-1.0.1.tgz#d98454a9c3753d5790860f16f68867b9e46be1fd" + integrity sha512-b7uo2UCUOYZcnF/3ID0lulOJi/bafxa1xPe7ZPsammBSpjSWQkjNxlt635YGS2MiR9GjvuXCtz2emr3jbsz98g== + +path-exists@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-2.1.0.tgz#0feb6c64f0fc518d9a754dd5efb62c7022761f4b" + integrity sha512-yTltuKuhtNeFJKa1PiRzfLAU5182q1y4Eb4XCJ3PBqyzEDkAZRzBrKKBct682ls9reBVHf9udYLN5Nd+K1B9BQ== + dependencies: + pinkie-promise "^2.0.0" + +path-exists@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" + integrity sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ== + +path-exists@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" + integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== + +path-is-absolute@^1.0.0, path-is-absolute@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== + +path-is-inside@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" + integrity sha512-DUWJr3+ULp4zXmol/SZkFf3JGsS9/SIv+Y3Rt93/UjPpDpklB5f1er4O3POIbUuUJ3FXgqte2Q7SrU6zAqwk8w== + +path-key@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" + integrity sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw== + +path-key@^3.0.0, path-key@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + +path-parse@^1.0.6, path-parse@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== + +path-to-regexp@0.1.7: + version "0.1.7" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" + integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ== + +path-type@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-1.1.0.tgz#59c44f7ee491da704da415da5a4070ba4f8fe441" + integrity sha512-S4eENJz1pkiQn9Znv33Q+deTOKmbl+jj1Fl+qiP/vYezj+S8x+J3Uo0ISrx/QoEvIlOaDWJhPaRd1flJ9HXZqg== + dependencies: + graceful-fs "^4.1.2" + pify "^2.0.0" + pinkie-promise "^2.0.0" + +path-type@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-3.0.0.tgz#cef31dc8e0a1a3bb0d105c0cd97cf3bf47f4e36f" + integrity sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg== + dependencies: + pify "^3.0.0" + +path-type@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" + integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== + +path@^0.12.7: + version "0.12.7" + resolved "https://registry.yarnpkg.com/path/-/path-0.12.7.tgz#d4dc2a506c4ce2197eb481ebfcd5b36c0140b10f" + integrity sha512-aXXC6s+1w7otVF9UletFkFcDsJeO7lSZBPUQhtb5O0xJe8LtYhj/GxldoL09bBj9+ZmE2hNoHqQSFMN5fikh4Q== + dependencies: + process "^0.11.1" + util "^0.10.3" + +pathval@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/pathval/-/pathval-1.1.1.tgz#8534e77a77ce7ac5a2512ea21e0fdb8fcf6c3d8d" + integrity sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ== + +pbkdf2@^3.0.17, pbkdf2@^3.0.3, pbkdf2@^3.0.9: + version "3.1.2" + resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.1.2.tgz#dd822aa0887580e52f1a039dc3eda108efae3075" + integrity sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA== + dependencies: + create-hash "^1.1.2" + create-hmac "^1.1.4" + ripemd160 "^2.0.1" + safe-buffer "^5.0.1" + sha.js "^2.4.8" + +performance-now@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" + integrity sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow== + +picocolors@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" + integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== + +picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.3, picomatch@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" + integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== + +pidtree@^0.3.0: + version "0.3.1" + resolved "https://registry.yarnpkg.com/pidtree/-/pidtree-0.3.1.tgz#ef09ac2cc0533df1f3250ccf2c4d366b0d12114a" + integrity sha512-qQbW94hLHEqCg7nhby4yRC7G2+jYHY4Rguc2bjw7Uug4GIJuu1tvf2uHaZv5Q8zdt+WKJ6qK1FOI6amaWUo5FA== + +pify@^2.0.0, pify@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" + integrity sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog== + +pify@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176" + integrity sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg== + +pinkie-promise@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/pinkie-promise/-/pinkie-promise-2.0.1.tgz#2135d6dfa7a358c069ac9b178776288228450ffa" + integrity sha512-0Gni6D4UcLTbv9c57DfxDGdr41XfgUjqWZu492f0cIGr16zDU06BWP/RAEvOuo7CQ0CNjHaLlM59YJJFm3NWlw== + dependencies: + pinkie "^2.0.0" + +pinkie@^2.0.0: + version "2.0.4" + resolved "https://registry.yarnpkg.com/pinkie/-/pinkie-2.0.4.tgz#72556b80cfa0d48a974e80e77248e80ed4f7f870" + integrity sha512-MnUuEycAemtSaeFSjXKW/aroV7akBbY+Sv+RkyqFjgAe73F+MR0TBWKBRDkmfWq/HiFmdavfZ1G7h4SPZXaCSg== + +pirates@^4.0.4: + version "4.0.5" + resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.5.tgz#feec352ea5c3268fb23a37c702ab1699f35a5f3b" + integrity sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ== + +pkg-dir@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" + integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== + dependencies: + find-up "^4.0.0" + +posix-character-classes@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" + integrity sha512-xTgYBc3fuo7Yt7JbiuFxSYGToMoz8fLoE6TC9Wx1P/u+LfeThMOAqmuyECnlBaaJb+u1m9hHiXUEtwW4OzfUJg== + +postinstall-postinstall@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/postinstall-postinstall/-/postinstall-postinstall-2.1.0.tgz#4f7f77441ef539d1512c40bd04c71b06a4704ca3" + integrity sha512-7hQX6ZlZXIoRiWNrbMQaLzUUfH+sSx39u8EJ9HYuDc1kLo9IXKWjM5RSquZN1ad5GnH8CGFM78fsAAQi3OKEEQ== + +precond@0.2: + version "0.2.3" + resolved "https://registry.yarnpkg.com/precond/-/precond-0.2.3.tgz#aa9591bcaa24923f1e0f4849d240f47efc1075ac" + integrity sha512-QCYG84SgGyGzqJ/vlMsxeXd/pgL/I94ixdNFyh1PusWmTCyVfPJjZ1K1jvHtsbfnXQs2TSkEP2fR7QiMZAnKFQ== + +prelude-ls@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" + integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== + +prelude-ls@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" + integrity sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w== + +prepend-http@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" + integrity sha512-ravE6m9Atw9Z/jjttRUZ+clIXogdghyZAuWJ3qEzjT+jI/dL1ifAqhZeC5VHzQp1MSt1+jxKkFNemj/iO7tVUA== + +prettier-plugin-solidity@=1.0.0-dev.22: + version "1.0.0-dev.22" + resolved "https://registry.yarnpkg.com/prettier-plugin-solidity/-/prettier-plugin-solidity-1.0.0-dev.22.tgz#08c07816884d17bd9171c073c2421559057230c3" + integrity sha512-0v+O2/sqq6WMlZ2TsnRBXaNmKF4zANn0uLLWuvNra4BjmKUtp33EZ4AVKB26fzWy14BkVGeJfPAtKry0x3SFfQ== + dependencies: + "@solidity-parser/parser" "^0.14.2" + emoji-regex "^10.1.0" + escape-string-regexp "^4.0.0" + semver "^7.3.7" + solidity-comments-extractor "^0.0.7" + string-width "^4.2.3" + +prettier@^1.14.3, prettier@^1.18.2: + version "1.19.1" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-1.19.1.tgz#f7d7f5ff8a9cd872a7be4ca142095956a60797cb" + integrity sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew== + +prettier@^2.1.2: + version "2.8.3" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.8.3.tgz#ab697b1d3dd46fb4626fbe2f543afe0cc98d8632" + integrity sha512-tJ/oJ4amDihPoufT5sM0Z1SKEuKay8LfVAMlbbhnnkvt6BUserZylqo2PN+p9KeljLr0OHa2rXHU1T8reeoTrw== + +prettier@^2.3.2: + version "2.7.1" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.7.1.tgz#e235806850d057f97bb08368a4f7d899f7760c64" + integrity sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g== + +pretty-format@^29.0.0, pretty-format@^29.2.1: + version "29.2.1" + resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-29.2.1.tgz#86e7748fe8bbc96a6a4e04fa99172630907a9611" + integrity sha512-Y41Sa4aLCtKAXvwuIpTvcFBkyeYp2gdFWzXGA+ZNES3VwURIB165XO/z7CjETwzCCS53MjW/rLMyyqEnTtaOfA== + dependencies: + "@jest/schemas" "^29.0.0" + ansi-styles "^5.0.0" + react-is "^18.0.0" + +private@^0.1.6, private@^0.1.8: + version "0.1.8" + resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff" + integrity sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg== + +process-nextick-args@~2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" + integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== + +process@^0.11.1, process@^0.11.10: + version "0.11.10" + resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" + integrity sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A== + +progress@^2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" + integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== + +promise-to-callback@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/promise-to-callback/-/promise-to-callback-1.0.0.tgz#5d2a749010bfb67d963598fcd3960746a68feef7" + integrity sha512-uhMIZmKM5ZteDMfLgJnoSq9GCwsNKrYau73Awf1jIy6/eUcuuZ3P+CD9zUv0kJsIUbU+x6uLNIhXhLHDs1pNPA== + dependencies: + is-fn "^1.0.0" + set-immediate-shim "^1.0.1" + +prompts@^2.0.1: + version "2.4.2" + resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" + integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== + dependencies: + kleur "^3.0.3" + sisteransi "^1.0.5" + +proxy-addr@~2.0.7: + version "2.0.7" + resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025" + integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg== + dependencies: + forwarded "0.2.0" + ipaddr.js "1.9.1" + +prr@~1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/prr/-/prr-1.0.1.tgz#d3fc114ba06995a45ec6893f484ceb1d78f5f476" + integrity sha512-yPw4Sng1gWghHQWj0B3ZggWUm4qVbPwPFcRG8KyxiU7J2OHFSoEHKS+EZ3fv5l1t9CyCiop6l/ZYeWbrgoQejw== + +pseudomap@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" + integrity sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ== + +psl@^1.1.28: + version "1.9.0" + resolved "https://registry.yarnpkg.com/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7" + integrity sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag== + +public-encrypt@^4.0.0: + version "4.0.3" + resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.3.tgz#4fcc9d77a07e48ba7527e7cbe0de33d0701331e0" + integrity sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q== + dependencies: + bn.js "^4.1.0" + browserify-rsa "^4.0.0" + create-hash "^1.1.0" + parse-asn1 "^5.0.0" + randombytes "^2.0.1" + safe-buffer "^5.1.2" + +pull-cat@^1.1.9: + version "1.1.11" + resolved "https://registry.yarnpkg.com/pull-cat/-/pull-cat-1.1.11.tgz#b642dd1255da376a706b6db4fa962f5fdb74c31b" + integrity sha512-i3w+xZ3DCtTVz8S62hBOuNLRHqVDsHMNZmgrZsjPnsxXUgbWtXEee84lo1XswE7W2a3WHyqsNuDJTjVLAQR8xg== + +pull-defer@^0.2.2: + version "0.2.3" + resolved "https://registry.yarnpkg.com/pull-defer/-/pull-defer-0.2.3.tgz#4ee09c6d9e227bede9938db80391c3dac489d113" + integrity sha512-/An3KE7mVjZCqNhZsr22k1Tx8MACnUnHZZNPSJ0S62td8JtYr/AiRG42Vz7Syu31SoTLUzVIe61jtT/pNdjVYA== + +pull-level@^2.0.3: + version "2.0.4" + resolved "https://registry.yarnpkg.com/pull-level/-/pull-level-2.0.4.tgz#4822e61757c10bdcc7cf4a03af04c92734c9afac" + integrity sha512-fW6pljDeUThpq5KXwKbRG3X7Ogk3vc75d5OQU/TvXXui65ykm+Bn+fiktg+MOx2jJ85cd+sheufPL+rw9QSVZg== + dependencies: + level-post "^1.0.7" + pull-cat "^1.1.9" + pull-live "^1.0.1" + pull-pushable "^2.0.0" + pull-stream "^3.4.0" + pull-window "^2.1.4" + stream-to-pull-stream "^1.7.1" + +pull-live@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/pull-live/-/pull-live-1.0.1.tgz#a4ecee01e330155e9124bbbcf4761f21b38f51f5" + integrity sha512-tkNz1QT5gId8aPhV5+dmwoIiA1nmfDOzJDlOOUpU5DNusj6neNd3EePybJ5+sITr2FwyCs/FVpx74YMCfc8YeA== + dependencies: + pull-cat "^1.1.9" + pull-stream "^3.4.0" + +pull-pushable@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/pull-pushable/-/pull-pushable-2.2.0.tgz#5f2f3aed47ad86919f01b12a2e99d6f1bd776581" + integrity sha512-M7dp95enQ2kaHvfCt2+DJfyzgCSpWVR2h2kWYnVsW6ZpxQBx5wOu0QWOvQPVoPnBLUZYitYP2y7HyHkLQNeGXg== + +pull-stream@^3.2.3, pull-stream@^3.4.0, pull-stream@^3.6.8: + version "3.7.0" + resolved "https://registry.yarnpkg.com/pull-stream/-/pull-stream-3.7.0.tgz#85de0e44ff38a4d2ad08cc43fc458e1922f9bf0b" + integrity sha512-Eco+/R004UaCK2qEDE8vGklcTG2OeZSVm1kTUQNrykEjDwcFXDZhygFDsW49DbXyJMEhHeRL3z5cRVqPAhXlIw== + +pull-window@^2.1.4: + version "2.1.4" + resolved "https://registry.yarnpkg.com/pull-window/-/pull-window-2.1.4.tgz#fc3b86feebd1920c7ae297691e23f705f88552f0" + integrity sha512-cbDzN76BMlcGG46OImrgpkMf/VkCnupj8JhsrpBw3aWBM9ye345aYnqitmZCgauBkc0HbbRRn9hCnsa3k2FNUg== + dependencies: + looper "^2.0.0" + +pump@^1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/pump/-/pump-1.0.3.tgz#5dfe8311c33bbf6fc18261f9f34702c47c08a954" + integrity sha512-8k0JupWme55+9tCVE+FS5ULT3K6AbgqrGa58lTT49RpyfwwcGedHqaC5LlQNdEAumn/wFsu6aPwkuPMioy8kqw== + dependencies: + end-of-stream "^1.1.0" + once "^1.3.1" + +pump@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" + integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== + dependencies: + end-of-stream "^1.1.0" + once "^1.3.1" + +punycode@1.3.2: + version "1.3.2" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d" + integrity sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw== + +punycode@2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.0.tgz#5f863edc89b96db09074bad7947bf09056ca4e7d" + integrity sha512-Yxz2kRwT90aPiWEMHVYnEf4+rhwF1tBmmZ4KepCP+Wkium9JxtWnUm1nqGwpiAHr/tnTSeHqr3wb++jgSkXjhA== + +punycode@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" + integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== + +punycode@^2.1.1: + version "2.3.0" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.0.tgz#f67fa67c94da8f4d0cfff981aee4118064199b8f" + integrity sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA== + +qs@6.11.0, qs@^6.7.0: + version "6.11.0" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.0.tgz#fd0d963446f7a65e1367e01abd85429453f0c37a" + integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q== + dependencies: + side-channel "^1.0.4" + +qs@~6.5.2: + version "6.5.3" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.3.tgz#3aeeffc91967ef6e35c0e488ef46fb296ab76aad" + integrity sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA== + +query-string@^5.0.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/query-string/-/query-string-5.1.1.tgz#a78c012b71c17e05f2e3fa2319dd330682efb3cb" + integrity sha512-gjWOsm2SoGlgLEdAGt7a6slVOk9mGiXmPFMqrEhLQ68rhQuBnpfs3+EmlvqKyxnCo9/PPlF+9MtY02S1aFg+Jw== + dependencies: + decode-uri-component "^0.2.0" + object-assign "^4.1.0" + strict-uri-encode "^1.0.0" + +querystring@0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" + integrity sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g== + +querystring@^0.2.0: + version "0.2.1" + resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.1.tgz#40d77615bb09d16902a85c3e38aa8b5ed761c2dd" + integrity sha512-wkvS7mL/JMugcup3/rMitHmd9ecIGd2lhFhK9N3UUQ450h66d1r3Y9nvXzQAW1Lq+wyx61k/1pfKS5KuKiyEbg== + +queue-microtask@^1.2.2, queue-microtask@^1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" + integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== + +quick-lru@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-5.1.1.tgz#366493e6b3e42a3a6885e2e99d18f80fb7a8c932" + integrity sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA== + +randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5, randombytes@^2.0.6, randombytes@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" + integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== + dependencies: + safe-buffer "^5.1.0" + +randomfill@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/randomfill/-/randomfill-1.0.4.tgz#c92196fc86ab42be983f1bf31778224931d61458" + integrity sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw== + dependencies: + randombytes "^2.0.5" + safe-buffer "^5.1.0" + +range-parser@~1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" + integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== + +raw-body@2.5.1, raw-body@^2.4.1: + version "2.5.1" + resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.1.tgz#fe1b1628b181b700215e5fd42389f98b71392857" + integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig== + dependencies: + bytes "3.1.2" + http-errors "2.0.0" + iconv-lite "0.4.24" + unpipe "1.0.0" + +rc@~1.2.7: + version "1.2.8" + resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed" + integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== + dependencies: + deep-extend "^0.6.0" + ini "~1.3.0" + minimist "^1.2.0" + strip-json-comments "~2.0.1" + +react-is@^18.0.0: + version "18.2.0" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-18.2.0.tgz#199431eeaaa2e09f86427efbb4f1473edb47609b" + integrity sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w== + +read-pkg-up@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-1.0.1.tgz#9d63c13276c065918d57f002a57f40a1b643fb02" + integrity sha512-WD9MTlNtI55IwYUS27iHh9tK3YoIVhxis8yKhLpTqWtml739uXc9NWTpxoHkfZf3+DkCCsXox94/VWZniuZm6A== + dependencies: + find-up "^1.0.0" + read-pkg "^1.0.0" + +read-pkg@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28" + integrity sha512-7BGwRHqt4s/uVbuyoeejRn4YmFnYZiFl4AuaeXHlgZf3sONF0SOGlxs2Pw8g6hCKupo08RafIO5YXFNOKTfwsQ== + dependencies: + load-json-file "^1.0.0" + normalize-package-data "^2.3.2" + path-type "^1.0.0" + +read-pkg@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-3.0.0.tgz#9cbc686978fee65d16c00e2b19c237fcf6e38389" + integrity sha512-BLq/cCO9two+lBgiTYNqD6GdtK8s4NpaWrl6/rCO9w0TUS8oJl7cmToOZfRYllKTISY6nt1U7jQ53brmKqY6BA== + dependencies: + load-json-file "^4.0.0" + normalize-package-data "^2.3.2" + path-type "^3.0.0" + +readable-stream@^1.0.33: + version "1.1.14" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.1.14.tgz#7cf4c54ef648e3813084c636dd2079e166c081d9" + integrity sha512-+MeVjFf4L44XUkhM1eYbD8fyEsxcV81pqMSR5gblfcLCHfZvbrqy4/qYHE+/R5HoBUT11WV5O08Cr1n3YXkWVQ== + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.1" + isarray "0.0.1" + string_decoder "~0.10.x" + +readable-stream@^2.0.0, readable-stream@^2.0.5, readable-stream@^2.2.2, readable-stream@^2.2.8, readable-stream@^2.2.9, readable-stream@^2.3.0, readable-stream@^2.3.5, readable-stream@^2.3.6, readable-stream@~2.3.6: + version "2.3.7" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" + integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~2.0.0" + safe-buffer "~5.1.1" + string_decoder "~1.1.1" + util-deprecate "~1.0.1" + +readable-stream@^3.0.6, readable-stream@^3.1.1, readable-stream@^3.4.0, readable-stream@^3.5.0, readable-stream@^3.6.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" + integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== + dependencies: + inherits "^2.0.3" + string_decoder "^1.1.1" + util-deprecate "^1.0.1" + +readable-stream@~1.0.15, readable-stream@~1.0.26-4: + version "1.0.34" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.0.34.tgz#125820e34bc842d2f2aaafafe4c2916ee32c157c" + integrity sha512-ok1qVCJuRkNmvebYikljxJA/UEsKwLl2nI1OmaqAu4/UE+h0wKCHok4XkL/gvi39OacXvw59RJUOFUkDib2rHg== + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.1" + isarray "0.0.1" + string_decoder "~0.10.x" + +readdirp@~3.6.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" + integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== + dependencies: + picomatch "^2.2.1" + +regenerate@^1.2.1: + version "1.4.2" + resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a" + integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A== + +regenerator-runtime@^0.11.0: + version "0.11.1" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9" + integrity sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg== + +regenerator-transform@^0.10.0: + version "0.10.1" + resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.10.1.tgz#1e4996837231da8b7f3cf4114d71b5691a0680dd" + integrity sha512-PJepbvDbuK1xgIgnau7Y90cwaAmO/LCLMI2mPvaXq2heGMR3aWW5/BQvYrhJ8jgmQjXewXvBjzfqKcVOmhjZ6Q== + dependencies: + babel-runtime "^6.18.0" + babel-types "^6.19.0" + private "^0.1.6" + +regex-not@^1.0.0, regex-not@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" + integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== + dependencies: + extend-shallow "^3.0.2" + safe-regex "^1.1.0" + +regexp.prototype.flags@^1.2.0, regexp.prototype.flags@^1.4.3: + version "1.4.3" + resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz#87cab30f80f66660181a3bb7bf5981a872b367ac" + integrity sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + functions-have-names "^1.2.2" + +regexpp@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-2.0.1.tgz#8d19d31cf632482b589049f8281f93dbcba4d07f" + integrity sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw== + +regexpp@^3.1.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-3.2.0.tgz#0425a2768d8f23bad70ca4b90461fa2f1213e1b2" + integrity sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg== + +regexpu-core@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-2.0.0.tgz#49d038837b8dcf8bfa5b9a42139938e6ea2ae240" + integrity sha512-tJ9+S4oKjxY8IZ9jmjnp/mtytu1u3iyIQAfmI51IKWH6bFf7XR1ybtaO6j7INhZKXOTYADk7V5qxaqLkmNxiZQ== + dependencies: + regenerate "^1.2.1" + regjsgen "^0.2.0" + regjsparser "^0.1.4" + +regjsgen@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.2.0.tgz#6c016adeac554f75823fe37ac05b92d5a4edb1f7" + integrity sha512-x+Y3yA24uF68m5GA+tBjbGYo64xXVJpbToBaWCoSNSc1hdk6dfctaRWrNFTVJZIIhL5GxW8zwjoixbnifnK59g== + +regjsparser@^0.1.4: + version "0.1.5" + resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.1.5.tgz#7ee8f84dc6fa792d3fd0ae228d24bd949ead205c" + integrity sha512-jlQ9gYLfk2p3V5Ag5fYhA7fv7OHzd1KUH0PRP46xc3TgwjwgROIW572AfYg/X9kaNq/LJnu6oJcFRXlIrGoTRw== + dependencies: + jsesc "~0.5.0" + +repeat-element@^1.1.2: + version "1.1.4" + resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.4.tgz#be681520847ab58c7568ac75fbfad28ed42d39e9" + integrity sha512-LFiNfRcSu7KK3evMyYOuCzv3L10TW7yC1G2/+StMjK8Y6Vqd2MG7r/Qjw4ghtuCOjFvlnms/iMmLqpvW/ES/WQ== + +repeat-string@^1.6.1: + version "1.6.1" + resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" + integrity sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w== + +repeating@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/repeating/-/repeating-2.0.1.tgz#5214c53a926d3552707527fbab415dbc08d06dda" + integrity sha512-ZqtSMuVybkISo2OWvqvm7iHSWngvdaW3IpsT9/uP8v4gMi591LY6h35wdOfvQdWCKFWZWm2Y1Opp4kV7vQKT6A== + dependencies: + is-finite "^1.0.0" + +request@^2.79.0, request@^2.85.0: + version "2.88.2" + resolved "https://registry.yarnpkg.com/request/-/request-2.88.2.tgz#d73c918731cb5a87da047e207234146f664d12b3" + integrity sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw== + dependencies: + aws-sign2 "~0.7.0" + aws4 "^1.8.0" + caseless "~0.12.0" + combined-stream "~1.0.6" + extend "~3.0.2" + forever-agent "~0.6.1" + form-data "~2.3.2" + har-validator "~5.1.3" + http-signature "~1.2.0" + is-typedarray "~1.0.0" + isstream "~0.1.2" + json-stringify-safe "~5.0.1" + mime-types "~2.1.19" + oauth-sign "~0.9.0" + performance-now "^2.1.0" + qs "~6.5.2" + safe-buffer "^5.1.2" + tough-cookie "~2.5.0" + tunnel-agent "^0.6.0" + uuid "^3.3.2" + +require-directory@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== + +require-from-string@^1.1.0: + version "1.2.1" + resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-1.2.1.tgz#529c9ccef27380adfec9a2f965b649bbee636418" + integrity sha512-H7AkJWMobeskkttHyhTVtS0fxpFLjxhbfMa6Bk3wimP7sdPRGL3EyCg3sAQenFfAe+xQ+oAc85Nmtvq0ROM83Q== + +require-from-string@^2.0.0, require-from-string@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" + integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== + +require-main-filename@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1" + integrity sha512-IqSUtOVP4ksd1C/ej5zeEh/BIP2ajqpn8c5x+q99gvcIG/Qf0cud5raVnE/Dwd0ua9TXYDoDc0RE5hBSdz22Ug== + +resolve-alpn@^1.0.0: + version "1.2.1" + resolved "https://registry.yarnpkg.com/resolve-alpn/-/resolve-alpn-1.2.1.tgz#b7adbdac3546aaaec20b45e7d8265927072726f9" + integrity sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g== + +resolve-cwd@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" + integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg== + dependencies: + resolve-from "^5.0.0" + +resolve-from@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-3.0.0.tgz#b22c7af7d9d6881bc8b6e653335eebcb0a188748" + integrity sha512-GnlH6vxLymXJNMBo7XP1fJIzBFbdYt49CuTwmB/6N53t+kMPRMFKz783LlQ4tv28XoQfMWinAJX6WCGf2IlaIw== + +resolve-from@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" + integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== + +resolve-from@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" + integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== + +resolve-url@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" + integrity sha512-ZuF55hVUQaaczgOIwqWzkEcEidmlD/xl44x1UZnhOXcYuFN2S6+rcxpG+C1N3So0wvNI3DmJICUFfu2SxhBmvg== + +resolve.exports@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/resolve.exports/-/resolve.exports-1.1.0.tgz#5ce842b94b05146c0e03076985d1d0e7e48c90c9" + integrity sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ== + +resolve@1.17.0: + version "1.17.0" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.17.0.tgz#b25941b54968231cc2d1bb76a79cb7f2c0bf8444" + integrity sha512-ic+7JYiV8Vi2yzQGFWOkiZD5Z9z7O2Zhm9XMaTxdJExKasieFCr+yXZ/WmXsckHiKl12ar0y6XiXDx3m4RHn1w== + dependencies: + path-parse "^1.0.6" + +resolve@^1.10.0, resolve@^1.12.0, resolve@^1.20.0, resolve@^1.8.1, resolve@~1.22.1: + version "1.22.1" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177" + integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw== + dependencies: + is-core-module "^2.9.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +responselike@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/responselike/-/responselike-1.0.2.tgz#918720ef3b631c5642be068f15ade5a46f4ba1e7" + integrity sha512-/Fpe5guzJk1gPqdJLJR5u7eG/gNY4nImjbRDaVWVMRhne55TCmj2i9Q+54PBRfatRC8v/rIiv9BN0pMd9OV5EQ== + dependencies: + lowercase-keys "^1.0.0" + +responselike@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/responselike/-/responselike-2.0.1.tgz#9a0bc8fdc252f3fb1cca68b016591059ba1422bc" + integrity sha512-4gl03wn3hj1HP3yzgdI7d3lCkF95F21Pz4BPGvKHinyQzALR5CapwC8yIi0Rh58DEMQ/SguC03wFj2k0M/mHhw== + dependencies: + lowercase-keys "^2.0.0" + +restore-cursor@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-2.0.0.tgz#9f7ee287f82fd326d4fd162923d62129eee0dfaf" + integrity sha512-6IzJLuGi4+R14vwagDHX+JrXmPVtPpn4mffDJ1UdR7/Edm87fl6yi8mMBIVvFtJaNTUvjughmW4hwLhRG7gC1Q== + dependencies: + onetime "^2.0.0" + signal-exit "^3.0.2" + +resumer@~0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/resumer/-/resumer-0.0.0.tgz#f1e8f461e4064ba39e82af3cdc2a8c893d076759" + integrity sha512-Fn9X8rX8yYF4m81rZCK/5VmrmsSbqS/i3rDLl6ZZHAXgC2nTAx3dhwG8q8odP/RmdLa2YrybDJaAMg+X1ajY3w== + dependencies: + through "~2.3.4" + +ret@~0.1.10: + version "0.1.15" + resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" + integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== + +reusify@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" + integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== + +rimraf@2.6.3: + version "2.6.3" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.3.tgz#b2d104fe0d8fb27cf9e0a1cda8262dd3833c6cab" + integrity sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA== + dependencies: + glob "^7.1.3" + +rimraf@^2.2.8, rimraf@^2.6.3: + version "2.7.1" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" + integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== + dependencies: + glob "^7.1.3" + +rimraf@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" + integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== + dependencies: + glob "^7.1.3" + +ripemd160@^2.0.0, ripemd160@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.2.tgz#a1c1a6f624751577ba5d07914cbc92850585890c" + integrity sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA== + dependencies: + hash-base "^3.0.0" + inherits "^2.0.1" + +rlp@^2.0.0, rlp@^2.2.1, rlp@^2.2.2, rlp@^2.2.3, rlp@^2.2.4: + version "2.2.7" + resolved "https://registry.yarnpkg.com/rlp/-/rlp-2.2.7.tgz#33f31c4afac81124ac4b283e2bd4d9720b30beaf" + integrity sha512-d5gdPmgQ0Z+AklL2NVXr/IoSjNZFfTVvQWzL/AM2AOcSzYP2xjlb0AC8YyCLc41MSNf6P6QVtjgPdmVtzb+4lQ== + dependencies: + bn.js "^5.2.0" + +run-async@^2.2.0: + version "2.4.1" + resolved "https://registry.yarnpkg.com/run-async/-/run-async-2.4.1.tgz#8440eccf99ea3e70bd409d49aab88e10c189a455" + integrity sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ== + +run-parallel-limit@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/run-parallel-limit/-/run-parallel-limit-1.1.0.tgz#be80e936f5768623a38a963262d6bef8ff11e7ba" + integrity sha512-jJA7irRNM91jaKc3Hcl1npHsFLOXOoTkPCUL1JEa1R82O2miplXXRaGdjW/KM/98YQWDhJLiSs793CnXfblJUw== + dependencies: + queue-microtask "^1.2.2" + +run-parallel@^1.1.9: + version "1.2.0" + resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" + integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== + dependencies: + queue-microtask "^1.2.2" + +rustbn.js@~0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/rustbn.js/-/rustbn.js-0.2.0.tgz#8082cb886e707155fd1cb6f23bd591ab8d55d0ca" + integrity sha512-4VlvkRUuCJvr2J6Y0ImW7NvTCriMi7ErOAqWk1y69vAdoNIzCF3yPmgeNzx+RQTLEDFq5sHfscn1MwHxP9hNfA== + +rxjs@^6.4.0: + version "6.6.7" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.7.tgz#90ac018acabf491bf65044235d5863c4dab804c9" + integrity sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ== + dependencies: + tslib "^1.9.0" + +safe-buffer@5.2.1, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.0, safe-buffer@^5.2.1, safe-buffer@~5.2.0: + version "5.2.1" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + +safe-buffer@~5.1.0, safe-buffer@~5.1.1: + version "5.1.2" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +safe-event-emitter@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/safe-event-emitter/-/safe-event-emitter-1.0.1.tgz#5b692ef22329ed8f69fdce607e50ca734f6f20af" + integrity sha512-e1wFe99A91XYYxoQbcq2ZJUWurxEyP8vfz7A7vuUe1s95q8r5ebraVaA1BukYJcpM6V16ugWoD9vngi8Ccu5fg== + dependencies: + events "^3.0.0" + +safe-regex-test@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/safe-regex-test/-/safe-regex-test-1.0.0.tgz#793b874d524eb3640d1873aad03596db2d4f2295" + integrity sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.1.3" + is-regex "^1.1.4" + +safe-regex@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" + integrity sha512-aJXcif4xnaNUzvUuC5gcb46oTS7zvg4jpMTnuqtrEPlR3vFr4pxtdTwaF1Qs3Enjn9HK+ZlwQui+a7z0SywIzg== + dependencies: + ret "~0.1.10" + +"safer-buffer@>= 2.1.2 < 3", "safer-buffer@>= 2.1.2 < 3.0.0", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: + version "2.1.2" + resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + +scrypt-js@3.0.1, scrypt-js@^3.0.0, scrypt-js@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/scrypt-js/-/scrypt-js-3.0.1.tgz#d314a57c2aef69d1ad98a138a21fe9eafa9ee312" + integrity sha512-cdwTTnqPu0Hyvf5in5asVdZocVDTNRmR7XEcJuIzMjJeSHybHl7vpB66AzwTaIg6CLSbtjcxc8fqcySfnTkccA== + +scryptsy@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/scryptsy/-/scryptsy-1.2.1.tgz#a3225fa4b2524f802700761e2855bdf3b2d92163" + integrity sha512-aldIRgMozSJ/Gl6K6qmJZysRP82lz83Wb42vl4PWN8SaLFHIaOzLPc9nUUW2jQN88CuGm5q5HefJ9jZ3nWSmTw== + dependencies: + pbkdf2 "^3.0.3" + +secp256k1@^4.0.1: + version "4.0.3" + resolved "https://registry.yarnpkg.com/secp256k1/-/secp256k1-4.0.3.tgz#c4559ecd1b8d3c1827ed2d1b94190d69ce267303" + integrity sha512-NLZVf+ROMxwtEj3Xa562qgv2BK5e2WNmXPiOdVIPLgs6lyTzMvBq0aWTYMI5XCP9jZMVKOcqZLw/Wc4vDkuxhA== + dependencies: + elliptic "^6.5.4" + node-addon-api "^2.0.0" + node-gyp-build "^4.2.0" + +seedrandom@3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/seedrandom/-/seedrandom-3.0.1.tgz#eb3dde015bcf55df05a233514e5df44ef9dce083" + integrity sha512-1/02Y/rUeU1CJBAGLebiC5Lbo5FnB22gQbIFFYTLkwvp1xdABZJH1sn4ZT1MzXmPpzv+Rf/Lu2NcsLJiK4rcDg== + +semaphore@>=1.0.1, semaphore@^1.0.3, semaphore@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/semaphore/-/semaphore-1.1.0.tgz#aaad8b86b20fe8e9b32b16dc2ee682a8cd26a8aa" + integrity sha512-O4OZEaNtkMd/K0i6js9SL+gqy0ZCBMgUvlSqHKi4IBdjhe7wB8pwztUk1BbZ1fmrvpwFrPbHzqd2w5pTcJH6LA== + +"semver@2 || 3 || 4 || 5", semver@^5.3.0, semver@^5.5.0, semver@^5.5.1, semver@^5.6.0: + version "5.7.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" + integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== + +semver@7.x, semver@^7.2.1, semver@^7.3.5, semver@^7.3.7: + version "7.3.8" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.8.tgz#07a78feafb3f7b32347d725e33de7e2a2df67798" + integrity sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A== + dependencies: + lru-cache "^6.0.0" + +semver@^6.0.0, semver@^6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" + integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== + +semver@~5.4.1: + version "5.4.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.4.1.tgz#e059c09d8571f0540823733433505d3a2f00b18e" + integrity sha512-WfG/X9+oATh81XtllIo/I8gOiY9EXRdv1cQdyykeXK17YcUW3EXUAi2To4pcH6nZtJPr7ZOpM5OMyWJZm+8Rsg== + +send@0.18.0: + version "0.18.0" + resolved "https://registry.yarnpkg.com/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be" + integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== + dependencies: + debug "2.6.9" + depd "2.0.0" + destroy "1.2.0" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + fresh "0.5.2" + http-errors "2.0.0" + mime "1.6.0" + ms "2.1.3" + on-finished "2.4.1" + range-parser "~1.2.1" + statuses "2.0.1" + +serialize-javascript@6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.0.tgz#efae5d88f45d7924141da8b5c3a7a7e663fefeb8" + integrity sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag== + dependencies: + randombytes "^2.1.0" + +serve-static@1.15.0: + version "1.15.0" + resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540" + integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== + dependencies: + encodeurl "~1.0.2" + escape-html "~1.0.3" + parseurl "~1.3.3" + send "0.18.0" + +servify@^0.1.12: + version "0.1.12" + resolved "https://registry.yarnpkg.com/servify/-/servify-0.1.12.tgz#142ab7bee1f1d033b66d0707086085b17c06db95" + integrity sha512-/xE6GvsKKqyo1BAY+KxOWXcLpPsUUyji7Qg3bVD7hh1eRze5bR1uYiuDA/k3Gof1s9BTzQZEJK8sNcNGFIzeWw== + dependencies: + body-parser "^1.16.0" + cors "^2.8.1" + express "^4.14.0" + request "^2.79.0" + xhr "^2.3.3" + +set-blocking@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" + integrity sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw== + +set-immediate-shim@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/set-immediate-shim/-/set-immediate-shim-1.0.1.tgz#4b2b1b27eb808a9f8dcc481a58e5e56f599f3f61" + integrity sha512-Li5AOqrZWCVA2n5kryzEmqai6bKSIvpz5oUJHPVj6+dsbD3X1ixtsY5tEnsaNpH3pFAHmG8eIHUrtEtohrg+UQ== + +set-value@^2.0.0, set-value@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b" + integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw== + dependencies: + extend-shallow "^2.0.1" + is-extendable "^0.1.1" + is-plain-object "^2.0.3" + split-string "^3.0.1" + +setimmediate@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" + integrity sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA== + +setprototypeof@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" + integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== + +sha.js@^2.4.0, sha.js@^2.4.8: + version "2.4.11" + resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.11.tgz#37a5cf0b81ecbc6943de109ba2960d1b26584ae7" + integrity sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ== + dependencies: + inherits "^2.0.1" + safe-buffer "^5.0.1" + +shebang-command@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" + integrity sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg== + dependencies: + shebang-regex "^1.0.0" + +shebang-command@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== + dependencies: + shebang-regex "^3.0.0" + +shebang-regex@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" + integrity sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ== + +shebang-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + +shell-quote@^1.6.1: + version "1.7.4" + resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.7.4.tgz#33fe15dee71ab2a81fcbd3a52106c5cfb9fb75d8" + integrity sha512-8o/QEhSSRb1a5i7TFR0iM4G16Z0vYB2OQVs4G3aAFXjn3T6yEx8AZxy1PgDF7I00LZHYA3WxaSYIf5e5sAX8Rw== + +side-channel@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" + integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== + dependencies: + call-bind "^1.0.0" + get-intrinsic "^1.0.2" + object-inspect "^1.9.0" + +signal-exit@^3.0.2, signal-exit@^3.0.3, signal-exit@^3.0.7: + version "3.0.7" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" + integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== + +simple-concat@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/simple-concat/-/simple-concat-1.0.1.tgz#f46976082ba35c2263f1c8ab5edfe26c41c9552f" + integrity sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q== + +simple-get@^2.7.0: + version "2.8.2" + resolved "https://registry.yarnpkg.com/simple-get/-/simple-get-2.8.2.tgz#5708fb0919d440657326cd5fe7d2599d07705019" + integrity sha512-Ijd/rV5o+mSBBs4F/x9oDPtTx9Zb6X9brmnXvMW4J7IR15ngi9q5xxqWBKU744jTZiaXtxaPL7uHG6vtN8kUkw== + dependencies: + decompress-response "^3.3.0" + once "^1.3.1" + simple-concat "^1.0.0" + +sisteransi@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" + integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== + +slash@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/slash/-/slash-1.0.0.tgz#c41f2f6c39fc16d1cd17ad4b5d896114ae470d55" + integrity sha512-3TYDR7xWt4dIqV2JauJr+EJeW356RXijHeUlO+8djJ+uBXPn8/2dpzBc8yQhh583sVvc9CvFAeQVgijsH+PNNg== + +slash@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/slash/-/slash-2.0.0.tgz#de552851a1759df3a8f206535442f5ec4ddeab44" + integrity sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A== + +slash@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" + integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== + +slice-ansi@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-2.1.0.tgz#cacd7693461a637a5788d92a7dd4fba068e81636" + integrity sha512-Qu+VC3EwYLldKa1fCxuuvULvSJOKEgk9pi8dZeCVK7TqBfUNTH4sFkk4joj8afVSfAYgJoSOetjx9QWOJ5mYoQ== + dependencies: + ansi-styles "^3.2.0" + astral-regex "^1.0.0" + is-fullwidth-code-point "^2.0.0" + +slice-ansi@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-4.0.0.tgz#500e8dd0fd55b05815086255b3195adf2a45fe6b" + integrity sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ== + dependencies: + ansi-styles "^4.0.0" + astral-regex "^2.0.0" + is-fullwidth-code-point "^3.0.0" + +snapdragon-node@^2.0.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" + integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== + dependencies: + define-property "^1.0.0" + isobject "^3.0.0" + snapdragon-util "^3.0.1" + +snapdragon-util@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" + integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== + dependencies: + kind-of "^3.2.0" + +snapdragon@^0.8.1: + version "0.8.2" + resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" + integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== + dependencies: + base "^0.11.1" + debug "^2.2.0" + define-property "^0.2.5" + extend-shallow "^2.0.1" + map-cache "^0.2.2" + source-map "^0.5.6" + source-map-resolve "^0.5.0" + use "^3.1.0" + +solc@0.7.3: + version "0.7.3" + resolved "https://registry.yarnpkg.com/solc/-/solc-0.7.3.tgz#04646961bd867a744f63d2b4e3c0701ffdc7d78a" + integrity sha512-GAsWNAjGzIDg7VxzP6mPjdurby3IkGCjQcM8GFYZT6RyaoUZKmMU6Y7YwG+tFGhv7dwZ8rmR4iwFDrrD99JwqA== + dependencies: + command-exists "^1.2.8" + commander "3.0.2" + follow-redirects "^1.12.1" + fs-extra "^0.30.0" + js-sha3 "0.8.0" + memorystream "^0.3.1" + require-from-string "^2.0.0" + semver "^5.5.0" + tmp "0.0.33" + +solc@0.8.17: + version "0.8.17" + resolved "https://registry.yarnpkg.com/solc/-/solc-0.8.17.tgz#c748fec6a64bf029ec406aa9b37e75938d1115ae" + integrity sha512-Dtidk2XtTTmkB3IKdyeg6wLYopJnBVxdoykN8oP8VY3PQjN16BScYoUJTXFm2OP7P0hXNAqWiJNmmfuELtLf8g== + dependencies: + command-exists "^1.2.8" + commander "^8.1.0" + follow-redirects "^1.12.1" + js-sha3 "0.8.0" + memorystream "^0.3.1" + semver "^5.5.0" + tmp "0.0.33" + +solc@^0.4.20: + version "0.4.26" + resolved "https://registry.yarnpkg.com/solc/-/solc-0.4.26.tgz#5390a62a99f40806b86258c737c1cf653cc35cb5" + integrity sha512-o+c6FpkiHd+HPjmjEVpQgH7fqZ14tJpXhho+/bQXlXbliLIS/xjXb42Vxh+qQY1WCSTMQ0+a5vR9vi0MfhU6mA== + dependencies: + fs-extra "^0.30.0" + memorystream "^0.3.1" + require-from-string "^1.1.0" + semver "^5.3.0" + yargs "^4.7.1" + +solc@^0.6.3: + version "0.6.12" + resolved "https://registry.yarnpkg.com/solc/-/solc-0.6.12.tgz#48ac854e0c729361b22a7483645077f58cba080e" + integrity sha512-Lm0Ql2G9Qc7yPP2Ba+WNmzw2jwsrd3u4PobHYlSOxaut3TtUbj9+5ZrT6f4DUpNPEoBaFUOEg9Op9C0mk7ge9g== + dependencies: + command-exists "^1.2.8" + commander "3.0.2" + fs-extra "^0.30.0" + js-sha3 "0.8.0" + memorystream "^0.3.1" + require-from-string "^2.0.0" + semver "^5.5.0" + tmp "0.0.33" + +solhint@^3.3.2: + version "3.3.7" + resolved "https://registry.yarnpkg.com/solhint/-/solhint-3.3.7.tgz#b5da4fedf7a0fee954cb613b6c55a5a2b0063aa7" + integrity sha512-NjjjVmXI3ehKkb3aNtRJWw55SUVJ8HMKKodwe0HnejA+k0d2kmhw7jvpa+MCTbcEgt8IWSwx0Hu6aCo/iYOZzQ== + dependencies: + "@solidity-parser/parser" "^0.14.1" + ajv "^6.6.1" + antlr4 "4.7.1" + ast-parents "0.0.1" + chalk "^2.4.2" + commander "2.18.0" + cosmiconfig "^5.0.7" + eslint "^5.6.0" + fast-diff "^1.1.2" + glob "^7.1.3" + ignore "^4.0.6" + js-yaml "^3.12.0" + lodash "^4.17.11" + semver "^6.3.0" + optionalDependencies: + prettier "^1.14.3" + +solidity-comments-extractor@^0.0.7: + version "0.0.7" + resolved "https://registry.yarnpkg.com/solidity-comments-extractor/-/solidity-comments-extractor-0.0.7.tgz#99d8f1361438f84019795d928b931f4e5c39ca19" + integrity sha512-wciNMLg/Irp8OKGrh3S2tfvZiZ0NEyILfcRCXCD4mp7SgK/i9gzLfhY2hY7VMCQJ3kH9UB9BzNdibIVMchzyYw== + +solpp@^0.11.5: + version "0.11.5" + resolved "https://registry.yarnpkg.com/solpp/-/solpp-0.11.5.tgz#e5f38b5acc952e1cc2e3871d490fdbed910938dd" + integrity sha512-LjzCGMrTDXtera2C4mbQGZSpBznP+o3/82L2CneAAMNbm+t4xPsvfrgJkIaY+IZ5YLrB8IXn7cYthwHMKvAWnQ== + dependencies: + antlr4 "~4.8.0" + axios "^0.21.1" + bn-str-256 "^1.9.1" + commander "^2.19.0" + ethereumjs-util "^6.0.0" + lodash "^4.17.11" + mz "^2.7.0" + resolve "^1.10.0" + semver "^5.6.0" + +source-map-resolve@^0.5.0: + version "0.5.3" + resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.3.tgz#190866bece7553e1f8f267a2ee82c606b5509a1a" + integrity sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw== + dependencies: + atob "^2.1.2" + decode-uri-component "^0.2.0" + resolve-url "^0.2.1" + source-map-url "^0.4.0" + urix "^0.1.0" + +source-map-support@0.5.12: + version "0.5.12" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.12.tgz#b4f3b10d51857a5af0138d3ce8003b201613d599" + integrity sha512-4h2Pbvyy15EE02G+JOZpUCmqWJuqrs+sEkzewTm++BPi7Hvn/HwcqLAcNxYAyI0x13CpPPn+kMjl+hplXMHITQ== + dependencies: + buffer-from "^1.0.0" + source-map "^0.6.0" + +source-map-support@0.5.13: + version "0.5.13" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.13.tgz#31b24a9c2e73c2de85066c0feb7d44767ed52932" + integrity sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w== + dependencies: + buffer-from "^1.0.0" + source-map "^0.6.0" + +source-map-support@^0.4.15: + version "0.4.18" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.4.18.tgz#0286a6de8be42641338594e97ccea75f0a2c585f" + integrity sha512-try0/JqxPLF9nOjvSta7tVondkP5dwgyLDjVoyMDlmjugT2lRZ1OfsrYTkCd2hkDnJTKRbO/Rl3orm8vlsUzbA== + dependencies: + source-map "^0.5.6" + +source-map-support@^0.5.13: + version "0.5.21" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" + integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== + dependencies: + buffer-from "^1.0.0" + source-map "^0.6.0" + +source-map-url@^0.4.0: + version "0.4.1" + resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.1.tgz#0af66605a745a5a2f91cf1bbf8a7afbc283dec56" + integrity sha512-cPiFOTLUKvJFIg4SKVScy4ilPPW6rFgMgfuZJPNoDuMs3nC1HbMUycBoJw77xFIp6z1UJQJOfx6C9GMH80DiTw== + +source-map@^0.5.6, source-map@^0.5.7: + version "0.5.7" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" + integrity sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ== + +source-map@^0.6.0, source-map@^0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== + +spdx-correct@^3.0.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.1.tgz#dece81ac9c1e6713e5f7d1b6f17d468fa53d89a9" + integrity sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w== + dependencies: + spdx-expression-parse "^3.0.0" + spdx-license-ids "^3.0.0" + +spdx-exceptions@^2.1.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz#3f28ce1a77a00372683eade4a433183527a2163d" + integrity sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A== + +spdx-expression-parse@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz#cf70f50482eefdc98e3ce0a6833e4a53ceeba679" + integrity sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q== + dependencies: + spdx-exceptions "^2.1.0" + spdx-license-ids "^3.0.0" + +spdx-license-ids@^3.0.0: + version "3.0.12" + resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.12.tgz#69077835abe2710b65f03969898b6637b505a779" + integrity sha512-rr+VVSXtRhO4OHbXUiAF7xW3Bo9DuuF6C5jH+q/x15j2jniycgKbxU09Hr0WqlSLUs4i4ltHGXqTe7VHclYWyA== + +split-ca@^1.0.0, split-ca@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/split-ca/-/split-ca-1.0.1.tgz#6c83aff3692fa61256e0cd197e05e9de157691a6" + integrity sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ== + +split-string@^3.0.1, split-string@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" + integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== + dependencies: + extend-shallow "^3.0.0" + +sprintf-js@~1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== + +ssh2@^1.11.0: + version "1.11.0" + resolved "https://registry.yarnpkg.com/ssh2/-/ssh2-1.11.0.tgz#ce60186216971e12f6deb553dcf82322498fe2e4" + integrity sha512-nfg0wZWGSsfUe/IBJkXVll3PEZ//YH2guww+mP88gTpuSU4FtZN7zu9JoeTGOyCNx2dTDtT9fOpWwlzyj4uOOw== + dependencies: + asn1 "^0.2.4" + bcrypt-pbkdf "^1.0.2" + optionalDependencies: + cpu-features "~0.0.4" + nan "^2.16.0" + +sshpk@^1.7.0: + version "1.17.0" + resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.17.0.tgz#578082d92d4fe612b13007496e543fa0fbcbe4c5" + integrity sha512-/9HIEs1ZXGhSPE8X6Ccm7Nam1z8KcoCqPdI7ecm1N33EzAetWahvQWVqLZtaZQ+IDKX4IyA2o0gBzqIMkAagHQ== + dependencies: + asn1 "~0.2.3" + assert-plus "^1.0.0" + bcrypt-pbkdf "^1.0.0" + dashdash "^1.12.0" + ecc-jsbn "~0.1.1" + getpass "^0.1.1" + jsbn "~0.1.0" + safer-buffer "^2.0.2" + tweetnacl "~0.14.0" + +stack-utils@^2.0.3: + version "2.0.5" + resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.5.tgz#d25265fca995154659dbbfba3b49254778d2fdd5" + integrity sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA== + dependencies: + escape-string-regexp "^2.0.0" + +stacktrace-parser@^0.1.10: + version "0.1.10" + resolved "https://registry.yarnpkg.com/stacktrace-parser/-/stacktrace-parser-0.1.10.tgz#29fb0cae4e0d0b85155879402857a1639eb6051a" + integrity sha512-KJP1OCML99+8fhOHxwwzyWrlUuVX5GQ0ZpJTd1DFXhdkrvg1szxfHhawXUZ3g9TkXORQd4/WG68jMlQZ2p8wlg== + dependencies: + type-fest "^0.7.1" + +static-extend@^0.1.1: + version "0.1.2" + resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" + integrity sha512-72E9+uLc27Mt718pMHt9VMNiAL4LMsmDbBva8mxWUCkT07fSzEGMYUCk0XWY6lp0j6RBAG4cJ3mWuZv2OE3s0g== + dependencies: + define-property "^0.2.5" + object-copy "^0.1.0" + +statuses@2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63" + integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== + +stream-to-pull-stream@^1.7.1: + version "1.7.3" + resolved "https://registry.yarnpkg.com/stream-to-pull-stream/-/stream-to-pull-stream-1.7.3.tgz#4161aa2d2eb9964de60bfa1af7feaf917e874ece" + integrity sha512-6sNyqJpr5dIOQdgNy/xcDWwDuzAsAwVzhzrWlAPAQ7Lkjx/rv0wgvxEyKwTq6FmNd5rjTrELt/CLmaSw7crMGg== + dependencies: + looper "^3.0.0" + pull-stream "^3.2.3" + +streamsearch@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-1.1.0.tgz#404dd1e2247ca94af554e841a8ef0eaa238da764" + integrity sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg== + +strict-uri-encode@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713" + integrity sha512-R3f198pcvnB+5IpnBlRkphuE9n46WyVl8I39W/ZUTZLz4nqSP/oLYUrcnJrw462Ds8he4YKMov2efsTIw1BDGQ== + +string-length@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" + integrity sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ== + dependencies: + char-regex "^1.0.2" + strip-ansi "^6.0.0" + +string-width@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" + integrity sha512-0XsVpQLnVCXHJfyEs8tC0zpTVIr5PKKsQtkT29IwupnPTjtPmQ3xT/4yCREF9hYkV/3M3kzcUTSAZT6a6h81tw== + dependencies: + code-point-at "^1.0.0" + is-fullwidth-code-point "^1.0.0" + strip-ansi "^3.0.0" + +string-width@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" + integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== + dependencies: + is-fullwidth-code-point "^2.0.0" + strip-ansi "^4.0.0" + +string-width@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" + integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w== + dependencies: + emoji-regex "^7.0.1" + is-fullwidth-code-point "^2.0.0" + strip-ansi "^5.1.0" + +string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string.prototype.padend@^3.0.0: + version "3.1.3" + resolved "https://registry.yarnpkg.com/string.prototype.padend/-/string.prototype.padend-3.1.3.tgz#997a6de12c92c7cb34dc8a201a6c53d9bd88a5f1" + integrity sha512-jNIIeokznm8SD/TZISQsZKYu7RJyheFNt84DUPrh482GC8RVp2MKqm2O5oBRdGxbDQoXrhhWtPIWQOiy20svUg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +string.prototype.trim@~1.2.7: + version "1.2.7" + resolved "https://registry.yarnpkg.com/string.prototype.trim/-/string.prototype.trim-1.2.7.tgz#a68352740859f6893f14ce3ef1bb3037f7a90533" + integrity sha512-p6TmeT1T3411M8Cgg9wBTMRtY2q9+PNy9EV1i2lIXUN/btt763oIfxwN3RR8VU6wHX8j/1CFy0L+YuThm6bgOg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.20.4" + +string.prototype.trimend@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz#914a65baaab25fbdd4ee291ca7dde57e869cb8d0" + integrity sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + +string.prototype.trimend@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.6.tgz#c4a27fa026d979d79c04f17397f250a462944533" + integrity sha512-JySq+4mrPf9EsDBEDYMOb/lM7XQLulwg5R/m1r0PXEFqrV0qHvl58sdTilSXtKOflCsK2E8jxf+GKC0T07RWwQ== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.20.4" + +string.prototype.trimstart@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz#5466d93ba58cfa2134839f81d7f42437e8c01fef" + integrity sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + +string.prototype.trimstart@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.6.tgz#e90ab66aa8e4007d92ef591bbf3cd422c56bdcf4" + integrity sha512-omqjMDaY92pbn5HOX7f9IccLA+U1tA9GvtU4JrodiXFfYB7jPzzHpRzpglLAjtUV6bB557zwClJezTqnAiYnQA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.20.4" + +string_decoder@^1.1.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" + integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== + dependencies: + safe-buffer "~5.2.0" + +string_decoder@~0.10.x: + version "0.10.31" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-0.10.31.tgz#62e203bc41766c6c28c9fc84301dab1c5310fa94" + integrity sha512-ev2QzSzWPYmy9GuqfIVildA4OdcGLeFZQrq5ys6RtiuF+RQQiZWr8TZNyAcuVXyQRYfEO+MsoB/1BuQVhOJuoQ== + +string_decoder@~1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== + dependencies: + safe-buffer "~5.1.0" + +strip-ansi@^3.0.0, strip-ansi@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" + integrity sha512-VhumSSbBqDTP8p2ZLKj40UjBCV4+v8bUSEpUb4KjRgWk9pbqGF4REFj6KEagidb2f/M6AzC0EmFyDNGaw9OCzg== + dependencies: + ansi-regex "^2.0.0" + +strip-ansi@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" + integrity sha512-4XaJ2zQdCzROZDivEVIDPkcQn8LMFSa8kj8Gxb/Lnwzv9A8VctNZ+lfivC/sV3ivW8ElJTERXZoPBRrZKkNKow== + dependencies: + ansi-regex "^3.0.0" + +strip-ansi@^5.1.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae" + integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA== + dependencies: + ansi-regex "^4.1.0" + +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-bom@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-2.0.0.tgz#6219a85616520491f35788bdbf1447a99c7e6b0e" + integrity sha512-kwrX1y7czp1E69n2ajbG65mIo9dqvJ+8aBQXOGVxqwvNbsXdFM6Lq37dLAY3mknUwru8CfcCbfOLL/gMo+fi3g== + dependencies: + is-utf8 "^0.2.0" + +strip-bom@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + integrity sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA== + +strip-bom@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" + integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== + +strip-final-newline@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" + integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== + +strip-hex-prefix@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/strip-hex-prefix/-/strip-hex-prefix-1.0.0.tgz#0c5f155fef1151373377de9dbb588da05500e36f" + integrity sha512-q8d4ue7JGEiVcypji1bALTos+0pWtyGlivAWyPuTkHzuTCJqrK9sWxYQZUq6Nq3cuyv3bm734IhHvHtGGURU6A== + dependencies: + is-hex-prefixed "1.0.0" + +strip-json-comments@3.1.1, strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" + integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== + +strip-json-comments@^2.0.1, strip-json-comments@~2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" + integrity sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ== + +supports-color@8.1.1, supports-color@^8.0.0: + version "8.1.1" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" + integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== + dependencies: + has-flag "^4.0.0" + +supports-color@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" + integrity sha512-KKNVtd6pCYgPIKU4cp2733HWYCpplQhddZLBUryaAHou723x+FRzQ5Df824Fj+IyyuiQTRoub4SnIFfIcrp70g== + +supports-color@^5.3.0: + version "5.5.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" + integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== + dependencies: + has-flag "^3.0.0" + +supports-color@^7.1.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" + integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== + dependencies: + has-flag "^4.0.0" + +supports-preserve-symlinks-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== + +swarm-js@^0.1.40: + version "0.1.42" + resolved "https://registry.yarnpkg.com/swarm-js/-/swarm-js-0.1.42.tgz#497995c62df6696f6e22372f457120e43e727979" + integrity sha512-BV7c/dVlA3R6ya1lMlSSNPLYrntt0LUq4YMgy3iwpCIc6rZnS5W2wUoctarZ5pXlpKtxDDf9hNziEkcfrxdhqQ== + dependencies: + bluebird "^3.5.0" + buffer "^5.0.5" + eth-lib "^0.1.26" + fs-extra "^4.0.2" + got "^11.8.5" + mime-types "^2.1.16" + mkdirp-promise "^5.0.1" + mock-fs "^4.1.0" + setimmediate "^1.0.5" + tar "^4.0.2" + xhr-request "^1.0.1" + +table@^5.2.3: + version "5.4.6" + resolved "https://registry.yarnpkg.com/table/-/table-5.4.6.tgz#1292d19500ce3f86053b05f0e8e7e4a3bb21079e" + integrity sha512-wmEc8m4fjnob4gt5riFRtTu/6+4rSe12TpAELNSqHMfF3IqnA+CH37USM6/YR3qRZv7e56kAEAtd6nKZaxe0Ug== + dependencies: + ajv "^6.10.2" + lodash "^4.17.14" + slice-ansi "^2.1.0" + string-width "^3.0.0" + +table@^6.0.9: + version "6.8.1" + resolved "https://registry.yarnpkg.com/table/-/table-6.8.1.tgz#ea2b71359fe03b017a5fbc296204471158080bdf" + integrity sha512-Y4X9zqrCftUhMeH2EptSSERdVKt/nEdijTOacGD/97EKjhQ/Qs8RTlEGABSJNNN8lac9kheH+af7yAkEWlgneA== + dependencies: + ajv "^8.0.1" + lodash.truncate "^4.4.2" + slice-ansi "^4.0.0" + string-width "^4.2.3" + strip-ansi "^6.0.1" + +tabtab@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/tabtab/-/tabtab-3.0.2.tgz#a2cea0f1035f88d145d7da77eaabbd3fe03e1ec9" + integrity sha512-jANKmUe0sIQc/zTALTBy186PoM/k6aPrh3A7p6AaAfF6WPSbTx1JYeGIGH162btpH+mmVEXln+UxwViZHO2Jhg== + dependencies: + debug "^4.0.1" + es6-promisify "^6.0.0" + inquirer "^6.0.0" + minimist "^1.2.0" + mkdirp "^0.5.1" + untildify "^3.0.3" + +tape@^4.6.3: + version "4.16.2" + resolved "https://registry.yarnpkg.com/tape/-/tape-4.16.2.tgz#7565e6af20426565557266e9dda7215869b297b6" + integrity sha512-TUChV+q0GxBBCEbfCYkGLkv8hDJYjMdSWdE0/Lr331sB389dsvFUHNV9ph5iQqKzt8Ss9drzcda/YeexclBFqg== + dependencies: + call-bind "~1.0.2" + deep-equal "~1.1.1" + defined "~1.0.1" + dotignore "~0.1.2" + for-each "~0.3.3" + glob "~7.2.3" + has "~1.0.3" + inherits "~2.0.4" + is-regex "~1.1.4" + minimist "~1.2.7" + object-inspect "~1.12.3" + resolve "~1.22.1" + resumer "~0.0.0" + string.prototype.trim "~1.2.7" + through "~2.3.8" + +tar-fs@~1.16.3: + version "1.16.3" + resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-1.16.3.tgz#966a628841da2c4010406a82167cbd5e0c72d509" + integrity sha512-NvCeXpYx7OsmOh8zIOP/ebG55zZmxLE0etfWRbWok+q2Qo8x/vOR/IJT1taADXPe+jsiu9axDb3X4B+iIgNlKw== + dependencies: + chownr "^1.0.1" + mkdirp "^0.5.1" + pump "^1.0.0" + tar-stream "^1.1.2" + +tar-fs@~2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.0.1.tgz#e44086c1c60d31a4f0cf893b1c4e155dabfae9e2" + integrity sha512-6tzWDMeroL87uF/+lin46k+Q+46rAJ0SyPGz7OW7wTgblI273hsBqk2C1j0/xNadNLKDTUL9BukSjB7cwgmlPA== + dependencies: + chownr "^1.1.1" + mkdirp-classic "^0.5.2" + pump "^3.0.0" + tar-stream "^2.0.0" + +tar-stream@^1.1.2: + version "1.6.2" + resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-1.6.2.tgz#8ea55dab37972253d9a9af90fdcd559ae435c555" + integrity sha512-rzS0heiNf8Xn7/mpdSVVSMAWAoy9bfb1WOTYC78Z0UQKeKa/CWS8FOq0lKGNa8DWKAn9gxjCvMLYc5PGXYlK2A== + dependencies: + bl "^1.0.0" + buffer-alloc "^1.2.0" + end-of-stream "^1.0.0" + fs-constants "^1.0.0" + readable-stream "^2.3.0" + to-buffer "^1.1.1" + xtend "^4.0.0" + +tar-stream@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.2.0.tgz#acad84c284136b060dc3faa64474aa9aebd77287" + integrity sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ== + dependencies: + bl "^4.0.3" + end-of-stream "^1.4.1" + fs-constants "^1.0.0" + inherits "^2.0.3" + readable-stream "^3.1.1" + +tar@^4.0.2: + version "4.4.19" + resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.19.tgz#2e4d7263df26f2b914dee10c825ab132123742f3" + integrity sha512-a20gEsvHnWe0ygBY8JbxoM4w3SJdhc7ZAuxkLqh+nvNQN2IOt0B5lLgM490X5Hl8FF0dl0tOf2ewFYAlIFgzVA== + dependencies: + chownr "^1.1.4" + fs-minipass "^1.2.7" + minipass "^2.9.0" + minizlib "^1.3.3" + mkdirp "^0.5.5" + safe-buffer "^5.2.1" + yallist "^3.1.1" + +test-exclude@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" + integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== + dependencies: + "@istanbuljs/schema" "^0.1.2" + glob "^7.1.4" + minimatch "^3.0.4" + +test-value@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/test-value/-/test-value-2.1.0.tgz#11da6ff670f3471a73b625ca4f3fdcf7bb748291" + integrity sha512-+1epbAxtKeXttkGFMTX9H42oqzOTufR1ceCF+GYA5aOmvaPq9wd4PUS8329fn2RRLGNeUkgRLnVpycjx8DsO2w== + dependencies: + array-back "^1.0.3" + typical "^2.6.0" + +testrpc@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/testrpc/-/testrpc-0.0.1.tgz#83e2195b1f5873aec7be1af8cbe6dcf39edb7aed" + integrity sha512-afH1hO+SQ/VPlmaLUFj2636QMeDvPCeQMc/9RBMW0IfjNe9gFD9Ra3ShqYkB7py0do1ZcCna/9acHyzTJ+GcNA== + +text-table@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== + +thenify-all@^1.0.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/thenify-all/-/thenify-all-1.6.0.tgz#1a1918d402d8fc3f98fbf234db0bcc8cc10e9726" + integrity sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA== + dependencies: + thenify ">= 3.1.0 < 4" + +"thenify@>= 3.1.0 < 4": + version "3.3.1" + resolved "https://registry.yarnpkg.com/thenify/-/thenify-3.3.1.tgz#8932e686a4066038a016dd9e2ca46add9838a95f" + integrity sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw== + dependencies: + any-promise "^1.0.0" + +through2@^2.0.3: + version "2.0.5" + resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" + integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== + dependencies: + readable-stream "~2.3.6" + xtend "~4.0.1" + +"through@>=2.2.7 <3", through@^2.3.6, through@~2.3.4, through@~2.3.8: + version "2.3.8" + resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" + integrity sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg== + +timed-out@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/timed-out/-/timed-out-4.0.1.tgz#f32eacac5a175bea25d7fab565ab3ed8741ef56f" + integrity sha512-G7r3AhovYtr5YKOWQkta8RKAPb+J9IsO4uVmzjl8AZwfhs8UcUwTiD6gcJYSgOtzyjvQKrKYn41syHbUWMkafA== + +tmp@0.0.33, tmp@^0.0.33: + version "0.0.33" + resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" + integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw== + dependencies: + os-tmpdir "~1.0.2" + +tmp@0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.1.0.tgz#ee434a4e22543082e294ba6201dcc6eafefa2877" + integrity sha512-J7Z2K08jbGcdA1kkQpJSqLF6T0tdQqpR2pnSUXsIchbPdTI9v3e85cLW0d6WDhwuAleOV71j2xWs8qMPfK7nKw== + dependencies: + rimraf "^2.6.3" + +tmpl@1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" + integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== + +to-buffer@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/to-buffer/-/to-buffer-1.1.1.tgz#493bd48f62d7c43fcded313a03dcadb2e1213a80" + integrity sha512-lx9B5iv7msuFYE3dytT+KE5tap+rNYw+K4jVkb9R/asAb+pbBSM17jtunHplhBe6RRJdZx3Pn2Jph24O32mOVg== + +to-fast-properties@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-1.0.3.tgz#b83571fa4d8c25b82e231b06e3a3055de4ca1a47" + integrity sha512-lxrWP8ejsq+7E3nNjwYmUBMAgjMTZoTI+sdBOpvNyijeDLa29LUn9QaoXAHv4+Z578hbmHHJKZknzxVtvo77og== + +to-fast-properties@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" + integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== + +to-object-path@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" + integrity sha512-9mWHdnGRuh3onocaHzukyvCZhzvr6tiflAy/JRFXcJX0TjgfWA9pk9t8CMbzmBE4Jfw58pXbkngtBtqYxzNEyg== + dependencies: + kind-of "^3.0.2" + +to-readable-stream@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/to-readable-stream/-/to-readable-stream-1.0.0.tgz#ce0aa0c2f3df6adf852efb404a783e77c0475771" + integrity sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q== + +to-regex-range@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" + integrity sha512-ZZWNfCjUokXXDGXFpZehJIkZqq91BcULFq/Pi7M5i4JnxXdhMKAK682z8bCW3o8Hj1wuuzoKcW3DfVzaP6VuNg== + dependencies: + is-number "^3.0.0" + repeat-string "^1.6.1" + +to-regex-range@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" + integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== + dependencies: + is-number "^7.0.0" + +to-regex@^3.0.1, to-regex@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" + integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== + dependencies: + define-property "^2.0.2" + extend-shallow "^3.0.2" + regex-not "^1.0.2" + safe-regex "^1.1.0" + +toidentifier@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" + integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== + +tough-cookie@~2.5.0: + version "2.5.0" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2" + integrity sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g== + dependencies: + psl "^1.1.28" + punycode "^2.1.1" + +tr46@~0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" + integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== + +treeify@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/treeify/-/treeify-1.1.0.tgz#4e31c6a463accd0943879f30667c4fdaff411bb8" + integrity sha512-1m4RA7xVAJrSGrrXGs0L3YTwyvBs2S8PbRHaLZAkFw7JR8oIFwYtysxlBZhYIa7xSyiYJKZ3iGrrk55cGA3i9A== + +trim-right@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003" + integrity sha512-WZGXGstmCWgeevgTL54hrCuw1dyMQIzWy7ZfqRJfSmJZBwklI15egmQytFP6bPidmw3M8d5yEowl1niq4vmqZw== + +ts-essentials@^1.0.0: + version "1.0.4" + resolved "https://registry.yarnpkg.com/ts-essentials/-/ts-essentials-1.0.4.tgz#ce3b5dade5f5d97cf69889c11bf7d2da8555b15a" + integrity sha512-q3N1xS4vZpRouhYHDPwO0bDW3EZ6SK9CrrDHxi/D6BPReSjpVgWIOpLS2o0gSBZm+7q/wyKp6RVM1AeeW7uyfQ== + +ts-essentials@^6.0.3: + version "6.0.7" + resolved "https://registry.yarnpkg.com/ts-essentials/-/ts-essentials-6.0.7.tgz#5f4880911b7581a873783740ce8b94da163d18a6" + integrity sha512-2E4HIIj4tQJlIHuATRHayv0EfMGK3ris/GRk1E3CFnsZzeNV+hUmelbaTZHLtXaZppM5oLhHRtO04gINC4Jusw== + +ts-essentials@^7.0.1: + version "7.0.3" + resolved "https://registry.yarnpkg.com/ts-essentials/-/ts-essentials-7.0.3.tgz#686fd155a02133eedcc5362dc8b5056cde3e5a38" + integrity sha512-8+gr5+lqO3G84KdiTSMRLtuyJ+nTBVRKuCrK4lidMPdVeEp0uqC875uE5NMcaA7YYMN7XsNiFQuMvasF8HT/xQ== + +ts-generator@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/ts-generator/-/ts-generator-0.1.1.tgz#af46f2fb88a6db1f9785977e9590e7bcd79220ab" + integrity sha512-N+ahhZxTLYu1HNTQetwWcx3so8hcYbkKBHTr4b4/YgObFTIKkOSSsaa+nal12w8mfrJAyzJfETXawbNjSfP2gQ== + dependencies: + "@types/mkdirp" "^0.5.2" + "@types/prettier" "^2.1.1" + "@types/resolve" "^0.0.8" + chalk "^2.4.1" + glob "^7.1.2" + mkdirp "^0.5.1" + prettier "^2.1.2" + resolve "^1.8.1" + ts-essentials "^1.0.0" + +ts-jest@^29.0.1: + version "29.0.3" + resolved "https://registry.yarnpkg.com/ts-jest/-/ts-jest-29.0.3.tgz#63ea93c5401ab73595440733cefdba31fcf9cb77" + integrity sha512-Ibygvmuyq1qp/z3yTh9QTwVVAbFdDy/+4BtIQR2sp6baF2SJU/8CKK/hhnGIDY2L90Az2jIqTwZPnN2p+BweiQ== + dependencies: + bs-logger "0.x" + fast-json-stable-stringify "2.x" + jest-util "^29.0.0" + json5 "^2.2.1" + lodash.memoize "4.x" + make-error "1.x" + semver "7.x" + yargs-parser "^21.0.1" + +ts-node@^10.0.0, ts-node@^10.1.0, ts-node@^10.7.0: + version "10.9.1" + resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.1.tgz#e73de9102958af9e1f0b168a6ff320e25adcff4b" + integrity sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw== + dependencies: + "@cspotcode/source-map-support" "^0.8.0" + "@tsconfig/node10" "^1.0.7" + "@tsconfig/node12" "^1.0.7" + "@tsconfig/node14" "^1.0.0" + "@tsconfig/node16" "^1.0.2" + acorn "^8.4.1" + acorn-walk "^8.1.1" + arg "^4.1.0" + create-require "^1.1.0" + diff "^4.0.1" + make-error "^1.1.1" + v8-compile-cache-lib "^3.0.1" + yn "3.1.1" + +tslib@^1.8.1, tslib@^1.9.0, tslib@^1.9.3: + version "1.14.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" + integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== + +tsort@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/tsort/-/tsort-0.0.1.tgz#e2280f5e817f8bf4275657fd0f9aebd44f5a2786" + integrity sha512-Tyrf5mxF8Ofs1tNoxA13lFeZ2Zrbd6cKbuH3V+MQ5sb6DtBj5FjrXVsRWT8YvNAQTqNoz66dz1WsbigI22aEnw== + +tsutils@^3.21.0: + version "3.21.0" + resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" + integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== + dependencies: + tslib "^1.8.1" + +tunnel-agent@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" + integrity sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w== + dependencies: + safe-buffer "^5.0.1" + +tweetnacl-util@^0.15.0, tweetnacl-util@^0.15.1: + version "0.15.1" + resolved "https://registry.yarnpkg.com/tweetnacl-util/-/tweetnacl-util-0.15.1.tgz#b80fcdb5c97bcc508be18c44a4be50f022eea00b" + integrity sha512-RKJBIj8lySrShN4w6i/BonWp2Z/uxwC3h4y7xsRrpP59ZboCd0GpEVsOnMDYLMmKBpYhb5TgHzZXy7wTfYFBRw== + +tweetnacl@^0.14.3, tweetnacl@~0.14.0: + version "0.14.5" + resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" + integrity sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA== + +tweetnacl@^1.0.0, tweetnacl@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-1.0.3.tgz#ac0af71680458d8a6378d0d0d050ab1407d35596" + integrity sha512-6rt+RN7aOi1nGMyC4Xa5DdYiukl2UWCbcJft7YhxReBGQD7OAM8Pbxw6YMo4r2diNEA8FEmu32YOn9rhaiE5yw== + +type-check@^0.4.0, type-check@~0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" + integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== + dependencies: + prelude-ls "^1.2.1" + +type-check@~0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" + integrity sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg== + dependencies: + prelude-ls "~1.1.2" + +type-detect@4.0.8, type-detect@^4.0.0, type-detect@^4.0.5: + version "4.0.8" + resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" + integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== + +type-fest@^0.20.2: + version "0.20.2" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" + integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== + +type-fest@^0.21.3: + version "0.21.3" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" + integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== + +type-fest@^0.7.1: + version "0.7.1" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.7.1.tgz#8dda65feaf03ed78f0a3f9678f1869147f7c5c48" + integrity sha512-Ne2YiiGN8bmrmJJEuTWTLJR32nh/JdL1+PSicowtNb0WFpn59GK8/lfD61bVtzguz7b3PBt74nxpv/Pw5po5Rg== + +type-is@~1.6.18: + version "1.6.18" + resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" + integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== + dependencies: + media-typer "0.3.0" + mime-types "~2.1.24" + +type@^1.0.1: + version "1.2.0" + resolved "https://registry.yarnpkg.com/type/-/type-1.2.0.tgz#848dd7698dafa3e54a6c479e759c4bc3f18847a0" + integrity sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg== + +type@^2.7.2: + version "2.7.2" + resolved "https://registry.yarnpkg.com/type/-/type-2.7.2.tgz#2376a15a3a28b1efa0f5350dcf72d24df6ef98d0" + integrity sha512-dzlvlNlt6AXU7EBSfpAscydQ7gXB+pPGsPnfJnZpiNJBDj7IaJzQlBZYGdEi4R9HmPdBv2XmWJ6YUtoTa7lmCw== + +typechain@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/typechain/-/typechain-3.0.0.tgz#d5a47700831f238e43f7429b987b4bb54849b92e" + integrity sha512-ft4KVmiN3zH4JUFu2WJBrwfHeDf772Tt2d8bssDTo/YcckKW2D+OwFrHXRC6hJvO3mHjFQTihoMV6fJOi0Hngg== + dependencies: + command-line-args "^4.0.7" + debug "^4.1.1" + fs-extra "^7.0.0" + js-sha3 "^0.8.0" + lodash "^4.17.15" + ts-essentials "^6.0.3" + ts-generator "^0.1.1" + +typechain@^4.0.0: + version "4.0.3" + resolved "https://registry.yarnpkg.com/typechain/-/typechain-4.0.3.tgz#e8fcd6c984676858c64eeeb155ea783a10b73779" + integrity sha512-tmoHQeXZWHxIdeLK+i6dU0CU0vOd9Cndr3jFTZIMzak5/YpFZ8XoiYpTZcngygGBqZo+Z1EUmttLbW9KkFZLgQ== + dependencies: + command-line-args "^4.0.7" + debug "^4.1.1" + fs-extra "^7.0.0" + js-sha3 "^0.8.0" + lodash "^4.17.15" + ts-essentials "^7.0.1" + ts-generator "^0.1.1" + +typed-array-length@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/typed-array-length/-/typed-array-length-1.0.4.tgz#89d83785e5c4098bec72e08b319651f0eac9c1bb" + integrity sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng== + dependencies: + call-bind "^1.0.2" + for-each "^0.3.3" + is-typed-array "^1.1.9" + +typedarray-to-buffer@^3.1.5: + version "3.1.5" + resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080" + integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q== + dependencies: + is-typedarray "^1.0.0" + +typedarray@^0.0.6: + version "0.0.6" + resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" + integrity sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA== + +typescript@^4.3.5, typescript@^4.5.5: + version "4.8.4" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.8.4.tgz#c464abca159669597be5f96b8943500b238e60e6" + integrity sha512-QCh+85mCy+h0IGff8r5XWzOVSbBO+KfeYrMQh7NJ58QujwcE22u+NUSmUxqF+un70P9GXKxa2HCNiTTMJknyjQ== + +typewise-core@^1.2, typewise-core@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/typewise-core/-/typewise-core-1.2.0.tgz#97eb91805c7f55d2f941748fa50d315d991ef195" + integrity sha512-2SCC/WLzj2SbUwzFOzqMCkz5amXLlxtJqDKTICqg30x+2DZxcfZN2MvQZmGfXWKNWaKK9pBPsvkcwv8bF/gxKg== + +typewise@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/typewise/-/typewise-1.0.3.tgz#1067936540af97937cc5dcf9922486e9fa284651" + integrity sha512-aXofE06xGhaQSPzt8hlTY+/YWQhm9P0jYUp1f2XtmW/3Bk0qzXcyFWAtPoo2uTGQj1ZwbDuSyuxicq+aDo8lCQ== + dependencies: + typewise-core "^1.2.0" + +typewiselite@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/typewiselite/-/typewiselite-1.0.0.tgz#c8882fa1bb1092c06005a97f34ef5c8508e3664e" + integrity sha512-J9alhjVHupW3Wfz6qFRGgQw0N3gr8hOkw6zm7FZ6UR1Cse/oD9/JVok7DNE9TT9IbciDHX2Ex9+ksE6cRmtymw== + +typical@^2.6.0, typical@^2.6.1: + version "2.6.1" + resolved "https://registry.yarnpkg.com/typical/-/typical-2.6.1.tgz#5c080e5d661cbbe38259d2e70a3c7253e873881d" + integrity sha512-ofhi8kjIje6npGozTip9Fr8iecmYfEbS06i0JnIg+rh51KakryWF4+jX8lLKZVhy6N+ID45WYSFCxPOdTWCzNg== + +uc.micro@^1.0.1, uc.micro@^1.0.5: + version "1.0.6" + resolved "https://registry.yarnpkg.com/uc.micro/-/uc.micro-1.0.6.tgz#9c411a802a409a91fc6cf74081baba34b24499ac" + integrity sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA== + +uglify-js@^3.1.4: + version "3.17.4" + resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.17.4.tgz#61678cf5fa3f5b7eb789bb345df29afb8257c22c" + integrity sha512-T9q82TJI9e/C1TAxYvfb16xO120tMVFZrGA3f9/P4424DNu6ypK103y0GPFVa17yotwSyZW5iYXgjYHkGrJW/g== + +ultron@~1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/ultron/-/ultron-1.1.1.tgz#9fe1536a10a664a65266a1e3ccf85fd36302bc9c" + integrity sha512-UIEXBNeYmKptWH6z8ZnqTeS8fV74zG0/eRU9VGkpzz+LIJNs8W/zM/L+7ctCkRrgbNnnR0xxw4bKOr0cW0N0Og== + +unbox-primitive@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e" + integrity sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw== + dependencies: + call-bind "^1.0.2" + has-bigints "^1.0.2" + has-symbols "^1.0.3" + which-boxed-primitive "^1.0.2" + +underscore@1.9.1: + version "1.9.1" + resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.9.1.tgz#06dce34a0e68a7babc29b365b8e74b8925203961" + integrity sha512-5/4etnCkd9c8gwgowi5/om/mYO5ajCaOgdzj/oW+0eQV9WxKBDZw5+ycmKmeaTXjInS/W0BzpGLo2xR2aBwZdg== + +undici@^5.4.0: + version "5.12.0" + resolved "https://registry.yarnpkg.com/undici/-/undici-5.12.0.tgz#c758ffa704fbcd40d506e4948860ccaf4099f531" + integrity sha512-zMLamCG62PGjd9HHMpo05bSLvvwWOZgGeiWlN/vlqu3+lRo3elxktVGEyLMX+IO7c2eflLjcW74AlkhEZm15mg== + dependencies: + busboy "^1.6.0" + +union-value@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847" + integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg== + dependencies: + arr-union "^3.1.0" + get-value "^2.0.6" + is-extendable "^0.1.1" + set-value "^2.0.1" + +universalify@^0.1.0: + version "0.1.2" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" + integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== + +universalify@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717" + integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== + +unorm@^1.3.3: + version "1.6.0" + resolved "https://registry.yarnpkg.com/unorm/-/unorm-1.6.0.tgz#029b289661fba714f1a9af439eb51d9b16c205af" + integrity sha512-b2/KCUlYZUeA7JFUuRJZPUtr4gZvBh7tavtv4fvk4+KV9pfGiR6CQAQAWl49ZpR3ts2dk4FYkP7EIgDJoiOLDA== + +unpipe@1.0.0, unpipe@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" + integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== + +unset-value@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" + integrity sha512-PcA2tsuGSF9cnySLHTLSh2qrQiJ70mn+r+Glzxv2TWZblxsxCC52BDlZoPCsz7STd9pN7EZetkWZBAvk4cgZdQ== + dependencies: + has-value "^0.3.1" + isobject "^3.0.0" + +untildify@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/untildify/-/untildify-3.0.3.tgz#1e7b42b140bcfd922b22e70ca1265bfe3634c7c9" + integrity sha512-iSk/J8efr8uPT/Z4eSUywnqyrQU7DSdMfdqK4iWEaUVVmcP5JcnpRqmVMwcwcnmI1ATFNgC5V90u09tBynNFKA== + +update-browserslist-db@^1.0.9: + version "1.0.10" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.10.tgz#0f54b876545726f17d00cd9a2561e6dade943ff3" + integrity sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ== + dependencies: + escalade "^3.1.1" + picocolors "^1.0.0" + +uri-js@^4.2.2: + version "4.4.1" + resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== + dependencies: + punycode "^2.1.0" + +urix@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" + integrity sha512-Am1ousAhSLBeB9cG/7k7r2R0zj50uDRlZHPGbazid5s9rlF1F/QKYObEKSIunSjIOkJZqwRRLpvewjEkM7pSqg== + +url-parse-lax@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-3.0.0.tgz#16b5cafc07dbe3676c1b1999177823d6503acb0c" + integrity sha512-NjFKA0DidqPa5ciFcSrXnAltTtzz84ogy+NebPvfEgAck0+TNg4UJ4IN+fB7zRZfbgUf0syOo9MDxFkDSMuFaQ== + dependencies: + prepend-http "^2.0.0" + +url-set-query@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/url-set-query/-/url-set-query-1.0.0.tgz#016e8cfd7c20ee05cafe7795e892bd0702faa339" + integrity sha512-3AChu4NiXquPfeckE5R5cGdiHCMWJx1dwCWOmWIL4KHAziJNOFIYJlpGFeKDvwLPHovZRCxK3cYlwzqI9Vp+Gg== + +url@^0.11.0: + version "0.11.0" + resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1" + integrity sha512-kbailJa29QrtXnxgq+DdCEGlbTeYM2eJUxsz6vjZavrCYPMIFHMKQmSKYAIuUK2i7hgPm28a8piX5NTUtM/LKQ== + dependencies: + punycode "1.3.2" + querystring "0.2.0" + +use@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" + integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== + +utf-8-validate@^5.0.2: + version "5.0.10" + resolved "https://registry.yarnpkg.com/utf-8-validate/-/utf-8-validate-5.0.10.tgz#d7d10ea39318171ca982718b6b96a8d2442571a2" + integrity sha512-Z6czzLq4u8fPOyx7TU6X3dvUZVvoJmxSQ+IcrlmagKhilxlhZgxPK6C5Jqbkw1IDUmFTM+cz9QDnnLTwDz/2gQ== + dependencies: + node-gyp-build "^4.3.0" + +utf8@3.0.0, utf8@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/utf8/-/utf8-3.0.0.tgz#f052eed1364d696e769ef058b183df88c87f69d1" + integrity sha512-E8VjFIQ/TyQgp+TZfS6l8yp/xWppSAHzidGiRrqe4bK4XP9pTRyKFgGJpO3SN7zdX4DeomTrwaseCHovfpFcqQ== + +util-deprecate@^1.0.1, util-deprecate@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== + +util.promisify@^1.0.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.1.1.tgz#77832f57ced2c9478174149cae9b96e9918cd54b" + integrity sha512-/s3UsZUrIfa6xDhr7zZhnE9SLQ5RIXyYfiVnMMyMDzOc8WhWN4Nbh36H842OyurKbCDAesZOJaVyvmSl6fhGQw== + dependencies: + call-bind "^1.0.0" + define-properties "^1.1.3" + for-each "^0.3.3" + has-symbols "^1.0.1" + object.getownpropertydescriptors "^2.1.1" + +util@^0.10.3: + version "0.10.4" + resolved "https://registry.yarnpkg.com/util/-/util-0.10.4.tgz#3aa0125bfe668a4672de58857d3ace27ecb76901" + integrity sha512-0Pm9hTQ3se5ll1XihRic3FDIku70C+iHUdT/W926rSgHV5QgXsYbKZN8MSC3tJtSkhuROzvsQjAaFENRXr+19A== + dependencies: + inherits "2.0.3" + +utils-merge@1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" + integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== + +uuid@3.3.2: + version "3.3.2" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.2.tgz#1b4af4955eb3077c501c23872fc6513811587131" + integrity sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA== + +uuid@^3.3.2: + version "3.4.0" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" + integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== + +uuid@^8.3.2: + version "8.3.2" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" + integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== + +v8-compile-cache-lib@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf" + integrity sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg== + +v8-compile-cache@^2.0.3: + version "2.3.0" + resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz#2de19618c66dc247dcfb6f99338035d8245a2cee" + integrity sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA== + +v8-to-istanbul@^9.0.1: + version "9.0.1" + resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-9.0.1.tgz#b6f994b0b5d4ef255e17a0d17dc444a9f5132fa4" + integrity sha512-74Y4LqY74kLE6IFyIjPtkSTWzUZmj8tdHT9Ii/26dvQ6K9Dl2NbEfj0XgU2sHCtKgt5VupqhlO/5aWuqS+IY1w== + dependencies: + "@jridgewell/trace-mapping" "^0.3.12" + "@types/istanbul-lib-coverage" "^2.0.1" + convert-source-map "^1.6.0" + +validate-npm-package-license@^3.0.1: + version "3.0.4" + resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" + integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== + dependencies: + spdx-correct "^3.0.0" + spdx-expression-parse "^3.0.0" + +varint@^5.0.0: + version "5.0.2" + resolved "https://registry.yarnpkg.com/varint/-/varint-5.0.2.tgz#5b47f8a947eb668b848e034dcfa87d0ff8a7f7a4" + integrity sha512-lKxKYG6H03yCZUpAGOPOsMcGxd1RHCu1iKvEHYDPmTyq2HueGhD73ssNBqqQWfvYs04G9iUFRvmAVLW20Jw6ow== + +vary@^1, vary@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" + integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== + +verror@1.10.0: + version "1.10.0" + resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" + integrity sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw== + dependencies: + assert-plus "^1.0.0" + core-util-is "1.0.2" + extsprintf "^1.2.0" + +walker@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f" + integrity sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ== + dependencies: + makeerror "1.0.12" + +weak-map@~1.0.x: + version "1.0.8" + resolved "https://registry.yarnpkg.com/weak-map/-/weak-map-1.0.8.tgz#394c18a9e8262e790544ed8b55c6a4ddad1cb1a3" + integrity sha512-lNR9aAefbGPpHO7AEnY0hCFjz1eTkWCXYvkTRrTHs9qv8zJp+SkVYpzfLIFXQQiG3tVvbNFQgVg2bQS8YGgxyw== + +web3-bzz@1.2.11: + version "1.2.11" + resolved "https://registry.yarnpkg.com/web3-bzz/-/web3-bzz-1.2.11.tgz#41bc19a77444bd5365744596d778b811880f707f" + integrity sha512-XGpWUEElGypBjeFyUhTkiPXFbDVD6Nr/S5jznE3t8cWUA0FxRf1n3n/NuIZeb0H9RkN2Ctd/jNma/k8XGa3YKg== + dependencies: + "@types/node" "^12.12.6" + got "9.6.0" + swarm-js "^0.1.40" + underscore "1.9.1" + +web3-core-helpers@1.2.11: + version "1.2.11" + resolved "https://registry.yarnpkg.com/web3-core-helpers/-/web3-core-helpers-1.2.11.tgz#84c681ed0b942c0203f3b324a245a127e8c67a99" + integrity sha512-PEPoAoZd5ME7UfbnCZBdzIerpe74GEvlwT4AjOmHeCVZoIFk7EqvOZDejJHt+feJA6kMVTdd0xzRNN295UhC1A== + dependencies: + underscore "1.9.1" + web3-eth-iban "1.2.11" + web3-utils "1.2.11" + +web3-core-method@1.2.11: + version "1.2.11" + resolved "https://registry.yarnpkg.com/web3-core-method/-/web3-core-method-1.2.11.tgz#f880137d1507a0124912bf052534f168b8d8fbb6" + integrity sha512-ff0q76Cde94HAxLDZ6DbdmKniYCQVtvuaYh+rtOUMB6kssa5FX0q3vPmixi7NPooFnbKmmZCM6NvXg4IreTPIw== + dependencies: + "@ethersproject/transactions" "^5.0.0-beta.135" + underscore "1.9.1" + web3-core-helpers "1.2.11" + web3-core-promievent "1.2.11" + web3-core-subscriptions "1.2.11" + web3-utils "1.2.11" + +web3-core-promievent@1.2.11: + version "1.2.11" + resolved "https://registry.yarnpkg.com/web3-core-promievent/-/web3-core-promievent-1.2.11.tgz#51fe97ca0ddec2f99bf8c3306a7a8e4b094ea3cf" + integrity sha512-il4McoDa/Ox9Agh4kyfQ8Ak/9ABYpnF8poBLL33R/EnxLsJOGQG2nZhkJa3I067hocrPSjEdlPt/0bHXsln4qA== + dependencies: + eventemitter3 "4.0.4" + +web3-core-requestmanager@1.2.11: + version "1.2.11" + resolved "https://registry.yarnpkg.com/web3-core-requestmanager/-/web3-core-requestmanager-1.2.11.tgz#fe6eb603fbaee18530293a91f8cf26d8ae28c45a" + integrity sha512-oFhBtLfOiIbmfl6T6gYjjj9igOvtyxJ+fjS+byRxiwFJyJ5BQOz4/9/17gWR1Cq74paTlI7vDGxYfuvfE/mKvA== + dependencies: + underscore "1.9.1" + web3-core-helpers "1.2.11" + web3-providers-http "1.2.11" + web3-providers-ipc "1.2.11" + web3-providers-ws "1.2.11" + +web3-core-subscriptions@1.2.11: + version "1.2.11" + resolved "https://registry.yarnpkg.com/web3-core-subscriptions/-/web3-core-subscriptions-1.2.11.tgz#beca908fbfcb050c16f45f3f0f4c205e8505accd" + integrity sha512-qEF/OVqkCvQ7MPs1JylIZCZkin0aKK9lDxpAtQ1F8niEDGFqn7DT8E/vzbIa0GsOjL2fZjDhWJsaW+BSoAW1gg== + dependencies: + eventemitter3 "4.0.4" + underscore "1.9.1" + web3-core-helpers "1.2.11" + +web3-core@1.2.11: + version "1.2.11" + resolved "https://registry.yarnpkg.com/web3-core/-/web3-core-1.2.11.tgz#1043cacc1becb80638453cc5b2a14be9050288a7" + integrity sha512-CN7MEYOY5ryo5iVleIWRE3a3cZqVaLlIbIzDPsvQRUfzYnvzZQRZBm9Mq+ttDi2STOOzc1MKylspz/o3yq/LjQ== + dependencies: + "@types/bn.js" "^4.11.5" + "@types/node" "^12.12.6" + bignumber.js "^9.0.0" + web3-core-helpers "1.2.11" + web3-core-method "1.2.11" + web3-core-requestmanager "1.2.11" + web3-utils "1.2.11" + +web3-eth-abi@1.2.11: + version "1.2.11" + resolved "https://registry.yarnpkg.com/web3-eth-abi/-/web3-eth-abi-1.2.11.tgz#a887494e5d447c2926d557a3834edd66e17af9b0" + integrity sha512-PkRYc0+MjuLSgg03QVWqWlQivJqRwKItKtEpRUaxUAeLE7i/uU39gmzm2keHGcQXo3POXAbOnMqkDvOep89Crg== + dependencies: + "@ethersproject/abi" "5.0.0-beta.153" + underscore "1.9.1" + web3-utils "1.2.11" + +web3-eth-accounts@1.2.11: + version "1.2.11" + resolved "https://registry.yarnpkg.com/web3-eth-accounts/-/web3-eth-accounts-1.2.11.tgz#a9e3044da442d31903a7ce035a86d8fa33f90520" + integrity sha512-6FwPqEpCfKIh3nSSGeo3uBm2iFSnFJDfwL3oS9pyegRBXNsGRVpgiW63yhNzL0796StsvjHWwQnQHsZNxWAkGw== + dependencies: + crypto-browserify "3.12.0" + eth-lib "0.2.8" + ethereumjs-common "^1.3.2" + ethereumjs-tx "^2.1.1" + scrypt-js "^3.0.1" + underscore "1.9.1" + uuid "3.3.2" + web3-core "1.2.11" + web3-core-helpers "1.2.11" + web3-core-method "1.2.11" + web3-utils "1.2.11" + +web3-eth-contract@1.2.11: + version "1.2.11" + resolved "https://registry.yarnpkg.com/web3-eth-contract/-/web3-eth-contract-1.2.11.tgz#917065902bc27ce89da9a1da26e62ef663663b90" + integrity sha512-MzYuI/Rq2o6gn7vCGcnQgco63isPNK5lMAan2E51AJLknjSLnOxwNY3gM8BcKoy4Z+v5Dv00a03Xuk78JowFow== + dependencies: + "@types/bn.js" "^4.11.5" + underscore "1.9.1" + web3-core "1.2.11" + web3-core-helpers "1.2.11" + web3-core-method "1.2.11" + web3-core-promievent "1.2.11" + web3-core-subscriptions "1.2.11" + web3-eth-abi "1.2.11" + web3-utils "1.2.11" + +web3-eth-ens@1.2.11: + version "1.2.11" + resolved "https://registry.yarnpkg.com/web3-eth-ens/-/web3-eth-ens-1.2.11.tgz#26d4d7f16d6cbcfff918e39832b939edc3162532" + integrity sha512-dbW7dXP6HqT1EAPvnniZVnmw6TmQEKF6/1KgAxbo8iBBYrVTMDGFQUUnZ+C4VETGrwwaqtX4L9d/FrQhZ6SUiA== + dependencies: + content-hash "^2.5.2" + eth-ens-namehash "2.0.8" + underscore "1.9.1" + web3-core "1.2.11" + web3-core-helpers "1.2.11" + web3-core-promievent "1.2.11" + web3-eth-abi "1.2.11" + web3-eth-contract "1.2.11" + web3-utils "1.2.11" + +web3-eth-iban@1.2.11: + version "1.2.11" + resolved "https://registry.yarnpkg.com/web3-eth-iban/-/web3-eth-iban-1.2.11.tgz#f5f73298305bc7392e2f188bf38a7362b42144ef" + integrity sha512-ozuVlZ5jwFC2hJY4+fH9pIcuH1xP0HEFhtWsR69u9uDIANHLPQQtWYmdj7xQ3p2YT4bQLq/axKhZi7EZVetmxQ== + dependencies: + bn.js "^4.11.9" + web3-utils "1.2.11" + +web3-eth-personal@1.2.11: + version "1.2.11" + resolved "https://registry.yarnpkg.com/web3-eth-personal/-/web3-eth-personal-1.2.11.tgz#a38b3942a1d87a62070ce0622a941553c3d5aa70" + integrity sha512-42IzUtKq9iHZ8K9VN0vAI50iSU9tOA1V7XU2BhF/tb7We2iKBVdkley2fg26TxlOcKNEHm7o6HRtiiFsVK4Ifw== + dependencies: + "@types/node" "^12.12.6" + web3-core "1.2.11" + web3-core-helpers "1.2.11" + web3-core-method "1.2.11" + web3-net "1.2.11" + web3-utils "1.2.11" + +web3-eth@1.2.11: + version "1.2.11" + resolved "https://registry.yarnpkg.com/web3-eth/-/web3-eth-1.2.11.tgz#4c81fcb6285b8caf544058fba3ae802968fdc793" + integrity sha512-REvxW1wJ58AgHPcXPJOL49d1K/dPmuw4LjPLBPStOVkQjzDTVmJEIsiLwn2YeuNDd4pfakBwT8L3bz1G1/wVsQ== + dependencies: + underscore "1.9.1" + web3-core "1.2.11" + web3-core-helpers "1.2.11" + web3-core-method "1.2.11" + web3-core-subscriptions "1.2.11" + web3-eth-abi "1.2.11" + web3-eth-accounts "1.2.11" + web3-eth-contract "1.2.11" + web3-eth-ens "1.2.11" + web3-eth-iban "1.2.11" + web3-eth-personal "1.2.11" + web3-net "1.2.11" + web3-utils "1.2.11" + +web3-net@1.2.11: + version "1.2.11" + resolved "https://registry.yarnpkg.com/web3-net/-/web3-net-1.2.11.tgz#eda68ef25e5cdb64c96c39085cdb74669aabbe1b" + integrity sha512-sjrSDj0pTfZouR5BSTItCuZ5K/oZPVdVciPQ6981PPPIwJJkCMeVjD7I4zO3qDPCnBjBSbWvVnLdwqUBPtHxyg== + dependencies: + web3-core "1.2.11" + web3-core-method "1.2.11" + web3-utils "1.2.11" + +web3-provider-engine@14.2.1: + version "14.2.1" + resolved "https://registry.yarnpkg.com/web3-provider-engine/-/web3-provider-engine-14.2.1.tgz#ef351578797bf170e08d529cb5b02f8751329b95" + integrity sha512-iSv31h2qXkr9vrL6UZDm4leZMc32SjWJFGOp/D92JXfcEboCqraZyuExDkpxKw8ziTufXieNM7LSXNHzszYdJw== + dependencies: + async "^2.5.0" + backoff "^2.5.0" + clone "^2.0.0" + cross-fetch "^2.1.0" + eth-block-tracker "^3.0.0" + eth-json-rpc-infura "^3.1.0" + eth-sig-util "^1.4.2" + ethereumjs-block "^1.2.2" + ethereumjs-tx "^1.2.0" + ethereumjs-util "^5.1.5" + ethereumjs-vm "^2.3.4" + json-rpc-error "^2.0.0" + json-stable-stringify "^1.0.1" + promise-to-callback "^1.0.0" + readable-stream "^2.2.9" + request "^2.85.0" + semaphore "^1.0.3" + ws "^5.1.1" + xhr "^2.2.0" + xtend "^4.0.1" + +web3-providers-http@1.2.11: + version "1.2.11" + resolved "https://registry.yarnpkg.com/web3-providers-http/-/web3-providers-http-1.2.11.tgz#1cd03442c61670572d40e4dcdf1faff8bd91e7c6" + integrity sha512-psh4hYGb1+ijWywfwpB2cvvOIMISlR44F/rJtYkRmQ5jMvG4FOCPlQJPiHQZo+2cc3HbktvvSJzIhkWQJdmvrA== + dependencies: + web3-core-helpers "1.2.11" + xhr2-cookies "1.1.0" + +web3-providers-ipc@1.2.11: + version "1.2.11" + resolved "https://registry.yarnpkg.com/web3-providers-ipc/-/web3-providers-ipc-1.2.11.tgz#d16d6c9be1be6e0b4f4536c4acc16b0f4f27ef21" + integrity sha512-yhc7Y/k8hBV/KlELxynWjJDzmgDEDjIjBzXK+e0rHBsYEhdCNdIH5Psa456c+l0qTEU2YzycF8VAjYpWfPnBpQ== + dependencies: + oboe "2.1.4" + underscore "1.9.1" + web3-core-helpers "1.2.11" + +web3-providers-ws@1.2.11: + version "1.2.11" + resolved "https://registry.yarnpkg.com/web3-providers-ws/-/web3-providers-ws-1.2.11.tgz#a1dfd6d9778d840561d9ec13dd453046451a96bb" + integrity sha512-ZxnjIY1Er8Ty+cE4migzr43zA/+72AF1myzsLaU5eVgdsfV7Jqx7Dix1hbevNZDKFlSoEyq/3j/jYalh3So1Zg== + dependencies: + eventemitter3 "4.0.4" + underscore "1.9.1" + web3-core-helpers "1.2.11" + websocket "^1.0.31" + +web3-shh@1.2.11: + version "1.2.11" + resolved "https://registry.yarnpkg.com/web3-shh/-/web3-shh-1.2.11.tgz#f5d086f9621c9a47e98d438010385b5f059fd88f" + integrity sha512-B3OrO3oG1L+bv3E1sTwCx66injW1A8hhwpknDUbV+sw3fehFazA06z9SGXUefuFI1kVs4q2vRi0n4oCcI4dZDg== + dependencies: + web3-core "1.2.11" + web3-core-method "1.2.11" + web3-core-subscriptions "1.2.11" + web3-net "1.2.11" + +web3-utils@1.2.11: + version "1.2.11" + resolved "https://registry.yarnpkg.com/web3-utils/-/web3-utils-1.2.11.tgz#af1942aead3fb166ae851a985bed8ef2c2d95a82" + integrity sha512-3Tq09izhD+ThqHEaWYX4VOT7dNPdZiO+c/1QMA0s5X2lDFKK/xHJb7cyTRRVzN2LvlHbR7baS1tmQhSua51TcQ== + dependencies: + bn.js "^4.11.9" + eth-lib "0.2.8" + ethereum-bloom-filters "^1.0.6" + ethjs-unit "0.1.6" + number-to-bn "1.7.0" + randombytes "^2.1.0" + underscore "1.9.1" + utf8 "3.0.0" + +web3-utils@^1.0.0-beta.31, web3-utils@^1.3.4: + version "1.8.1" + resolved "https://registry.yarnpkg.com/web3-utils/-/web3-utils-1.8.1.tgz#f2f7ca7eb65e6feb9f3d61056d0de6bbd57125ff" + integrity sha512-LgnM9p6V7rHHUGfpMZod+NST8cRfGzJ1BTXAyNo7A9cJX9LczBfSRxJp+U/GInYe9mby40t3v22AJdlELibnsQ== + dependencies: + bn.js "^5.2.1" + ethereum-bloom-filters "^1.0.6" + ethereumjs-util "^7.1.0" + ethjs-unit "0.1.6" + number-to-bn "1.7.0" + randombytes "^2.1.0" + utf8 "3.0.0" + +web3@1.2.11: + version "1.2.11" + resolved "https://registry.yarnpkg.com/web3/-/web3-1.2.11.tgz#50f458b2e8b11aa37302071c170ed61cff332975" + integrity sha512-mjQ8HeU41G6hgOYm1pmeH0mRAeNKJGnJEUzDMoerkpw7QUQT4exVREgF1MYPvL/z6vAshOXei25LE/t/Bxl8yQ== + dependencies: + web3-bzz "1.2.11" + web3-core "1.2.11" + web3-eth "1.2.11" + web3-eth-personal "1.2.11" + web3-net "1.2.11" + web3-shh "1.2.11" + web3-utils "1.2.11" + +webidl-conversions@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" + integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== + +websocket@1.0.32: + version "1.0.32" + resolved "https://registry.yarnpkg.com/websocket/-/websocket-1.0.32.tgz#1f16ddab3a21a2d929dec1687ab21cfdc6d3dbb1" + integrity sha512-i4yhcllSP4wrpoPMU2N0TQ/q0O94LRG/eUQjEAamRltjQ1oT1PFFKOG4i877OlJgCG8rw6LrrowJp+TYCEWF7Q== + dependencies: + bufferutil "^4.0.1" + debug "^2.2.0" + es5-ext "^0.10.50" + typedarray-to-buffer "^3.1.5" + utf-8-validate "^5.0.2" + yaeti "^0.0.6" + +websocket@^1.0.31: + version "1.0.34" + resolved "https://registry.yarnpkg.com/websocket/-/websocket-1.0.34.tgz#2bdc2602c08bf2c82253b730655c0ef7dcab3111" + integrity sha512-PRDso2sGwF6kM75QykIesBijKSVceR6jL2G8NGYyq2XrItNC2P5/qL5XeR056GhA+Ly7JMFvJb9I312mJfmqnQ== + dependencies: + bufferutil "^4.0.1" + debug "^2.2.0" + es5-ext "^0.10.50" + typedarray-to-buffer "^3.1.5" + utf-8-validate "^5.0.2" + yaeti "^0.0.6" + +whatwg-fetch@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-2.0.4.tgz#dde6a5df315f9d39991aa17621853d720b85566f" + integrity sha512-dcQ1GWpOD/eEQ97k66aiEVpNnapVj90/+R+SXTPYGHpYBBypfKJEQjLrvMZ7YXbKm21gXd4NcuxUTjiv1YtLng== + +whatwg-url@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" + integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw== + dependencies: + tr46 "~0.0.3" + webidl-conversions "^3.0.0" + +which-boxed-primitive@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" + integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== + dependencies: + is-bigint "^1.0.1" + is-boolean-object "^1.1.0" + is-number-object "^1.0.4" + is-string "^1.0.5" + is-symbol "^1.0.3" + +which-module@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/which-module/-/which-module-1.0.0.tgz#bba63ca861948994ff307736089e3b96026c2a4f" + integrity sha512-F6+WgncZi/mJDrammbTuHe1q0R5hOXv/mBaiNA2TCNT/LTHusX0V+CJnj9XT8ki5ln2UZyyddDgHfCzyrOH7MQ== + +which-typed-array@^1.1.9: + version "1.1.9" + resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.9.tgz#307cf898025848cf995e795e8423c7f337efbde6" + integrity sha512-w9c4xkx6mPidwp7180ckYWfMmvxpjlZuIudNtDf4N/tTAUB8VJbX25qZoAsrtGuYNnGw3pa0AXgbGKRB8/EceA== + dependencies: + available-typed-arrays "^1.0.5" + call-bind "^1.0.2" + for-each "^0.3.3" + gopd "^1.0.1" + has-tostringtag "^1.0.0" + is-typed-array "^1.1.10" + +which@2.0.2, which@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== + dependencies: + isexe "^2.0.0" + +which@^1.2.9: + version "1.3.1" + resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" + integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== + dependencies: + isexe "^2.0.0" + +window-size@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/window-size/-/window-size-0.2.0.tgz#b4315bb4214a3d7058ebeee892e13fa24d98b075" + integrity sha512-UD7d8HFA2+PZsbKyaOCEy8gMh1oDtHgJh1LfgjQ4zVXmYjAT/kvz3PueITKuqDiIXQe7yzpPnxX3lNc+AhQMyw== + +word-wrap@^1.2.3, word-wrap@~1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" + integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== + +wordwrap@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" + integrity sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q== + +workerpool@6.2.0: + version "6.2.0" + resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-6.2.0.tgz#827d93c9ba23ee2019c3ffaff5c27fccea289e8b" + integrity sha512-Rsk5qQHJ9eowMH28Jwhe8HEbmdYDX4lwoMWshiCXugjtHqMD9ZbiqSDLxcsfdqsETPzVUtX5s1Z5kStiIM6l4A== + +workerpool@6.2.1: + version "6.2.1" + resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-6.2.1.tgz#46fc150c17d826b86a008e5a4508656777e9c343" + integrity sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw== + +wrap-ansi@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85" + integrity sha512-vAaEaDM946gbNpH5pLVNR+vX2ht6n0Bt3GXwVB1AuAqZosOvHNF3P7wDnh8KLkSqgUh0uh77le7Owgoz+Z9XBw== + dependencies: + string-width "^1.0.1" + strip-ansi "^3.0.1" + +wrap-ansi@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== + +write-file-atomic@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-4.0.2.tgz#a9df01ae5b77858a027fd2e80768ee433555fcfd" + integrity sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg== + dependencies: + imurmurhash "^0.1.4" + signal-exit "^3.0.7" + +write@1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/write/-/write-1.0.3.tgz#0800e14523b923a387e415123c865616aae0f5c3" + integrity sha512-/lg70HAjtkUgWPVZhZcm+T4hkL8Zbtp1nFNOn3lRrxnlv50SRBv7cR7RqR+GMsd3hUXy9hWBo4CHTbFTcOYwig== + dependencies: + mkdirp "^0.5.1" + +ws@7.4.6: + version "7.4.6" + resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.6.tgz#5654ca8ecdeee47c33a9a4bf6d28e2be2980377c" + integrity sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A== + +ws@^3.0.0: + version "3.3.3" + resolved "https://registry.yarnpkg.com/ws/-/ws-3.3.3.tgz#f1cf84fe2d5e901ebce94efaece785f187a228f2" + integrity sha512-nnWLa/NwZSt4KQJu51MYlCcSQ5g7INpOrOMt4XV8j4dqTXdmlUmSHQ8/oLC069ckre0fRsgfvsKwbTdtKLCDkA== + dependencies: + async-limiter "~1.0.0" + safe-buffer "~5.1.0" + ultron "~1.1.0" + +ws@^5.1.1: + version "5.2.3" + resolved "https://registry.yarnpkg.com/ws/-/ws-5.2.3.tgz#05541053414921bc29c63bee14b8b0dd50b07b3d" + integrity sha512-jZArVERrMsKUatIdnLzqvcfydI85dvd/Fp1u/VOpfdDWQ4c9qWXe+VIeAbQ5FrDwciAkr+lzofXLz3Kuf26AOA== + dependencies: + async-limiter "~1.0.0" + +ws@^7.4.6: + version "7.5.9" + resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591" + integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q== + +xhr-request-promise@^0.1.2: + version "0.1.3" + resolved "https://registry.yarnpkg.com/xhr-request-promise/-/xhr-request-promise-0.1.3.tgz#2d5f4b16d8c6c893be97f1a62b0ed4cf3ca5f96c" + integrity sha512-YUBytBsuwgitWtdRzXDDkWAXzhdGB8bYm0sSzMPZT7Z2MBjMSTHFsyCT1yCRATY+XC69DUrQraRAEgcoCRaIPg== + dependencies: + xhr-request "^1.1.0" + +xhr-request@^1.0.1, xhr-request@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/xhr-request/-/xhr-request-1.1.0.tgz#f4a7c1868b9f198723444d82dcae317643f2e2ed" + integrity sha512-Y7qzEaR3FDtL3fP30k9wO/e+FBnBByZeybKOhASsGP30NIkRAAkKD/sCnLvgEfAIEC1rcmK7YG8f4oEnIrrWzA== + dependencies: + buffer-to-arraybuffer "^0.0.5" + object-assign "^4.1.1" + query-string "^5.0.1" + simple-get "^2.7.0" + timed-out "^4.0.1" + url-set-query "^1.0.0" + xhr "^2.0.4" + +xhr2-cookies@1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/xhr2-cookies/-/xhr2-cookies-1.1.0.tgz#7d77449d0999197f155cb73b23df72505ed89d48" + integrity sha512-hjXUA6q+jl/bd8ADHcVfFsSPIf+tyLIjuO9TwJC9WI6JP2zKcS7C+p56I9kCLLsaCiNT035iYvEUUzdEFj/8+g== + dependencies: + cookiejar "^2.1.1" + +xhr2@0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/xhr2/-/xhr2-0.1.3.tgz#cbfc4759a69b4a888e78cf4f20b051038757bd11" + integrity sha512-6RmGK22QwC7yXB1CRwyLWuS2opPcKOlAu0ViAnyZjDlzrEmCKL4kLHkfvB8oMRWeztMsNoDGAjsMZY15w/4tTw== + +xhr@^2.0.4, xhr@^2.2.0, xhr@^2.3.3: + version "2.6.0" + resolved "https://registry.yarnpkg.com/xhr/-/xhr-2.6.0.tgz#b69d4395e792b4173d6b7df077f0fc5e4e2b249d" + integrity sha512-/eCGLb5rxjx5e3mF1A7s+pLlR6CGyqWN91fv1JgER5mVWg1MZmlhBvy9kjcsOdRk8RrIujotWyJamfyrp+WIcA== + dependencies: + global "~4.4.0" + is-function "^1.0.1" + parse-headers "^2.0.0" + xtend "^4.0.0" + +xtend@^4.0.0, xtend@^4.0.1, xtend@~4.0.0, xtend@~4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" + integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== + +xtend@~2.1.1: + version "2.1.2" + resolved "https://registry.yarnpkg.com/xtend/-/xtend-2.1.2.tgz#6efecc2a4dad8e6962c4901b337ce7ba87b5d28b" + integrity sha512-vMNKzr2rHP9Dp/e1NQFnLQlwlhp9L/LfvnsVdHxN1f+uggyVI3i08uD14GPvCToPkdsRfyPqIyYGmIk58V98ZQ== + dependencies: + object-keys "~0.4.0" + +y18n@^3.2.1: + version "3.2.2" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-3.2.2.tgz#85c901bd6470ce71fc4bb723ad209b70f7f28696" + integrity sha512-uGZHXkHnhF0XeeAPgnKfPv1bgKAYyVvmNL1xlKsPYZPaIHxGti2hHqvOCQv71XMsLxu1QjergkqogUnms5D3YQ== + +y18n@^5.0.5: + version "5.0.8" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" + integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== + +yaeti@^0.0.6: + version "0.0.6" + resolved "https://registry.yarnpkg.com/yaeti/-/yaeti-0.0.6.tgz#f26f484d72684cf42bedfb76970aa1608fbf9577" + integrity sha512-MvQa//+KcZCUkBTIC9blM+CU9J2GzuTytsOUwf2lidtvkx/6gnEp1QvJv34t9vdjhFmha/mUiNDbN0D0mJWdug== + +yallist@^3.0.0, yallist@^3.0.2, yallist@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" + integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== + +yallist@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + +yaml@^1.10.2: + version "1.10.2" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" + integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== + +yargs-parser@20.2.4: + version "20.2.4" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.4.tgz#b42890f14566796f85ae8e3a25290d205f154a54" + integrity sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA== + +yargs-parser@^2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-2.4.1.tgz#85568de3cf150ff49fa51825f03a8c880ddcc5c4" + integrity sha512-9pIKIJhnI5tonzG6OnCFlz/yln8xHYcGl+pn3xR0Vzff0vzN1PbNRaelgfgRUwZ3s4i3jvxT9WhmUGL4whnasA== + dependencies: + camelcase "^3.0.0" + lodash.assign "^4.0.6" + +yargs-parser@^20.2.2: + version "20.2.9" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee" + integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w== + +yargs-parser@^21.0.0, yargs-parser@^21.0.1: + version "21.1.1" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" + integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw== + +yargs-unparser@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/yargs-unparser/-/yargs-unparser-2.0.0.tgz#f131f9226911ae5d9ad38c432fe809366c2325eb" + integrity sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA== + dependencies: + camelcase "^6.0.0" + decamelize "^4.0.0" + flat "^5.0.2" + is-plain-obj "^2.1.0" + +yargs@16.2.0: + version "16.2.0" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" + integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw== + dependencies: + cliui "^7.0.2" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.0" + y18n "^5.0.5" + yargs-parser "^20.2.2" + +yargs@^17.3.1: + version "17.6.1" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.6.1.tgz#712508771045019cda059bc1ba3ae091aaa1402e" + integrity sha512-leBuCGrL4dAd6ispNOGsJlhd0uZ6Qehkbu/B9KCR+Pxa/NVdNwi+i31lo0buCm6XxhJQFshXCD0/evfV4xfoUg== + dependencies: + cliui "^8.0.1" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.3" + y18n "^5.0.5" + yargs-parser "^21.0.0" + +yargs@^4.7.1: + version "4.8.1" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-4.8.1.tgz#c0c42924ca4aaa6b0e6da1739dfb216439f9ddc0" + integrity sha512-LqodLrnIDM3IFT+Hf/5sxBnEGECrfdC1uIbgZeJmESCSo4HoCAaKEus8MylXHAkdacGc0ye+Qa+dpkuom8uVYA== + dependencies: + cliui "^3.2.0" + decamelize "^1.1.1" + get-caller-file "^1.0.1" + lodash.assign "^4.0.3" + os-locale "^1.4.0" + read-pkg-up "^1.0.1" + require-directory "^2.1.1" + require-main-filename "^1.0.1" + set-blocking "^2.0.0" + string-width "^1.0.1" + which-module "^1.0.0" + window-size "^0.2.0" + y18n "^3.2.1" + yargs-parser "^2.4.1" + +yn@3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" + integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== + +yocto-queue@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" + integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== + +"zksync-web3@link:sdk/zksync-web3.js": + version "0.12.5"