From b45cc8322b0ca8c82ac1427c7075e674fef15d2b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Acid=20Chicken=20=28=E7=A1=AB=E9=85=B8=E9=B6=8F=29?= Date: Fri, 28 Sep 2018 19:11:40 +0900 Subject: [PATCH] Update Travis CI configuration --- .travis.yml | 174 +- .travis/lint_04_install.sh | 12 + .travis/lint_05_before_script.sh | 10 + .travis/lint_06_script.sh | 25 + .travis/test_03_before_install.sh | 25 + .travis/test_04_install.sh | 26 + .travis/test_05_before_script.sh | 25 + .travis/test_06_script.sh | 68 + test/README.md | 12 +- test/config.ini.in | 3 +- test/functional/README.md | 94 +- test/functional/combine_logs.py | 24 +- test/functional/create_cache.py | 3 +- test/functional/data/rpc_getblockstats.json | 222 ++ test/functional/data/rpc_psbt.json | 130 + test/functional/example_test.py | 77 +- test/functional/feature_assumevalid.py | 191 ++ test/functional/feature_bip68_sequence.py | 402 ++++ test/functional/feature_block.py | 1322 ++++++++++ test/functional/feature_blocksdir.py | 36 + test/functional/feature_cltv.py | 144 ++ test/functional/feature_config_args.py | 80 + test/functional/feature_csv_activation.py | 485 ++++ test/functional/feature_dbcrash.py | 285 +++ test/functional/feature_dersig.py | 136 ++ test/functional/feature_fee_estimation.py | 247 ++ test/functional/feature_help.py | 62 + test/functional/feature_includeconf.py | 82 + test/functional/feature_logging.py | 75 + test/functional/feature_maxuploadtarget.py | 164 ++ test/functional/feature_minchainwork.py | 90 + test/functional/feature_notifications.py | 87 + test/functional/feature_nulldummy.py | 124 + test/functional/feature_proxy.py | 201 ++ test/functional/feature_pruning.py | 461 ++++ test/functional/feature_rbf.py | 582 +++++ test/functional/feature_reindex.py | 37 + test/functional/feature_segwit.py | 652 +++++ test/functional/feature_uacomment.py | 40 + .../functional/feature_versionbits_warning.py | 110 + test/functional/interface_bitcoin_cli.py | 77 + test/functional/interface_http.py | 108 + test/functional/interface_rest.py | 303 +++ test/functional/interface_zmq.py | 121 + test/functional/mempool_accept.py | 295 +++ test/functional/mempool_limit.py | 33 +- test/functional/mempool_packages.py | 74 +- test/functional/mempool_persist.py | 53 +- test/functional/mempool_reorg.py | 25 +- test/functional/mempool_resurrect.py | 71 + test/functional/mempool_spend_coinbase.py | 56 + test/functional/mining_basic.py | 221 ++ .../mining_getblocktemplate_longpoll.py | 75 + .../mining_prioritisetransaction.py | 153 ++ test/functional/p2p_compactblocks.py | 924 +++++++ test/functional/p2p_disconnect_ban.py | 107 + test/functional/p2p_feefilter.py | 95 + test/functional/p2p_fingerprint.py | 148 ++ test/functional/p2p_invalid_block.py | 108 + test/functional/p2p_invalid_locator.py | 43 + test/functional/p2p_invalid_tx.py | 140 ++ test/functional/p2p_leak.py | 126 + test/functional/p2p_mempool.py | 34 + test/functional/p2p_node_network_limited.py | 114 + test/functional/p2p_segwit.py | 2065 ++++++++++++++++ test/functional/p2p_sendheaders.py | 607 +++++ test/functional/p2p_timeouts.py | 73 + test/functional/p2p_unrequested_blocks.py | 309 +++ test/functional/rpc_bind.py | 126 + test/functional/rpc_blockchain.py | 300 +++ test/functional/rpc_createmultisig.py | 101 + test/functional/rpc_decodescript.py | 234 ++ test/functional/rpc_deprecated.py | 24 + test/functional/rpc_fundrawtransaction.py | 754 ++++++ test/functional/rpc_getblockstats.py | 180 ++ test/functional/rpc_getchaintips.py | 62 + test/functional/rpc_help.py | 42 + test/functional/rpc_invalidateblock.py | 66 + test/functional/rpc_named_arguments.py | 34 + test/functional/rpc_net.py | 144 ++ test/functional/rpc_preciousblock.py | 114 + test/functional/rpc_psbt.py | 229 ++ test/functional/rpc_rawtransaction.py | 443 ++++ test/functional/rpc_scantxoutset.py | 95 + test/functional/rpc_signmessage.py | 42 + test/functional/rpc_signrawtransaction.py | 153 ++ test/functional/rpc_txoutproof.py | 114 + test/functional/rpc_uptime.py | 30 + test/functional/rpc_users.py | 206 ++ test/functional/rpc_zmq.py | 37 + test/functional/test_framework/address.py | 20 +- test/functional/test_framework/authproxy.py | 96 +- test/functional/test_framework/bignum.py | 39 - test/functional/test_framework/blocktools.py | 183 +- test/functional/test_framework/coverage.py | 30 +- test/functional/test_framework/key.py | 10 +- test/functional/test_framework/messages.py | 1442 +++++++++++ test/functional/test_framework/mininode.py | 2116 ++++------------- test/functional/test_framework/netutil.py | 27 +- test/functional/test_framework/script.py | 296 +-- test/functional/test_framework/segwit_addr.py | 107 + test/functional/test_framework/siphash.py | 2 +- test/functional/test_framework/socks5.py | 24 +- .../test_framework/test_framework.py | 312 ++- test/functional/test_framework/test_node.py | 329 ++- test/functional/test_framework/util.py | 135 +- test/functional/test_runner.py | 427 ++-- test/functional/wallet_abandonconflict.py | 176 ++ test/functional/wallet_address_types.py | 301 +++ test/functional/wallet_backup.py | 210 ++ test/functional/wallet_basic.py | 492 ++++ test/functional/wallet_bumpfee.py | 311 +++ test/functional/wallet_disable.py | 34 + test/functional/wallet_disableprivatekeys.py | 35 + test/functional/wallet_dump.py | 167 ++ test/functional/wallet_encryption.py | 83 + test/functional/wallet_fallbackfee.py | 30 + test/functional/wallet_groups.py | 96 + test/functional/wallet_hd.py | 159 ++ test/functional/wallet_import_rescan.py | 189 ++ test/functional/wallet_importmulti.py | 463 ++++ test/functional/wallet_importprunedfunds.py | 116 + test/functional/wallet_keypool.py | 89 + test/functional/wallet_keypool_topup.py | 72 + test/functional/wallet_labels.py | 177 ++ test/functional/wallet_listreceivedby.py | 168 ++ test/functional/wallet_listsinceblock.py | 285 +++ test/functional/wallet_listtransactions.py | 209 ++ test/functional/wallet_multiwallet.py | 305 +++ .../wallet_resendwallettransactions.py | 32 + test/functional/wallet_txn_clone.py | 149 ++ test/functional/wallet_txn_doublespend.py | 135 ++ test/functional/wallet_zapwallettxes.py | 82 + test/lint/README.md | 29 + test/lint/check-doc.py | 48 + test/lint/check-rpc-mappings.py | 162 ++ test/lint/commit-script-check.sh | 47 + test/lint/git-subtree-check.sh | 95 + test/lint/lint-all.sh | 26 + test/lint/lint-assertions.sh | 23 + test/lint/lint-circular-dependencies.sh | 83 + test/lint/lint-filenames.sh | 24 + test/lint/lint-format-strings.py | 291 +++ test/lint/lint-format-strings.sh | 43 + test/lint/lint-include-guards.sh | 30 + test/lint/lint-includes.sh | 112 + test/lint/lint-locale-dependence.sh | 228 ++ test/lint/lint-logs.sh | 26 + test/lint/lint-python-shebang.sh | 13 + test/lint/lint-python-utf8-encoding.sh | 28 + test/lint/lint-python.sh | 90 + test/lint/lint-qt.sh | 20 + test/lint/lint-shell-locale.sh | 25 + test/lint/lint-shell.sh | 47 + test/lint/lint-spelling.ignore-words.txt | 6 + test/lint/lint-spelling.sh | 18 + test/lint/lint-tests.sh | 35 + test/lint/lint-whitespace.sh | 113 + test/util/bitcoin-util-test.py | 20 +- test/util/data/bitcoin-util-test.json | 291 ++- test/util/data/blanktxv1.json | 1 + test/util/data/blanktxv2.json | 1 + test/util/data/tt-delin1-out.json | 45 +- test/util/data/tt-delout1-out.json | 43 +- test/util/data/tt-locktime317000-out.json | 47 +- test/util/data/txcreate1.hex | 2 +- test/util/data/txcreate1.json | 19 +- test/util/data/txcreate2.hex | 2 +- test/util/data/txcreate2.json | 9 +- test/util/data/txcreatedata1.hex | 2 +- test/util/data/txcreatedata1.json | 5 +- test/util/data/txcreatedata2.hex | 2 +- test/util/data/txcreatedata2.json | 13 +- test/util/data/txcreatedata_seq0.hex | 2 +- test/util/data/txcreatedata_seq0.json | 11 +- test/util/data/txcreatedata_seq1.json | 5 +- test/util/data/txcreatemultisig1.json | 7 +- test/util/data/txcreatemultisig2.json | 1 + test/util/data/txcreatemultisig3.json | 7 +- test/util/data/txcreatemultisig4.json | 1 + test/util/data/txcreatemultisig5.json | 9 +- test/util/data/txcreateoutpubkey1.json | 3 +- test/util/data/txcreateoutpubkey2.json | 7 +- test/util/data/txcreateoutpubkey3.json | 1 + test/util/data/txcreatescript1.json | 1 + test/util/data/txcreatescript2.json | 1 + test/util/data/txcreatescript3.json | 7 +- test/util/data/txcreatescript4.json | 1 + test/util/data/txcreatesignv1.hex | 2 +- test/util/data/txcreatesignv1.json | 17 +- test/util/rpcauth-test.py | 48 + 191 files changed, 26587 insertions(+), 2981 deletions(-) create mode 100755 .travis/lint_04_install.sh create mode 100755 .travis/lint_05_before_script.sh create mode 100755 .travis/lint_06_script.sh create mode 100755 .travis/test_03_before_install.sh create mode 100755 .travis/test_04_install.sh create mode 100755 .travis/test_05_before_script.sh create mode 100755 .travis/test_06_script.sh create mode 100644 test/functional/data/rpc_getblockstats.json create mode 100644 test/functional/data/rpc_psbt.json create mode 100755 test/functional/feature_assumevalid.py create mode 100755 test/functional/feature_bip68_sequence.py create mode 100755 test/functional/feature_block.py create mode 100755 test/functional/feature_blocksdir.py create mode 100755 test/functional/feature_cltv.py create mode 100755 test/functional/feature_config_args.py create mode 100755 test/functional/feature_csv_activation.py create mode 100755 test/functional/feature_dbcrash.py create mode 100755 test/functional/feature_dersig.py create mode 100755 test/functional/feature_fee_estimation.py create mode 100755 test/functional/feature_help.py create mode 100755 test/functional/feature_includeconf.py create mode 100755 test/functional/feature_logging.py create mode 100755 test/functional/feature_maxuploadtarget.py create mode 100755 test/functional/feature_minchainwork.py create mode 100755 test/functional/feature_notifications.py create mode 100755 test/functional/feature_nulldummy.py create mode 100755 test/functional/feature_proxy.py create mode 100755 test/functional/feature_pruning.py create mode 100755 test/functional/feature_rbf.py create mode 100755 test/functional/feature_reindex.py create mode 100755 test/functional/feature_segwit.py create mode 100755 test/functional/feature_uacomment.py create mode 100755 test/functional/feature_versionbits_warning.py create mode 100755 test/functional/interface_bitcoin_cli.py create mode 100755 test/functional/interface_http.py create mode 100755 test/functional/interface_rest.py create mode 100755 test/functional/interface_zmq.py create mode 100755 test/functional/mempool_accept.py create mode 100755 test/functional/mempool_resurrect.py create mode 100755 test/functional/mempool_spend_coinbase.py create mode 100755 test/functional/mining_basic.py create mode 100755 test/functional/mining_getblocktemplate_longpoll.py create mode 100755 test/functional/mining_prioritisetransaction.py create mode 100755 test/functional/p2p_compactblocks.py create mode 100755 test/functional/p2p_disconnect_ban.py create mode 100755 test/functional/p2p_feefilter.py create mode 100755 test/functional/p2p_fingerprint.py create mode 100755 test/functional/p2p_invalid_block.py create mode 100755 test/functional/p2p_invalid_locator.py create mode 100755 test/functional/p2p_invalid_tx.py create mode 100755 test/functional/p2p_leak.py create mode 100755 test/functional/p2p_mempool.py create mode 100755 test/functional/p2p_node_network_limited.py create mode 100755 test/functional/p2p_segwit.py create mode 100755 test/functional/p2p_sendheaders.py create mode 100755 test/functional/p2p_timeouts.py create mode 100755 test/functional/p2p_unrequested_blocks.py create mode 100755 test/functional/rpc_bind.py create mode 100755 test/functional/rpc_blockchain.py create mode 100755 test/functional/rpc_createmultisig.py create mode 100755 test/functional/rpc_decodescript.py create mode 100755 test/functional/rpc_deprecated.py create mode 100755 test/functional/rpc_fundrawtransaction.py create mode 100755 test/functional/rpc_getblockstats.py create mode 100755 test/functional/rpc_getchaintips.py create mode 100755 test/functional/rpc_help.py create mode 100755 test/functional/rpc_invalidateblock.py create mode 100755 test/functional/rpc_named_arguments.py create mode 100755 test/functional/rpc_net.py create mode 100755 test/functional/rpc_preciousblock.py create mode 100755 test/functional/rpc_psbt.py create mode 100755 test/functional/rpc_rawtransaction.py create mode 100755 test/functional/rpc_scantxoutset.py create mode 100755 test/functional/rpc_signmessage.py create mode 100755 test/functional/rpc_signrawtransaction.py create mode 100755 test/functional/rpc_txoutproof.py create mode 100755 test/functional/rpc_uptime.py create mode 100755 test/functional/rpc_users.py create mode 100755 test/functional/rpc_zmq.py create mode 100755 test/functional/test_framework/messages.py create mode 100644 test/functional/test_framework/segwit_addr.py create mode 100755 test/functional/wallet_abandonconflict.py create mode 100755 test/functional/wallet_address_types.py create mode 100755 test/functional/wallet_backup.py create mode 100755 test/functional/wallet_basic.py create mode 100755 test/functional/wallet_bumpfee.py create mode 100755 test/functional/wallet_disable.py create mode 100755 test/functional/wallet_disableprivatekeys.py create mode 100755 test/functional/wallet_dump.py create mode 100755 test/functional/wallet_encryption.py create mode 100755 test/functional/wallet_fallbackfee.py create mode 100755 test/functional/wallet_groups.py create mode 100755 test/functional/wallet_hd.py create mode 100755 test/functional/wallet_import_rescan.py create mode 100755 test/functional/wallet_importmulti.py create mode 100755 test/functional/wallet_importprunedfunds.py create mode 100755 test/functional/wallet_keypool.py create mode 100755 test/functional/wallet_keypool_topup.py create mode 100755 test/functional/wallet_labels.py create mode 100755 test/functional/wallet_listreceivedby.py create mode 100755 test/functional/wallet_listsinceblock.py create mode 100755 test/functional/wallet_listtransactions.py create mode 100755 test/functional/wallet_multiwallet.py create mode 100755 test/functional/wallet_resendwallettransactions.py create mode 100755 test/functional/wallet_txn_clone.py create mode 100755 test/functional/wallet_txn_doublespend.py create mode 100755 test/functional/wallet_zapwallettxes.py create mode 100644 test/lint/README.md create mode 100755 test/lint/check-doc.py create mode 100755 test/lint/check-rpc-mappings.py create mode 100755 test/lint/commit-script-check.sh create mode 100755 test/lint/git-subtree-check.sh create mode 100755 test/lint/lint-all.sh create mode 100755 test/lint/lint-assertions.sh create mode 100755 test/lint/lint-circular-dependencies.sh create mode 100755 test/lint/lint-filenames.sh create mode 100755 test/lint/lint-format-strings.py create mode 100755 test/lint/lint-format-strings.sh create mode 100755 test/lint/lint-include-guards.sh create mode 100755 test/lint/lint-includes.sh create mode 100755 test/lint/lint-locale-dependence.sh create mode 100755 test/lint/lint-logs.sh create mode 100755 test/lint/lint-python-shebang.sh create mode 100755 test/lint/lint-python-utf8-encoding.sh create mode 100755 test/lint/lint-python.sh create mode 100755 test/lint/lint-qt.sh create mode 100755 test/lint/lint-shell-locale.sh create mode 100755 test/lint/lint-shell.sh create mode 100644 test/lint/lint-spelling.ignore-words.txt create mode 100755 test/lint/lint-spelling.sh create mode 100755 test/lint/lint-tests.sh create mode 100755 test/lint/lint-whitespace.sh create mode 100755 test/util/rpcauth-test.py diff --git a/.travis.yml b/.travis.yml index 8c8f417c7..a46c6f6bd 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,80 +3,136 @@ dist: trusty os: linux language: minimal cache: + ccache: true directories: - - depends/built - - depends/sdk-sources - - $HOME/.ccache + - depends/built + - depends/sdk-sources + - $HOME/.ccache +stages: + - lint + - test env: global: - MAKEJOBS=-j3 - - RUN_TESTS=false - - CHECK_DOC=0 + - RUN_UNIT_TESTS=true + - RUN_FUNCTIONAL_TESTS=true + - RUN_BENCH=false # Set to true for any one job that has debug enabled, to quickly check bench is not crashing or hitting assertions + - DOCKER_NAME_TAG=ubuntu:18.04 - BOOST_TEST_RANDOM=1$TRAVIS_BUILD_ID - CCACHE_SIZE=100M - CCACHE_TEMPDIR=/tmp/.ccache-temp - CCACHE_COMPRESS=1 + - CCACHE_DIR=$HOME/.ccache - BASE_OUTDIR=$TRAVIS_BUILD_DIR/out - SDK_URL=https://bitcoincore.org/depends-sources/sdks - - PYTHON_DEBUG=1 - WINEDEBUG=fixme-all - matrix: + - DOCKER_PACKAGES="build-essential libtool autotools-dev automake pkg-config bsdmainutils curl git ca-certificates ccache" +before_install: + - set -o errexit; source .travis/test_03_before_install.sh +install: + - set -o errexit; source .travis/test_04_install.sh +before_script: + - set -o errexit; source .travis/test_05_before_script.sh +script: + - set -o errexit; source .travis/test_06_script.sh +after_script: + - echo $TRAVIS_COMMIT_RANGE + - echo $TRAVIS_COMMIT_LOG +jobs: + include: +# lint stage + - stage: lint + env: + sudo: false + cache: false + language: python + python: '3.6' + install: + - set -o errexit; source .travis/lint_04_install.sh + before_script: + - set -o errexit; source .travis/lint_05_before_script.sh + script: + - set -o errexit; source .travis/lint_06_script.sh # ARM - - HOST=arm-linux-gnueabihf PACKAGES="g++-arm-linux-gnueabihf" DEP_OPTS="NO_QT=1" CHECK_DOC=1 GOAL="install" BITCOIN_CONFIG="--enable-glibc-back-compat --enable-reduce-exports" + - stage: test + env: >- + HOST=arm-linux-gnueabihf + PACKAGES="g++-arm-linux-gnueabihf" + DEP_OPTS="NO_QT=1" + RUN_UNIT_TESTS=false + RUN_FUNCTIONAL_TESTS=false + GOAL="install" + # -Wno-psabi is to disable ABI warnings: "note: parameter passing for argument of type ... changed in GCC 7.1" + # This could be removed once the ABI change warning does not show up by default + BITCOIN_CONFIG="--enable-glibc-back-compat --enable-reduce-exports CXXFLAGS=-Wno-psabi" # Win32 - - HOST=i686-w64-mingw32 DPKG_ADD_ARCH="i386" DEP_OPTS="NO_QT=1" PACKAGES="python3 nsis g++-mingw-w64-i686 wine1.6 bc" RUN_TESTS=true GOAL="install" BITCOIN_CONFIG="--enable-reduce-exports" -# 32-bit + dash - - HOST=i686-pc-linux-gnu PACKAGES="g++-multilib bc python3-zmq" DEP_OPTS="NO_QT=1" RUN_TESTS=true GOAL="install" BITCOIN_CONFIG="--enable-zmq --enable-glibc-back-compat --enable-reduce-exports LDFLAGS=-static-libstdc++" USE_SHELL="/bin/dash" + - stage: test + env: >- + HOST=i686-w64-mingw32 + DPKG_ADD_ARCH="i386" + DEP_OPTS="NO_QT=1" + PACKAGES="python3 nsis g++-mingw-w64-i686 wine-binfmt wine32" + GOAL="install" + BITCOIN_CONFIG="--enable-reduce-exports" # Win64 - - HOST=x86_64-w64-mingw32 DPKG_ADD_ARCH="i386" DEP_OPTS="NO_QT=1" PACKAGES="python3 nsis g++-mingw-w64-x86-64 wine1.6 bc" RUN_TESTS=true GOAL="install" BITCOIN_CONFIG="--enable-reduce-exports" + - stage: test + env: >- + HOST=x86_64-w64-mingw32 + DEP_OPTS="NO_QT=1" + PACKAGES="python3 nsis g++-mingw-w64-x86-64 wine-binfmt wine64" + GOAL="install" + BITCOIN_CONFIG="--enable-reduce-exports" +# 32-bit + dash + - stage: test + env: >- + HOST=i686-pc-linux-gnu + PACKAGES="g++-multilib python3-zmq" + DEP_OPTS="NO_QT=1" + GOAL="install" + BITCOIN_CONFIG="--enable-zmq --enable-glibc-back-compat --enable-reduce-exports LDFLAGS=-static-libstdc++" + CONFIG_SHELL="/bin/dash" # x86_64 Linux (uses qt5 dev package instead of depends Qt to speed up build and avoid timeout) - - HOST=x86_64-unknown-linux-gnu PACKAGES="python3-zmq qtbase5-dev qttools5-dev-tools protobuf-compiler libdbus-1-dev libharfbuzz-dev" DEP_OPTS="NO_QT=1 NO_UPNP=1 DEBUG=1 ALLOW_HOST_PACKAGES=1" RUN_TESTS=true GOAL="install" BITCOIN_CONFIG="--enable-zmq --with-gui=qt5 --enable-glibc-back-compat --enable-reduce-exports CPPFLAGS=-DDEBUG_LOCKORDER" + - stage: test + env: >- + HOST=x86_64-unknown-linux-gnu + PACKAGES="python3-zmq qtbase5-dev qttools5-dev-tools protobuf-compiler libdbus-1-dev libharfbuzz-dev libprotobuf-dev" + DEP_OPTS="NO_QT=1 NO_UPNP=1 DEBUG=1 ALLOW_HOST_PACKAGES=1" + GOAL="install" + BITCOIN_CONFIG="--enable-zmq --with-gui=qt5 --enable-glibc-back-compat --enable-reduce-exports --enable-debug CXXFLAGS=\"-g0 -O2\"" +# x86_64 Linux (no depends, only system libs) + - stage: test + env: >- + HOST=x86_64-unknown-linux-gnu + PACKAGES="python3-zmq qtbase5-dev qttools5-dev-tools libssl1.0-dev libevent-dev bsdmainutils libboost-system-dev libboost-filesystem-dev libboost-chrono-dev libboost-test-dev libboost-thread-dev libdb5.3++-dev libminiupnpc-dev libzmq3-dev libprotobuf-dev protobuf-compiler libqrencode-dev" + NO_DEPENDS=1 + GOAL="install" + BITCOIN_CONFIG="--enable-zmq --with-incompatible-bdb --enable-glibc-back-compat --enable-reduce-exports --with-gui=qt5 CPPFLAGS=-DDEBUG_LOCKORDER" +# x86_64 Linux (sanitizers) + - stage: test + env: >- + HOST=x86_64-unknown-linux-gnu + PACKAGES="clang python3-zmq qtbase5-dev qttools5-dev-tools libssl1.0-dev libevent-dev bsdmainutils libboost-system-dev libboost-filesystem-dev libboost-chrono-dev libboost-test-dev libboost-thread-dev libdb5.3++-dev libminiupnpc-dev libzmq3-dev libprotobuf-dev protobuf-compiler libqrencode-dev" + NO_DEPENDS=1 + RUN_BENCH=true + RUN_FUNCTIONAL_TESTS=false # Disabled for now, can be combined with the other x86_64 linux NO_DEPENDS job when functional tests pass the sanitizers + GOAL="install" + BITCOIN_CONFIG="--enable-zmq --with-incompatible-bdb --enable-glibc-back-compat --enable-reduce-exports --with-gui=qt5 CPPFLAGS=-DDEBUG_LOCKORDER --with-sanitizers=undefined CC=clang CXX=clang++" # x86_64 Linux, No wallet - - HOST=x86_64-unknown-linux-gnu PACKAGES="python3" DEP_OPTS="NO_WALLET=1" RUN_TESTS=true GOAL="install" BITCOIN_CONFIG="--enable-glibc-back-compat --enable-reduce-exports" + - stage: test + env: >- + HOST=x86_64-unknown-linux-gnu + PACKAGES="python3" + DEP_OPTS="NO_WALLET=1" + GOAL="install" + BITCOIN_CONFIG="--enable-glibc-back-compat --enable-reduce-exports" # Cross-Mac - - HOST=x86_64-apple-darwin11 PACKAGES="cmake imagemagick libcap-dev librsvg2-bin libz-dev libbz2-dev libtiff-tools python-dev" BITCOIN_CONFIG="--enable-gui --enable-reduce-exports" OSX_SDK=10.11 GOAL="deploy" + - stage: test + env: >- + HOST=x86_64-apple-darwin14 + PACKAGES="cmake imagemagick libcap-dev librsvg2-bin libz-dev libbz2-dev libtiff-tools python-dev python3-setuptools-git" + OSX_SDK=10.11 + RUN_UNIT_TESTS=false + RUN_FUNCTIONAL_TESTS=false + GOAL="all deploy" + BITCOIN_CONFIG="--enable-gui --enable-reduce-exports --enable-werror" -before_install: - - export PATH=$(echo $PATH | tr ':' "\n" | sed '/\/opt\/python/d' | tr "\n" ":" | sed "s|::|:|g") -install: - - if [ -n "$DPKG_ADD_ARCH" ]; then sudo dpkg --add-architecture "$DPKG_ADD_ARCH" ; fi - - if [ -n "$PACKAGES" ]; then travis_retry sudo apt-get update; fi - - if [ -n "$PACKAGES" ]; then travis_retry sudo apt-get install --no-install-recommends --no-upgrade -qq $PACKAGES; fi -before_script: - - if [ "$TRAVIS_EVENT_TYPE" = "pull_request" ]; then contrib/devtools/commit-script-check.sh $TRAVIS_COMMIT_RANGE; fi - - unset CC; unset CXX - - if [ "$CHECK_DOC" = 1 ]; then contrib/devtools/check-doc.py; fi - - mkdir -p depends/SDKs depends/sdk-sources - - if [ -n "$OSX_SDK" -a ! -f depends/sdk-sources/MacOSX${OSX_SDK}.sdk.tar.gz ]; then curl --location --fail $SDK_URL/MacOSX${OSX_SDK}.sdk.tar.gz -o depends/sdk-sources/MacOSX${OSX_SDK}.sdk.tar.gz; fi - - if [ -n "$OSX_SDK" -a -f depends/sdk-sources/MacOSX${OSX_SDK}.sdk.tar.gz ]; then tar -C depends/SDKs -xf depends/sdk-sources/MacOSX${OSX_SDK}.sdk.tar.gz; fi - - make $MAKEJOBS -C depends HOST=$HOST $DEP_OPTS -script: - - if [ "$CHECK_DOC" = 1 -a "$TRAVIS_REPO_SLUG" = "bitcoin/bitcoin" -a "$TRAVIS_PULL_REQUEST" = "false" ]; then while read LINE; do travis_retry gpg --keyserver hkp://subset.pool.sks-keyservers.net --recv-keys $LINE; done < contrib/verify-commits/trusted-keys; fi - - if [ "$CHECK_DOC" = 1 -a "$TRAVIS_REPO_SLUG" = "bitcoin/bitcoin" -a "$TRAVIS_PULL_REQUEST" = "false" ]; then git fetch --unshallow; fi - - if [ "$CHECK_DOC" = 1 -a "$TRAVIS_REPO_SLUG" = "bitcoin/bitcoin" -a "$TRAVIS_PULL_REQUEST" = "false" ]; then contrib/verify-commits/verify-commits.sh; fi - - export TRAVIS_COMMIT_LOG=`git log --format=fuller -1` - - if [ -n "$USE_SHELL" ]; then export CONFIG_SHELL="$USE_SHELL"; fi - - OUTDIR=$BASE_OUTDIR/$TRAVIS_PULL_REQUEST/$TRAVIS_JOB_NUMBER-$HOST - - BITCOIN_CONFIG_ALL="--disable-dependency-tracking --prefix=$TRAVIS_BUILD_DIR/depends/$HOST --bindir=$OUTDIR/bin --libdir=$OUTDIR/lib" - - depends/$HOST/native/bin/ccache --max-size=$CCACHE_SIZE - - test -n "$USE_SHELL" && eval '"$USE_SHELL" -c "./autogen.sh"' || ./autogen.sh - - mkdir build && cd build - - ../configure --cache-file=config.cache $BITCOIN_CONFIG_ALL $BITCOIN_CONFIG || ( cat config.log && false) - - make distdir VERSION=$HOST - - cd bitzeny-$HOST - - ./configure --cache-file=../config.cache $BITCOIN_CONFIG_ALL $BITCOIN_CONFIG || ( cat config.log && false) - - make $MAKEJOBS $GOAL || ( echo "Build failure. Verbose build follows." && make $GOAL V=1 ; false ) - - export LD_LIBRARY_PATH=$TRAVIS_BUILD_DIR/depends/$HOST/lib - - export RUN_TESTS=false # TODO: remove this line if broken tests are fixed - - if [ "$RUN_TESTS" = "true" ]; then travis_wait 30 make $MAKEJOBS check VERBOSE=1; fi - - if [ "$TRAVIS_EVENT_TYPE" = "cron" ]; then extended="--extended --exclude pruning,dbcrash"; fi - - if [ "$RUN_TESTS" = "true" ]; then test/functional/test_runner.py --coverage --quiet ${extended}; fi -after_success: - - AUTHOR=`git log -1 | grep 'Author:' | awk '{$1="";$NF="";print $0}' | sed -e 's/^[ ]*//g'` - - test -n $DISCORD_WEBHOOK_URL && echo '{"embeds":[{"title":"['$TRAVIS_JOB_NUMBER':'$HOST'] Build '$TRAVIS_BUILD_NUMBER' succeeded","type":"rich","description":"[`'${TRAVIS_COMMIT:0:7}'`](https://github.com/'$TRAVIS_REPO_SLUG'/commit/'$TRAVIS_COMMIT') '$TRAVIS_COMMIT_MESSAGE' - '$AUTHOR'","url":"https://travis-ci.org/'$TRAVIS_REPO_SLUG'/builds/'$TRAVIS_BUILD_ID'","timestamp":"'`date --utc +%FT%TZ`'","color":2932302,"footer":{"text":"Travis CI"}}]}' | curl -A 'Travis CI' -H 'Content-Type:application/json' -d @- $DISCORD_WEBHOOK_URL -after_failure: - - AUTHOR=`git log -1 | grep 'Author:' | awk '{$1="";$NF="";print $0}' | sed -e 's/^[ ]*//g'` - - test -n $DISCORD_WEBHOOK_URL && echo '{"embeds":[{"title":"['$TRAVIS_JOB_NUMBER':'$HOST'] Build '$TRAVIS_BUILD_NUMBER' failed","type":"rich","description":"[`'${TRAVIS_COMMIT:0:7}'`](https://github.com/'$TRAVIS_REPO_SLUG'/commit/'$TRAVIS_COMMIT') '$TRAVIS_COMMIT_MESSAGE' - '$AUTHOR'","url":"https://travis-ci.org/'$TRAVIS_REPO_SLUG'/builds/'$TRAVIS_BUILD_ID'","timestamp":"'`date --utc +%FT%TZ`'","color":13313073,"footer":{"text":"Travis CI"}}]}' | curl -A 'Travis CI' -H 'Content-Type:application/json' -d @- $DISCORD_WEBHOOK_URL -after_script: - - echo $TRAVIS_COMMIT_RANGE - - echo $TRAVIS_COMMIT_LOG diff --git a/.travis/lint_04_install.sh b/.travis/lint_04_install.sh new file mode 100755 index 000000000..e473b9c19 --- /dev/null +++ b/.travis/lint_04_install.sh @@ -0,0 +1,12 @@ + +#!/usr/bin/env bash +# +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. + +export LC_ALL=C + +travis_retry pip install codespell +travis_retry pip install flake8 + diff --git a/.travis/lint_05_before_script.sh b/.travis/lint_05_before_script.sh new file mode 100755 index 000000000..153a13add --- /dev/null +++ b/.travis/lint_05_before_script.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash +# +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. + +export LC_ALL=C + +git fetch --unshallow + diff --git a/.travis/lint_06_script.sh b/.travis/lint_06_script.sh new file mode 100755 index 000000000..c494b21a3 --- /dev/null +++ b/.travis/lint_06_script.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash +# +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. + +export LC_ALL=C + +if [ "$TRAVIS_EVENT_TYPE" = "pull_request" ]; then + test/lint/commit-script-check.sh $TRAVIS_COMMIT_RANGE +fi + +test/lint/git-subtree-check.sh src/crypto/ctaes +test/lint/git-subtree-check.sh src/secp256k1 +test/lint/git-subtree-check.sh src/univalue +test/lint/git-subtree-check.sh src/leveldb +test/lint/check-doc.py +test/lint/check-rpc-mappings.py . +test/lint/lint-all.sh + +if [ "$TRAVIS_REPO_SLUG" = "bitcoin/bitcoin" -a "$TRAVIS_EVENT_TYPE" = "cron" ]; then + while read -r LINE; do travis_retry gpg --keyserver hkp://subset.pool.sks-keyservers.net --recv-keys $LINE; done < contrib/verify-commits/trusted-keys && + travis_wait 50 contrib/verify-commits/verify-commits.py; +fi + diff --git a/.travis/test_03_before_install.sh b/.travis/test_03_before_install.sh new file mode 100755 index 000000000..d091a67ca --- /dev/null +++ b/.travis/test_03_before_install.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash +# +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. + +export LC_ALL=C.UTF-8 + +PATH=$(echo $PATH | tr ':' "\n" | sed '/\/opt\/python/d' | tr "\n" ":" | sed "s|::|:|g") +export PATH + +BEGIN_FOLD () { + echo "" + CURRENT_FOLD_NAME=$1 + echo "travis_fold:start:${CURRENT_FOLD_NAME}" +} + +END_FOLD () { + RET=$? + echo "travis_fold:end:${CURRENT_FOLD_NAME}" + if [ $RET != 0 ]; then + echo "${CURRENT_FOLD_NAME} failed with status code ${RET}" + fi +} + diff --git a/.travis/test_04_install.sh b/.travis/test_04_install.sh new file mode 100755 index 000000000..ef595287b --- /dev/null +++ b/.travis/test_04_install.sh @@ -0,0 +1,26 @@ +#!/usr/bin/env bash +# +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. + +export LC_ALL=C.UTF-8 + +travis_retry docker pull "$DOCKER_NAME_TAG" +env | grep -E '^(CCACHE_|WINEDEBUG|LC_ALL|BOOST_TEST_RANDOM|CONFIG_SHELL)' | tee /tmp/env +if [[ $HOST = *-mingw32 ]]; then + DOCKER_ADMIN="--cap-add SYS_ADMIN" +fi +DOCKER_ID=$(docker run $DOCKER_ADMIN -idt --mount type=bind,src=$TRAVIS_BUILD_DIR,dst=$TRAVIS_BUILD_DIR --mount type=bind,src=$CCACHE_DIR,dst=$CCACHE_DIR -w $TRAVIS_BUILD_DIR --env-file /tmp/env $DOCKER_NAME_TAG) + +DOCKER_EXEC () { + docker exec $DOCKER_ID bash -c "cd $PWD && $*" +} + +if [ -n "$DPKG_ADD_ARCH" ]; then + DOCKER_EXEC dpkg --add-architecture "$DPKG_ADD_ARCH" +fi + +travis_retry DOCKER_EXEC apt-get update +travis_retry DOCKER_EXEC apt-get install --no-install-recommends --no-upgrade -qq $PACKAGES $DOCKER_PACKAGES + diff --git a/.travis/test_05_before_script.sh b/.travis/test_05_before_script.sh new file mode 100755 index 000000000..70caee979 --- /dev/null +++ b/.travis/test_05_before_script.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash +# +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. + +export LC_ALL=C.UTF-8 + +DOCKER_EXEC echo \> \$HOME/.bitcoin # Make sure default datadir does not exist and is never read by creating a dummy file + +mkdir -p depends/SDKs depends/sdk-sources + +if [ -n "$OSX_SDK" -a ! -f depends/sdk-sources/MacOSX${OSX_SDK}.sdk.tar.gz ]; then + curl --location --fail $SDK_URL/MacOSX${OSX_SDK}.sdk.tar.gz -o depends/sdk-sources/MacOSX${OSX_SDK}.sdk.tar.gz +fi +if [ -n "$OSX_SDK" -a -f depends/sdk-sources/MacOSX${OSX_SDK}.sdk.tar.gz ]; then + tar -C depends/SDKs -xf depends/sdk-sources/MacOSX${OSX_SDK}.sdk.tar.gz +fi +if [[ $HOST = *-mingw32 ]]; then + DOCKER_EXEC update-alternatives --set $HOST-g++ \$\(which $HOST-g++-posix\) +fi +if [ -z "$NO_DEPENDS" ]; then + DOCKER_EXEC CONFIG_SHELL= make $MAKEJOBS -C depends HOST=$HOST $DEP_OPTS +fi + diff --git a/.travis/test_06_script.sh b/.travis/test_06_script.sh new file mode 100755 index 000000000..1344ec2d4 --- /dev/null +++ b/.travis/test_06_script.sh @@ -0,0 +1,68 @@ +#!/usr/bin/env bash +# +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. + +export LC_ALL=C.UTF-8 + +TRAVIS_COMMIT_LOG=$(git log --format=fuller -1) +export TRAVIS_COMMIT_LOG + +OUTDIR=$BASE_OUTDIR/$TRAVIS_PULL_REQUEST/$TRAVIS_JOB_NUMBER-$HOST +BITCOIN_CONFIG_ALL="--disable-dependency-tracking --prefix=$TRAVIS_BUILD_DIR/depends/$HOST --bindir=$OUTDIR/bin --libdir=$OUTDIR/lib" +if [ -z "$NO_DEPENDS" ]; then + DOCKER_EXEC ccache --max-size=$CCACHE_SIZE +fi + +BEGIN_FOLD autogen +if [ -n "$CONFIG_SHELL" ]; then + DOCKER_EXEC "$CONFIG_SHELL" -c "./autogen.sh" +else + DOCKER_EXEC ./autogen.sh +fi +END_FOLD + +mkdir build +cd build || (echo "could not enter build directory"; exit 1) + +BEGIN_FOLD configure +DOCKER_EXEC ../configure --cache-file=config.cache $BITCOIN_CONFIG_ALL $BITCOIN_CONFIG || ( cat config.log && false) +END_FOLD + +BEGIN_FOLD distdir +DOCKER_EXEC make distdir VERSION=$HOST +END_FOLD + +cd "bitcoin-$HOST" || (echo "could not enter distdir bitcoin-$HOST"; exit 1) + +BEGIN_FOLD configure +DOCKER_EXEC ./configure --cache-file=../config.cache $BITCOIN_CONFIG_ALL $BITCOIN_CONFIG || ( cat config.log && false) +END_FOLD + +BEGIN_FOLD build +DOCKER_EXEC make $MAKEJOBS $GOAL || ( echo "Build failure. Verbose build follows." && DOCKER_EXEC make $GOAL V=1 ; false ) +END_FOLD + +if [ "$RUN_UNIT_TESTS" = "true" ]; then + BEGIN_FOLD unit-tests + DOCKER_EXEC LD_LIBRARY_PATH=$TRAVIS_BUILD_DIR/depends/$HOST/lib make $MAKEJOBS check VERBOSE=1 + END_FOLD +fi + +if [ "$RUN_BENCH" = "true" ]; then + BEGIN_FOLD bench + DOCKER_EXEC LD_LIBRARY_PATH=$TRAVIS_BUILD_DIR/depends/$HOST/lib $OUTDIR/bin/bench_bitcoin -scaling=0.001 + END_FOLD +fi + +if [ "$TRAVIS_EVENT_TYPE" = "cron" ]; then + extended="--extended --exclude feature_pruning" +fi + +if [ "$RUN_FUNCTIONAL_TESTS" = "true" ]; then + BEGIN_FOLD functional-tests + DOCKER_EXEC test/functional/test_runner.py --combinedlogslen=4000 --coverage --quiet --failfast ${extended} + END_FOLD +fi + diff --git a/test/README.md b/test/README.md index 868eb667a..a5ba732bc 100644 --- a/test/README.md +++ b/test/README.md @@ -5,10 +5,10 @@ etc. There are currently two sets of tests in this directory: -- [functional](/test/functional) which test the functionality of +- [functional](/test/functional) which test the functionality of bitcoind and bitcoin-qt by interacting with them through the RPC and P2P interfaces. -- [util](test/util) which tests the bitcoin utilities, currently only +- [util](/test/util) which tests the bitcoin utilities, currently only bitcoin-tx. The util tests are run as part of `make check` target. The functional @@ -33,13 +33,13 @@ The ZMQ functional test requires a python ZMQ library. To install it: Individual tests can be run by directly calling the test script, eg: ``` -test/functional/replace-by-fee.py +test/functional/feature_rbf.py ``` or can be run through the test_runner harness, eg: ``` -test/functional/test_runner.py replace-by-fee.py +test/functional/test_runner.py feature_rbf.py ``` You can run any combination (incl. duplicates) of tests by calling: @@ -177,11 +177,11 @@ Note: gdb attach step may require `sudo` ### Util tests -Util tests can be run locally by running `test/util/bitcoin-util-test.py`. +Util tests can be run locally by running `test/util/bitcoin-util-test.py`. Use the `-v` option for verbose output. # Writing functional tests You are encouraged to write functional tests for new or existing features. -Further information about the functional test framework and individual +Further information about the functional test framework and individual tests is found in [test/functional](/test/functional). diff --git a/test/config.ini.in b/test/config.ini.in index 35ee092be..28abee2a3 100644 --- a/test/config.ini.in +++ b/test/config.ini.in @@ -9,10 +9,11 @@ SRCDIR=@abs_top_srcdir@ BUILDDIR=@abs_top_builddir@ EXEEXT=@EXEEXT@ +RPCAUTH=@abs_top_srcdir@/share/rpcauth/rpcauth.py [components] # Which components are enabled. These are commented out by `configure` if they were disabled when running config. @ENABLE_WALLET_TRUE@ENABLE_WALLET=true -@BUILD_BITCOIN_UTILS_TRUE@ENABLE_UTILS=true +@BUILD_BITCOIN_CLI_TRUE@ENABLE_CLI=true @BUILD_BITCOIND_TRUE@ENABLE_BITCOIND=true @ENABLE_ZMQ_TRUE@ENABLE_ZMQ=true diff --git a/test/functional/README.md b/test/functional/README.md index 2558bd017..d40052ac9 100644 --- a/test/functional/README.md +++ b/test/functional/README.md @@ -17,15 +17,32 @@ don't have test cases for. #### Style guidelines -- Where possible, try to adhere to [PEP-8 guidelines]([https://www.python.org/dev/peps/pep-0008/) +- Where possible, try to adhere to [PEP-8 guidelines](https://www.python.org/dev/peps/pep-0008/) - Use a python linter like flake8 before submitting PRs to catch common style nits (eg trailing whitespace, unused imports, etc) +- See [the python lint script](/test/lint/lint-python.sh) that checks for violations that + could lead to bugs and issues in the test code. - Avoid wildcard imports where possible - Use a module-level docstring to describe what the test is testing, and how it is testing it. - When subclassing the BitcoinTestFramwork, place overrides for the `set_test_params()`, `add_options()` and `setup_xxxx()` methods at the top of the subclass, then locally-defined helper methods, then the `run_test()` method. +- Use `'{}'.format(x)` for string formatting, not `'%s' % x`. + +#### Naming guidelines + +- Name the test `_test.py`, where area can be one of the following: + - `feature` for tests for full features that aren't wallet/mining/mempool, eg `feature_rbf.py` + - `interface` for tests for other interfaces (REST, ZMQ, etc), eg `interface_rest.py` + - `mempool` for tests for mempool behaviour, eg `mempool_reorg.py` + - `mining` for tests for mining features, eg `mining_prioritisetransaction.py` + - `p2p` for tests that explicitly test the p2p interface, eg `p2p_disconnect_ban.py` + - `rpc` for tests for individual RPC methods or features, eg `rpc_listtransactions.py` + - `wallet` for tests for wallet features, eg `wallet_keypool.py` +- use an underscore to separate words + - exception: for tests for specific RPCs or command line options which don't include underscores, name the test after the exact RPC or argument name, eg `rpc_decodescript.py`, not `rpc_decode_script.py` +- Don't use the redundant word `test` in the name, eg `interface_zmq.py`, not `interface_zmq_test.py` #### General test-writing advice @@ -43,6 +60,11 @@ don't have test cases for. - When calling RPCs with lots of arguments, consider using named keyword arguments instead of positional arguments to make the intent of the call clear to readers. +- Many of the core test framework classes such as `CBlock` and `CTransaction` + don't allow new attributes to be added to their objects at runtime like + typical Python objects allow. This helps prevent unpredictable side effects + from typographical errors or usage of the objects outside of their intended + purpose. #### RPC and P2P definitions @@ -55,71 +77,21 @@ P2P messages. These can be found in the following source files: #### Using the P2P interface -- `mininode.py` contains all the definitions for objects that pass +- `messages.py` contains all the definitions for objects that pass over the network (`CBlock`, `CTransaction`, etc, along with the network-level wrappers for them, `msg_block`, `msg_tx`, etc). - P2P tests have two threads. One thread handles all network communication -with the bitcoind(s) being tested (using python's asyncore package); the other +with the bitcoind(s) being tested in a callback-based event loop; the other implements the test logic. -- `NodeConn` is the class used to connect to a bitcoind. If you implement -a callback class that derives from `NodeConnCB` and pass that to the -`NodeConn` object, your code will receive the appropriate callbacks when -events of interest arrive. - -- Call `NetworkThread.start()` after all `NodeConn` objects are created to -start the networking thread. (Continue with the test logic in your existing -thread.) +- `P2PConnection` is the class used to connect to a bitcoind. `P2PInterface` +contains the higher level logic for processing P2P payloads and connecting to +the Bitcoin Core node application logic. For custom behaviour, subclass the +P2PInterface object and override the callback methods. - Can be used to write tests where specific P2P protocol behavior is tested. -Examples tests are `p2p-accept-block.py`, `p2p-compactblocks.py`. - -#### Comptool - -- Comptool is a Testing framework for writing tests that compare the block/tx acceptance -behavior of a bitcoind against 1 or more other bitcoind instances. It should not be used -to write static tests with known outcomes, since that type of test is easier to write and -maintain using the standard BitcoinTestFramework. - -- Set the `num_nodes` variable (defined in `ComparisonTestFramework`) to start up -1 or more nodes. If using 1 node, then `--testbinary` can be used as a command line -option to change the bitcoind binary used by the test. If using 2 or more nodes, -then `--refbinary` can be optionally used to change the bitcoind that will be used -on nodes 2 and up. - -- Implement a (generator) function called `get_tests()` which yields `TestInstance`s. -Each `TestInstance` consists of: - - A list of `[object, outcome, hash]` entries - * `object` is a `CBlock`, `CTransaction`, or - `CBlockHeader`. `CBlock`'s and `CTransaction`'s are tested for - acceptance. `CBlockHeader`s can be used so that the test runner can deliver - complete headers-chains when requested from the bitcoind, to allow writing - tests where blocks can be delivered out of order but still processed by - headers-first bitcoind's. - * `outcome` is `True`, `False`, or `None`. If `True` - or `False`, the tip is compared with the expected tip -- either the - block passed in, or the hash specified as the optional 3rd entry. If - `None` is specified, then the test will compare all the bitcoind's - being tested to see if they all agree on what the best tip is. - * `hash` is the block hash of the tip to compare against. Optional to - specify; if left out then the hash of the block passed in will be used as - the expected tip. This allows for specifying an expected tip while testing - the handling of either invalid blocks or blocks delivered out of order, - which complete a longer chain. - - `sync_every_block`: `True/False`. If `False`, then all blocks - are inv'ed together, and the test runner waits until the node receives the - last one, and tests only the last block for tip acceptance using the - outcome and specified tip. If `True`, then each block is tested in - sequence and synced (this is slower when processing many blocks). - - `sync_every_transaction`: `True/False`. Analogous to - `sync_every_block`, except if the outcome on the last tx is "None", - then the contents of the entire mempool are compared across all bitcoind - connections. If `True` or `False`, then only the last tx's - acceptance is tested against the given outcome. - -- For examples of tests written in this framework, see - `invalidblockrequest.py` and `p2p-fullblocktest.py`. +Examples tests are `p2p_unrequested_blocks.py`, `p2p_compactblocks.py`. ### test-framework modules @@ -135,15 +107,9 @@ Generally useful functions. #### [test_framework/mininode.py](test_framework/mininode.py) Basic code to support P2P connectivity to a bitcoind. -#### [test_framework/comptool.py](test_framework/comptool.py) -Framework for comparison-tool style, P2P tests. - #### [test_framework/script.py](test_framework/script.py) Utilities for manipulating transaction scripts (originally from python-bitcoinlib) -#### [test_framework/blockstore.py](test_framework/blockstore.py) -Implements disk-backed block and tx storage. - #### [test_framework/key.py](test_framework/key.py) Wrapper around OpenSSL EC_Key (originally from python-bitcoinlib) diff --git a/test/functional/combine_logs.py b/test/functional/combine_logs.py index 3ca74ea35..3230d5cb6 100755 --- a/test/functional/combine_logs.py +++ b/test/functional/combine_logs.py @@ -13,7 +13,7 @@ import sys # Matches on the date format at the start of the log event -TIMESTAMP_PATTERN = re.compile(r"^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{6}") +TIMESTAMP_PATTERN = re.compile(r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d{6})?Z") LogEvent = namedtuple('LogEvent', ['timestamp', 'source', 'event']) @@ -25,10 +25,6 @@ def main(): parser.add_argument('--html', dest='html', action='store_true', help='outputs the combined log as html. Requires jinja2. pip install jinja2') args, unknown_args = parser.parse_known_args() - if args.color and os.name != 'posix': - print("Color output requires posix terminal colors.") - sys.exit(1) - if args.html and args.color: print("Only one out of --color or --html should be specified") sys.exit(1) @@ -63,7 +59,7 @@ def get_log_events(source, logfile): Log events may be split over multiple lines. We use the timestamp regex match as the marker for a new log event.""" try: - with open(logfile, 'r') as infile: + with open(logfile, 'r', encoding='utf-8') as infile: event = '' timestamp = '' for line in infile: @@ -75,11 +71,17 @@ def get_log_events(source, logfile): if time_match: if event: yield LogEvent(timestamp=timestamp, source=source, event=event.rstrip()) - event = line timestamp = time_match.group() + if time_match.group(1) is None: + # timestamp does not have microseconds. Add zeroes. + timestamp_micro = timestamp.replace("Z", ".000000Z") + line = line.replace(timestamp, timestamp_micro) + timestamp = timestamp_micro + event = line # if it doesn't have a timestamp, it's a continuation line of the previous log. else: - event += "\n" + line + # Add the line. Prefix with space equivalent to the source + timestamp so log lines are aligned + event += " " + line # Flush the final event yield LogEvent(timestamp=timestamp, source=source, event=event.rstrip()) except FileNotFoundError: @@ -98,7 +100,11 @@ def print_logs(log_events, color=False, html=False): colors["reset"] = "\033[0m" # Reset font color for event in log_events: - print("{0} {1: <5} {2} {3}".format(colors[event.source.rstrip()], event.source, event.event, colors["reset"])) + lines = event.event.splitlines() + print("{0} {1: <5} {2} {3}".format(colors[event.source.rstrip()], event.source, lines[0], colors["reset"])) + if len(lines) > 1: + for line in lines[1:]: + print("{0}{1}{2}".format(colors[event.source.rstrip()], line, colors["reset"])) else: try: diff --git a/test/functional/create_cache.py b/test/functional/create_cache.py index 7d4d1a529..edf16fa47 100755 --- a/test/functional/create_cache.py +++ b/test/functional/create_cache.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright (c) 2016 The Bitcoin Core developers +# Copyright (c) 2016-2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Create a blockchain cache. @@ -16,6 +16,7 @@ class CreateCache(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 0 + self.supports_cli = True def setup_network(self): pass diff --git a/test/functional/data/rpc_getblockstats.json b/test/functional/data/rpc_getblockstats.json new file mode 100644 index 000000000..b8cabe1e5 --- /dev/null +++ b/test/functional/data/rpc_getblockstats.json @@ -0,0 +1,222 @@ +{ + "blocks": [ + "0100000000000000000000000000000000000000000000000000000000000000000000003ba3edfd7a7b12b27ac72c3e67768f617fc81bc3888a51323a9fb8aa4b1e5e4adae5494dffff7f20020000000101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff4d04ffff001d0104455468652054696d65732030332f4a616e2f32303039204368616e63656c6c6f72206f6e206272696e6b206f66207365636f6e64206261696c6f757420666f722062616e6b73ffffffff0100f2052a01000000434104678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac00000000", + "0000002006226e46111a0b59caaf126043eb5bbf28c34f3a5e332a1fc7b2b73cf188910f1cd16b94f20a8a3dda91027c888025f2ec1a07ddcb2786bdff5916e66c00406f194ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff03510101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "0000002014341131c18d3b3aa30056a0f7a97c9ac852d3fd0ec9c76f7a25e83c01e7f821bf83574fb606f25c59200c844443201faf923ef5284fd4401f3104a323c601491a4ae75affff7f200200000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff03520101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "0000002078616da95299bd42cd8f813c8043816ec5741de466be3162e16bfff471808461f671e694afaf534d37df484f1990fc19a65fc26964b38141b7f8ecf61b8a50241a4ae75affff7f200500000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff03530101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020a08613f37d305835a3a1553e77a479eba0f34c06c52e429ece54f5973cd77a7086a1efcaf75f1cd5be2c9deb6a7850225757a2cfc3031a91cc1330b3af4acc891b4ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff03540101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000203b304fa1ce0505c2366982939ac148d9124c5ac747cc9aea133cea9916484966305de0e8d049f2be65c68d64d2c45746def5a9b4fcb8e298692b53b83b4690241b4ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff03550101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020fbdf49978ec4f0b23704b6772a614336872587e29c463f375836ffd775248837fed9f3fdfc33f076c6663ae78070fad7263c1e24161f3ee1a4857b8931815e2c1b4ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff03560101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020c37548b9ca256b9ff17187d4d4309cf3143845b0a5811d3ca5427b2fddf000731a10985dfd473561c070c3527c3fe3941834cf51b3dfbacb501b44c69c9745ce1b4ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff03570101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020383dd3766c0675440f26370ad62d687e335ea3a650dec9b02fe544107cc1823a13b98696d41562945457d655f4c6921f736068f7a72afd1ad6b335f2857d16631c4ae75affff7f200400000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff03580101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000207476dd96d81f53e63934ce28c9e89022e0f24d040ec3c838443e925fc3a2f230a94d0cbcefb4a151191dd7664153944d9eee3b7b46d4ba997f397ed2b72c3afe1c4ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff03590101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000209e425c73eea16cce98c0d47d6070aca29f0524eab4b97af84c386aa5322dd43055002f097e929bc6ad88ce869968e1b049aab7f6e45a5b869cf4349afd5d43e01c4ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff035a0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000202090e16a514fad40386413a100bbaa4fc14086a8d3501ac64c91fdef922e834a369e409444d0ec496eb0dd9a47f1fe81a7ab974bab28c50a912b994acf13b5f91c4ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff035b0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020777b767e42624c52775b331f19e81ba03be2f51a0608166cd5388c1a47d5e776473570bb9bba553a7db4a9a3083533027c54af1fea3ef6ef67757ef2255d64631c4ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff035c0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "0000002076bb2bf3251a51ec367a42f8584043171a5d53157394cd776ebd017e2982127653d953aca3e2217f56533c043c07b9a926a30672ebce2562f1d06a6dc5044e7b1c4ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff035d0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020e6d7f02292655b73fc1f1958b09633ba07265d71d2a2784060b354cbbf1900202e9c9b02b63170002a94a0c9d8d787e2faa4c074a1ebdeb2855555347321dd101d4ae75affff7f200200000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff035e0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020d61f077b0ed326e17f0a3d5af3fa876b72b434a252c9c3248d20130ed744287fcb10da470222dd29c7a07e2da7eb25d6499ed3919676df89cc630bd1b23fbb411d4ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff035f0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000205efa9741cf51533ed6e07a97c71768372f53ca9c6df83894d64fe94c718eee23a207441e79ecdcf99ef3326385f5f675e2dea84c85ab8973219c63f92847ed5b1d4ae75affff7f200200000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff03600101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020368386b0a0f46b2a2c4648eb9cb5dd1380c4f22e437e0bd49420670993361e5b9026632c2ddbb4b31b3c3118c51e43ea4d78e05c0aca0956278ead26a263d1521d4ae75affff7f200300000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401110101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020dd0a1594bbff6345a3e34f326e5ee605c855f5e0a5c363fc39615a8b1539b736200b51297dbee4aadf9b536cd2afe7617651e0a1d0f0610f436518a2a4dc54621d4ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401120101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020f302a1092709dc27a32d7229d391b90824a75828692c4bb2ca8f0ca5c88b3613c2e18797ffe8b367336338f90b2cf8c3f66277eb1e1ddbe18c052977294f10691d4ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401130101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000203dcc77aac703a8cd0e799384b74383c1d5f236426f77d516694607fc88fe85581276a20ceb98d02e6355c9dba4312e2fdc9832f4302cc307e1263f2df0aadd6a1e4ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401140101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020dfe109704a0b2801aee4232c31fb744145a7c80dd91a7727e16d4057719d5c3730f8296243521d82d96ed75c5af800a722fc9dde2e02af95c8c9822190ba07b21e4ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401150101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000201ef9ef2699bc36fd646bb9ba8629644bc98396122f6753710caf0315d7539f751382d3d85f17eb8b42cf17e54baa327886dcf6fa63207e097df8f9b84cc5422f1e4ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401160101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020834c91f23cd91b727be08b892f1c1a2f33c1e66d66f35607925fa1be4bca2c25b4145a73b1c71b945f5bb9ede3d8d95c9a3b12a0a81b7b14f440ec5146dd4ec71e4ae75affff7f200300000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401170101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000205743202bf1e543a9be2a59b62be6a5a494511fab96968007b8d7199ea60a524697227ba473ceaf48d4f48ee17f8ee6cd2f1f5ddff03a641642ece240e7872f8f1e4ae75affff7f200200000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401180101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000207469f5c1841bf57275d82db23e5a8f0e8512af1eb10119c238519cdd6cdced34fd96dc659a874b3f5d30fbe6ea421a6b9791dfce8450f8851e4b90d80f0f794e1e4ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401190101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020b6338f55fcc473b744d53d675b4a83dcd80ddec9d02ad3323cf1ff50ac0412239d986ec20885d772fdc67803273aaec43871426ac93d3815846a8cd13dea5af11f4ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04011a0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020a19d9edf2d22415cf226b4e1416c8a3097e0af222efac2bfeab15fa1f07b3f24c18580c4004de6d6244a30ce431c4be3ca44731509fc6b11710c792efed5e9191f4ae75affff7f200300000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04011b0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000208f9f29a27424ec01ce77485617088506ca8faeef69300f0a474ad63ca5d32972d6049609fa3588d6ffab4d9d89a90636ac94c0ca1995f7768163abeb25dbf2bc1f4ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04011c0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020fbd87d530a9ec3835ab579337fd16e512cec6c4779ab4d84e7256b3333dece28de1065c8c3d3d166e057139ac59af6f4f2c0d241b6269bbea6f61c5eff3dba431f4ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04011d0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020dc6ef4f436baab2d6880f242a2588313a2739ac694e30319344045ee318c9524d0ec7fbcaca30ce85392cb03b64015ece769afb50fd07db05c15ec49abf7d71c1f4ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04011e0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020386bf4cbb3708ae6345b9f2459bdd99d07422b05f9b005d2d4d1d3bf87d47359ebb22b3a15c8e94ddd8129527873b9bebfa10c54d11196961376efbcaad3c4681f4ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04011f0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020600e892f12ad82a23ce12684d3ffa0887eab5e3e97804fa651050b23366cc55ef2468e65c3d3cf49650657eb47d0b0b7949c71dcc0922eb824523157b7eff478204ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401200101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "0000002068e4aec52a3f4e44279e3a65cd476237bdfcc2328390bb31b8a903f89ddfa70e8d669f61b469acb31b1d4ecdc238e6616a83a30644a5d06fc2ca1aad6449d09b204ae75affff7f200300000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401210101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020cf2428ae9a5014b910275807f54a8bbbeec47d462f9d284ada60329b4955ff10cf83c44ddde39a709aef54fb302c7f1cb36db8fe7c3befce20dcc3729767518c204ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401220101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000205801eef8cca082407ce4798648c4d3ba0fc4dd2d4459eccfe5300c7960760d16cb3dd78a2f22fb88717a175e45c53d34f970b94ef9f7cd1b6c279294d427d163204ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401230101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "0000002088320983a4bcb95b9f342994c6943c227f3102d3b16282f048ceb8e15748662a52d1207591a0a364bc9245a76e36530f147ec4d1b4e1917676b4071f542c3b19204ae75affff7f200200000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401240101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020acd85b6d8087d3b6bd2a208a2e39b75da459c0e0eb14088075a23a2e043e8a4ed5a1754491f8180d293b42e6c04ec3f82e29c1f2600dc8607616f69a4a464e6e204ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401250101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000204873bcb379f78da4497ce1e22f6bfb63537b89c8c522257a7b7bef74e515ed1b6b235faab048fa73a76c68fdbdde6a4ee7ebe0a3b7b23df24ce75dbd2cf49c33214ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401260101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020a87f6d13bae8e2e07996c3316e8e0da6aec7d1aee6b80aa5883018e4d136db3e9a498ff7d322ad93863e0a5318af7e7d0ed683fd2e4ecf523f2b7369106dbe4a214ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401270101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020d9b0618450a51c08f43187c479e20d351f0466464409bb3071dc0af7c51d65498a198cef23dddd2c4b93d9d3288ae922584e221a9ef1ded3dba5a2ad494d9237214ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401280101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020167dff31847b8dcad472bb6bb7d0af53b245f0f1d4c9f83d4ce14a0f05d42d7f0f2638ffc0e6896230f28df1865ef133dccb1f027545c6a1177dffd1fecf8a01214ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401290101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000208236c04424573692504e777e179b9247e54622b118239311413812f13cefbf6e39a639143f599dc76208b2014de12a364716df2918af9186453e3676dca743d7214ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04012a0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020fab7b55aeb59f63315dbc10784c55e55635a7600cc4f3b94a00003007e7fc90b4af016248e9908882f8a7f0bd8743c8da82da119446e8b02e4d3b8d1d938a3aa214ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04012b0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020794b4160d8fd4ebe7611d0fc9d3e04f3038a485669f74075aa153852ed181121c577f4c0b7151f6d78a16e3c21ab291b53ced8a5c4f05a22caf24a25ed029d56224ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04012c0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000204a8acdb549d2ed922360ae0e81de6c913c3fd84b0de51abacbf97162a99f7c26c656b252d3259c33ffc7e5d403843accc6ace9b7e60e911e2347a6a0b0abd122224ae75affff7f200300000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04012d0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020dee6d00c7058b3422e4273c98c16181e04ea93116496a8442de546c2ee9fd86b550013f39e004b3829dc3717b17fcfedc87de9450315fcca540963119cb264c1224ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04012e0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000202ebca471f5fb2226a790233c3d0bc323f73d935872f3e15b66bd5fdcb822101d7b68788ed61d3fb8cb746f627e09db2fc09d8a07672747709d92ae400e053e78224ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04012f0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020ee500470fc1c71a82f2cbb9f8d5723bcdf57b8051fc458a8dbd8d0dbf60d0e40d1a0be0e50f3312d4830e3900186e5a6760d44006c164b4f0758218ef2b2de8e224ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401300101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000205057e8d8a7451d79325851dc8e0f4dfdb1dfaaab637509d9e61f0e064af5ee5c185221c53c0cf43261b3c238c0e8117da5d6ac60085615f7a3d8027726cb2143224ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401310101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000200907f01d9c5c872296796ca77feb62eb414cc080e13a93e59e181528bc19c336eaabacb1ebcbd20b26f6bacdc712ffe17d4c8131e7f99b9cac309c0683737c04234ae75affff7f200200000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401320101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000209bd6a4ec962d9e6199c0a2f39481a7ecc322fedbd2320cbe7dc984c6ab958421fe7a5c7f2513a3c3de9ddf7b211f5bfe85675b31183c4bb98ae79ab28cd055ac234ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401330101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000202b8a6381b7c9476fd77be379a929863db6b7edd9858d6eca0f68a430dc87cc3a9c6c8b34bfadfdacbe95cb1d4ae5ce4e4f4ccf0300171d7a91cdc97f620c7b37234ae75affff7f200400000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401340101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000203d3e9a0b4decf12b4402a2178b60599311c8a9c7d50ece365a61ca29530da7056754ddf7d77a11cc8ce680a74fae01d851bda024fc9c51712c55b4a190caef24234ae75affff7f200300000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401350101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "0000002067fa05082d4f7a29a3985f30798940cd854c76a2b20e9560b2047f7753193f71ac61b8df17a8a63f099c8f55869301fc2a0aa37355a4a2f89078135ee72a1362234ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401360101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020249684272865fec3ad62ecb73dfd930500e32e475306c9e5d4b6d545e3687b0a48deaeebfafa213f0a560994b3f4000d5e2b93951d7e5be40073503877292dc7234ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401370101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000204671db1df91098669bb03c3e8504d432da99853601366c7de7585bb8f23a6e1e2996a16c11a0f9ae87d937f566c8bbd919040528c1bf8dae4e22a8f0ac5f935a244ae75affff7f200200000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401380101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000203046500cabfee552ed114c505279cd75c28faa811adcb5010c53c14df5de3216d265321a4168c70fff1c85fd83bd976cc03c8c1fb6567398cc24053e343ad137244ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401390101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020591533cf8dd1fa1872c7e6d62ef714b28886f38f7921ee614e13b748eaf923282a96287277f18b1113a9c3ac384fd7b43bccd0e45114908ca5396a76cbd736fd244ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04013a0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000205286b5747d0244a55e1dce435fdbb9f300d7138fff3b16767bc09196eb00256c6795e3f372a2d5d137244a4fd72fd799e8de913f868f22269eaa628d7d2970a1244ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04013b0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000209649b55f6a5ff0d5db73aeb7089fcce605fb9dd23971e577b73747ffab586f4edf84581240223e2d8912a6eca049e06aa46ddbf271af08e9ac9605505311418b244ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04013c0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020fe7ff82c11ad3a3db3dc11a03a67e0b86b727d232c80f37209b028bc2689296c0da76fb756e1c9833f38b198cfe843ab820fbc0c38e30f8f858f6ffdbd64e834244ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04013d0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020c02e6109b6aaf6643cac109ad5b5be7f7ec47c7993335bdceea6e0e490ae9067eb1fcee49ecc40a61477f934e3b9821f2cc7ada429fcca2cd645866742854c40254ae75affff7f200300000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04013e0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020956c46fce0adfc8a30d91c7b7f328f17c2a90771083884c4fdb7f24640598f6fa69c4e5971bd3b6cdc7e3ee98e03a969b28c3220fdc685cc2eb77293763ca4ba254ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04013f0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000203eec7d0ea9c539f47e684eec9dba900e27c805b8200f237924c7df8065957726c83caf659fa341bf45f733a92cf76daf2cfb6bccbf969a2753f5f7d9cda4bd1e254ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401400101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000203c022b42283bf651ee4d536fe71b7e5382e9783d4a85f8bc159f00b97f16d82d312ac4f89f1b1496de576811f2ff17de44b512db0beddae59e030e2ce3eba4df254ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401410101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000205da9709dea4bea4f3229d0eed439a4820a0e817f9fbbcd8bb355cd8052702973403358080a2881b823a740f58b6b0c922b42189e06326478cc33390f2c704743254ae75affff7f200200000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401420101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020447c49f664916cc13a839e27f7cbe0b09dea990dec71dd479b537ecfb771c7159ae6241727c2645ce00817909ba97d43447fe2e146bce55b7dccd5bcd27d2e3d254ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401430101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020ae3b214b1495953eea3b99af7ade4b8d22d615d598dd6c790c6576d6453ef35d37fbd3446f8431f00052f278c3e0359beba54a2f5064bc6be4990478fcf4e086264ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401440101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000204cc68f9f571f59145e1f7505550d56da13a797fbc7e5a178dd7f6f9241d91f007f2400f7aa1b32b30bf869e4da86f75eaf2baae182efc45c42c6245f06aba675264ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401450101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "0000002096d55714a83c3d030cc72141eac3577b8a394b8366b2c93354fcecdafeab025022f5e26de7e4eba3f8e4a09f243b55ea6f08bfd013e2051cf1d5df20dcb3331b264ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401460101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000202ccf49665bd46e4915dcc9221f5fc72124bb73fb11fed8869dc5862a47950c0d654693a1d86098212d68fa9a27f9df0faafea4aecebd8b203f2ab8f3f0b86196264ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401470101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000205d0be5aea546eab7734cbee757ea5f4983ab3a3f202323c1c88590bdbc8b561974cb0de6549bfdec92322ad53d8a4192433edab0e33a10e7561d2e27c7759f8a264ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401480101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020fc3d70d4f690d7d7da90b1c832a51c2a92940cdd9fccda6a909c7256ad567160550e1089b48fe75f0f1e6f712cc2a1d4aad384a4ceacf1c71576420fe8e7de46264ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401490101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020995ab1f2293e3aa1e7bef418919bdce60032e89e60223b8c12b17488c50af83b3a360b4f89551aa0ced646a6210b0c3d3f7d0464faa248b9d252c89615babc9f274ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04014a0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020e02dd5e1201ebe7ef453e77df3021cffde1b5447b9eced017963489e005ac247e1f2b80e91180cd9744c3e126eb8a0fb34ced45587da6b0fa9fd1c6946f049bf274ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04014b0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "0000002081b265800d61feb37525c58aacc7e6d32cfc6b6579784f9952dca51c3addeb37da900ae4ec1b2075056001a361c0fc8f9ac1ca018bde2a2bd8fc587424dfe18f274ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04014c0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000207daba0dc6fccc067a19dec46770d1443fc715b50540242ff26073a6748bcd9583804c315b783d4f9aac4dc8f109fa60bcbf1c3cf98a7e1e593fcb969e88aafae274ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04014d0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000204a35ab2d9c4656c4992d2445ee566e4f5cd0468358292c94839de2b270ba0628de9148db03da59ca2eb727a4f7a03f2b9ee1e4045f37bdaa4b75a89de56b63ee274ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04014e0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020215ed7b31d118a45a06407133e41f3f9d25c913ff98f3e798144f981d9145305266efc9274f7c836aa0f8b831732aec2c1d85c5bb9176af4c889a707cca380fc274ae75affff7f200400000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04014f0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020425321a1f12ed4613b63b3cc5b4c7674242c3de67ea86984f2d9bb2766f6220a5c460ce6840832b7ef05f206c5d255bc2d8ea83753a7f5ef5c0bf3dbcbc7c74a284ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401500101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000204e088cfd312091bf589f0fc24131dba20ed83f716530a7b033356e9ec803be23a73c4af5b64fc631d20aa1f646c7c61c4cc5d4be6e2fa29570030d72219726a0284ae75affff7f200200000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401510101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000204287a2bf40300e39b45d6667d6e17abba37d98c41efc7023fd21b643fb2f6b76d915d10e1f9e224d3bfdf5203da57173f0b8d8eedc92ed9ff23372927e86073b284ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401520101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "0000002083137929ca902f409a1f35385b4d31e0f5163d488d71b00ade724d7831986a280a187c13fe0db45aed5fa4f5089d544a617da9f4f80fe6ce1d0711b228f3a6bd284ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401530101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020b38c074d78cbcc8bbbf20c48e30639c2dcf444488efee59f5aa01c48322a30418009c11df7e14de6a6d0d72cc22f20ff5fdba3a7dbe409409ff92b1302340fb8284ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401540101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020a6865148220c7f5ac3259f57fa381676d6be5fdab56eed8e060808fcce4118492476fe6d5ecc681e7a18c20a04239143de1c2cddefbb08f15153b8e9b0c22f62284ae75affff7f200200000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401550101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020ae146263f4087106b8c51ffb5cdddb03a4e593ea1a22a8f6580eaa374185126db2187dbe694a1b17b7c5664115307965407274c25bf6ae02049817685f923256294ae75affff7f200600000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401560101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020718abe22705f33134812b56d04c129d453da890e27caf6e77d2bfb3f9be460083634f35fc8ea4a29b0f30d6c6b4926455a31de62d2f763bc95c8cd1a7a425599294ae75affff7f200300000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401570101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "0000002084537c50520f4e6b0173926320c3193f6b4259c9a724fe202337d3d5cf7da70b1104e1d6f25c411165a856a0bfdd5ddbe168238425c05271962aa6e5ebb676fb294ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401580101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000204bae65b948c9790468cfc8fbf78a81e7aec5407dbab18c18ef1ab37a13f0c257792cd1246dac5ce265a8439e539db1aebe4776535d422315bac8e57a51605aa6294ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401590101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000201e68985b2a15920524c3c2da77354c0ccfb202c3ab4005128eb2743aa33f0a5b0527a624a6d1f808c896d8cd1552c251a4ae5310e6c62493977b0965d17f580d294ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04015a0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020ab1d90854505caa1ee748c1987f4cb6674844b84d17224bc3983fdd6e44a930b2edf3e5a90be7a9080ad15014ce796b38b6c19ddd3754443243fc277bcd2368a294ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04015b0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "0000002062242f316efa083c17e2173d9f9831afc1465f3fe84431d8e52fec71ae358b243e29246e0db770708987f2cff380920321636926d9c66c2c808d2d5f0ed27d672a4ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04015c0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020258c1a9b0ca17819d8d7fea5178e9c72c2e7c6769696f70098e0d5d2a9dc0e5cf039f5f686d727c6158ef5405ee8794ec2f89c641093a1dff0f5240263a441bc2a4ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04015d0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020434ce26cddae99571a1d663419d715f88e6ff0e413c45f21c44b1991d041fd2745534db221054e2233052aaffe7d7232a91b3f0918c14eac74b2f704b37bfb8e2a4ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04015e0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020ec94475f112ea37dddd84f066e61446b431d28d2d9b6fbb336d16295e7eece52f1dc01abb3b27e71d8a25c3ed2d6fbbc5900bf954738ed63d57689ba5e68f6532a4ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff04015f0101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000206188cca18a2be3b7f0b500a724f1b73312873488f04c5082abd81ca0b2250a75ec0efb2fa994ad23e1f39416e69976e66fc4ef9f101bd4f60fcee1f8c45a2b802a4ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401600101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000202384b91bd0bfb1385ef42bda9b2fa43930aa8889214bb14f63c61a74f380dc7f9d547f2925ad39b9161fa55b9cd258463fd058234778a7c7b3061113d64179812a4ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401610101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000204c266a3008c3bd95ae4bc8228bf878595cabf76cc2ef3fed32936777fe37833a1a13ea016868b2352a8d6d1eb35d0aee784cef06ed50655b8d84c089324351df2b4ae75affff7f200200000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401620101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "00000020ab2c78d1249de8482cf26bfb1616ac04f7b8404eafcae3e0654f398943fd41571b4d7549862be0b06ed1408ddc3e7c01b07af24c203a18fe9744e214b14d2d652b4ae75affff7f200000000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401630101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "0000002031aa0fc5e0242a9c9d91024b6b7e67544da72c4773537f881ee5caacd45a3c38c650ff5fa0ffd080b3e5f9752527b718e8c6b731467028e14b0b009d4e7406ee2b4ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401640101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000200418c57609fc6ea3f24f569dc3afe163538a6aab940fe8c3d73c72cd781221380482c6f8ef2c19429da2f7a842bb811496aa86247c55f7ecb3337718b694d9472b4ae75affff7f200100000001020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401650101ffffffff0200f2052a01000000232103836d350d84c3782a7579c5f542c7e70fe628d3062c6bac563d2264c21e56f70eac0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000", + "000000204f36abaf10d6a8b3113103f3413475db0640c89a39f02a718e8bd2190fe87f1d6ffaf2b9fdbed67ebae4b36c97f9582814c99c90d0b5123f14da4861bf3e4f742b4ae75affff7f200100000002020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401660101ffffffff02b000062a01000000232102f6869601b2b9980b07fc047e309c1fc1433e08c5bf0498115c5c0e117ef59bfdac0000000000000000266a24aa21a9ed5896cd6a40cd126b09e317cbd179f39e2bcef2f9d423751d980258396416e671012000000000000000000000000000000000000000000000000000000000000000000000000002000000011cd16b94f20a8a3dda91027c888025f2ec1a07ddcb2786bdff5916e66c00406f0000000048473044022046d00465c4508cfd02fcb878b19d120e28be28e40658b1f15458828891ed1541022036aac054f36a42666dfb7b42a20506315a0b72232ce7704406e23c7a9515178701feffffff0200286bee0000000017a91481ddd4a9708ba8088cdcfeab9583ede8d83a298c8750bb9a3b0000000017a91484e16967722289584257803688aae36cd64480688765000000", + "00000020cc7c39992f2dae21e0ebd958f6ba77a6d0bcc568e044b9b61ca4d77536a4214e7b4ade79dd733ea72d97993aaac27f2263b91b1500467350ff35ea40c2850d392b4ae75affff7f200100000004020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0401670101ffffffff0230d0062a01000000232102f58ba54b2d51c4e2a6096f9d266261b45f1026a86ba88c29ed8070dfe3f5ec6bac0000000000000000266a24aa21a9edb824d92cb231c2366f0726aedf8bfc2705239a40ae6b10a106534e8b5395a09d01200000000000000000000000000000000000000000000000000000000000000000000000000200000000010196bac2a0f6212b8e5710f8c7214dd32c393d38d20b7703cf0b7b25276bb6ab8001000000171600144f8a6c8d4c6c309b1e2b725b7496859e172ea367feffffff02781ef5050000000017a9149f00bafb542049fe32d532b0ea7494ebb7ae41398750daa4350000000017a9147794e6dc43de332ca7a095e582478c331446686d8702483045022100bd85ed3954f1151c2fde32c4021a32c96d7defa4a57c14b1a056be9b361a8e49022054947bf6fdb535c46cbee62efc1262cc0389a6eaa19afbcfa98fb1fb30c3ef230121031b2371df07fb88dddf590b246dc74defc265fdbfe258e4b168250859b806f5ce660000000200000001bf83574fb606f25c59200c844443201faf923ef5284fd4401f3104a323c601490000000049483045022100d1c4b09b488f6375ee4540a531a13b5549e88e2459bd88c84867e293c54862740220222e8af70c8d8b1139c2a7b616d9132bde94244edca7eee3c7a783b12839dadc01feffffff0250196bee0000000017a91490e5e33cfedf18d5cf911d6f853770c62e1f5d028700ca9a3b0000000017a91422311ee58518edf3a2289012461dc66bc8739d2687660000000200000000010196bac2a0f6212b8e5710f8c7214dd32c393d38d20b7703cf0b7b25276bb6ab800000000017160014b81faaafa52f7723f539f8d595147b1a112b38ecfeffffff0208bd9a3b0000000017a9146b2e611708a94d9c674dd08c7c4c1fbb97bcdba987005ed0b20000000017a914933a20f07bab1d8f341f391819466a271f0cfd648702483045022100dfa8b0052c7825e6abcea05d10fc82550a8d6538681ffc8188d48ba789e4d9b40220697371cdf527a6ecd1887f87da3c87dca3419e4a1aa2683e5c4e035199084d100121021cc37c2ec090f30ea0b49508ae8a7d65d9759903932666da56671c1faa445d5d53000000" + ], + "mocktime": 1525107225, + "stats": [ + { + "avgfee": 0, + "avgfeerate": 0, + "avgtxsize": 0, + "blockhash": "1d7fe80f19d28b8e712af0399ac84006db753441f3033111b3a8d610afab364f", + "feerate_percentiles": [ + 0, + 0, + 0, + 0, + 0 + ], + "height": 101, + "ins": 0, + "maxfee": 0, + "maxfeerate": 0, + "maxtxsize": 0, + "medianfee": 0, + "mediantime": 1525107242, + "mediantxsize": 0, + "minfee": 0, + "minfeerate": 0, + "mintxsize": 0, + "outs": 2, + "subsidy": 5000000000, + "swtotal_size": 0, + "swtotal_weight": 0, + "swtxs": 0, + "time": 1525107243, + "total_out": 0, + "total_size": 0, + "total_weight": 0, + "totalfee": 0, + "txs": 1, + "utxo_increase": 2, + "utxo_size_inc": 173 + }, + { + "avgfee": 3760, + "avgfeerate": 20, + "avgtxsize": 187, + "blockhash": "4e21a43675d7a41cb6b944e068c5bcd0a677baf658d9ebe021ae2d2f99397ccc", + "height": 102, + "feerate_percentiles": [ + 20, + 20, + 20, + 20, + 20 + ], + "ins": 1, + "maxfee": 3760, + "maxfeerate": 20, + "maxtxsize": 187, + "medianfee": 3760, + "mediantime": 1525107242, + "mediantxsize": 187, + "minfee": 3760, + "minfeerate": 20, + "mintxsize": 187, + "outs": 4, + "subsidy": 5000000000, + "swtotal_size": 0, + "swtotal_weight": 0, + "swtxs": 0, + "time": 1525107243, + "total_out": 4999996240, + "total_size": 187, + "total_weight": 748, + "totalfee": 3760, + "txs": 2, + "utxo_increase": 3, + "utxo_size_inc": 234 + }, + { + "avgfee": 18960, + "avgfeerate": 109, + "avgtxsize": 228, + "blockhash": "22d9b8b9c2a37c81515f3fc84f7241f6c07dbcea85ef16b00bcc33ae400a030f", + "feerate_percentiles": [ + 20, + 20, + 20, + 300, + 300 + ], + "height": 103, + "ins": 3, + "maxfee": 49800, + "maxfeerate": 300, + "maxtxsize": 248, + "medianfee": 3760, + "mediantime": 1525107243, + "mediantxsize": 248, + "minfee": 3320, + "minfeerate": 20, + "mintxsize": 188, + "outs": 8, + "subsidy": 5000000000, + "swtotal_size": 496, + "swtotal_weight": 1324, + "swtxs": 2, + "time": 1525107243, + "total_out": 9999939360, + "total_size": 684, + "total_weight": 2076, + "totalfee": 56880, + "txs": 4, + "utxo_increase": 5, + "utxo_size_inc": 380 + } + ] +} \ No newline at end of file diff --git a/test/functional/data/rpc_psbt.json b/test/functional/data/rpc_psbt.json new file mode 100644 index 000000000..dd40a096d --- /dev/null +++ b/test/functional/data/rpc_psbt.json @@ -0,0 +1,130 @@ +{ + "invalid" : [ + "AgAAAAEmgXE3Ht/yhek3re6ks3t4AAwFZsuzrWRkFxPKQhcb9gAAAABqRzBEAiBwsiRRI+a/R01gxbUMBD1MaRpdJDXwmjSnZiqdwlF5CgIgATKcqdrPKAvfMHQOwDkEIkIsgctFg5RXrrdvwS7dlbMBIQJlfRGNM1e44PTCzUbbezn22cONmnCry5st5dyNv+TOMf7///8C09/1BQAAAAAZdqkU0MWZA8W6woaHYOkP1SGkZlqnZSCIrADh9QUAAAAAF6kUNUXm4zuDLEcFDyTT7rk8nAOUi8eHsy4TAA==", + "cHNidP8BAHUCAAAAASaBcTce3/KF6Tet7qSze3gADAVmy7OtZGQXE8pCFxv2AAAAAAD+////AtPf9QUAAAAAGXapFNDFmQPFusKGh2DpD9UhpGZap2UgiKwA4fUFAAAAABepFDVF5uM7gyxHBQ8k0+65PJwDlIvHh7MuEwAAAQD9pQEBAAAAAAECiaPHHqtNIOA3G7ukzGmPopXJRjr6Ljl/hTPMti+VZ+UBAAAAFxYAFL4Y0VKpsBIDna89p95PUzSe7LmF/////4b4qkOnHf8USIk6UwpyN+9rRgi7st0tAXHmOuxqSJC0AQAAABcWABT+Pp7xp0XpdNkCxDVZQ6vLNL1TU/////8CAMLrCwAAAAAZdqkUhc/xCX/Z4Ai7NK9wnGIZeziXikiIrHL++E4sAAAAF6kUM5cluiHv1irHU6m80GfWx6ajnQWHAkcwRAIgJxK+IuAnDzlPVoMR3HyppolwuAJf3TskAinwf4pfOiQCIAGLONfc0xTnNMkna9b7QPZzMlvEuqFEyADS8vAtsnZcASED0uFWdJQbrUqZY3LLh+GFbTZSYG2YVi/jnF6efkE/IQUCSDBFAiEA0SuFLYXc2WHS9fSrZgZU327tzHlMDDPOXMMJ/7X85Y0CIGczio4OFyXBl/saiK9Z9R5E5CVbIBZ8hoQDHAXR8lkqASECI7cr7vCWXRC+B3jv7NYfysb3mk6haTkzgHNEZPhPKrMAAAAAAA==", + "cHNidP8BAP0KAQIAAAACqwlJoIxa98SbghL0F+LxWrP1wz3PFTghqBOfh3pbe+QAAAAAakcwRAIgR1lmF5fAGwNrJZKJSGhiGDR9iYZLcZ4ff89X0eURZYcCIFMJ6r9Wqk2Ikf/REf3xM286KdqGbX+EhtdVRs7tr5MZASEDXNxh/HupccC1AaZGoqg7ECy0OIEhfKaC3Ibi1z+ogpL+////qwlJoIxa98SbghL0F+LxWrP1wz3PFTghqBOfh3pbe+QBAAAAAP7///8CYDvqCwAAAAAZdqkUdopAu9dAy+gdmI5x3ipNXHE5ax2IrI4kAAAAAAAAGXapFG9GILVT+glechue4O/p+gOcykWXiKwAAAAAAAABASAA4fUFAAAAABepFDVF5uM7gyxHBQ8k0+65PJwDlIvHhwEEFgAUhdE1N/LiZUBaNNuvqePdoB+4IwgAAAA=", + "cHNidP8AAQD9pQEBAAAAAAECiaPHHqtNIOA3G7ukzGmPopXJRjr6Ljl/hTPMti+VZ+UBAAAAFxYAFL4Y0VKpsBIDna89p95PUzSe7LmF/////4b4qkOnHf8USIk6UwpyN+9rRgi7st0tAXHmOuxqSJC0AQAAABcWABT+Pp7xp0XpdNkCxDVZQ6vLNL1TU/////8CAMLrCwAAAAAZdqkUhc/xCX/Z4Ai7NK9wnGIZeziXikiIrHL++E4sAAAAF6kUM5cluiHv1irHU6m80GfWx6ajnQWHAkcwRAIgJxK+IuAnDzlPVoMR3HyppolwuAJf3TskAinwf4pfOiQCIAGLONfc0xTnNMkna9b7QPZzMlvEuqFEyADS8vAtsnZcASED0uFWdJQbrUqZY3LLh+GFbTZSYG2YVi/jnF6efkE/IQUCSDBFAiEA0SuFLYXc2WHS9fSrZgZU327tzHlMDDPOXMMJ/7X85Y0CIGczio4OFyXBl/saiK9Z9R5E5CVbIBZ8hoQDHAXR8lkqASECI7cr7vCWXRC+B3jv7NYfysb3mk6haTkzgHNEZPhPKrMAAAAAAA==", + "cHNidP8BAHUCAAAAASaBcTce3/KF6Tet7qSze3gADAVmy7OtZGQXE8pCFxv2AAAAAAD+////AtPf9QUAAAAAGXapFNDFmQPFusKGh2DpD9UhpGZap2UgiKwA4fUFAAAAABepFDVF5uM7gyxHBQ8k0+65PJwDlIvHh7MuEwAAAQD9pQEBAAAAAAECiaPHHqtNIOA3G7ukzGmPopXJRjr6Ljl/hTPMti+VZ+UBAAAAFxYAFL4Y0VKpsBIDna89p95PUzSe7LmF/////4b4qkOnHf8USIk6UwpyN+9rRgi7st0tAXHmOuxqSJC0AQAAABcWABT+Pp7xp0XpdNkCxDVZQ6vLNL1TU/////8CAMLrCwAAAAAZdqkUhc/xCX/Z4Ai7NK9wnGIZeziXikiIrHL++E4sAAAAF6kUM5cluiHv1irHU6m80GfWx6ajnQWHAkcwRAIgJxK+IuAnDzlPVoMR3HyppolwuAJf3TskAinwf4pfOiQCIAGLONfc0xTnNMkna9b7QPZzMlvEuqFEyADS8vAtsnZcASED0uFWdJQbrUqZY3LLh+GFbTZSYG2YVi/jnF6efkE/IQUCSDBFAiEA0SuFLYXc2WHS9fSrZgZU327tzHlMDDPOXMMJ/7X85Y0CIGczio4OFyXBl/saiK9Z9R5E5CVbIBZ8hoQDHAXR8lkqASECI7cr7vCWXRC+B3jv7NYfysb3mk6haTkzgHNEZPhPKrMAAAAAAQA/AgAAAAH//////////////////////////////////////////wAAAAAA/////wEAAAAAAAAAAANqAQAAAAAAAAAA", + "cHNidP8CAAFVAgAAAAEnmiMjpd+1H8RfIg+liw/BPh4zQnkqhdfjbNYzO1y8OQAAAAAA/////wGgWuoLAAAAABl2qRT/6cAGEJfMO2NvLLBGD6T8Qn0rRYisAAAAAAABASCVXuoLAAAAABepFGNFIA9o0YnhrcDfHE0W6o8UwNvrhyICA7E0HMunaDtq9PEjjNbpfnFn1Wn6xH8eSNR1QYRDVb1GRjBDAiAEJLWO/6qmlOFVnqXJO7/UqJBkIkBVzfBwtncUaUQtBwIfXI6w/qZRbWC4rLM61k7eYOh4W/s6qUuZvfhhUduamgEBBCIAIHcf0YrUWWZt1J89Vk49vEL0yEd042CtoWgWqO1IjVaBAQVHUiEDsTQcy6doO2r08SOM1ul+cWfVafrEfx5I1HVBhENVvUYhA95V0eHayAXj+KWMH7+blMAvPbqv4Sf+/KSZXyb4IIO9Uq4iBgOxNBzLp2g7avTxI4zW6X5xZ9Vp+sR/HkjUdUGEQ1W9RhC0prpnAAAAgAAAAIAEAACAIgYD3lXR4drIBeP4pYwfv5uUwC89uq/hJ/78pJlfJvggg70QtKa6ZwAAAIAAAACABQAAgAAA", + "cHNidP8BAFUCAAAAASeaIyOl37UfxF8iD6WLD8E+HjNCeSqF1+Ns1jM7XLw5AAAAAAD/////AaBa6gsAAAAAGXapFP/pwAYQl8w7Y28ssEYPpPxCfStFiKwAAAAAAAIBACCVXuoLAAAAABepFGNFIA9o0YnhrcDfHE0W6o8UwNvrhyICA7E0HMunaDtq9PEjjNbpfnFn1Wn6xH8eSNR1QYRDVb1GRjBDAiAEJLWO/6qmlOFVnqXJO7/UqJBkIkBVzfBwtncUaUQtBwIfXI6w/qZRbWC4rLM61k7eYOh4W/s6qUuZvfhhUduamgEBBCIAIHcf0YrUWWZt1J89Vk49vEL0yEd042CtoWgWqO1IjVaBAQVHUiEDsTQcy6doO2r08SOM1ul+cWfVafrEfx5I1HVBhENVvUYhA95V0eHayAXj+KWMH7+blMAvPbqv4Sf+/KSZXyb4IIO9Uq4iBgOxNBzLp2g7avTxI4zW6X5xZ9Vp+sR/HkjUdUGEQ1W9RhC0prpnAAAAgAAAAIAEAACAIgYD3lXR4drIBeP4pYwfv5uUwC89uq/hJ/78pJlfJvggg70QtKa6ZwAAAIAAAACABQAAgAAA", + "cHNidP8BAFUCAAAAASeaIyOl37UfxF8iD6WLD8E+HjNCeSqF1+Ns1jM7XLw5AAAAAAD/////AaBa6gsAAAAAGXapFP/pwAYQl8w7Y28ssEYPpPxCfStFiKwAAAAAAAEBIJVe6gsAAAAAF6kUY0UgD2jRieGtwN8cTRbqjxTA2+uHIQIDsTQcy6doO2r08SOM1ul+cWfVafrEfx5I1HVBhENVvUYwQwIgBCS1jv+qppThVZ6lyTu/1KiQZCJAVc3wcLZ3FGlELQcCH1yOsP6mUW1guKyzOtZO3mDoeFv7OqlLmb34YVHbmpoBAQQiACB3H9GK1FlmbdSfPVZOPbxC9MhHdONgraFoFqjtSI1WgQEFR1IhA7E0HMunaDtq9PEjjNbpfnFn1Wn6xH8eSNR1QYRDVb1GIQPeVdHh2sgF4/iljB+/m5TALz26r+En/vykmV8m+CCDvVKuIgYDsTQcy6doO2r08SOM1ul+cWfVafrEfx5I1HVBhENVvUYQtKa6ZwAAAIAAAACABAAAgCIGA95V0eHayAXj+KWMH7+blMAvPbqv4Sf+/KSZXyb4IIO9ELSmumcAAACAAAAAgAUAAIAAAA==", + "cHNidP8BAFUCAAAAASeaIyOl37UfxF8iD6WLD8E+HjNCeSqF1+Ns1jM7XLw5AAAAAAD/////AaBa6gsAAAAAGXapFP/pwAYQl8w7Y28ssEYPpPxCfStFiKwAAAAAAAEBIJVe6gsAAAAAF6kUY0UgD2jRieGtwN8cTRbqjxTA2+uHIgIDsTQcy6doO2r08SOM1ul+cWfVafrEfx5I1HVBhENVvUZGMEMCIAQktY7/qqaU4VWepck7v9SokGQiQFXN8HC2dxRpRC0HAh9cjrD+plFtYLisszrWTt5g6Hhb+zqpS5m9+GFR25qaAQIEACIAIHcf0YrUWWZt1J89Vk49vEL0yEd042CtoWgWqO1IjVaBAQVHUiEDsTQcy6doO2r08SOM1ul+cWfVafrEfx5I1HVBhENVvUYhA95V0eHayAXj+KWMH7+blMAvPbqv4Sf+/KSZXyb4IIO9Uq4iBgOxNBzLp2g7avTxI4zW6X5xZ9Vp+sR/HkjUdUGEQ1W9RhC0prpnAAAAgAAAAIAEAACAIgYD3lXR4drIBeP4pYwfv5uUwC89uq/hJ/78pJlfJvggg70QtKa6ZwAAAIAAAACABQAAgAAA", + "cHNidP8BAFUCAAAAASeaIyOl37UfxF8iD6WLD8E+HjNCeSqF1+Ns1jM7XLw5AAAAAAD/////AaBa6gsAAAAAGXapFP/pwAYQl8w7Y28ssEYPpPxCfStFiKwAAAAAAAEBIJVe6gsAAAAAF6kUY0UgD2jRieGtwN8cTRbqjxTA2+uHIgIDsTQcy6doO2r08SOM1ul+cWfVafrEfx5I1HVBhENVvUZGMEMCIAQktY7/qqaU4VWepck7v9SokGQiQFXN8HC2dxRpRC0HAh9cjrD+plFtYLisszrWTt5g6Hhb+zqpS5m9+GFR25qaAQEEIgAgdx/RitRZZm3Unz1WTj28QvTIR3TjYK2haBao7UiNVoECBQBHUiEDsTQcy6doO2r08SOM1ul+cWfVafrEfx5I1HVBhENVvUYhA95V0eHayAXj+KWMH7+blMAvPbqv4Sf+/KSZXyb4IIO9Uq4iBgOxNBzLp2g7avTxI4zW6X5xZ9Vp+sR/HkjUdUGEQ1W9RhC0prpnAAAAgAAAAIAEAACAIgYD3lXR4drIBeP4pYwfv5uUwC89uq/hJ/78pJlfJvggg70QtKa6ZwAAAIAAAACABQAAgAAA", + "cHNidP8BAFUCAAAAASeaIyOl37UfxF8iD6WLD8E+HjNCeSqF1+Ns1jM7XLw5AAAAAAD/////AaBa6gsAAAAAGXapFP/pwAYQl8w7Y28ssEYPpPxCfStFiKwAAAAAAAEBIJVe6gsAAAAAF6kUY0UgD2jRieGtwN8cTRbqjxTA2+uHIgIDsTQcy6doO2r08SOM1ul+cWfVafrEfx5I1HVBhENVvUZGMEMCIAQktY7/qqaU4VWepck7v9SokGQiQFXN8HC2dxRpRC0HAh9cjrD+plFtYLisszrWTt5g6Hhb+zqpS5m9+GFR25qaAQEEIgAgdx/RitRZZm3Unz1WTj28QvTIR3TjYK2haBao7UiNVoEBBUdSIQOxNBzLp2g7avTxI4zW6X5xZ9Vp+sR/HkjUdUGEQ1W9RiED3lXR4drIBeP4pYwfv5uUwC89uq/hJ/78pJlfJvggg71SriEGA7E0HMunaDtq9PEjjNbpfnFn1Wn6xH8eSNR1QYRDVb0QtKa6ZwAAAIAAAACABAAAgCIGA95V0eHayAXj+KWMH7+blMAvPbqv4Sf+/KSZXyb4IIO9ELSmumcAAACAAAAAgAUAAIAAAA==", + "cHNidP8BAJoCAAAAAljoeiG1ba8MI76OcHBFbDNvfLqlyHV5JPVFiHuyq911AAAAAAD/////g40EJ9DsZQpoqka7CwmK6kQiwHGyyng1Kgd5WdB86h0BAAAAAP////8CcKrwCAAAAAAWABTYXCtx0AYLCcmIauuBXlCZHdoSTQDh9QUAAAAAFgAUAK6pouXw+HaliN9VRuh0LR2HAI8AAAAAAAIAALsCAAAAAarXOTEBi9JfhK5AC2iEi+CdtwbqwqwYKYur7nGrZW+LAAAAAEhHMEQCIFj2/HxqM+GzFUjUgcgmwBW9MBNarULNZ3kNq2bSrSQ7AiBKHO0mBMZzW2OT5bQWkd14sA8MWUL7n3UYVvqpOBV9ugH+////AoDw+gIAAAAAF6kUD7lGNCFpa4LIM68kHHjBfdveSTSH0PIKJwEAAAAXqRQpynT4oI+BmZQoGFyXtdhS5AY/YYdlAAAAAQfaAEcwRAIgdAGK1BgAl7hzMjwAFXILNoTMgSOJEEjn282bVa1nnJkCIHPTabdA4+tT3O+jOCPIBwUUylWn3ZVE8VfBZ5EyYRGMAUgwRQIhAPYQOLMI3B2oZaNIUnRvAVdyk0IIxtJEVDk82ZvfIhd3AiAFbmdaZ1ptCgK4WxTl4pB02KJam1dgvqKBb2YZEKAG6gFHUiEClYO/Oa4KYJdHrRma3dY0+mEIVZ1sXNObTCGD8auW4H8hAtq2H/SaFNtqfQKwzR+7ePxLGDErW05U2uTbovv+9TbXUq4AAQEgAMLrCwAAAAAXqRS39fr0Dj1ApaRZsds1NfK3L6kh6IcBByMiACCMI1MXN0O1ld+0oHtyuo5C43l9p06H/n2ddJfjsgKJAwEI2gQARzBEAiBi63pVYQenxz9FrEq1od3fb3B1+xJ1lpp/OD7/94S8sgIgDAXbt0cNvy8IVX3TVscyXB7TCRPpls04QJRdsSIo2l8BRzBEAiBl9FulmYtZon/+GnvtAWrx8fkNVLOqj3RQql9WolEDvQIgf3JHA60e25ZoCyhLVtT/y4j3+3Weq74IqjDym4UTg9IBR1IhAwidwQx6xttU+RMpr2FzM9s4jOrQwjH3IzedG5kDCwLcIQI63ZBPPW3PWd25BrDe4jUpt/+57VDl6GFRkmhgIh8Oc1KuACICA6mkw39ZltOqJdusa1cK8GUDlEkpQkYLNUdT7Z7spYdxENkMak8AAACAAAAAgAQAAIAAIgICf2OZdX0u/1WhNq0CxoSxg4tlVuXxtrNCgqlLa1AFEJYQ2QxqTwAAAIAAAACABQAAgAA=", + "cHNidP8BAJoCAAAAAljoeiG1ba8MI76OcHBFbDNvfLqlyHV5JPVFiHuyq911AAAAAAD/////g40EJ9DsZQpoqka7CwmK6kQiwHGyyng1Kgd5WdB86h0BAAAAAP////8CcKrwCAAAAAAWABTYXCtx0AYLCcmIauuBXlCZHdoSTQDh9QUAAAAAFgAUAK6pouXw+HaliN9VRuh0LR2HAI8AAAAAAAEAuwIAAAABqtc5MQGL0l+ErkALaISL4J23BurCrBgpi6vucatlb4sAAAAASEcwRAIgWPb8fGoz4bMVSNSByCbAFb0wE1qtQs1neQ2rZtKtJDsCIEoc7SYExnNbY5PltBaR3XiwDwxZQvufdRhW+qk4FX26Af7///8CgPD6AgAAAAAXqRQPuUY0IWlrgsgzryQceMF9295JNIfQ8gonAQAAABepFCnKdPigj4GZlCgYXJe12FLkBj9hh2UAAAACBwDaAEcwRAIgdAGK1BgAl7hzMjwAFXILNoTMgSOJEEjn282bVa1nnJkCIHPTabdA4+tT3O+jOCPIBwUUylWn3ZVE8VfBZ5EyYRGMAUgwRQIhAPYQOLMI3B2oZaNIUnRvAVdyk0IIxtJEVDk82ZvfIhd3AiAFbmdaZ1ptCgK4WxTl4pB02KJam1dgvqKBb2YZEKAG6gFHUiEClYO/Oa4KYJdHrRma3dY0+mEIVZ1sXNObTCGD8auW4H8hAtq2H/SaFNtqfQKwzR+7ePxLGDErW05U2uTbovv+9TbXUq4AAQEgAMLrCwAAAAAXqRS39fr0Dj1ApaRZsds1NfK3L6kh6IcBByMiACCMI1MXN0O1ld+0oHtyuo5C43l9p06H/n2ddJfjsgKJAwEI2gQARzBEAiBi63pVYQenxz9FrEq1od3fb3B1+xJ1lpp/OD7/94S8sgIgDAXbt0cNvy8IVX3TVscyXB7TCRPpls04QJRdsSIo2l8BRzBEAiBl9FulmYtZon/+GnvtAWrx8fkNVLOqj3RQql9WolEDvQIgf3JHA60e25ZoCyhLVtT/y4j3+3Weq74IqjDym4UTg9IBR1IhAwidwQx6xttU+RMpr2FzM9s4jOrQwjH3IzedG5kDCwLcIQI63ZBPPW3PWd25BrDe4jUpt/+57VDl6GFRkmhgIh8Oc1KuACICA6mkw39ZltOqJdusa1cK8GUDlEkpQkYLNUdT7Z7spYdxENkMak8AAACAAAAAgAQAAIAAIgICf2OZdX0u/1WhNq0CxoSxg4tlVuXxtrNCgqlLa1AFEJYQ2QxqTwAAAIAAAACABQAAgAA=", + "cHNidP8BAJoCAAAAAljoeiG1ba8MI76OcHBFbDNvfLqlyHV5JPVFiHuyq911AAAAAAD/////g40EJ9DsZQpoqka7CwmK6kQiwHGyyng1Kgd5WdB86h0BAAAAAP////8CcKrwCAAAAAAWABTYXCtx0AYLCcmIauuBXlCZHdoSTQDh9QUAAAAAFgAUAK6pouXw+HaliN9VRuh0LR2HAI8AAAAAAAEAuwIAAAABqtc5MQGL0l+ErkALaISL4J23BurCrBgpi6vucatlb4sAAAAASEcwRAIgWPb8fGoz4bMVSNSByCbAFb0wE1qtQs1neQ2rZtKtJDsCIEoc7SYExnNbY5PltBaR3XiwDwxZQvufdRhW+qk4FX26Af7///8CgPD6AgAAAAAXqRQPuUY0IWlrgsgzryQceMF9295JNIfQ8gonAQAAABepFCnKdPigj4GZlCgYXJe12FLkBj9hh2UAAAABB9oARzBEAiB0AYrUGACXuHMyPAAVcgs2hMyBI4kQSOfbzZtVrWecmQIgc9Npt0Dj61Pc76M4I8gHBRTKVafdlUTxV8FnkTJhEYwBSDBFAiEA9hA4swjcHahlo0hSdG8BV3KTQgjG0kRUOTzZm98iF3cCIAVuZ1pnWm0KArhbFOXikHTYolqbV2C+ooFvZhkQoAbqAUdSIQKVg785rgpgl0etGZrd1jT6YQhVnWxc05tMIYPxq5bgfyEC2rYf9JoU22p9ArDNH7t4/EsYMStbTlTa5Nui+/71NtdSrgABASAAwusLAAAAABepFLf1+vQOPUClpFmx2zU18rcvqSHohwEHIyIAIIwjUxc3Q7WV37Sge3K6jkLjeX2nTof+fZ10l+OyAokDAggA2gQARzBEAiBi63pVYQenxz9FrEq1od3fb3B1+xJ1lpp/OD7/94S8sgIgDAXbt0cNvy8IVX3TVscyXB7TCRPpls04QJRdsSIo2l8BRzBEAiBl9FulmYtZon/+GnvtAWrx8fkNVLOqj3RQql9WolEDvQIgf3JHA60e25ZoCyhLVtT/y4j3+3Weq74IqjDym4UTg9IBR1IhAwidwQx6xttU+RMpr2FzM9s4jOrQwjH3IzedG5kDCwLcIQI63ZBPPW3PWd25BrDe4jUpt/+57VDl6GFRkmhgIh8Oc1KuACICA6mkw39ZltOqJdusa1cK8GUDlEkpQkYLNUdT7Z7spYdxENkMak8AAACAAAAAgAQAAIAAIgICf2OZdX0u/1WhNq0CxoSxg4tlVuXxtrNCgqlLa1AFEJYQ2QxqTwAAAIAAAACABQAAgAA=", + "cHNidP8BAJoCAAAAAljoeiG1ba8MI76OcHBFbDNvfLqlyHV5JPVFiHuyq911AAAAAAD/////g40EJ9DsZQpoqka7CwmK6kQiwHGyyng1Kgd5WdB86h0BAAAAAP////8CcKrwCAAAAAAWABTYXCtx0AYLCcmIauuBXlCZHdoSTQDh9QUAAAAAFgAUAK6pouXw+HaliN9VRuh0LR2HAI8AAAAAAAEAuwIAAAABqtc5MQGL0l+ErkALaISL4J23BurCrBgpi6vucatlb4sAAAAASEcwRAIgWPb8fGoz4bMVSNSByCbAFb0wE1qtQs1neQ2rZtKtJDsCIEoc7SYExnNbY5PltBaR3XiwDwxZQvufdRhW+qk4FX26Af7///8CgPD6AgAAAAAXqRQPuUY0IWlrgsgzryQceMF9295JNIfQ8gonAQAAABepFCnKdPigj4GZlCgYXJe12FLkBj9hh2UAAAABB9oARzBEAiB0AYrUGACXuHMyPAAVcgs2hMyBI4kQSOfbzZtVrWecmQIgc9Npt0Dj61Pc76M4I8gHBRTKVafdlUTxV8FnkTJhEYwBSDBFAiEA9hA4swjcHahlo0hSdG8BV3KTQgjG0kRUOTzZm98iF3cCIAVuZ1pnWm0KArhbFOXikHTYolqbV2C+ooFvZhkQoAbqAUdSIQKVg785rgpgl0etGZrd1jT6YQhVnWxc05tMIYPxq5bgfyEC2rYf9JoU22p9ArDNH7t4/EsYMStbTlTa5Nui+/71NtdSrgABASAAwusLAAAAABepFLf1+vQOPUClpFmx2zU18rcvqSHohwEHIyIAIIwjUxc3Q7WV37Sge3K6jkLjeX2nTof+fZ10l+OyAokDAQjaBABHMEQCIGLrelVhB6fHP0WsSrWh3d9vcHX7EnWWmn84Pv/3hLyyAiAMBdu3Rw2/LwhVfdNWxzJcHtMJE+mWzThAlF2xIijaXwFHMEQCIGX0W6WZi1mif/4ae+0BavHx+Q1Us6qPdFCqX1aiUQO9AiB/ckcDrR7blmgLKEtW1P/LiPf7dZ6rvgiqMPKbhROD0gFHUiEDCJ3BDHrG21T5EymvYXMz2ziM6tDCMfcjN50bmQMLAtwhAjrdkE89bc9Z3bkGsN7iNSm3/7ntUOXoYVGSaGAiHw5zUq4AIQIDqaTDf1mW06ol26xrVwrwZQOUSSlCRgs1R1PtnuylhxDZDGpPAAAAgAAAAIAEAACAACICAn9jmXV9Lv9VoTatAsaEsYOLZVbl8bazQoKpS2tQBRCWENkMak8AAACAAAAAgAUAAIAA", + "cHNidP8BAHMCAAAAATAa6YblFqHsisW0vGVz0y+DtGXiOtdhZ9aLOOcwtNvbAAAAAAD/////AnR7AQAAAAAAF6kUA6oXrogrXQ1Usl1jEE5P/s57nqKHYEOZOwAAAAAXqRS5IbG6b3IuS/qDtlV6MTmYakLsg4cAAAAAAAEBHwDKmjsAAAAAFgAU0tlLZK4IWH7vyO6xh8YB6Tn5A3wCAwABAAAAAAEAFgAUYunpgv/zTdgjlhAxawkM0qO3R8sAAQAiACCHa62DLx0WgBXtQSMqnqZaGBXZ7xPA74dZ9ktbKyeKZQEBJVEhA7fOI6AcW0vwCmQlN836uzFbZoMyhnR471EwnSvVf4qHUa4A", + "cHNidP8BAHMCAAAAATAa6YblFqHsisW0vGVz0y+DtGXiOtdhZ9aLOOcwtNvbAAAAAAD/////AnR7AQAAAAAAF6kUA6oXrogrXQ1Usl1jEE5P/s57nqKHYEOZOwAAAAAXqRS5IbG6b3IuS/qDtlV6MTmYakLsg4cAAAAAAAEBHwDKmjsAAAAAFgAU0tlLZK4IWH7vyO6xh8YB6Tn5A3wAAgAAFgAUYunpgv/zTdgjlhAxawkM0qO3R8sAAQAiACCHa62DLx0WgBXtQSMqnqZaGBXZ7xPA74dZ9ktbKyeKZQEBJVEhA7fOI6AcW0vwCmQlN836uzFbZoMyhnR471EwnSvVf4qHUa4A", + "cHNidP8BAHMCAAAAATAa6YblFqHsisW0vGVz0y+DtGXiOtdhZ9aLOOcwtNvbAAAAAAD/////AnR7AQAAAAAAF6kUA6oXrogrXQ1Usl1jEE5P/s57nqKHYEOZOwAAAAAXqRS5IbG6b3IuS/qDtlV6MTmYakLsg4cAAAAAAAEBHwDKmjsAAAAAFgAU0tlLZK4IWH7vyO6xh8YB6Tn5A3wAAQAWABRi6emC//NN2COWEDFrCQzSo7dHywABACIAIIdrrYMvHRaAFe1BIyqeploYFdnvE8Dvh1n2S1srJ4plIQEAJVEhA7fOI6AcW0vwCmQlN836uzFbZoMyhnR471EwnSvVf4qHUa4A" + ], + "valid" : [ + "cHNidP8BAHUCAAAAASaBcTce3/KF6Tet7qSze3gADAVmy7OtZGQXE8pCFxv2AAAAAAD+////AtPf9QUAAAAAGXapFNDFmQPFusKGh2DpD9UhpGZap2UgiKwA4fUFAAAAABepFDVF5uM7gyxHBQ8k0+65PJwDlIvHh7MuEwAAAQD9pQEBAAAAAAECiaPHHqtNIOA3G7ukzGmPopXJRjr6Ljl/hTPMti+VZ+UBAAAAFxYAFL4Y0VKpsBIDna89p95PUzSe7LmF/////4b4qkOnHf8USIk6UwpyN+9rRgi7st0tAXHmOuxqSJC0AQAAABcWABT+Pp7xp0XpdNkCxDVZQ6vLNL1TU/////8CAMLrCwAAAAAZdqkUhc/xCX/Z4Ai7NK9wnGIZeziXikiIrHL++E4sAAAAF6kUM5cluiHv1irHU6m80GfWx6ajnQWHAkcwRAIgJxK+IuAnDzlPVoMR3HyppolwuAJf3TskAinwf4pfOiQCIAGLONfc0xTnNMkna9b7QPZzMlvEuqFEyADS8vAtsnZcASED0uFWdJQbrUqZY3LLh+GFbTZSYG2YVi/jnF6efkE/IQUCSDBFAiEA0SuFLYXc2WHS9fSrZgZU327tzHlMDDPOXMMJ/7X85Y0CIGczio4OFyXBl/saiK9Z9R5E5CVbIBZ8hoQDHAXR8lkqASECI7cr7vCWXRC+B3jv7NYfysb3mk6haTkzgHNEZPhPKrMAAAAAAAAA", + "cHNidP8BAKACAAAAAqsJSaCMWvfEm4IS9Bfi8Vqz9cM9zxU4IagTn4d6W3vkAAAAAAD+////qwlJoIxa98SbghL0F+LxWrP1wz3PFTghqBOfh3pbe+QBAAAAAP7///8CYDvqCwAAAAAZdqkUdopAu9dAy+gdmI5x3ipNXHE5ax2IrI4kAAAAAAAAGXapFG9GILVT+glechue4O/p+gOcykWXiKwAAAAAAAEHakcwRAIgR1lmF5fAGwNrJZKJSGhiGDR9iYZLcZ4ff89X0eURZYcCIFMJ6r9Wqk2Ikf/REf3xM286KdqGbX+EhtdVRs7tr5MZASEDXNxh/HupccC1AaZGoqg7ECy0OIEhfKaC3Ibi1z+ogpIAAQEgAOH1BQAAAAAXqRQ1RebjO4MsRwUPJNPuuTycA5SLx4cBBBYAFIXRNTfy4mVAWjTbr6nj3aAfuCMIAAAA", + "cHNidP8BAHUCAAAAASaBcTce3/KF6Tet7qSze3gADAVmy7OtZGQXE8pCFxv2AAAAAAD+////AtPf9QUAAAAAGXapFNDFmQPFusKGh2DpD9UhpGZap2UgiKwA4fUFAAAAABepFDVF5uM7gyxHBQ8k0+65PJwDlIvHh7MuEwAAAQD9pQEBAAAAAAECiaPHHqtNIOA3G7ukzGmPopXJRjr6Ljl/hTPMti+VZ+UBAAAAFxYAFL4Y0VKpsBIDna89p95PUzSe7LmF/////4b4qkOnHf8USIk6UwpyN+9rRgi7st0tAXHmOuxqSJC0AQAAABcWABT+Pp7xp0XpdNkCxDVZQ6vLNL1TU/////8CAMLrCwAAAAAZdqkUhc/xCX/Z4Ai7NK9wnGIZeziXikiIrHL++E4sAAAAF6kUM5cluiHv1irHU6m80GfWx6ajnQWHAkcwRAIgJxK+IuAnDzlPVoMR3HyppolwuAJf3TskAinwf4pfOiQCIAGLONfc0xTnNMkna9b7QPZzMlvEuqFEyADS8vAtsnZcASED0uFWdJQbrUqZY3LLh+GFbTZSYG2YVi/jnF6efkE/IQUCSDBFAiEA0SuFLYXc2WHS9fSrZgZU327tzHlMDDPOXMMJ/7X85Y0CIGczio4OFyXBl/saiK9Z9R5E5CVbIBZ8hoQDHAXR8lkqASECI7cr7vCWXRC+B3jv7NYfysb3mk6haTkzgHNEZPhPKrMAAAAAAQMEAQAAAAAAAA==", + "cHNidP8BAKACAAAAAqsJSaCMWvfEm4IS9Bfi8Vqz9cM9zxU4IagTn4d6W3vkAAAAAAD+////qwlJoIxa98SbghL0F+LxWrP1wz3PFTghqBOfh3pbe+QBAAAAAP7///8CYDvqCwAAAAAZdqkUdopAu9dAy+gdmI5x3ipNXHE5ax2IrI4kAAAAAAAAGXapFG9GILVT+glechue4O/p+gOcykWXiKwAAAAAAAEA3wIAAAABJoFxNx7f8oXpN63upLN7eAAMBWbLs61kZBcTykIXG/YAAAAAakcwRAIgcLIkUSPmv0dNYMW1DAQ9TGkaXSQ18Jo0p2YqncJReQoCIAEynKnazygL3zB0DsA5BCJCLIHLRYOUV663b8Eu3ZWzASECZX0RjTNXuOD0ws1G23s59tnDjZpwq8ubLeXcjb/kzjH+////AtPf9QUAAAAAGXapFNDFmQPFusKGh2DpD9UhpGZap2UgiKwA4fUFAAAAABepFDVF5uM7gyxHBQ8k0+65PJwDlIvHh7MuEwAAAQEgAOH1BQAAAAAXqRQ1RebjO4MsRwUPJNPuuTycA5SLx4cBBBYAFIXRNTfy4mVAWjTbr6nj3aAfuCMIACICAurVlmh8qAYEPtw94RbN8p1eklfBls0FXPaYyNAr8k6ZELSmumcAAACAAAAAgAIAAIAAIgIDlPYr6d8ZlSxVh3aK63aYBhrSxKJciU9H2MFitNchPQUQtKa6ZwAAAIABAACAAgAAgAA=", + "cHNidP8BAFUCAAAAASeaIyOl37UfxF8iD6WLD8E+HjNCeSqF1+Ns1jM7XLw5AAAAAAD/////AaBa6gsAAAAAGXapFP/pwAYQl8w7Y28ssEYPpPxCfStFiKwAAAAAAAEBIJVe6gsAAAAAF6kUY0UgD2jRieGtwN8cTRbqjxTA2+uHIgIDsTQcy6doO2r08SOM1ul+cWfVafrEfx5I1HVBhENVvUZGMEMCIAQktY7/qqaU4VWepck7v9SokGQiQFXN8HC2dxRpRC0HAh9cjrD+plFtYLisszrWTt5g6Hhb+zqpS5m9+GFR25qaAQEEIgAgdx/RitRZZm3Unz1WTj28QvTIR3TjYK2haBao7UiNVoEBBUdSIQOxNBzLp2g7avTxI4zW6X5xZ9Vp+sR/HkjUdUGEQ1W9RiED3lXR4drIBeP4pYwfv5uUwC89uq/hJ/78pJlfJvggg71SriIGA7E0HMunaDtq9PEjjNbpfnFn1Wn6xH8eSNR1QYRDVb1GELSmumcAAACAAAAAgAQAAIAiBgPeVdHh2sgF4/iljB+/m5TALz26r+En/vykmV8m+CCDvRC0prpnAAAAgAAAAIAFAACAAAA=", + "cHNidP8BACoCAAAAAAFAQg8AAAAAABepFG6Rty1Vk+fUOR4v9E6R6YXDFkHwhwAAAAAAAA==" + ], + "creator" : [ + { + "inputs" : [ + { + "txid":"75ddabb27b8845f5247975c8a5ba7c6f336c4570708ebe230caf6db5217ae858", + "vout":0 + }, + { + "txid":"1dea7cd05979072a3578cab271c02244ea8a090bbb46aa680a65ecd027048d83", + "vout":1 + } + ], + "outputs" : [ + { + "bcrt1qmpwzkuwsqc9snjvgdt4czhjsnywa5yjdqpxskv":1.49990000 + }, + { + "bcrt1qqzh2ngh97ru8dfvgma25d6r595wcwqy0cee4cc": 1 + } + ], + "result" : "cHNidP8BAJoCAAAAAljoeiG1ba8MI76OcHBFbDNvfLqlyHV5JPVFiHuyq911AAAAAAD/////g40EJ9DsZQpoqka7CwmK6kQiwHGyyng1Kgd5WdB86h0BAAAAAP////8CcKrwCAAAAAAWABTYXCtx0AYLCcmIauuBXlCZHdoSTQDh9QUAAAAAFgAUAK6pouXw+HaliN9VRuh0LR2HAI8AAAAAAAAAAAA=" + } + ], + "signer" : [ + { + "privkeys" : [ + "cP53pDbR5WtAD8dYAW9hhTjuvvTVaEiQBdrz9XPrgLBeRFiyCbQr", + "cR6SXDoyfQrcp4piaiHE97Rsgta9mNhGTen9XeonVgwsh4iSgw6d" + ], + "psbt" : "cHNidP8BAJoCAAAAAljoeiG1ba8MI76OcHBFbDNvfLqlyHV5JPVFiHuyq911AAAAAAD/////g40EJ9DsZQpoqka7CwmK6kQiwHGyyng1Kgd5WdB86h0BAAAAAP////8CcKrwCAAAAAAWABTYXCtx0AYLCcmIauuBXlCZHdoSTQDh9QUAAAAAFgAUAK6pouXw+HaliN9VRuh0LR2HAI8AAAAAAAEAuwIAAAABqtc5MQGL0l+ErkALaISL4J23BurCrBgpi6vucatlb4sAAAAASEcwRAIgWPb8fGoz4bMVSNSByCbAFb0wE1qtQs1neQ2rZtKtJDsCIEoc7SYExnNbY5PltBaR3XiwDwxZQvufdRhW+qk4FX26Af7///8CgPD6AgAAAAAXqRQPuUY0IWlrgsgzryQceMF9295JNIfQ8gonAQAAABepFCnKdPigj4GZlCgYXJe12FLkBj9hh2UAAAABBEdSIQKVg785rgpgl0etGZrd1jT6YQhVnWxc05tMIYPxq5bgfyEC2rYf9JoU22p9ArDNH7t4/EsYMStbTlTa5Nui+/71NtdSriIGApWDvzmuCmCXR60Zmt3WNPphCFWdbFzTm0whg/GrluB/ENkMak8AAACAAAAAgAAAAIAiBgLath/0mhTban0CsM0fu3j8SxgxK1tOVNrk26L7/vU21xDZDGpPAAAAgAAAAIABAACAAQMEAQAAAAABASAAwusLAAAAABepFLf1+vQOPUClpFmx2zU18rcvqSHohwEEIgAgjCNTFzdDtZXftKB7crqOQuN5fadOh/59nXSX47ICiQMBBUdSIQMIncEMesbbVPkTKa9hczPbOIzq0MIx9yM3nRuZAwsC3CECOt2QTz1tz1nduQaw3uI1Kbf/ue1Q5ehhUZJoYCIfDnNSriIGAjrdkE89bc9Z3bkGsN7iNSm3/7ntUOXoYVGSaGAiHw5zENkMak8AAACAAAAAgAMAAIAiBgMIncEMesbbVPkTKa9hczPbOIzq0MIx9yM3nRuZAwsC3BDZDGpPAAAAgAAAAIACAACAAQMEAQAAAAAiAgOppMN/WZbTqiXbrGtXCvBlA5RJKUJGCzVHU+2e7KWHcRDZDGpPAAAAgAAAAIAEAACAACICAn9jmXV9Lv9VoTatAsaEsYOLZVbl8bazQoKpS2tQBRCWENkMak8AAACAAAAAgAUAAIAA", + "result" : "cHNidP8BAJoCAAAAAljoeiG1ba8MI76OcHBFbDNvfLqlyHV5JPVFiHuyq911AAAAAAD/////g40EJ9DsZQpoqka7CwmK6kQiwHGyyng1Kgd5WdB86h0BAAAAAP////8CcKrwCAAAAAAWABTYXCtx0AYLCcmIauuBXlCZHdoSTQDh9QUAAAAAFgAUAK6pouXw+HaliN9VRuh0LR2HAI8AAAAAAAEAuwIAAAABqtc5MQGL0l+ErkALaISL4J23BurCrBgpi6vucatlb4sAAAAASEcwRAIgWPb8fGoz4bMVSNSByCbAFb0wE1qtQs1neQ2rZtKtJDsCIEoc7SYExnNbY5PltBaR3XiwDwxZQvufdRhW+qk4FX26Af7///8CgPD6AgAAAAAXqRQPuUY0IWlrgsgzryQceMF9295JNIfQ8gonAQAAABepFCnKdPigj4GZlCgYXJe12FLkBj9hh2UAAAAiAgKVg785rgpgl0etGZrd1jT6YQhVnWxc05tMIYPxq5bgf0cwRAIgdAGK1BgAl7hzMjwAFXILNoTMgSOJEEjn282bVa1nnJkCIHPTabdA4+tT3O+jOCPIBwUUylWn3ZVE8VfBZ5EyYRGMAQEDBAEAAAABBEdSIQKVg785rgpgl0etGZrd1jT6YQhVnWxc05tMIYPxq5bgfyEC2rYf9JoU22p9ArDNH7t4/EsYMStbTlTa5Nui+/71NtdSriIGApWDvzmuCmCXR60Zmt3WNPphCFWdbFzTm0whg/GrluB/ENkMak8AAACAAAAAgAAAAIAiBgLath/0mhTban0CsM0fu3j8SxgxK1tOVNrk26L7/vU21xDZDGpPAAAAgAAAAIABAACAAAEBIADC6wsAAAAAF6kUt/X69A49QKWkWbHbNTXyty+pIeiHIgIDCJ3BDHrG21T5EymvYXMz2ziM6tDCMfcjN50bmQMLAtxHMEQCIGLrelVhB6fHP0WsSrWh3d9vcHX7EnWWmn84Pv/3hLyyAiAMBdu3Rw2/LwhVfdNWxzJcHtMJE+mWzThAlF2xIijaXwEBAwQBAAAAAQQiACCMI1MXN0O1ld+0oHtyuo5C43l9p06H/n2ddJfjsgKJAwEFR1IhAwidwQx6xttU+RMpr2FzM9s4jOrQwjH3IzedG5kDCwLcIQI63ZBPPW3PWd25BrDe4jUpt/+57VDl6GFRkmhgIh8Oc1KuIgYCOt2QTz1tz1nduQaw3uI1Kbf/ue1Q5ehhUZJoYCIfDnMQ2QxqTwAAAIAAAACAAwAAgCIGAwidwQx6xttU+RMpr2FzM9s4jOrQwjH3IzedG5kDCwLcENkMak8AAACAAAAAgAIAAIAAIgIDqaTDf1mW06ol26xrVwrwZQOUSSlCRgs1R1Ptnuylh3EQ2QxqTwAAAIAAAACABAAAgAAiAgJ/Y5l1fS7/VaE2rQLGhLGDi2VW5fG2s0KCqUtrUAUQlhDZDGpPAAAAgAAAAIAFAACAAA==" + }, + { + "privkeys" : [ + "cT7J9YpCwY3AVRFSjN6ukeEeWY6mhpbJPxRaDaP5QTdygQRxP9Au", + "cNBc3SWUip9PPm1GjRoLEJT6T41iNzCYtD7qro84FMnM5zEqeJsE" + ], + "psbt" : "cHNidP8BAJoCAAAAAljoeiG1ba8MI76OcHBFbDNvfLqlyHV5JPVFiHuyq911AAAAAAD/////g40EJ9DsZQpoqka7CwmK6kQiwHGyyng1Kgd5WdB86h0BAAAAAP////8CcKrwCAAAAAAWABTYXCtx0AYLCcmIauuBXlCZHdoSTQDh9QUAAAAAFgAUAK6pouXw+HaliN9VRuh0LR2HAI8AAAAAAAEAuwIAAAABqtc5MQGL0l+ErkALaISL4J23BurCrBgpi6vucatlb4sAAAAASEcwRAIgWPb8fGoz4bMVSNSByCbAFb0wE1qtQs1neQ2rZtKtJDsCIEoc7SYExnNbY5PltBaR3XiwDwxZQvufdRhW+qk4FX26Af7///8CgPD6AgAAAAAXqRQPuUY0IWlrgsgzryQceMF9295JNIfQ8gonAQAAABepFCnKdPigj4GZlCgYXJe12FLkBj9hh2UAAAABBEdSIQKVg785rgpgl0etGZrd1jT6YQhVnWxc05tMIYPxq5bgfyEC2rYf9JoU22p9ArDNH7t4/EsYMStbTlTa5Nui+/71NtdSriIGApWDvzmuCmCXR60Zmt3WNPphCFWdbFzTm0whg/GrluB/ENkMak8AAACAAAAAgAAAAIAiBgLath/0mhTban0CsM0fu3j8SxgxK1tOVNrk26L7/vU21xDZDGpPAAAAgAAAAIABAACAAQMEAQAAAAABASAAwusLAAAAABepFLf1+vQOPUClpFmx2zU18rcvqSHohwEEIgAgjCNTFzdDtZXftKB7crqOQuN5fadOh/59nXSX47ICiQMBBUdSIQMIncEMesbbVPkTKa9hczPbOIzq0MIx9yM3nRuZAwsC3CECOt2QTz1tz1nduQaw3uI1Kbf/ue1Q5ehhUZJoYCIfDnNSriIGAjrdkE89bc9Z3bkGsN7iNSm3/7ntUOXoYVGSaGAiHw5zENkMak8AAACAAAAAgAMAAIAiBgMIncEMesbbVPkTKa9hczPbOIzq0MIx9yM3nRuZAwsC3BDZDGpPAAAAgAAAAIACAACAAQMEAQAAAAAiAgOppMN/WZbTqiXbrGtXCvBlA5RJKUJGCzVHU+2e7KWHcRDZDGpPAAAAgAAAAIAEAACAACICAn9jmXV9Lv9VoTatAsaEsYOLZVbl8bazQoKpS2tQBRCWENkMak8AAACAAAAAgAUAAIAA", + "result" : "cHNidP8BAJoCAAAAAljoeiG1ba8MI76OcHBFbDNvfLqlyHV5JPVFiHuyq911AAAAAAD/////g40EJ9DsZQpoqka7CwmK6kQiwHGyyng1Kgd5WdB86h0BAAAAAP////8CcKrwCAAAAAAWABTYXCtx0AYLCcmIauuBXlCZHdoSTQDh9QUAAAAAFgAUAK6pouXw+HaliN9VRuh0LR2HAI8AAAAAAAEAuwIAAAABqtc5MQGL0l+ErkALaISL4J23BurCrBgpi6vucatlb4sAAAAASEcwRAIgWPb8fGoz4bMVSNSByCbAFb0wE1qtQs1neQ2rZtKtJDsCIEoc7SYExnNbY5PltBaR3XiwDwxZQvufdRhW+qk4FX26Af7///8CgPD6AgAAAAAXqRQPuUY0IWlrgsgzryQceMF9295JNIfQ8gonAQAAABepFCnKdPigj4GZlCgYXJe12FLkBj9hh2UAAAAiAgLath/0mhTban0CsM0fu3j8SxgxK1tOVNrk26L7/vU210cwRAIgYxqYn+c4qSrQGYYCMxLBkhT+KAKznly8GsNniAbGksMCIDnbbDh70mdxbf2z1NjaULjoXSEzJrp8faqkwM5B65IjAQEDBAEAAAABBEdSIQKVg785rgpgl0etGZrd1jT6YQhVnWxc05tMIYPxq5bgfyEC2rYf9JoU22p9ArDNH7t4/EsYMStbTlTa5Nui+/71NtdSriIGApWDvzmuCmCXR60Zmt3WNPphCFWdbFzTm0whg/GrluB/ENkMak8AAACAAAAAgAAAAIAiBgLath/0mhTban0CsM0fu3j8SxgxK1tOVNrk26L7/vU21xDZDGpPAAAAgAAAAIABAACAAAEBIADC6wsAAAAAF6kUt/X69A49QKWkWbHbNTXyty+pIeiHIgICOt2QTz1tz1nduQaw3uI1Kbf/ue1Q5ehhUZJoYCIfDnNHMEQCIGX0W6WZi1mif/4ae+0BavHx+Q1Us6qPdFCqX1aiUQO9AiB/ckcDrR7blmgLKEtW1P/LiPf7dZ6rvgiqMPKbhROD0gEBAwQBAAAAAQQiACCMI1MXN0O1ld+0oHtyuo5C43l9p06H/n2ddJfjsgKJAwEFR1IhAwidwQx6xttU+RMpr2FzM9s4jOrQwjH3IzedG5kDCwLcIQI63ZBPPW3PWd25BrDe4jUpt/+57VDl6GFRkmhgIh8Oc1KuIgYCOt2QTz1tz1nduQaw3uI1Kbf/ue1Q5ehhUZJoYCIfDnMQ2QxqTwAAAIAAAACAAwAAgCIGAwidwQx6xttU+RMpr2FzM9s4jOrQwjH3IzedG5kDCwLcENkMak8AAACAAAAAgAIAAIAAIgIDqaTDf1mW06ol26xrVwrwZQOUSSlCRgs1R1Ptnuylh3EQ2QxqTwAAAIAAAACABAAAgAAiAgJ/Y5l1fS7/VaE2rQLGhLGDi2VW5fG2s0KCqUtrUAUQlhDZDGpPAAAAgAAAAIAFAACAAA==" + }, + { + "privkeys" : [ + "cNBc3SWUip9PPm1GjRoLEJT6T41iNzCYtD7qro84FMnM5zEqeJsE" + ], + "psbt" : "cHNidP8BAKACAAAAAqsJSaCMWvfEm4IS9Bfi8Vqz9cM9zxU4IagTn4d6W3vkAAAAAAD+////qwlJoIxa98SbghL0F+LxWrP1wz3PFTghqBOfh3pbe+QBAAAAAP7///8CYDvqCwAAAAAZdqkUdopAu9dAy+gdmI5x3ipNXHE5ax2IrI4kAAAAAAAAGXapFG9GILVT+glechue4O/p+gOcykWXiKwAAAAAAAEBItPf9QUAAAAAGXapFNSO0xELlAFMsRS9Mtb00GbcdCVriKwAAQEgAOH1BQAAAAAXqRQ1RebjO4MsRwUPJNPuuTycA5SLx4cBBBYAFIXRNTfy4mVAWjTbr6nj3aAfuCMIACICAurVlmh8qAYEPtw94RbN8p1eklfBls0FXPaYyNAr8k6ZELSmumcAAACAAAAAgAIAAIAAIgIDlPYr6d8ZlSxVh3aK63aYBhrSxKJciU9H2MFitNchPQUQtKa6ZwAAAIABAACAAgAAgAA=", + "result" : "cHNidP8BAKACAAAAAqsJSaCMWvfEm4IS9Bfi8Vqz9cM9zxU4IagTn4d6W3vkAAAAAAD+////qwlJoIxa98SbghL0F+LxWrP1wz3PFTghqBOfh3pbe+QBAAAAAP7///8CYDvqCwAAAAAZdqkUdopAu9dAy+gdmI5x3ipNXHE5ax2IrI4kAAAAAAAAGXapFG9GILVT+glechue4O/p+gOcykWXiKwAAAAAAAEBItPf9QUAAAAAGXapFNSO0xELlAFMsRS9Mtb00GbcdCVriKwAAQEgAOH1BQAAAAAXqRQ1RebjO4MsRwUPJNPuuTycA5SLx4cBBBYAFIXRNTfy4mVAWjTbr6nj3aAfuCMIACICAurVlmh8qAYEPtw94RbN8p1eklfBls0FXPaYyNAr8k6ZELSmumcAAACAAAAAgAIAAIAAIgIDlPYr6d8ZlSxVh3aK63aYBhrSxKJciU9H2MFitNchPQUQtKa6ZwAAAIABAACAAgAAgAA=" + }, + { + "privkeys" : [ + "cT7J9YpCwY3AVRFSjN6ukeEeWY6mhpbJPxRaDaP5QTdygQRxP9Au", + "cNBc3SWUip9PPm1GjRoLEJT6T41iNzCYtD7qro84FMnM5zEqeJsE" + ], + "psbt" : "cHNidP8BAJoCAAAAAljoeiG1ba8MI76OcHBFbDNvfLqlyHV5JPVFiHuyq911AAAAAAD/////g40EJ9DsZQpoqka7CwmK6kQiwHGyyng1Kgd5WdB86h0BAAAAAP////8CcKrwCAAAAAAWABTYXCtx0AYLCcmIauuBXlCZHdoSTQDh9QUAAAAAFgAUAK6pouXw+HaliN9VRuh0LR2HAI8AAAAAAAEAuwIAAAABqtc5MQGL0l+ErkALaISL4J23BurCrBgpi6vucatlb4sAAAAASEcwRAIgWPb8fGoz4bMVSNSByCbAFb0wE1qtQs1neQ2rZtKtJDsCIEoc7SYExnNbY5PltBaR3XiwDwxZQvufdRhW+qk4FX26Af7///8CgPD6AgAAAAAXqRQPuUY0IWlrgsgzryQceMF9295JNIfQ8gonAQAAABepFCnKdPigj4GZlCgYXJe12FLkBj9hh2UAAAAiAgLath/0mhTban0CsM0fu3j8SxgxK1tOVNrk26L7/vU210gwRQIhAPYQOLMI3B2oZaNIUnRvAVdyk0IIxtJEVDk82ZvfIhd3AiAFbmdaZ1ptCgK4WxTl4pB02KJam1dgvqKBb2YZEKAG6gEBAwQBAAAAAQRHUiEClYO/Oa4KYJdHrRma3dY0+mEIVZ1sXNObTCGD8auW4H8hAtq2H/SaFNtqfQKwzR+7ePxLGDErW05U2uTbovv+9TbXUq8iBgKVg785rgpgl0etGZrd1jT6YQhVnWxc05tMIYPxq5bgfxDZDGpPAAAAgAAAAIAAAACAIgYC2rYf9JoU22p9ArDNH7t4/EsYMStbTlTa5Nui+/71NtcQ2QxqTwAAAIAAAACAAQAAgAABASAAwusLAAAAABepFLf1+vQOPUClpFmx2zU18rcvqSHohyICAjrdkE89bc9Z3bkGsN7iNSm3/7ntUOXoYVGSaGAiHw5zRzBEAiBl9FulmYtZon/+GnvtAWrx8fkNVLOqj3RQql9WolEDvQIgf3JHA60e25ZoCyhLVtT/y4j3+3Weq74IqjDym4UTg9IBAQMEAQAAAAEEIgAgjCNTFzdDtZXftKB7crqOQuN5fadOh/59nXSX47ICiQMBBUdSIQMIncEMesbbVPkTKa9hczPbOIzq0MIx9yM3nRuZAwsC3CECOt2QTz1tz1nduQaw3uI1Kbf/ue1Q5ehhUZJoYCIfDnNSriIGAjrdkE89bc9Z3bkGsN7iNSm3/7ntUOXoYVGSaGAiHw5zENkMak8AAACAAAAAgAMAAIAiBgMIncEMesbbVPkTKa9hczPbOIzq0MIx9yM3nRuZAwsC3BDZDGpPAAAAgAAAAIACAACAACICA6mkw39ZltOqJdusa1cK8GUDlEkpQkYLNUdT7Z7spYdxENkMak8AAACAAAAAgAQAAIAAIgICf2OZdX0u/1WhNq0CxoSxg4tlVuXxtrNCgqlLa1AFEJYQ2QxqTwAAAIAAAACABQAAgAA=", + "result" : "cHNidP8BAJoCAAAAAljoeiG1ba8MI76OcHBFbDNvfLqlyHV5JPVFiHuyq911AAAAAAD/////g40EJ9DsZQpoqka7CwmK6kQiwHGyyng1Kgd5WdB86h0BAAAAAP////8CcKrwCAAAAAAWABTYXCtx0AYLCcmIauuBXlCZHdoSTQDh9QUAAAAAFgAUAK6pouXw+HaliN9VRuh0LR2HAI8AAAAAAAEAuwIAAAABqtc5MQGL0l+ErkALaISL4J23BurCrBgpi6vucatlb4sAAAAASEcwRAIgWPb8fGoz4bMVSNSByCbAFb0wE1qtQs1neQ2rZtKtJDsCIEoc7SYExnNbY5PltBaR3XiwDwxZQvufdRhW+qk4FX26Af7///8CgPD6AgAAAAAXqRQPuUY0IWlrgsgzryQceMF9295JNIfQ8gonAQAAABepFCnKdPigj4GZlCgYXJe12FLkBj9hh2UAAAAiAgLath/0mhTban0CsM0fu3j8SxgxK1tOVNrk26L7/vU210gwRQIhAPYQOLMI3B2oZaNIUnRvAVdyk0IIxtJEVDk82ZvfIhd3AiAFbmdaZ1ptCgK4WxTl4pB02KJam1dgvqKBb2YZEKAG6gEBAwQBAAAAAQRHUiEClYO/Oa4KYJdHrRma3dY0+mEIVZ1sXNObTCGD8auW4H8hAtq2H/SaFNtqfQKwzR+7ePxLGDErW05U2uTbovv+9TbXUq8iBgKVg785rgpgl0etGZrd1jT6YQhVnWxc05tMIYPxq5bgfxDZDGpPAAAAgAAAAIAAAACAIgYC2rYf9JoU22p9ArDNH7t4/EsYMStbTlTa5Nui+/71NtcQ2QxqTwAAAIAAAACAAQAAgAABASAAwusLAAAAABepFLf1+vQOPUClpFmx2zU18rcvqSHohyICAjrdkE89bc9Z3bkGsN7iNSm3/7ntUOXoYVGSaGAiHw5zRzBEAiBl9FulmYtZon/+GnvtAWrx8fkNVLOqj3RQql9WolEDvQIgf3JHA60e25ZoCyhLVtT/y4j3+3Weq74IqjDym4UTg9IBAQMEAQAAAAEEIgAgjCNTFzdDtZXftKB7crqOQuN5fadOh/59nXSX47ICiQMBBUdSIQMIncEMesbbVPkTKa9hczPbOIzq0MIx9yM3nRuZAwsC3CECOt2QTz1tz1nduQaw3uI1Kbf/ue1Q5ehhUZJoYCIfDnNSriIGAjrdkE89bc9Z3bkGsN7iNSm3/7ntUOXoYVGSaGAiHw5zENkMak8AAACAAAAAgAMAAIAiBgMIncEMesbbVPkTKa9hczPbOIzq0MIx9yM3nRuZAwsC3BDZDGpPAAAAgAAAAIACAACAACICA6mkw39ZltOqJdusa1cK8GUDlEkpQkYLNUdT7Z7spYdxENkMak8AAACAAAAAgAQAAIAAIgICf2OZdX0u/1WhNq0CxoSxg4tlVuXxtrNCgqlLa1AFEJYQ2QxqTwAAAIAAAACABQAAgAA=" + }, + { + "privkeys" : [ + "cT7J9YpCwY3AVRFSjN6ukeEeWY6mhpbJPxRaDaP5QTdygQRxP9Au", + "cNBc3SWUip9PPm1GjRoLEJT6T41iNzCYtD7qro84FMnM5zEqeJsE" + ], + "psbt" : "cHNidP8BAJoCAAAAAljoeiG1ba8MI76OcHBFbDNvfLqlyHV5JPVFiHuyq911AAAAAAD/////g40EJ9DsZQpoqka7CwmK6kQiwHGyyng1Kgd5WdB86h0BAAAAAP////8CcKrwCAAAAAAWABTYXCtx0AYLCcmIauuBXlCZHdoSTQDh9QUAAAAAFgAUAK6pouXw+HaliN9VRuh0LR2HAI8AAAAAAAEAuwIAAAABqtc5MQGL0l+ErkALaISL4J23BurCrBgpi6vucatlb4sAAAAASEcwRAIgWPb8fGoz4bMVSNSByCbAFb0wE1qtQs1neQ2rZtKtJDsCIEoc7SYExnNbY5PltBaR3XiwDwxZQvufdRhW+qk4FX26Af7///8CgPD6AgAAAAAXqRQPuUY0IWlrgsgzryQceMF9295JNIfQ8gonAQAAABepFCnKdPigj4GZlCgYXJe12FLkBj9hh2UAAAAiAgLath/0mhTban0CsM0fu3j8SxgxK1tOVNrk26L7/vU210gwRQIhAPYQOLMI3B2oZaNIUnRvAVdyk0IIxtJEVDk82ZvfIhd3AiAFbmdaZ1ptCgK4WxTl4pB02KJam1dgvqKBb2YZEKAG6gEBAwQBAAAAAQRHUiEClYO/Oa4KYJdHrRma3dY0+mEIVZ1sXNObTCGD8auW4H8hAtq2H/SaFNtqfQKwzR+7ePxLGDErW05U2uTbovv+9TbXUq4iBgKVg785rgpgl0etGZrd1jT6YQhVnWxc05tMIYPxq5bgfxDZDGpPAAAAgAAAAIAAAACAIgYC2rYf9JoU22p9ArDNH7t4/EsYMStbTlTa5Nui+/71NtcQ2QxqTwAAAIAAAACAAQAAgAABASAAwusLAAAAABepFLf1+vQOPUClpFmx2zU18rcvqSHohyICAjrdkE89bc9Z3bkGsN7iNSm3/7ntUOXoYVGSaGAiHw5zRzBEAiBl9FulmYtZon/+GnvtAWrx8fkNVLOqj3RQql9WolEDvQIgf3JHA60e25ZoCyhLVtT/y4j3+3Weq74IqjDym4UTg9IBAQMEAQAAAAEEIgAgjCNTFzdDtZXftKB7crqOQuN5fadOh/59nXSX47ICiQABBUdSIQMIncEMesbbVPkTKa9hczPbOIzq0MIx9yM3nRuZAwsC3CECOt2QTz1tz1nduQaw3uI1Kbf/ue1Q5ehhUZJoYCIfDnNSriIGAjrdkE89bc9Z3bkGsN7iNSm3/7ntUOXoYVGSaGAiHw5zENkMak8AAACAAAAAgAMAAIAiBgMIncEMesbbVPkTKa9hczPbOIzq0MIx9yM3nRuZAwsC3BDZDGpPAAAAgAAAAIACAACAACICA6mkw39ZltOqJdusa1cK8GUDlEkpQkYLNUdT7Z7spYdxENkMak8AAACAAAAAgAQAAIAAIgICf2OZdX0u/1WhNq0CxoSxg4tlVuXxtrNCgqlLa1AFEJYQ2QxqTwAAAIAAAACABQAAgAA=", + "result" : "cHNidP8BAJoCAAAAAljoeiG1ba8MI76OcHBFbDNvfLqlyHV5JPVFiHuyq911AAAAAAD/////g40EJ9DsZQpoqka7CwmK6kQiwHGyyng1Kgd5WdB86h0BAAAAAP////8CcKrwCAAAAAAWABTYXCtx0AYLCcmIauuBXlCZHdoSTQDh9QUAAAAAFgAUAK6pouXw+HaliN9VRuh0LR2HAI8AAAAAAAEAuwIAAAABqtc5MQGL0l+ErkALaISL4J23BurCrBgpi6vucatlb4sAAAAASEcwRAIgWPb8fGoz4bMVSNSByCbAFb0wE1qtQs1neQ2rZtKtJDsCIEoc7SYExnNbY5PltBaR3XiwDwxZQvufdRhW+qk4FX26Af7///8CgPD6AgAAAAAXqRQPuUY0IWlrgsgzryQceMF9295JNIfQ8gonAQAAABepFCnKdPigj4GZlCgYXJe12FLkBj9hh2UAAAAiAgLath/0mhTban0CsM0fu3j8SxgxK1tOVNrk26L7/vU210gwRQIhAPYQOLMI3B2oZaNIUnRvAVdyk0IIxtJEVDk82ZvfIhd3AiAFbmdaZ1ptCgK4WxTl4pB02KJam1dgvqKBb2YZEKAG6gEBAwQBAAAAAQRHUiEClYO/Oa4KYJdHrRma3dY0+mEIVZ1sXNObTCGD8auW4H8hAtq2H/SaFNtqfQKwzR+7ePxLGDErW05U2uTbovv+9TbXUq4iBgKVg785rgpgl0etGZrd1jT6YQhVnWxc05tMIYPxq5bgfxDZDGpPAAAAgAAAAIAAAACAIgYC2rYf9JoU22p9ArDNH7t4/EsYMStbTlTa5Nui+/71NtcQ2QxqTwAAAIAAAACAAQAAgAABASAAwusLAAAAABepFLf1+vQOPUClpFmx2zU18rcvqSHohyICAjrdkE89bc9Z3bkGsN7iNSm3/7ntUOXoYVGSaGAiHw5zRzBEAiBl9FulmYtZon/+GnvtAWrx8fkNVLOqj3RQql9WolEDvQIgf3JHA60e25ZoCyhLVtT/y4j3+3Weq74IqjDym4UTg9IBAQMEAQAAAAEEIgAgjCNTFzdDtZXftKB7crqOQuN5fadOh/59nXSX47ICiQABBUdSIQMIncEMesbbVPkTKa9hczPbOIzq0MIx9yM3nRuZAwsC3CECOt2QTz1tz1nduQaw3uI1Kbf/ue1Q5ehhUZJoYCIfDnNSriIGAjrdkE89bc9Z3bkGsN7iNSm3/7ntUOXoYVGSaGAiHw5zENkMak8AAACAAAAAgAMAAIAiBgMIncEMesbbVPkTKa9hczPbOIzq0MIx9yM3nRuZAwsC3BDZDGpPAAAAgAAAAIACAACAACICA6mkw39ZltOqJdusa1cK8GUDlEkpQkYLNUdT7Z7spYdxENkMak8AAACAAAAAgAQAAIAAIgICf2OZdX0u/1WhNq0CxoSxg4tlVuXxtrNCgqlLa1AFEJYQ2QxqTwAAAIAAAACABQAAgAA=" + }, + { + "privkeys" : [ + "cT7J9YpCwY3AVRFSjN6ukeEeWY6mhpbJPxRaDaP5QTdygQRxP9Au", + "cNBc3SWUip9PPm1GjRoLEJT6T41iNzCYtD7qro84FMnM5zEqeJsE" + ], + "psbt" : "cHNidP8BAJoCAAAAAljoeiG1ba8MI76OcHBFbDNvfLqlyHV5JPVFiHuyq911AAAAAAD/////g40EJ9DsZQpoqka7CwmK6kQiwHGyyng1Kgd5WdB86h0BAAAAAP////8CcKrwCAAAAAAWABTYXCtx0AYLCcmIauuBXlCZHdoSTQDh9QUAAAAAFgAUAK6pouXw+HaliN9VRuh0LR2HAI8AAAAAAAEAuwIAAAABqtc5MQGL0l+ErkALaISL4J23BurCrBgpi6vucatlb4sAAAAASEcwRAIgWPb8fGoz4bMVSNSByCbAFb0wE1qtQs1neQ2rZtKtJDsCIEoc7SYExnNbY5PltBaR3XiwDwxZQvufdRhW+qk4FX26Af7///8CgPD6AgAAAAAXqRQPuUY0IWlrgsgzryQceMF9295JNIfQ8gonAQAAABepFCnKdPigj4GZlCgYXJe12FLkBj9hh2UAAAAiAgLath/0mhTban0CsM0fu3j8SxgxK1tOVNrk26L7/vU210gwRQIhAPYQOLMI3B2oZaNIUnRvAVdyk0IIxtJEVDk82ZvfIhd3AiAFbmdaZ1ptCgK4WxTl4pB02KJam1dgvqKBb2YZEKAG6gEBAwQBAAAAAQRHUiEClYO/Oa4KYJdHrRma3dY0+mEIVZ1sXNObTCGD8auW4H8hAtq2H/SaFNtqfQKwzR+7ePxLGDErW05U2uTbovv+9TbXUq4iBgKVg785rgpgl0etGZrd1jT6YQhVnWxc05tMIYPxq5bgfxDZDGpPAAAAgAAAAIAAAACAIgYC2rYf9JoU22p9ArDNH7t4/EsYMStbTlTa5Nui+/71NtcQ2QxqTwAAAIAAAACAAQAAgAABASAAwusLAAAAABepFLf1+vQOPUClpFmx2zU18rcvqSHohyICAjrdkE89bc9Z3bkGsN7iNSm3/7ntUOXoYVGSaGAiHw5zRzBEAiBl9FulmYtZon/+GnvtAWrx8fkNVLOqj3RQql9WolEDvQIgf3JHA60e25ZoCyhLVtT/y4j3+3Weq74IqjDym4UTg9IBAQMEAQAAAAEEIgAgjCNTFzdDtZXftKB7crqOQuN5fadOh/59nXSX47ICiQMBBUdSIQMIncEMesbbVPkTKa9hczPbOIzq0MIx9yM3nRuZAwsC3CECOt2QTz1tz1nduQaw3uI1Kbf/ue1Q5ehhUZJoYCIfDnNSrSIGAjrdkE89bc9Z3bkGsN7iNSm3/7ntUOXoYVGSaGAiHw5zENkMak8AAACAAAAAgAMAAIAiBgMIncEMesbbVPkTKa9hczPbOIzq0MIx9yM3nRuZAwsC3BDZDGpPAAAAgAAAAIACAACAACICA6mkw39ZltOqJdusa1cK8GUDlEkpQkYLNUdT7Z7spYdxENkMak8AAACAAAAAgAQAAIAAIgICf2OZdX0u/1WhNq0CxoSxg4tlVuXxtrNCgqlLa1AFEJYQ2QxqTwAAAIAAAACABQAAgAA=", + "result" : "cHNidP8BAJoCAAAAAljoeiG1ba8MI76OcHBFbDNvfLqlyHV5JPVFiHuyq911AAAAAAD/////g40EJ9DsZQpoqka7CwmK6kQiwHGyyng1Kgd5WdB86h0BAAAAAP////8CcKrwCAAAAAAWABTYXCtx0AYLCcmIauuBXlCZHdoSTQDh9QUAAAAAFgAUAK6pouXw+HaliN9VRuh0LR2HAI8AAAAAAAEAuwIAAAABqtc5MQGL0l+ErkALaISL4J23BurCrBgpi6vucatlb4sAAAAASEcwRAIgWPb8fGoz4bMVSNSByCbAFb0wE1qtQs1neQ2rZtKtJDsCIEoc7SYExnNbY5PltBaR3XiwDwxZQvufdRhW+qk4FX26Af7///8CgPD6AgAAAAAXqRQPuUY0IWlrgsgzryQceMF9295JNIfQ8gonAQAAABepFCnKdPigj4GZlCgYXJe12FLkBj9hh2UAAAAiAgLath/0mhTban0CsM0fu3j8SxgxK1tOVNrk26L7/vU210gwRQIhAPYQOLMI3B2oZaNIUnRvAVdyk0IIxtJEVDk82ZvfIhd3AiAFbmdaZ1ptCgK4WxTl4pB02KJam1dgvqKBb2YZEKAG6gEBAwQBAAAAAQRHUiEClYO/Oa4KYJdHrRma3dY0+mEIVZ1sXNObTCGD8auW4H8hAtq2H/SaFNtqfQKwzR+7ePxLGDErW05U2uTbovv+9TbXUq4iBgKVg785rgpgl0etGZrd1jT6YQhVnWxc05tMIYPxq5bgfxDZDGpPAAAAgAAAAIAAAACAIgYC2rYf9JoU22p9ArDNH7t4/EsYMStbTlTa5Nui+/71NtcQ2QxqTwAAAIAAAACAAQAAgAABASAAwusLAAAAABepFLf1+vQOPUClpFmx2zU18rcvqSHohyICAjrdkE89bc9Z3bkGsN7iNSm3/7ntUOXoYVGSaGAiHw5zRzBEAiBl9FulmYtZon/+GnvtAWrx8fkNVLOqj3RQql9WolEDvQIgf3JHA60e25ZoCyhLVtT/y4j3+3Weq74IqjDym4UTg9IBAQMEAQAAAAEEIgAgjCNTFzdDtZXftKB7crqOQuN5fadOh/59nXSX47ICiQMBBUdSIQMIncEMesbbVPkTKa9hczPbOIzq0MIx9yM3nRuZAwsC3CECOt2QTz1tz1nduQaw3uI1Kbf/ue1Q5ehhUZJoYCIfDnNSrSIGAjrdkE89bc9Z3bkGsN7iNSm3/7ntUOXoYVGSaGAiHw5zENkMak8AAACAAAAAgAMAAIAiBgMIncEMesbbVPkTKa9hczPbOIzq0MIx9yM3nRuZAwsC3BDZDGpPAAAAgAAAAIACAACAACICA6mkw39ZltOqJdusa1cK8GUDlEkpQkYLNUdT7Z7spYdxENkMak8AAACAAAAAgAQAAIAAIgICf2OZdX0u/1WhNq0CxoSxg4tlVuXxtrNCgqlLa1AFEJYQ2QxqTwAAAIAAAACABQAAgAA=" + } + ], + "combiner" : [ + { + "combine" : [ + "cHNidP8BAJoCAAAAAljoeiG1ba8MI76OcHBFbDNvfLqlyHV5JPVFiHuyq911AAAAAAD/////g40EJ9DsZQpoqka7CwmK6kQiwHGyyng1Kgd5WdB86h0BAAAAAP////8CcKrwCAAAAAAWABTYXCtx0AYLCcmIauuBXlCZHdoSTQDh9QUAAAAAFgAUAK6pouXw+HaliN9VRuh0LR2HAI8AAAAAAAEAuwIAAAABqtc5MQGL0l+ErkALaISL4J23BurCrBgpi6vucatlb4sAAAAASEcwRAIgWPb8fGoz4bMVSNSByCbAFb0wE1qtQs1neQ2rZtKtJDsCIEoc7SYExnNbY5PltBaR3XiwDwxZQvufdRhW+qk4FX26Af7///8CgPD6AgAAAAAXqRQPuUY0IWlrgsgzryQceMF9295JNIfQ8gonAQAAABepFCnKdPigj4GZlCgYXJe12FLkBj9hh2UAAAAiAgKVg785rgpgl0etGZrd1jT6YQhVnWxc05tMIYPxq5bgf0cwRAIgdAGK1BgAl7hzMjwAFXILNoTMgSOJEEjn282bVa1nnJkCIHPTabdA4+tT3O+jOCPIBwUUylWn3ZVE8VfBZ5EyYRGMAQEDBAEAAAABBEdSIQKVg785rgpgl0etGZrd1jT6YQhVnWxc05tMIYPxq5bgfyEC2rYf9JoU22p9ArDNH7t4/EsYMStbTlTa5Nui+/71NtdSriIGApWDvzmuCmCXR60Zmt3WNPphCFWdbFzTm0whg/GrluB/ENkMak8AAACAAAAAgAAAAIAiBgLath/0mhTban0CsM0fu3j8SxgxK1tOVNrk26L7/vU21xDZDGpPAAAAgAAAAIABAACAAAEBIADC6wsAAAAAF6kUt/X69A49QKWkWbHbNTXyty+pIeiHIgIDCJ3BDHrG21T5EymvYXMz2ziM6tDCMfcjN50bmQMLAtxHMEQCIGLrelVhB6fHP0WsSrWh3d9vcHX7EnWWmn84Pv/3hLyyAiAMBdu3Rw2/LwhVfdNWxzJcHtMJE+mWzThAlF2xIijaXwEBAwQBAAAAAQQiACCMI1MXN0O1ld+0oHtyuo5C43l9p06H/n2ddJfjsgKJAwEFR1IhAwidwQx6xttU+RMpr2FzM9s4jOrQwjH3IzedG5kDCwLcIQI63ZBPPW3PWd25BrDe4jUpt/+57VDl6GFRkmhgIh8Oc1KuIgYCOt2QTz1tz1nduQaw3uI1Kbf/ue1Q5ehhUZJoYCIfDnMQ2QxqTwAAAIAAAACAAwAAgCIGAwidwQx6xttU+RMpr2FzM9s4jOrQwjH3IzedG5kDCwLcENkMak8AAACAAAAAgAIAAIAAIgIDqaTDf1mW06ol26xrVwrwZQOUSSlCRgs1R1Ptnuylh3EQ2QxqTwAAAIAAAACABAAAgAAiAgJ/Y5l1fS7/VaE2rQLGhLGDi2VW5fG2s0KCqUtrUAUQlhDZDGpPAAAAgAAAAIAFAACAAA==", + "cHNidP8BAJoCAAAAAljoeiG1ba8MI76OcHBFbDNvfLqlyHV5JPVFiHuyq911AAAAAAD/////g40EJ9DsZQpoqka7CwmK6kQiwHGyyng1Kgd5WdB86h0BAAAAAP////8CcKrwCAAAAAAWABTYXCtx0AYLCcmIauuBXlCZHdoSTQDh9QUAAAAAFgAUAK6pouXw+HaliN9VRuh0LR2HAI8AAAAAAAEAuwIAAAABqtc5MQGL0l+ErkALaISL4J23BurCrBgpi6vucatlb4sAAAAASEcwRAIgWPb8fGoz4bMVSNSByCbAFb0wE1qtQs1neQ2rZtKtJDsCIEoc7SYExnNbY5PltBaR3XiwDwxZQvufdRhW+qk4FX26Af7///8CgPD6AgAAAAAXqRQPuUY0IWlrgsgzryQceMF9295JNIfQ8gonAQAAABepFCnKdPigj4GZlCgYXJe12FLkBj9hh2UAAAAiAgLath/0mhTban0CsM0fu3j8SxgxK1tOVNrk26L7/vU210cwRAIgYxqYn+c4qSrQGYYCMxLBkhT+KAKznly8GsNniAbGksMCIDnbbDh70mdxbf2z1NjaULjoXSEzJrp8faqkwM5B65IjAQEDBAEAAAABBEdSIQKVg785rgpgl0etGZrd1jT6YQhVnWxc05tMIYPxq5bgfyEC2rYf9JoU22p9ArDNH7t4/EsYMStbTlTa5Nui+/71NtdSriIGApWDvzmuCmCXR60Zmt3WNPphCFWdbFzTm0whg/GrluB/ENkMak8AAACAAAAAgAAAAIAiBgLath/0mhTban0CsM0fu3j8SxgxK1tOVNrk26L7/vU21xDZDGpPAAAAgAAAAIABAACAAAEBIADC6wsAAAAAF6kUt/X69A49QKWkWbHbNTXyty+pIeiHIgICOt2QTz1tz1nduQaw3uI1Kbf/ue1Q5ehhUZJoYCIfDnNHMEQCIGX0W6WZi1mif/4ae+0BavHx+Q1Us6qPdFCqX1aiUQO9AiB/ckcDrR7blmgLKEtW1P/LiPf7dZ6rvgiqMPKbhROD0gEBAwQBAAAAAQQiACCMI1MXN0O1ld+0oHtyuo5C43l9p06H/n2ddJfjsgKJAwEFR1IhAwidwQx6xttU+RMpr2FzM9s4jOrQwjH3IzedG5kDCwLcIQI63ZBPPW3PWd25BrDe4jUpt/+57VDl6GFRkmhgIh8Oc1KuIgYCOt2QTz1tz1nduQaw3uI1Kbf/ue1Q5ehhUZJoYCIfDnMQ2QxqTwAAAIAAAACAAwAAgCIGAwidwQx6xttU+RMpr2FzM9s4jOrQwjH3IzedG5kDCwLcENkMak8AAACAAAAAgAIAAIAAIgIDqaTDf1mW06ol26xrVwrwZQOUSSlCRgs1R1Ptnuylh3EQ2QxqTwAAAIAAAACABAAAgAAiAgJ/Y5l1fS7/VaE2rQLGhLGDi2VW5fG2s0KCqUtrUAUQlhDZDGpPAAAAgAAAAIAFAACAAA==" + ], + "result" : "cHNidP8BAJoCAAAAAljoeiG1ba8MI76OcHBFbDNvfLqlyHV5JPVFiHuyq911AAAAAAD/////g40EJ9DsZQpoqka7CwmK6kQiwHGyyng1Kgd5WdB86h0BAAAAAP////8CcKrwCAAAAAAWABTYXCtx0AYLCcmIauuBXlCZHdoSTQDh9QUAAAAAFgAUAK6pouXw+HaliN9VRuh0LR2HAI8AAAAAAAEAuwIAAAABqtc5MQGL0l+ErkALaISL4J23BurCrBgpi6vucatlb4sAAAAASEcwRAIgWPb8fGoz4bMVSNSByCbAFb0wE1qtQs1neQ2rZtKtJDsCIEoc7SYExnNbY5PltBaR3XiwDwxZQvufdRhW+qk4FX26Af7///8CgPD6AgAAAAAXqRQPuUY0IWlrgsgzryQceMF9295JNIfQ8gonAQAAABepFCnKdPigj4GZlCgYXJe12FLkBj9hh2UAAAAiAgKVg785rgpgl0etGZrd1jT6YQhVnWxc05tMIYPxq5bgf0cwRAIgdAGK1BgAl7hzMjwAFXILNoTMgSOJEEjn282bVa1nnJkCIHPTabdA4+tT3O+jOCPIBwUUylWn3ZVE8VfBZ5EyYRGMASICAtq2H/SaFNtqfQKwzR+7ePxLGDErW05U2uTbovv+9TbXRzBEAiBjGpif5zipKtAZhgIzEsGSFP4oArOeXLwaw2eIBsaSwwIgOdtsOHvSZ3Ft/bPU2NpQuOhdITMmunx9qqTAzkHrkiMBAQMEAQAAAAEER1IhApWDvzmuCmCXR60Zmt3WNPphCFWdbFzTm0whg/GrluB/IQLath/0mhTban0CsM0fu3j8SxgxK1tOVNrk26L7/vU211KuIgYClYO/Oa4KYJdHrRma3dY0+mEIVZ1sXNObTCGD8auW4H8Q2QxqTwAAAIAAAACAAAAAgCIGAtq2H/SaFNtqfQKwzR+7ePxLGDErW05U2uTbovv+9TbXENkMak8AAACAAAAAgAEAAIAAAQEgAMLrCwAAAAAXqRS39fr0Dj1ApaRZsds1NfK3L6kh6IciAgMIncEMesbbVPkTKa9hczPbOIzq0MIx9yM3nRuZAwsC3EcwRAIgYut6VWEHp8c/RaxKtaHd329wdfsSdZaafzg+//eEvLICIAwF27dHDb8vCFV901bHMlwe0wkT6ZbNOECUXbEiKNpfASICAjrdkE89bc9Z3bkGsN7iNSm3/7ntUOXoYVGSaGAiHw5zRzBEAiBl9FulmYtZon/+GnvtAWrx8fkNVLOqj3RQql9WolEDvQIgf3JHA60e25ZoCyhLVtT/y4j3+3Weq74IqjDym4UTg9IBAQMEAQAAAAEEIgAgjCNTFzdDtZXftKB7crqOQuN5fadOh/59nXSX47ICiQMBBUdSIQMIncEMesbbVPkTKa9hczPbOIzq0MIx9yM3nRuZAwsC3CECOt2QTz1tz1nduQaw3uI1Kbf/ue1Q5ehhUZJoYCIfDnNSriIGAjrdkE89bc9Z3bkGsN7iNSm3/7ntUOXoYVGSaGAiHw5zENkMak8AAACAAAAAgAMAAIAiBgMIncEMesbbVPkTKa9hczPbOIzq0MIx9yM3nRuZAwsC3BDZDGpPAAAAgAAAAIACAACAACICA6mkw39ZltOqJdusa1cK8GUDlEkpQkYLNUdT7Z7spYdxENkMak8AAACAAAAAgAQAAIAAIgICf2OZdX0u/1WhNq0CxoSxg4tlVuXxtrNCgqlLa1AFEJYQ2QxqTwAAAIAAAACABQAAgAA=" + }, + { + "combine" : [ + "cHNidP8BAD8CAAAAAf//////////////////////////////////////////AAAAAAD/////AQAAAAAAAAAAA2oBAAAAAAAKDwECAwQFBgcICQ8BAgMEBQYHCAkKCwwNDg8ACg8BAgMEBQYHCAkPAQIDBAUGBwgJCgsMDQ4PAAoPAQIDBAUGBwgJDwECAwQFBgcICQoLDA0ODwA=", + "cHNidP8BAD8CAAAAAf//////////////////////////////////////////AAAAAAD/////AQAAAAAAAAAAA2oBAAAAAAAKDwECAwQFBgcIEA8BAgMEBQYHCAkKCwwNDg8ACg8BAgMEBQYHCBAPAQIDBAUGBwgJCgsMDQ4PAAoPAQIDBAUGBwgQDwECAwQFBgcICQoLDA0ODwA=" + ], + "result" : "cHNidP8BAD8CAAAAAf//////////////////////////////////////////AAAAAAD/////AQAAAAAAAAAAA2oBAAAAAAAKDwECAwQFBgcICQ8BAgMEBQYHCAkKCwwNDg8KDwECAwQFBgcIEA8BAgMEBQYHCAkKCwwNDg8ACg8BAgMEBQYHCAkPAQIDBAUGBwgJCgsMDQ4PCg8BAgMEBQYHCBAPAQIDBAUGBwgJCgsMDQ4PAAoPAQIDBAUGBwgJDwECAwQFBgcICQoLDA0ODwoPAQIDBAUGBwgQDwECAwQFBgcICQoLDA0ODwA=" + } + ], + "finalizer" : [ + { + "finalize" : "cHNidP8BAJoCAAAAAljoeiG1ba8MI76OcHBFbDNvfLqlyHV5JPVFiHuyq911AAAAAAD/////g40EJ9DsZQpoqka7CwmK6kQiwHGyyng1Kgd5WdB86h0BAAAAAP////8CcKrwCAAAAAAWABTYXCtx0AYLCcmIauuBXlCZHdoSTQDh9QUAAAAAFgAUAK6pouXw+HaliN9VRuh0LR2HAI8AAAAAAAEAuwIAAAABqtc5MQGL0l+ErkALaISL4J23BurCrBgpi6vucatlb4sAAAAASEcwRAIgWPb8fGoz4bMVSNSByCbAFb0wE1qtQs1neQ2rZtKtJDsCIEoc7SYExnNbY5PltBaR3XiwDwxZQvufdRhW+qk4FX26Af7///8CgPD6AgAAAAAXqRQPuUY0IWlrgsgzryQceMF9295JNIfQ8gonAQAAABepFCnKdPigj4GZlCgYXJe12FLkBj9hh2UAAAAiAgKVg785rgpgl0etGZrd1jT6YQhVnWxc05tMIYPxq5bgf0cwRAIgdAGK1BgAl7hzMjwAFXILNoTMgSOJEEjn282bVa1nnJkCIHPTabdA4+tT3O+jOCPIBwUUylWn3ZVE8VfBZ5EyYRGMASICAtq2H/SaFNtqfQKwzR+7ePxLGDErW05U2uTbovv+9TbXSDBFAiEA9hA4swjcHahlo0hSdG8BV3KTQgjG0kRUOTzZm98iF3cCIAVuZ1pnWm0KArhbFOXikHTYolqbV2C+ooFvZhkQoAbqAQEDBAEAAAABBEdSIQKVg785rgpgl0etGZrd1jT6YQhVnWxc05tMIYPxq5bgfyEC2rYf9JoU22p9ArDNH7t4/EsYMStbTlTa5Nui+/71NtdSriIGApWDvzmuCmCXR60Zmt3WNPphCFWdbFzTm0whg/GrluB/ENkMak8AAACAAAAAgAAAAIAiBgLath/0mhTban0CsM0fu3j8SxgxK1tOVNrk26L7/vU21xDZDGpPAAAAgAAAAIABAACAAAEBIADC6wsAAAAAF6kUt/X69A49QKWkWbHbNTXyty+pIeiHIgIDCJ3BDHrG21T5EymvYXMz2ziM6tDCMfcjN50bmQMLAtxHMEQCIGLrelVhB6fHP0WsSrWh3d9vcHX7EnWWmn84Pv/3hLyyAiAMBdu3Rw2/LwhVfdNWxzJcHtMJE+mWzThAlF2xIijaXwEiAgI63ZBPPW3PWd25BrDe4jUpt/+57VDl6GFRkmhgIh8Oc0cwRAIgZfRbpZmLWaJ//hp77QFq8fH5DVSzqo90UKpfVqJRA70CIH9yRwOtHtuWaAsoS1bU/8uI9/t1nqu+CKow8puFE4PSAQEDBAEAAAABBCIAIIwjUxc3Q7WV37Sge3K6jkLjeX2nTof+fZ10l+OyAokDAQVHUiEDCJ3BDHrG21T5EymvYXMz2ziM6tDCMfcjN50bmQMLAtwhAjrdkE89bc9Z3bkGsN7iNSm3/7ntUOXoYVGSaGAiHw5zUq4iBgI63ZBPPW3PWd25BrDe4jUpt/+57VDl6GFRkmhgIh8OcxDZDGpPAAAAgAAAAIADAACAIgYDCJ3BDHrG21T5EymvYXMz2ziM6tDCMfcjN50bmQMLAtwQ2QxqTwAAAIAAAACAAgAAgAAiAgOppMN/WZbTqiXbrGtXCvBlA5RJKUJGCzVHU+2e7KWHcRDZDGpPAAAAgAAAAIAEAACAACICAn9jmXV9Lv9VoTatAsaEsYOLZVbl8bazQoKpS2tQBRCWENkMak8AAACAAAAAgAUAAIAA", + "result" : "cHNidP8BAJoCAAAAAljoeiG1ba8MI76OcHBFbDNvfLqlyHV5JPVFiHuyq911AAAAAAD/////g40EJ9DsZQpoqka7CwmK6kQiwHGyyng1Kgd5WdB86h0BAAAAAP////8CcKrwCAAAAAAWABTYXCtx0AYLCcmIauuBXlCZHdoSTQDh9QUAAAAAFgAUAK6pouXw+HaliN9VRuh0LR2HAI8AAAAAAAEAuwIAAAABqtc5MQGL0l+ErkALaISL4J23BurCrBgpi6vucatlb4sAAAAASEcwRAIgWPb8fGoz4bMVSNSByCbAFb0wE1qtQs1neQ2rZtKtJDsCIEoc7SYExnNbY5PltBaR3XiwDwxZQvufdRhW+qk4FX26Af7///8CgPD6AgAAAAAXqRQPuUY0IWlrgsgzryQceMF9295JNIfQ8gonAQAAABepFCnKdPigj4GZlCgYXJe12FLkBj9hh2UAAAABB9oARzBEAiB0AYrUGACXuHMyPAAVcgs2hMyBI4kQSOfbzZtVrWecmQIgc9Npt0Dj61Pc76M4I8gHBRTKVafdlUTxV8FnkTJhEYwBSDBFAiEA9hA4swjcHahlo0hSdG8BV3KTQgjG0kRUOTzZm98iF3cCIAVuZ1pnWm0KArhbFOXikHTYolqbV2C+ooFvZhkQoAbqAUdSIQKVg785rgpgl0etGZrd1jT6YQhVnWxc05tMIYPxq5bgfyEC2rYf9JoU22p9ArDNH7t4/EsYMStbTlTa5Nui+/71NtdSrgABASAAwusLAAAAABepFLf1+vQOPUClpFmx2zU18rcvqSHohwEHIyIAIIwjUxc3Q7WV37Sge3K6jkLjeX2nTof+fZ10l+OyAokDAQjaBABHMEQCIGLrelVhB6fHP0WsSrWh3d9vcHX7EnWWmn84Pv/3hLyyAiAMBdu3Rw2/LwhVfdNWxzJcHtMJE+mWzThAlF2xIijaXwFHMEQCIGX0W6WZi1mif/4ae+0BavHx+Q1Us6qPdFCqX1aiUQO9AiB/ckcDrR7blmgLKEtW1P/LiPf7dZ6rvgiqMPKbhROD0gFHUiEDCJ3BDHrG21T5EymvYXMz2ziM6tDCMfcjN50bmQMLAtwhAjrdkE89bc9Z3bkGsN7iNSm3/7ntUOXoYVGSaGAiHw5zUq4AIgIDqaTDf1mW06ol26xrVwrwZQOUSSlCRgs1R1Ptnuylh3EQ2QxqTwAAAIAAAACABAAAgAAiAgJ/Y5l1fS7/VaE2rQLGhLGDi2VW5fG2s0KCqUtrUAUQlhDZDGpPAAAAgAAAAIAFAACAAA==" + } + ], + "extractor" : [ + { + "extract" : "cHNidP8BAJoCAAAAAljoeiG1ba8MI76OcHBFbDNvfLqlyHV5JPVFiHuyq911AAAAAAD/////g40EJ9DsZQpoqka7CwmK6kQiwHGyyng1Kgd5WdB86h0BAAAAAP////8CcKrwCAAAAAAWABTYXCtx0AYLCcmIauuBXlCZHdoSTQDh9QUAAAAAFgAUAK6pouXw+HaliN9VRuh0LR2HAI8AAAAAAAEAuwIAAAABqtc5MQGL0l+ErkALaISL4J23BurCrBgpi6vucatlb4sAAAAASEcwRAIgWPb8fGoz4bMVSNSByCbAFb0wE1qtQs1neQ2rZtKtJDsCIEoc7SYExnNbY5PltBaR3XiwDwxZQvufdRhW+qk4FX26Af7///8CgPD6AgAAAAAXqRQPuUY0IWlrgsgzryQceMF9295JNIfQ8gonAQAAABepFCnKdPigj4GZlCgYXJe12FLkBj9hh2UAAAABB9oARzBEAiB0AYrUGACXuHMyPAAVcgs2hMyBI4kQSOfbzZtVrWecmQIgc9Npt0Dj61Pc76M4I8gHBRTKVafdlUTxV8FnkTJhEYwBSDBFAiEA9hA4swjcHahlo0hSdG8BV3KTQgjG0kRUOTzZm98iF3cCIAVuZ1pnWm0KArhbFOXikHTYolqbV2C+ooFvZhkQoAbqAUdSIQKVg785rgpgl0etGZrd1jT6YQhVnWxc05tMIYPxq5bgfyEC2rYf9JoU22p9ArDNH7t4/EsYMStbTlTa5Nui+/71NtdSrgABASAAwusLAAAAABepFLf1+vQOPUClpFmx2zU18rcvqSHohwEHIyIAIIwjUxc3Q7WV37Sge3K6jkLjeX2nTof+fZ10l+OyAokDAQjaBABHMEQCIGLrelVhB6fHP0WsSrWh3d9vcHX7EnWWmn84Pv/3hLyyAiAMBdu3Rw2/LwhVfdNWxzJcHtMJE+mWzThAlF2xIijaXwFHMEQCIGX0W6WZi1mif/4ae+0BavHx+Q1Us6qPdFCqX1aiUQO9AiB/ckcDrR7blmgLKEtW1P/LiPf7dZ6rvgiqMPKbhROD0gFHUiEDCJ3BDHrG21T5EymvYXMz2ziM6tDCMfcjN50bmQMLAtwhAjrdkE89bc9Z3bkGsN7iNSm3/7ntUOXoYVGSaGAiHw5zUq4AIgIDqaTDf1mW06ol26xrVwrwZQOUSSlCRgs1R1Ptnuylh3EQ2QxqTwAAAIAAAACABAAAgAAiAgJ/Y5l1fS7/VaE2rQLGhLGDi2VW5fG2s0KCqUtrUAUQlhDZDGpPAAAAgAAAAIAFAACAAA==", + "result" : "0200000000010258e87a21b56daf0c23be8e7070456c336f7cbaa5c8757924f545887bb2abdd7500000000da00473044022074018ad4180097b873323c0015720b3684cc8123891048e7dbcd9b55ad679c99022073d369b740e3eb53dcefa33823c8070514ca55a7dd9544f157c167913261118c01483045022100f61038b308dc1da865a34852746f015772934208c6d24454393cd99bdf2217770220056e675a675a6d0a02b85b14e5e29074d8a25a9b5760bea2816f661910a006ea01475221029583bf39ae0a609747ad199addd634fa6108559d6c5cd39b4c2183f1ab96e07f2102dab61ff49a14db6a7d02b0cd1fbb78fc4b18312b5b4e54dae4dba2fbfef536d752aeffffffff838d0427d0ec650a68aa46bb0b098aea4422c071b2ca78352a077959d07cea1d01000000232200208c2353173743b595dfb4a07b72ba8e42e3797da74e87fe7d9d7497e3b2028903ffffffff0270aaf00800000000160014d85c2b71d0060b09c9886aeb815e50991dda124d00e1f5050000000016001400aea9a2e5f0f876a588df5546e8742d1d87008f000400473044022062eb7a556107a7c73f45ac4ab5a1dddf6f7075fb1275969a7f383efff784bcb202200c05dbb7470dbf2f08557dd356c7325c1ed30913e996cd3840945db12228da5f01473044022065f45ba5998b59a27ffe1a7bed016af1f1f90d54b3aa8f7450aa5f56a25103bd02207f724703ad1edb96680b284b56d4ffcb88f7fb759eabbe08aa30f29b851383d20147522103089dc10c7ac6db54f91329af617333db388cead0c231f723379d1b99030b02dc21023add904f3d6dcf59ddb906b0dee23529b7ffb9ed50e5e86151926860221f0e7352ae00000000" + } + ] +} \ No newline at end of file diff --git a/test/functional/example_test.py b/test/functional/example_test.py index 228e15059..3f15367a7 100755 --- a/test/functional/example_test.py +++ b/test/functional/example_test.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright (c) 2017 The Bitcoin Core developers +# Copyright (c) 2017-2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """An example functional test @@ -15,11 +15,9 @@ # Avoid wildcard * imports if possible from test_framework.blocktools import (create_block, create_coinbase) +from test_framework.messages import CInv from test_framework.mininode import ( - CInv, - NetworkThread, - NodeConn, - NodeConnCB, + P2PInterface, mininode_lock, msg_block, msg_getdata, @@ -28,19 +26,18 @@ from test_framework.util import ( assert_equal, connect_nodes, - p2p_port, wait_until, ) -# NodeConnCB is a class containing callbacks to be executed when a P2P -# message is received from the node-under-test. Subclass NodeConnCB and +# P2PInterface is a class containing callbacks to be executed when a P2P +# message is received from the node-under-test. Subclass P2PInterface and # override the on_*() methods if you need custom behaviour. -class BaseNode(NodeConnCB): +class BaseNode(P2PInterface): def __init__(self): - """Initialize the NodeConnCB + """Initialize the P2PInterface - Used to inialize custom properties for the Node that aren't - included by default in the base class. Be aware that the NodeConnCB + Used to initialize custom properties for the Node that aren't + included by default in the base class. Be aware that the P2PInterface base class already stores a counter for each P2P message type and the last received message of each type, which should be sufficient for the needs of most tests. @@ -51,14 +48,14 @@ def __init__(self): # Stores a dictionary of all blocks received self.block_receive_map = defaultdict(int) - def on_block(self, conn, message): + def on_block(self, message): """Override the standard on_block callback Store the hash of a received block in the dictionary.""" message.block.calc_sha256() self.block_receive_map[message.block.sha256] += 1 - def on_inv(self, conn, message): + def on_inv(self, message): """Override the standard on_inv callback""" pass @@ -70,16 +67,17 @@ def custom_function(): # self.log.info("running custom_function") # Oops! Can't run self.log outside the BitcoinTestFramework pass + class ExampleTest(BitcoinTestFramework): # Each functional test is a subclass of the BitcoinTestFramework class. - # Override the set_test_params(), add_options(), setup_chain(), setup_network() + # Override the set_test_params(), skip_test_if_missing_module(), add_options(), setup_chain(), setup_network() # and setup_nodes() methods to customize the test setup as required. def set_test_params(self): """Override test parameters for your individual test. - This method must be overridden and num_nodes must be exlicitly set.""" + This method must be overridden and num_nodes must be explicitly set.""" self.setup_clean_chain = True self.num_nodes = 3 # Use self.extra_args to change command-line arguments for the nodes @@ -87,6 +85,11 @@ def set_test_params(self): # self.log.info("I've finished set_test_params") # Oops! Can't run self.log before run_test() + # Use skip_test_if_missing_module() to skip the test if your test requires certain modules to be present. + # This test uses generate which requires wallet to be compiled + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + # Use add_options() to add specific command-line options for your test. # In practice this is not used very much, since the tests are mostly written # to be run in automated environments without command-line options. @@ -114,7 +117,7 @@ def setup_network(self): # sync_all() should not include node2, since we're not expecting it to # sync. connect_nodes(self.nodes[0], 1) - self.sync_all([self.nodes[0:1]]) + self.sync_all([self.nodes[0:2]]) # Use setup_nodes() to customize the node start behaviour (for example if # you don't want to start all nodes at the start of the test). @@ -133,21 +136,12 @@ def custom_method(self): def run_test(self): """Main test logic""" - # Create a P2P connection to one of the nodes - node0 = BaseNode() - connections = [] - connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], node0)) - node0.add_connection(connections[0]) - - # Start up network handling in another thread. This needs to be called - # after the P2P connections have been created. - NetworkThread().start() - # wait_for_verack ensures that the P2P connection is fully up. - node0.wait_for_verack() + # Create P2P connections will wait for a verack to make sure the connection is fully up + self.nodes[0].add_p2p_connection(BaseNode()) # Generating a block on one of the nodes will get us out of IBD blocks = [int(self.nodes[0].generate(nblocks=1)[0], 16)] - self.sync_all([self.nodes[0:1]]) + self.sync_all([self.nodes[0:2]]) # Notice above how we called an RPC by calling a method with the same # name on the node object. Notice also how we used a keyword argument @@ -179,8 +173,8 @@ def run_test(self): block = create_block(self.tip, create_coinbase(height), self.block_time) block.solve() block_message = msg_block(block) - # Send message is used to send a P2P message to the node over our NodeConn connection - node0.send_message(block_message) + # Send message is used to send a P2P message to the node over our P2PInterface + self.nodes[0].p2p.send_message(block_message) self.tip = block.sha256 blocks.append(self.tip) self.block_time += 1 @@ -193,28 +187,27 @@ def run_test(self): connect_nodes(self.nodes[1], 2) self.log.info("Add P2P connection to node2") - node2 = BaseNode() - connections.append(NodeConn('127.0.0.1', p2p_port(2), self.nodes[2], node2)) - node2.add_connection(connections[1]) - node2.wait_for_verack() + self.nodes[0].disconnect_p2ps() + + self.nodes[2].add_p2p_connection(BaseNode()) - self.log.info("Wait for node2 reach current tip. Test that it has propogated all the blocks to us") + self.log.info("Wait for node2 reach current tip. Test that it has propagated all the blocks to us") getdata_request = msg_getdata() for block in blocks: getdata_request.inv.append(CInv(2, block)) - node2.send_message(getdata_request) + self.nodes[2].p2p.send_message(getdata_request) # wait_until() will loop until a predicate condition is met. Use it to test properties of the - # NodeConnCB objects. - wait_until(lambda: sorted(blocks) == sorted(list(node2.block_receive_map.keys())), timeout=5, lock=mininode_lock) + # P2PInterface objects. + wait_until(lambda: sorted(blocks) == sorted(list(self.nodes[2].p2p.block_receive_map.keys())), timeout=5, lock=mininode_lock) self.log.info("Check that each block was received only once") - # The network thread uses a global lock on data access to the NodeConn objects when sending and receiving - # messages. The test thread should acquire the global lock before accessing any NodeConn data to avoid locking + # The network thread uses a global lock on data access to the P2PConnection objects when sending and receiving + # messages. The test thread should acquire the global lock before accessing any P2PConnection data to avoid locking # and synchronization issues. Note wait_until() acquires this global lock when testing the predicate. with mininode_lock: - for block in node2.block_receive_map.values(): + for block in self.nodes[2].p2p.block_receive_map.values(): assert_equal(block, 1) if __name__ == '__main__': diff --git a/test/functional/feature_assumevalid.py b/test/functional/feature_assumevalid.py new file mode 100755 index 000000000..3d0467038 --- /dev/null +++ b/test/functional/feature_assumevalid.py @@ -0,0 +1,191 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test logic for skipping signature validation on old blocks. + +Test logic for skipping signature validation on blocks which we've assumed +valid (https://github.com/bitcoin/bitcoin/pull/9484) + +We build a chain that includes and invalid signature for one of the +transactions: + + 0: genesis block + 1: block 1 with coinbase transaction output. + 2-101: bury that block with 100 blocks so the coinbase transaction + output can be spent + 102: a block containing a transaction spending the coinbase + transaction output. The transaction has an invalid signature. + 103-2202: bury the bad block with just over two weeks' worth of blocks + (2100 blocks) + +Start three nodes: + + - node0 has no -assumevalid parameter. Try to sync to block 2202. It will + reject block 102 and only sync as far as block 101 + - node1 has -assumevalid set to the hash of block 102. Try to sync to + block 2202. node1 will sync all the way to block 2202. + - node2 has -assumevalid set to the hash of block 102. Try to sync to + block 200. node2 will reject block 102 since it's assumed valid, but it + isn't buried by at least two weeks' work. +""" +import time + +from test_framework.blocktools import (create_block, create_coinbase) +from test_framework.key import CECKey +from test_framework.messages import ( + CBlockHeader, + COutPoint, + CTransaction, + CTxIn, + CTxOut, + msg_block, + msg_headers +) +from test_framework.mininode import P2PInterface +from test_framework.script import (CScript, OP_TRUE) +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal + +class BaseNode(P2PInterface): + def send_header_for_blocks(self, new_blocks): + headers_message = msg_headers() + headers_message.headers = [CBlockHeader(b) for b in new_blocks] + self.send_message(headers_message) + +class AssumeValidTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 3 + + def setup_network(self): + self.add_nodes(3) + # Start node0. We don't start the other nodes yet since + # we need to pre-mine a block with an invalid transaction + # signature so we can pass in the block hash as assumevalid. + self.start_node(0) + + def send_blocks_until_disconnected(self, p2p_conn): + """Keep sending blocks to the node until we're disconnected.""" + for i in range(len(self.blocks)): + if not p2p_conn.is_connected: + break + try: + p2p_conn.send_message(msg_block(self.blocks[i])) + except IOError as e: + assert not p2p_conn.is_connected + break + + def assert_blockchain_height(self, node, height): + """Wait until the blockchain is no longer advancing and verify it's reached the expected height.""" + last_height = node.getblock(node.getbestblockhash())['height'] + timeout = 10 + while True: + time.sleep(0.25) + current_height = node.getblock(node.getbestblockhash())['height'] + if current_height != last_height: + last_height = current_height + if timeout < 0: + assert False, "blockchain too short after timeout: %d" % current_height + timeout - 0.25 + continue + elif current_height > height: + assert False, "blockchain too long: %d" % current_height + elif current_height == height: + break + + def run_test(self): + p2p0 = self.nodes[0].add_p2p_connection(BaseNode()) + + # Build the blockchain + self.tip = int(self.nodes[0].getbestblockhash(), 16) + self.block_time = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['time'] + 1 + + self.blocks = [] + + # Get a pubkey for the coinbase TXO + coinbase_key = CECKey() + coinbase_key.set_secretbytes(b"horsebattery") + coinbase_pubkey = coinbase_key.get_pubkey() + + # Create the first block with a coinbase output to our key + height = 1 + block = create_block(self.tip, create_coinbase(height, coinbase_pubkey), self.block_time) + self.blocks.append(block) + self.block_time += 1 + block.solve() + # Save the coinbase for later + self.block1 = block + self.tip = block.sha256 + height += 1 + + # Bury the block 100 deep so the coinbase output is spendable + for i in range(100): + block = create_block(self.tip, create_coinbase(height), self.block_time) + block.solve() + self.blocks.append(block) + self.tip = block.sha256 + self.block_time += 1 + height += 1 + + # Create a transaction spending the coinbase output with an invalid (null) signature + tx = CTransaction() + tx.vin.append(CTxIn(COutPoint(self.block1.vtx[0].sha256, 0), scriptSig=b"")) + tx.vout.append(CTxOut(49 * 100000000, CScript([OP_TRUE]))) + tx.calc_sha256() + + block102 = create_block(self.tip, create_coinbase(height), self.block_time) + self.block_time += 1 + block102.vtx.extend([tx]) + block102.hashMerkleRoot = block102.calc_merkle_root() + block102.rehash() + block102.solve() + self.blocks.append(block102) + self.tip = block102.sha256 + self.block_time += 1 + height += 1 + + # Bury the assumed valid block 2100 deep + for i in range(2100): + block = create_block(self.tip, create_coinbase(height), self.block_time) + block.nVersion = 4 + block.solve() + self.blocks.append(block) + self.tip = block.sha256 + self.block_time += 1 + height += 1 + + self.nodes[0].disconnect_p2ps() + + # Start node1 and node2 with assumevalid so they accept a block with a bad signature. + self.start_node(1, extra_args=["-assumevalid=" + hex(block102.sha256)]) + self.start_node(2, extra_args=["-assumevalid=" + hex(block102.sha256)]) + + p2p0 = self.nodes[0].add_p2p_connection(BaseNode()) + p2p1 = self.nodes[1].add_p2p_connection(BaseNode()) + p2p2 = self.nodes[2].add_p2p_connection(BaseNode()) + + # send header lists to all three nodes + p2p0.send_header_for_blocks(self.blocks[0:2000]) + p2p0.send_header_for_blocks(self.blocks[2000:]) + p2p1.send_header_for_blocks(self.blocks[0:2000]) + p2p1.send_header_for_blocks(self.blocks[2000:]) + p2p2.send_header_for_blocks(self.blocks[0:200]) + + # Send blocks to node0. Block 102 will be rejected. + self.send_blocks_until_disconnected(p2p0) + self.assert_blockchain_height(self.nodes[0], 101) + + # Send all blocks to node1. All blocks will be accepted. + for i in range(2202): + p2p1.send_message(msg_block(self.blocks[i])) + # Syncing 2200 blocks can take a while on slow systems. Give it plenty of time to sync. + p2p1.sync_with_ping(120) + assert_equal(self.nodes[1].getblock(self.nodes[1].getbestblockhash())['height'], 2202) + + # Send blocks to node2. Block 102 will be rejected. + self.send_blocks_until_disconnected(p2p2) + self.assert_blockchain_height(self.nodes[2], 101) + +if __name__ == '__main__': + AssumeValidTest().main() diff --git a/test/functional/feature_bip68_sequence.py b/test/functional/feature_bip68_sequence.py new file mode 100755 index 000000000..8466f851c --- /dev/null +++ b/test/functional/feature_bip68_sequence.py @@ -0,0 +1,402 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test BIP68 implementation.""" + +import time + +from test_framework.blocktools import create_block, create_coinbase, add_witness_commitment +from test_framework.messages import COIN, COutPoint, CTransaction, CTxIn, CTxOut, FromHex, ToHex +from test_framework.script import CScript +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, assert_greater_than, assert_raises_rpc_error, bytes_to_hex_str, get_bip9_status, satoshi_round, sync_blocks + +SEQUENCE_LOCKTIME_DISABLE_FLAG = (1<<31) +SEQUENCE_LOCKTIME_TYPE_FLAG = (1<<22) # this means use time (0 means height) +SEQUENCE_LOCKTIME_GRANULARITY = 9 # this is a bit-shift +SEQUENCE_LOCKTIME_MASK = 0x0000ffff + +# RPC error for non-BIP68 final transactions +NOT_FINAL_ERROR = "non-BIP68-final (code 64)" + +class BIP68Test(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 2 + self.extra_args = [[], ["-acceptnonstdtxn=0"]] + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + self.relayfee = self.nodes[0].getnetworkinfo()["relayfee"] + + # Generate some coins + self.nodes[0].generate(110) + + self.log.info("Running test disable flag") + self.test_disable_flag() + + self.log.info("Running test sequence-lock-confirmed-inputs") + self.test_sequence_lock_confirmed_inputs() + + self.log.info("Running test sequence-lock-unconfirmed-inputs") + self.test_sequence_lock_unconfirmed_inputs() + + self.log.info("Running test BIP68 not consensus before versionbits activation") + self.test_bip68_not_consensus() + + self.log.info("Activating BIP68 (and 112/113)") + self.activateCSV() + + self.log.info("Verifying nVersion=2 transactions are standard.") + self.log.info("Note that nVersion=2 transactions are always standard (independent of BIP68 activation status).") + self.test_version2_relay() + + self.log.info("Passed") + + # Test that BIP68 is not in effect if tx version is 1, or if + # the first sequence bit is set. + def test_disable_flag(self): + # Create some unconfirmed inputs + new_addr = self.nodes[0].getnewaddress() + self.nodes[0].sendtoaddress(new_addr, 2) # send 2 BTC + + utxos = self.nodes[0].listunspent(0, 0) + assert(len(utxos) > 0) + + utxo = utxos[0] + + tx1 = CTransaction() + value = int(satoshi_round(utxo["amount"] - self.relayfee)*COIN) + + # Check that the disable flag disables relative locktime. + # If sequence locks were used, this would require 1 block for the + # input to mature. + sequence_value = SEQUENCE_LOCKTIME_DISABLE_FLAG | 1 + tx1.vin = [CTxIn(COutPoint(int(utxo["txid"], 16), utxo["vout"]), nSequence=sequence_value)] + tx1.vout = [CTxOut(value, CScript([b'a']))] + + tx1_signed = self.nodes[0].signrawtransactionwithwallet(ToHex(tx1))["hex"] + tx1_id = self.nodes[0].sendrawtransaction(tx1_signed) + tx1_id = int(tx1_id, 16) + + # This transaction will enable sequence-locks, so this transaction should + # fail + tx2 = CTransaction() + tx2.nVersion = 2 + sequence_value = sequence_value & 0x7fffffff + tx2.vin = [CTxIn(COutPoint(tx1_id, 0), nSequence=sequence_value)] + tx2.vout = [CTxOut(int(value - self.relayfee * COIN), CScript([b'a' * 35]))] + tx2.rehash() + + assert_raises_rpc_error(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, ToHex(tx2)) + + # Setting the version back down to 1 should disable the sequence lock, + # so this should be accepted. + tx2.nVersion = 1 + + self.nodes[0].sendrawtransaction(ToHex(tx2)) + + # Calculate the median time past of a prior block ("confirmations" before + # the current tip). + def get_median_time_past(self, confirmations): + block_hash = self.nodes[0].getblockhash(self.nodes[0].getblockcount()-confirmations) + return self.nodes[0].getblockheader(block_hash)["mediantime"] + + # Test that sequence locks are respected for transactions spending confirmed inputs. + def test_sequence_lock_confirmed_inputs(self): + # Create lots of confirmed utxos, and use them to generate lots of random + # transactions. + max_outputs = 50 + addresses = [] + while len(addresses) < max_outputs: + addresses.append(self.nodes[0].getnewaddress()) + while len(self.nodes[0].listunspent()) < 200: + import random + random.shuffle(addresses) + num_outputs = random.randint(1, max_outputs) + outputs = {} + for i in range(num_outputs): + outputs[addresses[i]] = random.randint(1, 20)*0.01 + self.nodes[0].sendmany("", outputs) + self.nodes[0].generate(1) + + utxos = self.nodes[0].listunspent() + + # Try creating a lot of random transactions. + # Each time, choose a random number of inputs, and randomly set + # some of those inputs to be sequence locked (and randomly choose + # between height/time locking). Small random chance of making the locks + # all pass. + for i in range(400): + # Randomly choose up to 10 inputs + num_inputs = random.randint(1, 10) + random.shuffle(utxos) + + # Track whether any sequence locks used should fail + should_pass = True + + # Track whether this transaction was built with sequence locks + using_sequence_locks = False + + tx = CTransaction() + tx.nVersion = 2 + value = 0 + for j in range(num_inputs): + sequence_value = 0xfffffffe # this disables sequence locks + + # 50% chance we enable sequence locks + if random.randint(0,1): + using_sequence_locks = True + + # 10% of the time, make the input sequence value pass + input_will_pass = (random.randint(1,10) == 1) + sequence_value = utxos[j]["confirmations"] + if not input_will_pass: + sequence_value += 1 + should_pass = False + + # Figure out what the median-time-past was for the confirmed input + # Note that if an input has N confirmations, we're going back N blocks + # from the tip so that we're looking up MTP of the block + # PRIOR to the one the input appears in, as per the BIP68 spec. + orig_time = self.get_median_time_past(utxos[j]["confirmations"]) + cur_time = self.get_median_time_past(0) # MTP of the tip + + # can only timelock this input if it's not too old -- otherwise use height + can_time_lock = True + if ((cur_time - orig_time) >> SEQUENCE_LOCKTIME_GRANULARITY) >= SEQUENCE_LOCKTIME_MASK: + can_time_lock = False + + # if time-lockable, then 50% chance we make this a time lock + if random.randint(0,1) and can_time_lock: + # Find first time-lock value that fails, or latest one that succeeds + time_delta = sequence_value << SEQUENCE_LOCKTIME_GRANULARITY + if input_will_pass and time_delta > cur_time - orig_time: + sequence_value = ((cur_time - orig_time) >> SEQUENCE_LOCKTIME_GRANULARITY) + elif (not input_will_pass and time_delta <= cur_time - orig_time): + sequence_value = ((cur_time - orig_time) >> SEQUENCE_LOCKTIME_GRANULARITY)+1 + sequence_value |= SEQUENCE_LOCKTIME_TYPE_FLAG + tx.vin.append(CTxIn(COutPoint(int(utxos[j]["txid"], 16), utxos[j]["vout"]), nSequence=sequence_value)) + value += utxos[j]["amount"]*COIN + # Overestimate the size of the tx - signatures should be less than 120 bytes, and leave 50 for the output + tx_size = len(ToHex(tx))//2 + 120*num_inputs + 50 + tx.vout.append(CTxOut(int(value-self.relayfee*tx_size*COIN/1000), CScript([b'a']))) + rawtx = self.nodes[0].signrawtransactionwithwallet(ToHex(tx))["hex"] + + if (using_sequence_locks and not should_pass): + # This transaction should be rejected + assert_raises_rpc_error(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, rawtx) + else: + # This raw transaction should be accepted + self.nodes[0].sendrawtransaction(rawtx) + utxos = self.nodes[0].listunspent() + + # Test that sequence locks on unconfirmed inputs must have nSequence + # height or time of 0 to be accepted. + # Then test that BIP68-invalid transactions are removed from the mempool + # after a reorg. + def test_sequence_lock_unconfirmed_inputs(self): + # Store height so we can easily reset the chain at the end of the test + cur_height = self.nodes[0].getblockcount() + + # Create a mempool tx. + txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 2) + tx1 = FromHex(CTransaction(), self.nodes[0].getrawtransaction(txid)) + tx1.rehash() + + # Anyone-can-spend mempool tx. + # Sequence lock of 0 should pass. + tx2 = CTransaction() + tx2.nVersion = 2 + tx2.vin = [CTxIn(COutPoint(tx1.sha256, 0), nSequence=0)] + tx2.vout = [CTxOut(int(tx1.vout[0].nValue - self.relayfee*COIN), CScript([b'a']))] + tx2_raw = self.nodes[0].signrawtransactionwithwallet(ToHex(tx2))["hex"] + tx2 = FromHex(tx2, tx2_raw) + tx2.rehash() + + self.nodes[0].sendrawtransaction(tx2_raw) + + # Create a spend of the 0th output of orig_tx with a sequence lock + # of 1, and test what happens when submitting. + # orig_tx.vout[0] must be an anyone-can-spend output + def test_nonzero_locks(orig_tx, node, relayfee, use_height_lock): + sequence_value = 1 + if not use_height_lock: + sequence_value |= SEQUENCE_LOCKTIME_TYPE_FLAG + + tx = CTransaction() + tx.nVersion = 2 + tx.vin = [CTxIn(COutPoint(orig_tx.sha256, 0), nSequence=sequence_value)] + tx.vout = [CTxOut(int(orig_tx.vout[0].nValue - relayfee * COIN), CScript([b'a' * 35]))] + tx.rehash() + + if (orig_tx.hash in node.getrawmempool()): + # sendrawtransaction should fail if the tx is in the mempool + assert_raises_rpc_error(-26, NOT_FINAL_ERROR, node.sendrawtransaction, ToHex(tx)) + else: + # sendrawtransaction should succeed if the tx is not in the mempool + node.sendrawtransaction(ToHex(tx)) + + return tx + + test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=True) + test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=False) + + # Now mine some blocks, but make sure tx2 doesn't get mined. + # Use prioritisetransaction to lower the effective feerate to 0 + self.nodes[0].prioritisetransaction(txid=tx2.hash, fee_delta=int(-self.relayfee*COIN)) + cur_time = int(time.time()) + for i in range(10): + self.nodes[0].setmocktime(cur_time + 600) + self.nodes[0].generate(1) + cur_time += 600 + + assert(tx2.hash in self.nodes[0].getrawmempool()) + + test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=True) + test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=False) + + # Mine tx2, and then try again + self.nodes[0].prioritisetransaction(txid=tx2.hash, fee_delta=int(self.relayfee*COIN)) + + # Advance the time on the node so that we can test timelocks + self.nodes[0].setmocktime(cur_time+600) + self.nodes[0].generate(1) + assert(tx2.hash not in self.nodes[0].getrawmempool()) + + # Now that tx2 is not in the mempool, a sequence locked spend should + # succeed + tx3 = test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=False) + assert(tx3.hash in self.nodes[0].getrawmempool()) + + self.nodes[0].generate(1) + assert(tx3.hash not in self.nodes[0].getrawmempool()) + + # One more test, this time using height locks + tx4 = test_nonzero_locks(tx3, self.nodes[0], self.relayfee, use_height_lock=True) + assert(tx4.hash in self.nodes[0].getrawmempool()) + + # Now try combining confirmed and unconfirmed inputs + tx5 = test_nonzero_locks(tx4, self.nodes[0], self.relayfee, use_height_lock=True) + assert(tx5.hash not in self.nodes[0].getrawmempool()) + + utxos = self.nodes[0].listunspent() + tx5.vin.append(CTxIn(COutPoint(int(utxos[0]["txid"], 16), utxos[0]["vout"]), nSequence=1)) + tx5.vout[0].nValue += int(utxos[0]["amount"]*COIN) + raw_tx5 = self.nodes[0].signrawtransactionwithwallet(ToHex(tx5))["hex"] + + assert_raises_rpc_error(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, raw_tx5) + + # Test mempool-BIP68 consistency after reorg + # + # State of the transactions in the last blocks: + # ... -> [ tx2 ] -> [ tx3 ] + # tip-1 tip + # And currently tx4 is in the mempool. + # + # If we invalidate the tip, tx3 should get added to the mempool, causing + # tx4 to be removed (fails sequence-lock). + self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) + assert(tx4.hash not in self.nodes[0].getrawmempool()) + assert(tx3.hash in self.nodes[0].getrawmempool()) + + # Now mine 2 empty blocks to reorg out the current tip (labeled tip-1 in + # diagram above). + # This would cause tx2 to be added back to the mempool, which in turn causes + # tx3 to be removed. + tip = int(self.nodes[0].getblockhash(self.nodes[0].getblockcount()-1), 16) + height = self.nodes[0].getblockcount() + for i in range(2): + block = create_block(tip, create_coinbase(height), cur_time) + block.nVersion = 3 + block.rehash() + block.solve() + tip = block.sha256 + height += 1 + self.nodes[0].submitblock(ToHex(block)) + cur_time += 1 + + mempool = self.nodes[0].getrawmempool() + assert(tx3.hash not in mempool) + assert(tx2.hash in mempool) + + # Reset the chain and get rid of the mocktimed-blocks + self.nodes[0].setmocktime(0) + self.nodes[0].invalidateblock(self.nodes[0].getblockhash(cur_height+1)) + self.nodes[0].generate(10) + + # Make sure that BIP68 isn't being used to validate blocks, prior to + # versionbits activation. If more blocks are mined prior to this test + # being run, then it's possible the test has activated the soft fork, and + # this test should be moved to run earlier, or deleted. + def test_bip68_not_consensus(self): + assert(get_bip9_status(self.nodes[0], 'csv')['status'] != 'active') + txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 2) + + tx1 = FromHex(CTransaction(), self.nodes[0].getrawtransaction(txid)) + tx1.rehash() + + # Make an anyone-can-spend transaction + tx2 = CTransaction() + tx2.nVersion = 1 + tx2.vin = [CTxIn(COutPoint(tx1.sha256, 0), nSequence=0)] + tx2.vout = [CTxOut(int(tx1.vout[0].nValue - self.relayfee*COIN), CScript([b'a']))] + + # sign tx2 + tx2_raw = self.nodes[0].signrawtransactionwithwallet(ToHex(tx2))["hex"] + tx2 = FromHex(tx2, tx2_raw) + tx2.rehash() + + self.nodes[0].sendrawtransaction(ToHex(tx2)) + + # Now make an invalid spend of tx2 according to BIP68 + sequence_value = 100 # 100 block relative locktime + + tx3 = CTransaction() + tx3.nVersion = 2 + tx3.vin = [CTxIn(COutPoint(tx2.sha256, 0), nSequence=sequence_value)] + tx3.vout = [CTxOut(int(tx2.vout[0].nValue - self.relayfee * COIN), CScript([b'a' * 35]))] + tx3.rehash() + + assert_raises_rpc_error(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, ToHex(tx3)) + + # make a block that violates bip68; ensure that the tip updates + tip = int(self.nodes[0].getbestblockhash(), 16) + block = create_block(tip, create_coinbase(self.nodes[0].getblockcount()+1)) + block.nVersion = 3 + block.vtx.extend([tx1, tx2, tx3]) + block.hashMerkleRoot = block.calc_merkle_root() + block.rehash() + add_witness_commitment(block) + block.solve() + + self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True))) + assert_equal(self.nodes[0].getbestblockhash(), block.hash) + + def activateCSV(self): + # activation should happen at block height 432 (3 periods) + # getblockchaininfo will show CSV as active at block 431 (144 * 3 -1) since it's returning whether CSV is active for the next block. + min_activation_height = 432 + height = self.nodes[0].getblockcount() + assert_greater_than(min_activation_height - height, 2) + self.nodes[0].generate(min_activation_height - height - 2) + assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], "locked_in") + self.nodes[0].generate(1) + assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], "active") + sync_blocks(self.nodes) + + # Use self.nodes[1] to test that version 2 transactions are standard. + def test_version2_relay(self): + inputs = [ ] + outputs = { self.nodes[1].getnewaddress() : 1.0 } + rawtx = self.nodes[1].createrawtransaction(inputs, outputs) + rawtxfund = self.nodes[1].fundrawtransaction(rawtx)['hex'] + tx = FromHex(CTransaction(), rawtxfund) + tx.nVersion = 2 + tx_signed = self.nodes[1].signrawtransactionwithwallet(ToHex(tx))["hex"] + self.nodes[1].sendrawtransaction(tx_signed) + +if __name__ == '__main__': + BIP68Test().main() diff --git a/test/functional/feature_block.py b/test/functional/feature_block.py new file mode 100755 index 000000000..628cefb76 --- /dev/null +++ b/test/functional/feature_block.py @@ -0,0 +1,1322 @@ +#!/usr/bin/env python3 +# Copyright (c) 2015-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test block processing.""" +import copy +import struct +import time + +from test_framework.blocktools import create_block, create_coinbase, create_tx_with_script, get_legacy_sigopcount_block +from test_framework.key import CECKey +from test_framework.messages import ( + CBlock, + COIN, + COutPoint, + CTransaction, + CTxIn, + CTxOut, + MAX_BLOCK_BASE_SIZE, + uint256_from_compact, + uint256_from_str, +) +from test_framework.mininode import P2PDataStore +from test_framework.script import ( + CScript, + MAX_SCRIPT_ELEMENT_SIZE, + OP_2DUP, + OP_CHECKMULTISIG, + OP_CHECKMULTISIGVERIFY, + OP_CHECKSIG, + OP_CHECKSIGVERIFY, + OP_ELSE, + OP_ENDIF, + OP_EQUAL, + OP_DROP, + OP_FALSE, + OP_HASH160, + OP_IF, + OP_INVALIDOPCODE, + OP_RETURN, + OP_TRUE, + SIGHASH_ALL, + SignatureHash, + hash160, +) +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal + +MAX_BLOCK_SIGOPS = 20000 + +# Use this class for tests that require behavior other than normal "mininode" behavior. +# For now, it is used to serialize a bloated varint (b64). +class CBrokenBlock(CBlock): + def initialize(self, base_block): + self.vtx = copy.deepcopy(base_block.vtx) + self.hashMerkleRoot = self.calc_merkle_root() + + def serialize(self, with_witness=False): + r = b"" + r += super(CBlock, self).serialize() + r += struct.pack(" b1 (0) -> b2 (1) + b1 = self.next_block(1, spend=out[0]) + self.save_spendable_output() + + b2 = self.next_block(2, spend=out[1]) + self.save_spendable_output() + + self.sync_blocks([b1, b2]) + + # Fork like this: + # + # genesis -> b1 (0) -> b2 (1) + # \-> b3 (1) + # + # Nothing should happen at this point. We saw b2 first so it takes priority. + self.log.info("Don't reorg to a chain of the same length") + self.move_tip(1) + b3 = self.next_block(3, spend=out[1]) + txout_b3 = b3.vtx[1] + self.sync_blocks([b3], False) + + # Now we add another block to make the alternative chain longer. + # + # genesis -> b1 (0) -> b2 (1) + # \-> b3 (1) -> b4 (2) + self.log.info("Reorg to a longer chain") + b4 = self.next_block(4, spend=out[2]) + self.sync_blocks([b4]) + + # ... and back to the first chain. + # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) + # \-> b3 (1) -> b4 (2) + self.move_tip(2) + b5 = self.next_block(5, spend=out[2]) + self.save_spendable_output() + self.sync_blocks([b5], False) + + self.log.info("Reorg back to the original chain") + b6 = self.next_block(6, spend=out[3]) + self.sync_blocks([b6], True) + + # Try to create a fork that double-spends + # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) + # \-> b7 (2) -> b8 (4) + # \-> b3 (1) -> b4 (2) + self.log.info("Reject a chain with a double spend, even if it is longer") + self.move_tip(5) + b7 = self.next_block(7, spend=out[2]) + self.sync_blocks([b7], False) + + b8 = self.next_block(8, spend=out[4]) + self.sync_blocks([b8], False, reconnect=True) + + # Try to create a block that has too much fee + # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) + # \-> b9 (4) + # \-> b3 (1) -> b4 (2) + self.log.info("Reject a block where the miner creates too much coinbase reward") + self.move_tip(6) + b9 = self.next_block(9, spend=out[4], additional_coinbase_value=1) + self.sync_blocks([b9], success=False, reject_reason='bad-cb-amount', reconnect=True) + + # Create a fork that ends in a block with too much fee (the one that causes the reorg) + # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) + # \-> b10 (3) -> b11 (4) + # \-> b3 (1) -> b4 (2) + self.log.info("Reject a chain where the miner creates too much coinbase reward, even if the chain is longer") + self.move_tip(5) + b10 = self.next_block(10, spend=out[3]) + self.sync_blocks([b10], False) + + b11 = self.next_block(11, spend=out[4], additional_coinbase_value=1) + self.sync_blocks([b11], success=False, reject_reason='bad-cb-amount', reconnect=True) + + # Try again, but with a valid fork first + # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) + # \-> b12 (3) -> b13 (4) -> b14 (5) + # \-> b3 (1) -> b4 (2) + self.log.info("Reject a chain where the miner creates too much coinbase reward, even if the chain is longer (on a forked chain)") + self.move_tip(5) + b12 = self.next_block(12, spend=out[3]) + self.save_spendable_output() + b13 = self.next_block(13, spend=out[4]) + self.save_spendable_output() + b14 = self.next_block(14, spend=out[5], additional_coinbase_value=1) + self.sync_blocks([b12, b13, b14], success=False, reject_reason='bad-cb-amount', reconnect=True) + + # New tip should be b13. + assert_equal(node.getbestblockhash(), b13.hash) + + # Add a block with MAX_BLOCK_SIGOPS and one with one more sigop + # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) + # \-> b12 (3) -> b13 (4) -> b15 (5) -> b16 (6) + # \-> b3 (1) -> b4 (2) + self.log.info("Accept a block with lots of checksigs") + lots_of_checksigs = CScript([OP_CHECKSIG] * (MAX_BLOCK_SIGOPS - 1)) + self.move_tip(13) + b15 = self.next_block(15, spend=out[5], script=lots_of_checksigs) + self.save_spendable_output() + self.sync_blocks([b15], True) + + self.log.info("Reject a block with too many checksigs") + too_many_checksigs = CScript([OP_CHECKSIG] * (MAX_BLOCK_SIGOPS)) + b16 = self.next_block(16, spend=out[6], script=too_many_checksigs) + self.sync_blocks([b16], success=False, reject_reason='bad-blk-sigops', reconnect=True) + + # Attempt to spend a transaction created on a different fork + # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) + # \-> b12 (3) -> b13 (4) -> b15 (5) -> b17 (b3.vtx[1]) + # \-> b3 (1) -> b4 (2) + self.log.info("Reject a block with a spend from a re-org'ed out tx") + self.move_tip(15) + b17 = self.next_block(17, spend=txout_b3) + self.sync_blocks([b17], success=False, reject_reason='bad-txns-inputs-missingorspent', reconnect=True) + + # Attempt to spend a transaction created on a different fork (on a fork this time) + # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) + # \-> b12 (3) -> b13 (4) -> b15 (5) + # \-> b18 (b3.vtx[1]) -> b19 (6) + # \-> b3 (1) -> b4 (2) + self.log.info("Reject a block with a spend from a re-org'ed out tx (on a forked chain)") + self.move_tip(13) + b18 = self.next_block(18, spend=txout_b3) + self.sync_blocks([b18], False) + + b19 = self.next_block(19, spend=out[6]) + self.sync_blocks([b19], success=False, reject_reason='bad-txns-inputs-missingorspent', reconnect=True) + + # Attempt to spend a coinbase at depth too low + # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) + # \-> b12 (3) -> b13 (4) -> b15 (5) -> b20 (7) + # \-> b3 (1) -> b4 (2) + self.log.info("Reject a block spending an immature coinbase.") + self.move_tip(15) + b20 = self.next_block(20, spend=out[7]) + self.sync_blocks([b20], success=False, reject_reason='bad-txns-premature-spend-of-coinbase') + + # Attempt to spend a coinbase at depth too low (on a fork this time) + # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) + # \-> b12 (3) -> b13 (4) -> b15 (5) + # \-> b21 (6) -> b22 (5) + # \-> b3 (1) -> b4 (2) + self.log.info("Reject a block spending an immature coinbase (on a forked chain)") + self.move_tip(13) + b21 = self.next_block(21, spend=out[6]) + self.sync_blocks([b21], False) + + b22 = self.next_block(22, spend=out[5]) + self.sync_blocks([b22], success=False, reject_reason='bad-txns-premature-spend-of-coinbase') + + # Create a block on either side of MAX_BLOCK_BASE_SIZE and make sure its accepted/rejected + # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) + # \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) + # \-> b24 (6) -> b25 (7) + # \-> b3 (1) -> b4 (2) + self.log.info("Accept a block of size MAX_BLOCK_BASE_SIZE") + self.move_tip(15) + b23 = self.next_block(23, spend=out[6]) + tx = CTransaction() + script_length = MAX_BLOCK_BASE_SIZE - len(b23.serialize()) - 69 + script_output = CScript([b'\x00' * script_length]) + tx.vout.append(CTxOut(0, script_output)) + tx.vin.append(CTxIn(COutPoint(b23.vtx[1].sha256, 0))) + b23 = self.update_block(23, [tx]) + # Make sure the math above worked out to produce a max-sized block + assert_equal(len(b23.serialize()), MAX_BLOCK_BASE_SIZE) + self.sync_blocks([b23], True) + self.save_spendable_output() + + self.log.info("Reject a block of size MAX_BLOCK_BASE_SIZE + 1") + self.move_tip(15) + b24 = self.next_block(24, spend=out[6]) + script_length = MAX_BLOCK_BASE_SIZE - len(b24.serialize()) - 69 + script_output = CScript([b'\x00' * (script_length + 1)]) + tx.vout = [CTxOut(0, script_output)] + b24 = self.update_block(24, [tx]) + assert_equal(len(b24.serialize()), MAX_BLOCK_BASE_SIZE + 1) + self.sync_blocks([b24], success=False, reject_reason='bad-blk-length', reconnect=True) + + b25 = self.next_block(25, spend=out[7]) + self.sync_blocks([b25], False) + + # Create blocks with a coinbase input script size out of range + # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) + # \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) -> b30 (7) + # \-> ... (6) -> ... (7) + # \-> b3 (1) -> b4 (2) + self.log.info("Reject a block with coinbase input script size out of range") + self.move_tip(15) + b26 = self.next_block(26, spend=out[6]) + b26.vtx[0].vin[0].scriptSig = b'\x00' + b26.vtx[0].rehash() + # update_block causes the merkle root to get updated, even with no new + # transactions, and updates the required state. + b26 = self.update_block(26, []) + self.sync_blocks([b26], success=False, reject_reason='bad-cb-length', reconnect=True) + + # Extend the b26 chain to make sure bitcoind isn't accepting b26 + b27 = self.next_block(27, spend=out[7]) + self.sync_blocks([b27], False) + + # Now try a too-large-coinbase script + self.move_tip(15) + b28 = self.next_block(28, spend=out[6]) + b28.vtx[0].vin[0].scriptSig = b'\x00' * 101 + b28.vtx[0].rehash() + b28 = self.update_block(28, []) + self.sync_blocks([b28], success=False, reject_reason='bad-cb-length', reconnect=True) + + # Extend the b28 chain to make sure bitcoind isn't accepting b28 + b29 = self.next_block(29, spend=out[7]) + self.sync_blocks([b29], False) + + # b30 has a max-sized coinbase scriptSig. + self.move_tip(23) + b30 = self.next_block(30) + b30.vtx[0].vin[0].scriptSig = b'\x00' * 100 + b30.vtx[0].rehash() + b30 = self.update_block(30, []) + self.sync_blocks([b30], True) + self.save_spendable_output() + + # b31 - b35 - check sigops of OP_CHECKMULTISIG / OP_CHECKMULTISIGVERIFY / OP_CHECKSIGVERIFY + # + # genesis -> ... -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10) + # \-> b36 (11) + # \-> b34 (10) + # \-> b32 (9) + # + + # MULTISIG: each op code counts as 20 sigops. To create the edge case, pack another 19 sigops at the end. + self.log.info("Accept a block with the max number of OP_CHECKMULTISIG sigops") + lots_of_multisigs = CScript([OP_CHECKMULTISIG] * ((MAX_BLOCK_SIGOPS - 1) // 20) + [OP_CHECKSIG] * 19) + b31 = self.next_block(31, spend=out[8], script=lots_of_multisigs) + assert_equal(get_legacy_sigopcount_block(b31), MAX_BLOCK_SIGOPS) + self.sync_blocks([b31], True) + self.save_spendable_output() + + # this goes over the limit because the coinbase has one sigop + self.log.info("Reject a block with too many OP_CHECKMULTISIG sigops") + too_many_multisigs = CScript([OP_CHECKMULTISIG] * (MAX_BLOCK_SIGOPS // 20)) + b32 = self.next_block(32, spend=out[9], script=too_many_multisigs) + assert_equal(get_legacy_sigopcount_block(b32), MAX_BLOCK_SIGOPS + 1) + self.sync_blocks([b32], success=False, reject_reason='bad-blk-sigops', reconnect=True) + + # CHECKMULTISIGVERIFY + self.log.info("Accept a block with the max number of OP_CHECKMULTISIGVERIFY sigops") + self.move_tip(31) + lots_of_multisigs = CScript([OP_CHECKMULTISIGVERIFY] * ((MAX_BLOCK_SIGOPS - 1) // 20) + [OP_CHECKSIG] * 19) + b33 = self.next_block(33, spend=out[9], script=lots_of_multisigs) + self.sync_blocks([b33], True) + self.save_spendable_output() + + self.log.info("Reject a block with too many OP_CHECKMULTISIGVERIFY sigops") + too_many_multisigs = CScript([OP_CHECKMULTISIGVERIFY] * (MAX_BLOCK_SIGOPS // 20)) + b34 = self.next_block(34, spend=out[10], script=too_many_multisigs) + self.sync_blocks([b34], success=False, reject_reason='bad-blk-sigops', reconnect=True) + + # CHECKSIGVERIFY + self.log.info("Accept a block with the max number of OP_CHECKSIGVERIFY sigops") + self.move_tip(33) + lots_of_checksigs = CScript([OP_CHECKSIGVERIFY] * (MAX_BLOCK_SIGOPS - 1)) + b35 = self.next_block(35, spend=out[10], script=lots_of_checksigs) + self.sync_blocks([b35], True) + self.save_spendable_output() + + self.log.info("Reject a block with too many OP_CHECKSIGVERIFY sigops") + too_many_checksigs = CScript([OP_CHECKSIGVERIFY] * (MAX_BLOCK_SIGOPS)) + b36 = self.next_block(36, spend=out[11], script=too_many_checksigs) + self.sync_blocks([b36], success=False, reject_reason='bad-blk-sigops', reconnect=True) + + # Check spending of a transaction in a block which failed to connect + # + # b6 (3) + # b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10) + # \-> b37 (11) + # \-> b38 (11/37) + # + + # save 37's spendable output, but then double-spend out11 to invalidate the block + self.log.info("Reject a block spending transaction from a block which failed to connect") + self.move_tip(35) + b37 = self.next_block(37, spend=out[11]) + txout_b37 = b37.vtx[1] + tx = self.create_and_sign_transaction(out[11], 0) + b37 = self.update_block(37, [tx]) + self.sync_blocks([b37], success=False, reject_reason='bad-txns-inputs-missingorspent', reconnect=True) + + # attempt to spend b37's first non-coinbase tx, at which point b37 was still considered valid + self.move_tip(35) + b38 = self.next_block(38, spend=txout_b37) + self.sync_blocks([b38], success=False, reject_reason='bad-txns-inputs-missingorspent', reconnect=True) + + # Check P2SH SigOp counting + # + # + # 13 (4) -> b15 (5) -> b23 (6) -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b41 (12) + # \-> b40 (12) + # + # b39 - create some P2SH outputs that will require 6 sigops to spend: + # + # redeem_script = COINBASE_PUBKEY, (OP_2DUP+OP_CHECKSIGVERIFY) * 5, OP_CHECKSIG + # p2sh_script = OP_HASH160, ripemd160(sha256(script)), OP_EQUAL + # + self.log.info("Check P2SH SIGOPS are correctly counted") + self.move_tip(35) + b39 = self.next_block(39) + b39_outputs = 0 + b39_sigops_per_output = 6 + + # Build the redeem script, hash it, use hash to create the p2sh script + redeem_script = CScript([self.coinbase_pubkey] + [OP_2DUP, OP_CHECKSIGVERIFY] * 5 + [OP_CHECKSIG]) + redeem_script_hash = hash160(redeem_script) + p2sh_script = CScript([OP_HASH160, redeem_script_hash, OP_EQUAL]) + + # Create a transaction that spends one satoshi to the p2sh_script, the rest to OP_TRUE + # This must be signed because it is spending a coinbase + spend = out[11] + tx = self.create_tx(spend, 0, 1, p2sh_script) + tx.vout.append(CTxOut(spend.vout[0].nValue - 1, CScript([OP_TRUE]))) + self.sign_tx(tx, spend) + tx.rehash() + b39 = self.update_block(39, [tx]) + b39_outputs += 1 + + # Until block is full, add tx's with 1 satoshi to p2sh_script, the rest to OP_TRUE + tx_new = None + tx_last = tx + total_size = len(b39.serialize()) + while(total_size < MAX_BLOCK_BASE_SIZE): + tx_new = self.create_tx(tx_last, 1, 1, p2sh_script) + tx_new.vout.append(CTxOut(tx_last.vout[1].nValue - 1, CScript([OP_TRUE]))) + tx_new.rehash() + total_size += len(tx_new.serialize()) + if total_size >= MAX_BLOCK_BASE_SIZE: + break + b39.vtx.append(tx_new) # add tx to block + tx_last = tx_new + b39_outputs += 1 + + b39 = self.update_block(39, []) + self.sync_blocks([b39], True) + self.save_spendable_output() + + # Test sigops in P2SH redeem scripts + # + # b40 creates 3333 tx's spending the 6-sigop P2SH outputs from b39 for a total of 19998 sigops. + # The first tx has one sigop and then at the end we add 2 more to put us just over the max. + # + # b41 does the same, less one, so it has the maximum sigops permitted. + # + self.log.info("Reject a block with too many P2SH sigops") + self.move_tip(39) + b40 = self.next_block(40, spend=out[12]) + sigops = get_legacy_sigopcount_block(b40) + numTxes = (MAX_BLOCK_SIGOPS - sigops) // b39_sigops_per_output + assert_equal(numTxes <= b39_outputs, True) + + lastOutpoint = COutPoint(b40.vtx[1].sha256, 0) + new_txs = [] + for i in range(1, numTxes + 1): + tx = CTransaction() + tx.vout.append(CTxOut(1, CScript([OP_TRUE]))) + tx.vin.append(CTxIn(lastOutpoint, b'')) + # second input is corresponding P2SH output from b39 + tx.vin.append(CTxIn(COutPoint(b39.vtx[i].sha256, 0), b'')) + # Note: must pass the redeem_script (not p2sh_script) to the signature hash function + (sighash, err) = SignatureHash(redeem_script, tx, 1, SIGHASH_ALL) + sig = self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL])) + scriptSig = CScript([sig, redeem_script]) + + tx.vin[1].scriptSig = scriptSig + tx.rehash() + new_txs.append(tx) + lastOutpoint = COutPoint(tx.sha256, 0) + + b40_sigops_to_fill = MAX_BLOCK_SIGOPS - (numTxes * b39_sigops_per_output + sigops) + 1 + tx = CTransaction() + tx.vin.append(CTxIn(lastOutpoint, b'')) + tx.vout.append(CTxOut(1, CScript([OP_CHECKSIG] * b40_sigops_to_fill))) + tx.rehash() + new_txs.append(tx) + self.update_block(40, new_txs) + self.sync_blocks([b40], success=False, reject_reason='bad-blk-sigops', reconnect=True) + + # same as b40, but one less sigop + self.log.info("Accept a block with the max number of P2SH sigops") + self.move_tip(39) + b41 = self.next_block(41, spend=None) + self.update_block(41, b40.vtx[1:-1]) + b41_sigops_to_fill = b40_sigops_to_fill - 1 + tx = CTransaction() + tx.vin.append(CTxIn(lastOutpoint, b'')) + tx.vout.append(CTxOut(1, CScript([OP_CHECKSIG] * b41_sigops_to_fill))) + tx.rehash() + self.update_block(41, [tx]) + self.sync_blocks([b41], True) + + # Fork off of b39 to create a constant base again + # + # b23 (6) -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) + # \-> b41 (12) + # + self.move_tip(39) + b42 = self.next_block(42, spend=out[12]) + self.save_spendable_output() + + b43 = self.next_block(43, spend=out[13]) + self.save_spendable_output() + self.sync_blocks([b42, b43], True) + + # Test a number of really invalid scenarios + # + # -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b44 (14) + # \-> ??? (15) + + # The next few blocks are going to be created "by hand" since they'll do funky things, such as having + # the first transaction be non-coinbase, etc. The purpose of b44 is to make sure this works. + self.log.info("Build block 44 manually") + height = self.block_heights[self.tip.sha256] + 1 + coinbase = create_coinbase(height, self.coinbase_pubkey) + b44 = CBlock() + b44.nTime = self.tip.nTime + 1 + b44.hashPrevBlock = self.tip.sha256 + b44.nBits = 0x207fffff + b44.vtx.append(coinbase) + b44.hashMerkleRoot = b44.calc_merkle_root() + b44.solve() + self.tip = b44 + self.block_heights[b44.sha256] = height + self.blocks[44] = b44 + self.sync_blocks([b44], True) + + self.log.info("Reject a block with a non-coinbase as the first tx") + non_coinbase = self.create_tx(out[15], 0, 1) + b45 = CBlock() + b45.nTime = self.tip.nTime + 1 + b45.hashPrevBlock = self.tip.sha256 + b45.nBits = 0x207fffff + b45.vtx.append(non_coinbase) + b45.hashMerkleRoot = b45.calc_merkle_root() + b45.calc_sha256() + b45.solve() + self.block_heights[b45.sha256] = self.block_heights[self.tip.sha256] + 1 + self.tip = b45 + self.blocks[45] = b45 + self.sync_blocks([b45], success=False, reject_reason='bad-cb-missing', reconnect=True) + + self.log.info("Reject a block with no transactions") + self.move_tip(44) + b46 = CBlock() + b46.nTime = b44.nTime + 1 + b46.hashPrevBlock = b44.sha256 + b46.nBits = 0x207fffff + b46.vtx = [] + b46.hashMerkleRoot = 0 + b46.solve() + self.block_heights[b46.sha256] = self.block_heights[b44.sha256] + 1 + self.tip = b46 + assert 46 not in self.blocks + self.blocks[46] = b46 + self.sync_blocks([b46], success=False, reject_reason='bad-blk-length', reconnect=True) + + self.log.info("Reject a block with invalid work") + self.move_tip(44) + b47 = self.next_block(47, solve=False) + target = uint256_from_compact(b47.nBits) + while b47.sha256 < target: + b47.nNonce += 1 + b47.rehash() + self.sync_blocks([b47], False, request_block=False) + + self.log.info("Reject a block with a timestamp >2 hours in the future") + self.move_tip(44) + b48 = self.next_block(48, solve=False) + b48.nTime = int(time.time()) + 60 * 60 * 3 + b48.solve() + self.sync_blocks([b48], False, request_block=False) + + self.log.info("Reject a block with invalid merkle hash") + self.move_tip(44) + b49 = self.next_block(49) + b49.hashMerkleRoot += 1 + b49.solve() + self.sync_blocks([b49], success=False, reject_reason='bad-txnmrklroot', reconnect=True) + + self.log.info("Reject a block with incorrect POW limit") + self.move_tip(44) + b50 = self.next_block(50) + b50.nBits = b50.nBits - 1 + b50.solve() + self.sync_blocks([b50], False, request_block=False, reconnect=True) + + self.log.info("Reject a block with two coinbase transactions") + self.move_tip(44) + b51 = self.next_block(51) + cb2 = create_coinbase(51, self.coinbase_pubkey) + b51 = self.update_block(51, [cb2]) + self.sync_blocks([b51], success=False, reject_reason='bad-cb-multiple', reconnect=True) + + self.log.info("Reject a block with duplicate transactions") + # Note: txns have to be in the right position in the merkle tree to trigger this error + self.move_tip(44) + b52 = self.next_block(52, spend=out[15]) + tx = self.create_tx(b52.vtx[1], 0, 1) + b52 = self.update_block(52, [tx, tx]) + self.sync_blocks([b52], success=False, reject_reason='bad-txns-duplicate', reconnect=True) + + # Test block timestamps + # -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) + # \-> b54 (15) + # + self.move_tip(43) + b53 = self.next_block(53, spend=out[14]) + self.sync_blocks([b53], False) + self.save_spendable_output() + + self.log.info("Reject a block with timestamp before MedianTimePast") + b54 = self.next_block(54, spend=out[15]) + b54.nTime = b35.nTime - 1 + b54.solve() + self.sync_blocks([b54], False, request_block=False) + + # valid timestamp + self.move_tip(53) + b55 = self.next_block(55, spend=out[15]) + b55.nTime = b35.nTime + self.update_block(55, []) + self.sync_blocks([b55], True) + self.save_spendable_output() + + # Test Merkle tree malleability + # + # -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57p2 (16) + # \-> b57 (16) + # \-> b56p2 (16) + # \-> b56 (16) + # + # Merkle tree malleability (CVE-2012-2459): repeating sequences of transactions in a block without + # affecting the merkle root of a block, while still invalidating it. + # See: src/consensus/merkle.h + # + # b57 has three txns: coinbase, tx, tx1. The merkle root computation will duplicate tx. + # Result: OK + # + # b56 copies b57 but duplicates tx1 and does not recalculate the block hash. So it has a valid merkle + # root but duplicate transactions. + # Result: Fails + # + # b57p2 has six transactions in its merkle tree: + # - coinbase, tx, tx1, tx2, tx3, tx4 + # Merkle root calculation will duplicate as necessary. + # Result: OK. + # + # b56p2 copies b57p2 but adds both tx3 and tx4. The purpose of the test is to make sure the code catches + # duplicate txns that are not next to one another with the "bad-txns-duplicate" error (which indicates + # that the error was caught early, avoiding a DOS vulnerability.) + + # b57 - a good block with 2 txs, don't submit until end + self.move_tip(55) + b57 = self.next_block(57) + tx = self.create_and_sign_transaction(out[16], 1) + tx1 = self.create_tx(tx, 0, 1) + b57 = self.update_block(57, [tx, tx1]) + + # b56 - copy b57, add a duplicate tx + self.log.info("Reject a block with a duplicate transaction in the Merkle Tree (but with a valid Merkle Root)") + self.move_tip(55) + b56 = copy.deepcopy(b57) + self.blocks[56] = b56 + assert_equal(len(b56.vtx), 3) + b56 = self.update_block(56, [tx1]) + assert_equal(b56.hash, b57.hash) + self.sync_blocks([b56], success=False, reject_reason='bad-txns-duplicate', reconnect=True) + + # b57p2 - a good block with 6 tx'es, don't submit until end + self.move_tip(55) + b57p2 = self.next_block("57p2") + tx = self.create_and_sign_transaction(out[16], 1) + tx1 = self.create_tx(tx, 0, 1) + tx2 = self.create_tx(tx1, 0, 1) + tx3 = self.create_tx(tx2, 0, 1) + tx4 = self.create_tx(tx3, 0, 1) + b57p2 = self.update_block("57p2", [tx, tx1, tx2, tx3, tx4]) + + # b56p2 - copy b57p2, duplicate two non-consecutive tx's + self.log.info("Reject a block with two duplicate transactions in the Merkle Tree (but with a valid Merkle Root)") + self.move_tip(55) + b56p2 = copy.deepcopy(b57p2) + self.blocks["b56p2"] = b56p2 + assert_equal(b56p2.hash, b57p2.hash) + assert_equal(len(b56p2.vtx), 6) + b56p2 = self.update_block("b56p2", [tx3, tx4]) + self.sync_blocks([b56p2], success=False, reject_reason='bad-txns-duplicate', reconnect=True) + + self.move_tip("57p2") + self.sync_blocks([b57p2], True) + + self.move_tip(57) + self.sync_blocks([b57], False) # The tip is not updated because 57p2 seen first + self.save_spendable_output() + + # Test a few invalid tx types + # + # -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) + # \-> ??? (17) + # + + # tx with prevout.n out of range + self.log.info("Reject a block with a transaction with prevout.n out of range") + self.move_tip(57) + b58 = self.next_block(58, spend=out[17]) + tx = CTransaction() + assert(len(out[17].vout) < 42) + tx.vin.append(CTxIn(COutPoint(out[17].sha256, 42), CScript([OP_TRUE]), 0xffffffff)) + tx.vout.append(CTxOut(0, b"")) + tx.calc_sha256() + b58 = self.update_block(58, [tx]) + self.sync_blocks([b58], success=False, reject_reason='bad-txns-inputs-missingorspent', reconnect=True) + + # tx with output value > input value + self.log.info("Reject a block with a transaction with outputs > inputs") + self.move_tip(57) + b59 = self.next_block(59) + tx = self.create_and_sign_transaction(out[17], 51 * COIN) + b59 = self.update_block(59, [tx]) + self.sync_blocks([b59], success=False, reject_reason='bad-txns-in-belowout', reconnect=True) + + # reset to good chain + self.move_tip(57) + b60 = self.next_block(60, spend=out[17]) + self.sync_blocks([b60], True) + self.save_spendable_output() + + # Test BIP30 + # + # -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) + # \-> b61 (18) + # + # Blocks are not allowed to contain a transaction whose id matches that of an earlier, + # not-fully-spent transaction in the same chain. To test, make identical coinbases; + # the second one should be rejected. + # + self.log.info("Reject a block with a transaction with a duplicate hash of a previous transaction (BIP30)") + self.move_tip(60) + b61 = self.next_block(61, spend=out[18]) + b61.vtx[0].vin[0].scriptSig = b60.vtx[0].vin[0].scriptSig # Equalize the coinbases + b61.vtx[0].rehash() + b61 = self.update_block(61, []) + assert_equal(b60.vtx[0].serialize(), b61.vtx[0].serialize()) + self.sync_blocks([b61], success=False, reject_reason='bad-txns-BIP30', reconnect=True) + + # Test tx.isFinal is properly rejected (not an exhaustive tx.isFinal test, that should be in data-driven transaction tests) + # + # -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) + # \-> b62 (18) + # + self.log.info("Reject a block with a transaction with a nonfinal locktime") + self.move_tip(60) + b62 = self.next_block(62) + tx = CTransaction() + tx.nLockTime = 0xffffffff # this locktime is non-final + tx.vin.append(CTxIn(COutPoint(out[18].sha256, 0))) # don't set nSequence + tx.vout.append(CTxOut(0, CScript([OP_TRUE]))) + assert(tx.vin[0].nSequence < 0xffffffff) + tx.calc_sha256() + b62 = self.update_block(62, [tx]) + self.sync_blocks([b62], success=False, reject_reason='bad-txns-nonfinal') + + # Test a non-final coinbase is also rejected + # + # -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) + # \-> b63 (-) + # + self.log.info("Reject a block with a coinbase transaction with a nonfinal locktime") + self.move_tip(60) + b63 = self.next_block(63) + b63.vtx[0].nLockTime = 0xffffffff + b63.vtx[0].vin[0].nSequence = 0xDEADBEEF + b63.vtx[0].rehash() + b63 = self.update_block(63, []) + self.sync_blocks([b63], success=False, reject_reason='bad-txns-nonfinal') + + # This checks that a block with a bloated VARINT between the block_header and the array of tx such that + # the block is > MAX_BLOCK_BASE_SIZE with the bloated varint, but <= MAX_BLOCK_BASE_SIZE without the bloated varint, + # does not cause a subsequent, identical block with canonical encoding to be rejected. The test does not + # care whether the bloated block is accepted or rejected; it only cares that the second block is accepted. + # + # What matters is that the receiving node should not reject the bloated block, and then reject the canonical + # block on the basis that it's the same as an already-rejected block (which would be a consensus failure.) + # + # -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) + # \ + # b64a (18) + # b64a is a bloated block (non-canonical varint) + # b64 is a good block (same as b64 but w/ canonical varint) + # + self.log.info("Accept a valid block even if a bloated version of the block has previously been sent") + self.move_tip(60) + regular_block = self.next_block("64a", spend=out[18]) + + # make it a "broken_block," with non-canonical serialization + b64a = CBrokenBlock(regular_block) + b64a.initialize(regular_block) + self.blocks["64a"] = b64a + self.tip = b64a + tx = CTransaction() + + # use canonical serialization to calculate size + script_length = MAX_BLOCK_BASE_SIZE - len(b64a.normal_serialize()) - 69 + script_output = CScript([b'\x00' * script_length]) + tx.vout.append(CTxOut(0, script_output)) + tx.vin.append(CTxIn(COutPoint(b64a.vtx[1].sha256, 0))) + b64a = self.update_block("64a", [tx]) + assert_equal(len(b64a.serialize()), MAX_BLOCK_BASE_SIZE + 8) + self.sync_blocks([b64a], success=False, reject_reason='non-canonical ReadCompactSize():') + + # bitcoind doesn't disconnect us for sending a bloated block, but if we subsequently + # resend the header message, it won't send us the getdata message again. Just + # disconnect and reconnect and then call sync_blocks. + # TODO: improve this test to be less dependent on P2P DOS behaviour. + node.disconnect_p2ps() + self.reconnect_p2p() + + self.move_tip(60) + b64 = CBlock(b64a) + b64.vtx = copy.deepcopy(b64a.vtx) + assert_equal(b64.hash, b64a.hash) + assert_equal(len(b64.serialize()), MAX_BLOCK_BASE_SIZE) + self.blocks[64] = b64 + b64 = self.update_block(64, []) + self.sync_blocks([b64], True) + self.save_spendable_output() + + # Spend an output created in the block itself + # + # -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) + # + self.log.info("Accept a block with a transaction spending an output created in the same block") + self.move_tip(64) + b65 = self.next_block(65) + tx1 = self.create_and_sign_transaction(out[19], out[19].vout[0].nValue) + tx2 = self.create_and_sign_transaction(tx1, 0) + b65 = self.update_block(65, [tx1, tx2]) + self.sync_blocks([b65], True) + self.save_spendable_output() + + # Attempt to spend an output created later in the same block + # + # -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) + # \-> b66 (20) + self.log.info("Reject a block with a transaction spending an output created later in the same block") + self.move_tip(65) + b66 = self.next_block(66) + tx1 = self.create_and_sign_transaction(out[20], out[20].vout[0].nValue) + tx2 = self.create_and_sign_transaction(tx1, 1) + b66 = self.update_block(66, [tx2, tx1]) + self.sync_blocks([b66], success=False, reject_reason='bad-txns-inputs-missingorspent', reconnect=True) + + # Attempt to double-spend a transaction created in a block + # + # -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) + # \-> b67 (20) + # + # + self.log.info("Reject a block with a transaction double spending a transaction creted in the same block") + self.move_tip(65) + b67 = self.next_block(67) + tx1 = self.create_and_sign_transaction(out[20], out[20].vout[0].nValue) + tx2 = self.create_and_sign_transaction(tx1, 1) + tx3 = self.create_and_sign_transaction(tx1, 2) + b67 = self.update_block(67, [tx1, tx2, tx3]) + self.sync_blocks([b67], success=False, reject_reason='bad-txns-inputs-missingorspent', reconnect=True) + + # More tests of block subsidy + # + # -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20) + # \-> b68 (20) + # + # b68 - coinbase with an extra 10 satoshis, + # creates a tx that has 9 satoshis from out[20] go to fees + # this fails because the coinbase is trying to claim 1 satoshi too much in fees + # + # b69 - coinbase with extra 10 satoshis, and a tx that gives a 10 satoshi fee + # this succeeds + # + self.log.info("Reject a block trying to claim too much subsidy in the coinbase transaction") + self.move_tip(65) + b68 = self.next_block(68, additional_coinbase_value=10) + tx = self.create_and_sign_transaction(out[20], out[20].vout[0].nValue - 9) + b68 = self.update_block(68, [tx]) + self.sync_blocks([b68], success=False, reject_reason='bad-cb-amount', reconnect=True) + + self.log.info("Accept a block claiming the correct subsidy in the coinbase transaction") + self.move_tip(65) + b69 = self.next_block(69, additional_coinbase_value=10) + tx = self.create_and_sign_transaction(out[20], out[20].vout[0].nValue - 10) + self.update_block(69, [tx]) + self.sync_blocks([b69], True) + self.save_spendable_output() + + # Test spending the outpoint of a non-existent transaction + # + # -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20) + # \-> b70 (21) + # + self.log.info("Reject a block containing a transaction spending from a non-existent input") + self.move_tip(69) + b70 = self.next_block(70, spend=out[21]) + bogus_tx = CTransaction() + bogus_tx.sha256 = uint256_from_str(b"23c70ed7c0506e9178fc1a987f40a33946d4ad4c962b5ae3a52546da53af0c5c") + tx = CTransaction() + tx.vin.append(CTxIn(COutPoint(bogus_tx.sha256, 0), b"", 0xffffffff)) + tx.vout.append(CTxOut(1, b"")) + b70 = self.update_block(70, [tx]) + self.sync_blocks([b70], success=False, reject_reason='bad-txns-inputs-missingorspent', reconnect=True) + + # Test accepting an invalid block which has the same hash as a valid one (via merkle tree tricks) + # + # -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20) -> b72 (21) + # \-> b71 (21) + # + # b72 is a good block. + # b71 is a copy of 72, but re-adds one of its transactions. However, it has the same hash as b72. + self.log.info("Reject a block containing a duplicate transaction but with the same Merkle root (Merkle tree malleability") + self.move_tip(69) + b72 = self.next_block(72) + tx1 = self.create_and_sign_transaction(out[21], 2) + tx2 = self.create_and_sign_transaction(tx1, 1) + b72 = self.update_block(72, [tx1, tx2]) # now tip is 72 + b71 = copy.deepcopy(b72) + b71.vtx.append(tx2) # add duplicate tx2 + self.block_heights[b71.sha256] = self.block_heights[b69.sha256] + 1 # b71 builds off b69 + self.blocks[71] = b71 + + assert_equal(len(b71.vtx), 4) + assert_equal(len(b72.vtx), 3) + assert_equal(b72.sha256, b71.sha256) + + self.move_tip(71) + self.sync_blocks([b71], success=False, reject_reason='bad-txns-duplicate', reconnect=True) + + self.move_tip(72) + self.sync_blocks([b72], True) + self.save_spendable_output() + + # Test some invalid scripts and MAX_BLOCK_SIGOPS + # + # -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20) -> b72 (21) + # \-> b** (22) + # + + # b73 - tx with excessive sigops that are placed after an excessively large script element. + # The purpose of the test is to make sure those sigops are counted. + # + # script is a bytearray of size 20,526 + # + # bytearray[0-19,998] : OP_CHECKSIG + # bytearray[19,999] : OP_PUSHDATA4 + # bytearray[20,000-20,003]: 521 (max_script_element_size+1, in little-endian format) + # bytearray[20,004-20,525]: unread data (script_element) + # bytearray[20,526] : OP_CHECKSIG (this puts us over the limit) + self.log.info("Reject a block containing too many sigops after a large script element") + self.move_tip(72) + b73 = self.next_block(73) + size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 1 + 5 + 1 + a = bytearray([OP_CHECKSIG] * size) + a[MAX_BLOCK_SIGOPS - 1] = int("4e", 16) # OP_PUSHDATA4 + + element_size = MAX_SCRIPT_ELEMENT_SIZE + 1 + a[MAX_BLOCK_SIGOPS] = element_size % 256 + a[MAX_BLOCK_SIGOPS + 1] = element_size // 256 + a[MAX_BLOCK_SIGOPS + 2] = 0 + a[MAX_BLOCK_SIGOPS + 3] = 0 + + tx = self.create_and_sign_transaction(out[22], 1, CScript(a)) + b73 = self.update_block(73, [tx]) + assert_equal(get_legacy_sigopcount_block(b73), MAX_BLOCK_SIGOPS + 1) + self.sync_blocks([b73], success=False, reject_reason='bad-blk-sigops', reconnect=True) + + # b74/75 - if we push an invalid script element, all previous sigops are counted, + # but sigops after the element are not counted. + # + # The invalid script element is that the push_data indicates that + # there will be a large amount of data (0xffffff bytes), but we only + # provide a much smaller number. These bytes are CHECKSIGS so they would + # cause b75 to fail for excessive sigops, if those bytes were counted. + # + # b74 fails because we put MAX_BLOCK_SIGOPS+1 before the element + # b75 succeeds because we put MAX_BLOCK_SIGOPS before the element + self.log.info("Check sigops are counted correctly after an invalid script element") + self.move_tip(72) + b74 = self.next_block(74) + size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 42 # total = 20,561 + a = bytearray([OP_CHECKSIG] * size) + a[MAX_BLOCK_SIGOPS] = 0x4e + a[MAX_BLOCK_SIGOPS + 1] = 0xfe + a[MAX_BLOCK_SIGOPS + 2] = 0xff + a[MAX_BLOCK_SIGOPS + 3] = 0xff + a[MAX_BLOCK_SIGOPS + 4] = 0xff + tx = self.create_and_sign_transaction(out[22], 1, CScript(a)) + b74 = self.update_block(74, [tx]) + self.sync_blocks([b74], success=False, reject_reason='bad-blk-sigops', reconnect=True) + + self.move_tip(72) + b75 = self.next_block(75) + size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 42 + a = bytearray([OP_CHECKSIG] * size) + a[MAX_BLOCK_SIGOPS - 1] = 0x4e + a[MAX_BLOCK_SIGOPS] = 0xff + a[MAX_BLOCK_SIGOPS + 1] = 0xff + a[MAX_BLOCK_SIGOPS + 2] = 0xff + a[MAX_BLOCK_SIGOPS + 3] = 0xff + tx = self.create_and_sign_transaction(out[22], 1, CScript(a)) + b75 = self.update_block(75, [tx]) + self.sync_blocks([b75], True) + self.save_spendable_output() + + # Check that if we push an element filled with CHECKSIGs, they are not counted + self.move_tip(75) + b76 = self.next_block(76) + size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 1 + 5 + a = bytearray([OP_CHECKSIG] * size) + a[MAX_BLOCK_SIGOPS - 1] = 0x4e # PUSHDATA4, but leave the following bytes as just checksigs + tx = self.create_and_sign_transaction(out[23], 1, CScript(a)) + b76 = self.update_block(76, [tx]) + self.sync_blocks([b76], True) + self.save_spendable_output() + + # Test transaction resurrection + # + # -> b77 (24) -> b78 (25) -> b79 (26) + # \-> b80 (25) -> b81 (26) -> b82 (27) + # + # b78 creates a tx, which is spent in b79. After b82, both should be in mempool + # + # The tx'es must be unsigned and pass the node's mempool policy. It is unsigned for the + # rather obscure reason that the Python signature code does not distinguish between + # Low-S and High-S values (whereas the bitcoin code has custom code which does so); + # as a result of which, the odds are 50% that the python code will use the right + # value and the transaction will be accepted into the mempool. Until we modify the + # test framework to support low-S signing, we are out of luck. + # + # To get around this issue, we construct transactions which are not signed and which + # spend to OP_TRUE. If the standard-ness rules change, this test would need to be + # updated. (Perhaps to spend to a P2SH OP_TRUE script) + self.log.info("Test transaction resurrection during a re-org") + self.move_tip(76) + b77 = self.next_block(77) + tx77 = self.create_and_sign_transaction(out[24], 10 * COIN) + b77 = self.update_block(77, [tx77]) + self.sync_blocks([b77], True) + self.save_spendable_output() + + b78 = self.next_block(78) + tx78 = self.create_tx(tx77, 0, 9 * COIN) + b78 = self.update_block(78, [tx78]) + self.sync_blocks([b78], True) + + b79 = self.next_block(79) + tx79 = self.create_tx(tx78, 0, 8 * COIN) + b79 = self.update_block(79, [tx79]) + self.sync_blocks([b79], True) + + # mempool should be empty + assert_equal(len(self.nodes[0].getrawmempool()), 0) + + self.move_tip(77) + b80 = self.next_block(80, spend=out[25]) + self.sync_blocks([b80], False, request_block=False) + self.save_spendable_output() + + b81 = self.next_block(81, spend=out[26]) + self.sync_blocks([b81], False, request_block=False) # other chain is same length + self.save_spendable_output() + + b82 = self.next_block(82, spend=out[27]) + self.sync_blocks([b82], True) # now this chain is longer, triggers re-org + self.save_spendable_output() + + # now check that tx78 and tx79 have been put back into the peer's mempool + mempool = self.nodes[0].getrawmempool() + assert_equal(len(mempool), 2) + assert(tx78.hash in mempool) + assert(tx79.hash in mempool) + + # Test invalid opcodes in dead execution paths. + # + # -> b81 (26) -> b82 (27) -> b83 (28) + # + self.log.info("Accept a block with invalid opcodes in dead execution paths") + b83 = self.next_block(83) + op_codes = [OP_IF, OP_INVALIDOPCODE, OP_ELSE, OP_TRUE, OP_ENDIF] + script = CScript(op_codes) + tx1 = self.create_and_sign_transaction(out[28], out[28].vout[0].nValue, script) + + tx2 = self.create_and_sign_transaction(tx1, 0, CScript([OP_TRUE])) + tx2.vin[0].scriptSig = CScript([OP_FALSE]) + tx2.rehash() + + b83 = self.update_block(83, [tx1, tx2]) + self.sync_blocks([b83], True) + self.save_spendable_output() + + # Reorg on/off blocks that have OP_RETURN in them (and try to spend them) + # + # -> b81 (26) -> b82 (27) -> b83 (28) -> b84 (29) -> b87 (30) -> b88 (31) + # \-> b85 (29) -> b86 (30) \-> b89a (32) + # + self.log.info("Test re-orging blocks with OP_RETURN in them") + b84 = self.next_block(84) + tx1 = self.create_tx(out[29], 0, 0, CScript([OP_RETURN])) + tx1.vout.append(CTxOut(0, CScript([OP_TRUE]))) + tx1.vout.append(CTxOut(0, CScript([OP_TRUE]))) + tx1.vout.append(CTxOut(0, CScript([OP_TRUE]))) + tx1.vout.append(CTxOut(0, CScript([OP_TRUE]))) + tx1.calc_sha256() + self.sign_tx(tx1, out[29]) + tx1.rehash() + tx2 = self.create_tx(tx1, 1, 0, CScript([OP_RETURN])) + tx2.vout.append(CTxOut(0, CScript([OP_RETURN]))) + tx3 = self.create_tx(tx1, 2, 0, CScript([OP_RETURN])) + tx3.vout.append(CTxOut(0, CScript([OP_TRUE]))) + tx4 = self.create_tx(tx1, 3, 0, CScript([OP_TRUE])) + tx4.vout.append(CTxOut(0, CScript([OP_RETURN]))) + tx5 = self.create_tx(tx1, 4, 0, CScript([OP_RETURN])) + + b84 = self.update_block(84, [tx1, tx2, tx3, tx4, tx5]) + self.sync_blocks([b84], True) + self.save_spendable_output() + + self.move_tip(83) + b85 = self.next_block(85, spend=out[29]) + self.sync_blocks([b85], False) # other chain is same length + + b86 = self.next_block(86, spend=out[30]) + self.sync_blocks([b86], True) + + self.move_tip(84) + b87 = self.next_block(87, spend=out[30]) + self.sync_blocks([b87], False) # other chain is same length + self.save_spendable_output() + + b88 = self.next_block(88, spend=out[31]) + self.sync_blocks([b88], True) + self.save_spendable_output() + + # trying to spend the OP_RETURN output is rejected + b89a = self.next_block("89a", spend=out[32]) + tx = self.create_tx(tx1, 0, 0, CScript([OP_TRUE])) + b89a = self.update_block("89a", [tx]) + self.sync_blocks([b89a], success=False, reject_reason='bad-txns-inputs-missingorspent', reconnect=True) + + self.log.info("Test a re-org of one week's worth of blocks (1088 blocks)") + + self.move_tip(88) + LARGE_REORG_SIZE = 1088 + blocks = [] + spend = out[32] + for i in range(89, LARGE_REORG_SIZE + 89): + b = self.next_block(i, spend) + tx = CTransaction() + script_length = MAX_BLOCK_BASE_SIZE - len(b.serialize()) - 69 + script_output = CScript([b'\x00' * script_length]) + tx.vout.append(CTxOut(0, script_output)) + tx.vin.append(CTxIn(COutPoint(b.vtx[1].sha256, 0))) + b = self.update_block(i, [tx]) + assert_equal(len(b.serialize()), MAX_BLOCK_BASE_SIZE) + blocks.append(b) + self.save_spendable_output() + spend = self.get_spendable_output() + + self.sync_blocks(blocks, True, timeout=180) + chain1_tip = i + + # now create alt chain of same length + self.move_tip(88) + blocks2 = [] + for i in range(89, LARGE_REORG_SIZE + 89): + blocks2.append(self.next_block("alt" + str(i))) + self.sync_blocks(blocks2, False, request_block=False) + + # extend alt chain to trigger re-org + block = self.next_block("alt" + str(chain1_tip + 1)) + self.sync_blocks([block], True, timeout=180) + + # ... and re-org back to the first chain + self.move_tip(chain1_tip) + block = self.next_block(chain1_tip + 1) + self.sync_blocks([block], False, request_block=False) + block = self.next_block(chain1_tip + 2) + self.sync_blocks([block], True, timeout=180) + + # Helper methods + ################ + + def add_transactions_to_block(self, block, tx_list): + [tx.rehash() for tx in tx_list] + block.vtx.extend(tx_list) + + # this is a little handier to use than the version in blocktools.py + def create_tx(self, spend_tx, n, value, script=CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE])): + return create_tx_with_script(spend_tx, n, amount=value, script_pub_key=script) + + # sign a transaction, using the key we know about + # this signs input 0 in tx, which is assumed to be spending output n in spend_tx + def sign_tx(self, tx, spend_tx): + scriptPubKey = bytearray(spend_tx.vout[0].scriptPubKey) + if (scriptPubKey[0] == OP_TRUE): # an anyone-can-spend + tx.vin[0].scriptSig = CScript() + return + (sighash, err) = SignatureHash(spend_tx.vout[0].scriptPubKey, tx, 0, SIGHASH_ALL) + tx.vin[0].scriptSig = CScript([self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL]))]) + + def create_and_sign_transaction(self, spend_tx, value, script=CScript([OP_TRUE])): + tx = self.create_tx(spend_tx, 0, value, script) + self.sign_tx(tx, spend_tx) + tx.rehash() + return tx + + def next_block(self, number, spend=None, additional_coinbase_value=0, script=CScript([OP_TRUE]), solve=True): + if self.tip is None: + base_block_hash = self.genesis_hash + block_time = int(time.time()) + 1 + else: + base_block_hash = self.tip.sha256 + block_time = self.tip.nTime + 1 + # First create the coinbase + height = self.block_heights[base_block_hash] + 1 + coinbase = create_coinbase(height, self.coinbase_pubkey) + coinbase.vout[0].nValue += additional_coinbase_value + coinbase.rehash() + if spend is None: + block = create_block(base_block_hash, coinbase, block_time) + else: + coinbase.vout[0].nValue += spend.vout[0].nValue - 1 # all but one satoshi to fees + coinbase.rehash() + block = create_block(base_block_hash, coinbase, block_time) + tx = self.create_tx(spend, 0, 1, script) # spend 1 satoshi + self.sign_tx(tx, spend) + self.add_transactions_to_block(block, [tx]) + block.hashMerkleRoot = block.calc_merkle_root() + if solve: + block.solve() + self.tip = block + self.block_heights[block.sha256] = height + assert number not in self.blocks + self.blocks[number] = block + return block + + # save the current tip so it can be spent by a later block + def save_spendable_output(self): + self.log.debug("saving spendable output %s" % self.tip.vtx[0]) + self.spendable_outputs.append(self.tip) + + # get an output that we previously marked as spendable + def get_spendable_output(self): + self.log.debug("getting spendable output %s" % self.spendable_outputs[0].vtx[0]) + return self.spendable_outputs.pop(0).vtx[0] + + # move the tip back to a previous block + def move_tip(self, number): + self.tip = self.blocks[number] + + # adds transactions to the block and updates state + def update_block(self, block_number, new_transactions): + block = self.blocks[block_number] + self.add_transactions_to_block(block, new_transactions) + old_sha256 = block.sha256 + block.hashMerkleRoot = block.calc_merkle_root() + block.solve() + # Update the internal state just like in next_block + self.tip = block + if block.sha256 != old_sha256: + self.block_heights[block.sha256] = self.block_heights[old_sha256] + del self.block_heights[old_sha256] + self.blocks[block_number] = block + return block + + def bootstrap_p2p(self): + """Add a P2P connection to the node. + + Helper to connect and wait for version handshake.""" + self.nodes[0].add_p2p_connection(P2PDataStore()) + # We need to wait for the initial getheaders from the peer before we + # start populating our blockstore. If we don't, then we may run ahead + # to the next subtest before we receive the getheaders. We'd then send + # an INV for the next block and receive two getheaders - one for the + # IBD and one for the INV. We'd respond to both and could get + # unexpectedly disconnected if the DoS score for that error is 50. + self.nodes[0].p2p.wait_for_getheaders(timeout=5) + + def reconnect_p2p(self): + """Tear down and bootstrap the P2P connection to the node. + + The node gets disconnected several times in this test. This helper + method reconnects the p2p and restarts the network thread.""" + self.nodes[0].disconnect_p2ps() + self.bootstrap_p2p() + + def sync_blocks(self, blocks, success=True, reject_reason=None, request_block=True, reconnect=False, timeout=60): + """Sends blocks to test node. Syncs and verifies that tip has advanced to most recent block. + + Call with success = False if the tip shouldn't advance to the most recent block.""" + self.nodes[0].p2p.send_blocks_and_test(blocks, self.nodes[0], success=success, reject_reason=reject_reason, request_block=request_block, timeout=timeout, expect_disconnect=reconnect) + + if reconnect: + self.reconnect_p2p() + +if __name__ == '__main__': + FullBlockTest().main() diff --git a/test/functional/feature_blocksdir.py b/test/functional/feature_blocksdir.py new file mode 100755 index 000000000..c170f510c --- /dev/null +++ b/test/functional/feature_blocksdir.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python3 +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the blocksdir option. +""" + +import os +import shutil + +from test_framework.test_framework import BitcoinTestFramework, initialize_datadir + + +class BlocksdirTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 1 + + def run_test(self): + self.stop_node(0) + shutil.rmtree(self.nodes[0].datadir) + initialize_datadir(self.options.tmpdir, 0) + self.log.info("Starting with nonexistent blocksdir ...") + blocksdir_path = os.path.join(self.options.tmpdir, 'blocksdir') + self.nodes[0].assert_start_raises_init_error(["-blocksdir=" + blocksdir_path], 'Error: Specified blocks directory "{}" does not exist.'.format(blocksdir_path)) + os.mkdir(blocksdir_path) + self.log.info("Starting with existing blocksdir ...") + self.start_node(0, ["-blocksdir=" + blocksdir_path]) + self.log.info("mining blocks..") + self.nodes[0].generatetoaddress(10, self.nodes[0].get_deterministic_priv_key().address) + assert os.path.isfile(os.path.join(blocksdir_path, "regtest", "blocks", "blk00000.dat")) + assert os.path.isdir(os.path.join(self.nodes[0].datadir, "regtest", "blocks", "index")) + + +if __name__ == '__main__': + BlocksdirTest().main() diff --git a/test/functional/feature_cltv.py b/test/functional/feature_cltv.py new file mode 100755 index 000000000..f84b08a19 --- /dev/null +++ b/test/functional/feature_cltv.py @@ -0,0 +1,144 @@ +#!/usr/bin/env python3 +# Copyright (c) 2015-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test BIP65 (CHECKLOCKTIMEVERIFY). + +Test that the CHECKLOCKTIMEVERIFY soft-fork activates at (regtest) block height +1351. +""" + +from test_framework.blocktools import create_coinbase, create_block, create_transaction +from test_framework.messages import CTransaction, msg_block, ToHex +from test_framework.mininode import P2PInterface +from test_framework.script import CScript, OP_1NEGATE, OP_CHECKLOCKTIMEVERIFY, OP_DROP, CScriptNum +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + bytes_to_hex_str, + hex_str_to_bytes, +) + +from io import BytesIO + +CLTV_HEIGHT = 1351 + +# Reject codes that we might receive in this test +REJECT_INVALID = 16 +REJECT_OBSOLETE = 17 +REJECT_NONSTANDARD = 64 + +def cltv_invalidate(tx): + '''Modify the signature in vin 0 of the tx to fail CLTV + + Prepends -1 CLTV DROP in the scriptSig itself. + + TODO: test more ways that transactions using CLTV could be invalid (eg + locktime requirements fail, sequence time requirements fail, etc). + ''' + tx.vin[0].scriptSig = CScript([OP_1NEGATE, OP_CHECKLOCKTIMEVERIFY, OP_DROP] + + list(CScript(tx.vin[0].scriptSig))) + +def cltv_validate(node, tx, height): + '''Modify the signature in vin 0 of the tx to pass CLTV + Prepends CLTV DROP in the scriptSig, and sets + the locktime to height''' + tx.vin[0].nSequence = 0 + tx.nLockTime = height + + # Need to re-sign, since nSequence and nLockTime changed + signed_result = node.signrawtransactionwithwallet(ToHex(tx)) + new_tx = CTransaction() + new_tx.deserialize(BytesIO(hex_str_to_bytes(signed_result['hex']))) + + new_tx.vin[0].scriptSig = CScript([CScriptNum(height), OP_CHECKLOCKTIMEVERIFY, OP_DROP] + + list(CScript(new_tx.vin[0].scriptSig))) + return new_tx + + +class BIP65Test(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 1 + self.extra_args = [['-whitelist=127.0.0.1', '-par=1']] # Use only one script thread to get the exact reject reason for testing + self.setup_clean_chain = True + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + self.nodes[0].add_p2p_connection(P2PInterface()) + + self.log.info("Mining %d blocks", CLTV_HEIGHT - 2) + self.coinbase_txids = [self.nodes[0].getblock(b)['tx'][0] for b in self.nodes[0].generate(CLTV_HEIGHT - 2)] + self.nodeaddress = self.nodes[0].getnewaddress() + + self.log.info("Test that an invalid-according-to-CLTV transaction can still appear in a block") + + spendtx = create_transaction(self.nodes[0], self.coinbase_txids[0], + self.nodeaddress, amount=1.0) + cltv_invalidate(spendtx) + spendtx.rehash() + + tip = self.nodes[0].getbestblockhash() + block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1 + block = create_block(int(tip, 16), create_coinbase(CLTV_HEIGHT - 1), block_time) + block.nVersion = 3 + block.vtx.append(spendtx) + block.hashMerkleRoot = block.calc_merkle_root() + block.solve() + + self.nodes[0].p2p.send_and_ping(msg_block(block)) + assert_equal(self.nodes[0].getbestblockhash(), block.hash) + + self.log.info("Test that blocks must now be at least version 4") + tip = block.sha256 + block_time += 1 + block = create_block(tip, create_coinbase(CLTV_HEIGHT), block_time) + block.nVersion = 3 + block.solve() + + with self.nodes[0].assert_debug_log(expected_msgs=['{}, bad-version(0x00000003)'.format(block.hash)]): + self.nodes[0].p2p.send_and_ping(msg_block(block)) + assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip) + self.nodes[0].p2p.sync_with_ping() + + self.log.info("Test that invalid-according-to-cltv transactions cannot appear in a block") + block.nVersion = 4 + + spendtx = create_transaction(self.nodes[0], self.coinbase_txids[1], + self.nodeaddress, amount=1.0) + cltv_invalidate(spendtx) + spendtx.rehash() + + # First we show that this tx is valid except for CLTV by getting it + # rejected from the mempool for exactly that reason. + assert_equal( + [{'txid': spendtx.hash, 'allowed': False, 'reject-reason': '64: non-mandatory-script-verify-flag (Negative locktime)'}], + self.nodes[0].testmempoolaccept(rawtxs=[bytes_to_hex_str(spendtx.serialize())], allowhighfees=True) + ) + + # Now we verify that a block with this transaction is also invalid. + block.vtx.append(spendtx) + block.hashMerkleRoot = block.calc_merkle_root() + block.solve() + + with self.nodes[0].assert_debug_log(expected_msgs=['CheckInputs on {} failed with non-mandatory-script-verify-flag (Negative locktime)'.format(block.vtx[-1].hash)]): + self.nodes[0].p2p.send_and_ping(msg_block(block)) + assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip) + self.nodes[0].p2p.sync_with_ping() + + self.log.info("Test that a version 4 block with a valid-according-to-CLTV transaction is accepted") + spendtx = cltv_validate(self.nodes[0], spendtx, CLTV_HEIGHT - 1) + spendtx.rehash() + + block.vtx.pop(1) + block.vtx.append(spendtx) + block.hashMerkleRoot = block.calc_merkle_root() + block.solve() + + self.nodes[0].p2p.send_and_ping(msg_block(block)) + assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256) + + +if __name__ == '__main__': + BIP65Test().main() diff --git a/test/functional/feature_config_args.py b/test/functional/feature_config_args.py new file mode 100755 index 000000000..1124119e2 --- /dev/null +++ b/test/functional/feature_config_args.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python3 +# Copyright (c) 2017-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test various command line arguments and configuration file parameters.""" + +import os + +from test_framework.test_framework import BitcoinTestFramework + + +class ConfArgsTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 1 + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def test_config_file_parser(self): + # Assume node is stopped + + inc_conf_file_path = os.path.join(self.nodes[0].datadir, 'include.conf') + with open(os.path.join(self.nodes[0].datadir, 'bitcoin.conf'), 'a', encoding='utf-8') as conf: + conf.write('includeconf={}\n'.format(inc_conf_file_path)) + + with open(inc_conf_file_path, 'w', encoding='utf-8') as conf: + conf.write('-dash=1\n') + self.nodes[0].assert_start_raises_init_error(expected_msg='Error reading configuration file: parse error on line 1: -dash=1, options in configuration file must be specified without leading -') + + with open(inc_conf_file_path, 'w', encoding='utf-8') as conf: + conf.write('nono\n') + self.nodes[0].assert_start_raises_init_error(expected_msg='Error reading configuration file: parse error on line 1: nono, if you intended to specify a negated option, use nono=1 instead') + + with open(inc_conf_file_path, 'w', encoding='utf-8') as conf: + conf.write('') # clear + + def run_test(self): + self.stop_node(0) + + self.test_config_file_parser() + + # Remove the -datadir argument so it doesn't override the config file + self.nodes[0].args = [arg for arg in self.nodes[0].args if not arg.startswith("-datadir")] + + default_data_dir = self.nodes[0].datadir + new_data_dir = os.path.join(default_data_dir, 'newdatadir') + new_data_dir_2 = os.path.join(default_data_dir, 'newdatadir2') + + # Check that using -datadir argument on non-existent directory fails + self.nodes[0].datadir = new_data_dir + self.nodes[0].assert_start_raises_init_error(['-datadir=' + new_data_dir], 'Error: Specified data directory "' + new_data_dir + '" does not exist.') + + # Check that using non-existent datadir in conf file fails + conf_file = os.path.join(default_data_dir, "bitcoin.conf") + + # datadir needs to be set before [regtest] section + conf_file_contents = open(conf_file, encoding='utf8').read() + with open(conf_file, 'w', encoding='utf8') as f: + f.write("datadir=" + new_data_dir + "\n") + f.write(conf_file_contents) + + # Temporarily disabled, because this test would access the user's home dir (~/.bitcoin) + #self.nodes[0].assert_start_raises_init_error(['-conf=' + conf_file], 'Error reading configuration file: specified data directory "' + new_data_dir + '" does not exist.') + + # Create the directory and ensure the config file now works + os.mkdir(new_data_dir) + # Temporarily disabled, because this test would access the user's home dir (~/.bitcoin) + #self.start_node(0, ['-conf='+conf_file, '-wallet=w1']) + #self.stop_node(0) + #assert os.path.exists(os.path.join(new_data_dir, 'regtest', 'wallets', 'w1')) + + # Ensure command line argument overrides datadir in conf + os.mkdir(new_data_dir_2) + self.nodes[0].datadir = new_data_dir_2 + self.start_node(0, ['-datadir='+new_data_dir_2, '-conf='+conf_file, '-wallet=w2']) + assert os.path.exists(os.path.join(new_data_dir_2, 'regtest', 'wallets', 'w2')) + +if __name__ == '__main__': + ConfArgsTest().main() diff --git a/test/functional/feature_csv_activation.py b/test/functional/feature_csv_activation.py new file mode 100755 index 000000000..df79c4312 --- /dev/null +++ b/test/functional/feature_csv_activation.py @@ -0,0 +1,485 @@ +#!/usr/bin/env python3 +# Copyright (c) 2015-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test activation of the first version bits soft fork. + +This soft fork will activate the following BIPS: +BIP 68 - nSequence relative lock times +BIP 112 - CHECKSEQUENCEVERIFY +BIP 113 - MedianTimePast semantics for nLockTime + +regtest lock-in with 108/144 block signalling +activation after a further 144 blocks + +mine 82 blocks whose coinbases will be used to generate inputs for our tests +mine 61 blocks to transition from DEFINED to STARTED +mine 144 blocks only 100 of which are signaling readiness in order to fail to change state this period +mine 144 blocks with 108 signaling and verify STARTED->LOCKED_IN +mine 140 blocks and seed block chain with the 82 inputs will use for our tests at height 572 +mine 3 blocks and verify still at LOCKED_IN and test that enforcement has not triggered +mine 1 block and test that enforcement has triggered (which triggers ACTIVE) +Test BIP 113 is enforced +Mine 4 blocks so next height is 580 and test BIP 68 is enforced for time and height +Mine 1 block so next height is 581 and test BIP 68 now passes time but not height +Mine 1 block so next height is 582 and test BIP 68 now passes time and height +Test that BIP 112 is enforced + +Various transactions will be used to test that the BIPs rules are not enforced before the soft fork activates +And that after the soft fork activates transactions pass and fail as they should according to the rules. +For each BIP, transactions of versions 1 and 2 will be tested. +---------------- +BIP 113: +bip113tx - modify the nLocktime variable + +BIP 68: +bip68txs - 16 txs with nSequence relative locktime of 10 with various bits set as per the relative_locktimes below + +BIP 112: +bip112txs_vary_nSequence - 16 txs with nSequence relative_locktimes of 10 evaluated against 10 OP_CSV OP_DROP +bip112txs_vary_nSequence_9 - 16 txs with nSequence relative_locktimes of 9 evaluated against 10 OP_CSV OP_DROP +bip112txs_vary_OP_CSV - 16 txs with nSequence = 10 evaluated against varying {relative_locktimes of 10} OP_CSV OP_DROP +bip112txs_vary_OP_CSV_9 - 16 txs with nSequence = 9 evaluated against varying {relative_locktimes of 10} OP_CSV OP_DROP +bip112tx_special - test negative argument to OP_CSV +""" +from decimal import Decimal +from itertools import product +from io import BytesIO +import time + +from test_framework.blocktools import create_coinbase, create_block, create_transaction +from test_framework.messages import ToHex, CTransaction +from test_framework.mininode import P2PDataStore +from test_framework.script import ( + CScript, + OP_CHECKSEQUENCEVERIFY, + OP_DROP, +) +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + get_bip9_status, + hex_str_to_bytes, +) + +BASE_RELATIVE_LOCKTIME = 10 +SEQ_DISABLE_FLAG = 1 << 31 +SEQ_RANDOM_HIGH_BIT = 1 << 25 +SEQ_TYPE_FLAG = 1 << 22 +SEQ_RANDOM_LOW_BIT = 1 << 18 + +def relative_locktime(sdf, srhb, stf, srlb): + """Returns a locktime with certain bits set.""" + + locktime = BASE_RELATIVE_LOCKTIME + if sdf: + locktime |= SEQ_DISABLE_FLAG + if srhb: + locktime |= SEQ_RANDOM_HIGH_BIT + if stf: + locktime |= SEQ_TYPE_FLAG + if srlb: + locktime |= SEQ_RANDOM_LOW_BIT + return locktime + +def all_rlt_txs(txs): + return [tx['tx'] for tx in txs] + +def sign_transaction(node, unsignedtx): + rawtx = ToHex(unsignedtx) + signresult = node.signrawtransactionwithwallet(rawtx) + tx = CTransaction() + f = BytesIO(hex_str_to_bytes(signresult['hex'])) + tx.deserialize(f) + return tx + +def create_bip112special(node, input, txversion, address): + tx = create_transaction(node, input, address, amount=Decimal("49.98")) + tx.nVersion = txversion + signtx = sign_transaction(node, tx) + signtx.vin[0].scriptSig = CScript([-1, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig))) + return signtx + +def send_generic_input_tx(node, coinbases, address): + return node.sendrawtransaction(ToHex(sign_transaction(node, create_transaction(node, node.getblock(coinbases.pop())['tx'][0], address, amount=Decimal("49.99"))))) + +def create_bip68txs(node, bip68inputs, txversion, address, locktime_delta=0): + """Returns a list of bip68 transactions with different bits set.""" + txs = [] + assert(len(bip68inputs) >= 16) + for i, (sdf, srhb, stf, srlb) in enumerate(product(*[[True, False]] * 4)): + locktime = relative_locktime(sdf, srhb, stf, srlb) + tx = create_transaction(node, bip68inputs[i], address, amount=Decimal("49.98")) + tx.nVersion = txversion + tx.vin[0].nSequence = locktime + locktime_delta + tx = sign_transaction(node, tx) + tx.rehash() + txs.append({'tx': tx, 'sdf': sdf, 'stf': stf}) + + return txs + +def create_bip112txs(node, bip112inputs, varyOP_CSV, txversion, address, locktime_delta=0): + """Returns a list of bip68 transactions with different bits set.""" + txs = [] + assert(len(bip112inputs) >= 16) + for i, (sdf, srhb, stf, srlb) in enumerate(product(*[[True, False]] * 4)): + locktime = relative_locktime(sdf, srhb, stf, srlb) + tx = create_transaction(node, bip112inputs[i], address, amount=Decimal("49.98")) + if (varyOP_CSV): # if varying OP_CSV, nSequence is fixed + tx.vin[0].nSequence = BASE_RELATIVE_LOCKTIME + locktime_delta + else: # vary nSequence instead, OP_CSV is fixed + tx.vin[0].nSequence = locktime + locktime_delta + tx.nVersion = txversion + signtx = sign_transaction(node, tx) + if (varyOP_CSV): + signtx.vin[0].scriptSig = CScript([locktime, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig))) + else: + signtx.vin[0].scriptSig = CScript([BASE_RELATIVE_LOCKTIME, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig))) + tx.rehash() + txs.append({'tx': signtx, 'sdf': sdf, 'stf': stf}) + return txs + +class BIP68_112_113Test(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 1 + self.setup_clean_chain = True + self.extra_args = [['-whitelist=127.0.0.1', '-blockversion=4', '-addresstype=legacy']] + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def generate_blocks(self, number, version, test_blocks=None): + if test_blocks is None: + test_blocks = [] + for i in range(number): + block = self.create_test_block([], version) + test_blocks.append(block) + self.last_block_time += 600 + self.tip = block.sha256 + self.tipheight += 1 + return test_blocks + + def create_test_block(self, txs, version=536870912): + block = create_block(self.tip, create_coinbase(self.tipheight + 1), self.last_block_time + 600) + block.nVersion = version + block.vtx.extend(txs) + block.hashMerkleRoot = block.calc_merkle_root() + block.rehash() + block.solve() + return block + + def sync_blocks(self, blocks, success=True): + """Sends blocks to test node. Syncs and verifies that tip has advanced to most recent block. + + Call with success = False if the tip shouldn't advance to the most recent block.""" + self.nodes[0].p2p.send_blocks_and_test(blocks, self.nodes[0], success=success) + + def run_test(self): + self.nodes[0].add_p2p_connection(P2PDataStore()) + + self.log.info("Generate blocks in the past for coinbase outputs.") + long_past_time = int(time.time()) - 600 * 1000 # enough to build up to 1000 blocks 10 minutes apart without worrying about getting into the future + self.nodes[0].setmocktime(long_past_time - 100) # enough so that the generated blocks will still all be before long_past_time + self.coinbase_blocks = self.nodes[0].generate(1 + 16 + 2 * 32 + 1) # 82 blocks generated for inputs + self.nodes[0].setmocktime(0) # set time back to present so yielded blocks aren't in the future as we advance last_block_time + self.tipheight = 82 # height of the next block to build + self.last_block_time = long_past_time + self.tip = int(self.nodes[0].getbestblockhash(), 16) + self.nodeaddress = self.nodes[0].getnewaddress() + + self.log.info("Test that the csv softfork is DEFINED") + assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'defined') + test_blocks = self.generate_blocks(61, 4) + self.sync_blocks(test_blocks) + + self.log.info("Advance from DEFINED to STARTED, height = 143") + assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started') + + self.log.info("Fail to achieve LOCKED_IN") + # 100 out of 144 signal bit 0. Use a variety of bits to simulate multiple parallel softforks + + test_blocks = self.generate_blocks(50, 536870913) # 0x20000001 (signalling ready) + test_blocks = self.generate_blocks(20, 4, test_blocks) # 0x00000004 (signalling not) + test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready) + test_blocks = self.generate_blocks(24, 536936448, test_blocks) # 0x20010000 (signalling not) + self.sync_blocks(test_blocks) + + self.log.info("Failed to advance past STARTED, height = 287") + assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started') + + self.log.info("Generate blocks to achieve LOCK-IN") + # 108 out of 144 signal bit 0 to achieve lock-in + # using a variety of bits to simulate multiple parallel softforks + test_blocks = self.generate_blocks(58, 536870913) # 0x20000001 (signalling ready) + test_blocks = self.generate_blocks(26, 4, test_blocks) # 0x00000004 (signalling not) + test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready) + test_blocks = self.generate_blocks(10, 536936448, test_blocks) # 0x20010000 (signalling not) + self.sync_blocks(test_blocks) + + self.log.info("Advanced from STARTED to LOCKED_IN, height = 431") + assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in') + + # Generate 140 more version 4 blocks + test_blocks = self.generate_blocks(140, 4) + self.sync_blocks(test_blocks) + + # Inputs at height = 572 + # + # Put inputs for all tests in the chain at height 572 (tip now = 571) (time increases by 600s per block) + # Note we reuse inputs for v1 and v2 txs so must test these separately + # 16 normal inputs + bip68inputs = [] + for i in range(16): + bip68inputs.append(send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress)) + + # 2 sets of 16 inputs with 10 OP_CSV OP_DROP (actually will be prepended to spending scriptSig) + bip112basicinputs = [] + for j in range(2): + inputs = [] + for i in range(16): + inputs.append(send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress)) + bip112basicinputs.append(inputs) + + # 2 sets of 16 varied inputs with (relative_lock_time) OP_CSV OP_DROP (actually will be prepended to spending scriptSig) + bip112diverseinputs = [] + for j in range(2): + inputs = [] + for i in range(16): + inputs.append(send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress)) + bip112diverseinputs.append(inputs) + + # 1 special input with -1 OP_CSV OP_DROP (actually will be prepended to spending scriptSig) + bip112specialinput = send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress) + + # 1 normal input + bip113input = send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress) + + self.nodes[0].setmocktime(self.last_block_time + 600) + inputblockhash = self.nodes[0].generate(1)[0] # 1 block generated for inputs to be in chain at height 572 + self.nodes[0].setmocktime(0) + self.tip = int(inputblockhash, 16) + self.tipheight += 1 + self.last_block_time += 600 + assert_equal(len(self.nodes[0].getblock(inputblockhash, True)["tx"]), 82 + 1) + + # 2 more version 4 blocks + test_blocks = self.generate_blocks(2, 4) + self.sync_blocks(test_blocks) + + self.log.info("Not yet advanced to ACTIVE, height = 574 (will activate for block 576, not 575)") + assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in') + + # Test both version 1 and version 2 transactions for all tests + # BIP113 test transaction will be modified before each use to put in appropriate block time + bip113tx_v1 = create_transaction(self.nodes[0], bip113input, self.nodeaddress, amount=Decimal("49.98")) + bip113tx_v1.vin[0].nSequence = 0xFFFFFFFE + bip113tx_v1.nVersion = 1 + bip113tx_v2 = create_transaction(self.nodes[0], bip113input, self.nodeaddress, amount=Decimal("49.98")) + bip113tx_v2.vin[0].nSequence = 0xFFFFFFFE + bip113tx_v2.nVersion = 2 + + # For BIP68 test all 16 relative sequence locktimes + bip68txs_v1 = create_bip68txs(self.nodes[0], bip68inputs, 1, self.nodeaddress) + bip68txs_v2 = create_bip68txs(self.nodes[0], bip68inputs, 2, self.nodeaddress) + + # For BIP112 test: + # 16 relative sequence locktimes of 10 against 10 OP_CSV OP_DROP inputs + bip112txs_vary_nSequence_v1 = create_bip112txs(self.nodes[0], bip112basicinputs[0], False, 1, self.nodeaddress) + bip112txs_vary_nSequence_v2 = create_bip112txs(self.nodes[0], bip112basicinputs[0], False, 2, self.nodeaddress) + # 16 relative sequence locktimes of 9 against 10 OP_CSV OP_DROP inputs + bip112txs_vary_nSequence_9_v1 = create_bip112txs(self.nodes[0], bip112basicinputs[1], False, 1, self.nodeaddress, -1) + bip112txs_vary_nSequence_9_v2 = create_bip112txs(self.nodes[0], bip112basicinputs[1], False, 2, self.nodeaddress, -1) + # sequence lock time of 10 against 16 (relative_lock_time) OP_CSV OP_DROP inputs + bip112txs_vary_OP_CSV_v1 = create_bip112txs(self.nodes[0], bip112diverseinputs[0], True, 1, self.nodeaddress) + bip112txs_vary_OP_CSV_v2 = create_bip112txs(self.nodes[0], bip112diverseinputs[0], True, 2, self.nodeaddress) + # sequence lock time of 9 against 16 (relative_lock_time) OP_CSV OP_DROP inputs + bip112txs_vary_OP_CSV_9_v1 = create_bip112txs(self.nodes[0], bip112diverseinputs[1], True, 1, self.nodeaddress, -1) + bip112txs_vary_OP_CSV_9_v2 = create_bip112txs(self.nodes[0], bip112diverseinputs[1], True, 2, self.nodeaddress, -1) + # -1 OP_CSV OP_DROP input + bip112tx_special_v1 = create_bip112special(self.nodes[0], bip112specialinput, 1, self.nodeaddress) + bip112tx_special_v2 = create_bip112special(self.nodes[0], bip112specialinput, 2, self.nodeaddress) + + self.log.info("TESTING") + + self.log.info("Pre-Soft Fork Tests. All txs should pass.") + self.log.info("Test version 1 txs") + + success_txs = [] + # add BIP113 tx and -1 CSV tx + bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block + bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1) + success_txs.append(bip113signed1) + success_txs.append(bip112tx_special_v1) + # add BIP 68 txs + success_txs.extend(all_rlt_txs(bip68txs_v1)) + # add BIP 112 with seq=10 txs + success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1)) + success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v1)) + # try BIP 112 with seq=9 txs + success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1)) + success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v1)) + self.sync_blocks([self.create_test_block(success_txs)]) + self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) + + self.log.info("Test version 2 txs") + + success_txs = [] + # add BIP113 tx and -1 CSV tx + bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block + bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2) + success_txs.append(bip113signed2) + success_txs.append(bip112tx_special_v2) + # add BIP 68 txs + success_txs.extend(all_rlt_txs(bip68txs_v2)) + # add BIP 112 with seq=10 txs + success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v2)) + success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v2)) + # try BIP 112 with seq=9 txs + success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2)) + success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v2)) + self.sync_blocks([self.create_test_block(success_txs)]) + self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) + + # 1 more version 4 block to get us to height 575 so the fork should now be active for the next block + test_blocks = self.generate_blocks(1, 4) + self.sync_blocks(test_blocks) + assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'active') + + self.log.info("Post-Soft Fork Tests.") + + self.log.info("BIP 113 tests") + # BIP 113 tests should now fail regardless of version number if nLockTime isn't satisfied by new rules + bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block + bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1) + bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block + bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2) + for bip113tx in [bip113signed1, bip113signed2]: + self.sync_blocks([self.create_test_block([bip113tx])], success=False) + # BIP 113 tests should now pass if the locktime is < MTP + bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block + bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1) + bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block + bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2) + for bip113tx in [bip113signed1, bip113signed2]: + self.sync_blocks([self.create_test_block([bip113tx])]) + self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) + + # Next block height = 580 after 4 blocks of random version + test_blocks = self.generate_blocks(4, 1234) + self.sync_blocks(test_blocks) + + self.log.info("BIP 68 tests") + self.log.info("Test version 1 txs - all should still pass") + + success_txs = [] + success_txs.extend(all_rlt_txs(bip68txs_v1)) + self.sync_blocks([self.create_test_block(success_txs)]) + self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) + + self.log.info("Test version 2 txs") + + # All txs with SEQUENCE_LOCKTIME_DISABLE_FLAG set pass + bip68success_txs = [tx['tx'] for tx in bip68txs_v2 if tx['sdf']] + self.sync_blocks([self.create_test_block(bip68success_txs)]) + self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) + + # All txs without flag fail as we are at delta height = 8 < 10 and delta time = 8 * 600 < 10 * 512 + bip68timetxs = [tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and tx['stf']] + for tx in bip68timetxs: + self.sync_blocks([self.create_test_block([tx])], success=False) + + bip68heighttxs = [tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and not tx['stf']] + for tx in bip68heighttxs: + self.sync_blocks([self.create_test_block([tx])], success=False) + + # Advance one block to 581 + test_blocks = self.generate_blocks(1, 1234) + self.sync_blocks(test_blocks) + + # Height txs should fail and time txs should now pass 9 * 600 > 10 * 512 + bip68success_txs.extend(bip68timetxs) + self.sync_blocks([self.create_test_block(bip68success_txs)]) + self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) + for tx in bip68heighttxs: + self.sync_blocks([self.create_test_block([tx])], success=False) + + # Advance one block to 582 + test_blocks = self.generate_blocks(1, 1234) + self.sync_blocks(test_blocks) + + # All BIP 68 txs should pass + bip68success_txs.extend(bip68heighttxs) + self.sync_blocks([self.create_test_block(bip68success_txs)]) + self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) + + self.log.info("BIP 112 tests") + self.log.info("Test version 1 txs") + + # -1 OP_CSV tx should fail + self.sync_blocks([self.create_test_block([bip112tx_special_v1])], success=False) + # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 1 txs should still pass + + success_txs = [tx['tx'] for tx in bip112txs_vary_OP_CSV_v1 if tx['sdf']] + success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if tx['sdf']] + self.sync_blocks([self.create_test_block(success_txs)]) + self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) + + # If SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV, version 1 txs should now fail + fail_txs = all_rlt_txs(bip112txs_vary_nSequence_v1) + fail_txs += all_rlt_txs(bip112txs_vary_nSequence_9_v1) + fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf']] + fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf']] + for tx in fail_txs: + self.sync_blocks([self.create_test_block([tx])], success=False) + + self.log.info("Test version 2 txs") + + # -1 OP_CSV tx should fail + self.sync_blocks([self.create_test_block([bip112tx_special_v2])], success=False) + + # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 2 txs should pass (all sequence locks are met) + success_txs = [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if tx['sdf']] + success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if tx['sdf']] + + self.sync_blocks([self.create_test_block(success_txs)]) + self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) + + # SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV for all remaining txs ## + + # All txs with nSequence 9 should fail either due to earlier mismatch or failing the CSV check + fail_txs = all_rlt_txs(bip112txs_vary_nSequence_9_v2) + fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if not tx['sdf']] + for tx in fail_txs: + self.sync_blocks([self.create_test_block([tx])], success=False) + + # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in nSequence, tx should fail + fail_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if tx['sdf']] + for tx in fail_txs: + self.sync_blocks([self.create_test_block([tx])], success=False) + + # If sequencelock types mismatch, tx should fail + fail_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and tx['stf']] + fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and tx['stf']] + for tx in fail_txs: + self.sync_blocks([self.create_test_block([tx])], success=False) + + # Remaining txs should pass, just test masking works properly + success_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and not tx['stf']] + success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and not tx['stf']] + self.sync_blocks([self.create_test_block(success_txs)]) + self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) + + # Additional test, of checking that comparison of two time types works properly + time_txs = [] + for tx in [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and tx['stf']]: + tx.vin[0].nSequence = BASE_RELATIVE_LOCKTIME | SEQ_TYPE_FLAG + signtx = sign_transaction(self.nodes[0], tx) + time_txs.append(signtx) + + self.sync_blocks([self.create_test_block(time_txs)]) + self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) + + # TODO: Test empty stack fails + +if __name__ == '__main__': + BIP68_112_113Test().main() diff --git a/test/functional/feature_dbcrash.py b/test/functional/feature_dbcrash.py new file mode 100755 index 000000000..ae1eacf2d --- /dev/null +++ b/test/functional/feature_dbcrash.py @@ -0,0 +1,285 @@ +#!/usr/bin/env python3 +# Copyright (c) 2017-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test recovery from a crash during chainstate writing. + +- 4 nodes + * node0, node1, and node2 will have different dbcrash ratios, and different + dbcache sizes + * node3 will be a regular node, with no crashing. + * The nodes will not connect to each other. + +- use default test framework starting chain. initialize starting_tip_height to + tip height. + +- Main loop: + * generate lots of transactions on node3, enough to fill up a block. + * uniformly randomly pick a tip height from starting_tip_height to + tip_height; with probability 1/(height_difference+4), invalidate this block. + * mine enough blocks to overtake tip_height at start of loop. + * for each node in [node0,node1,node2]: + - for each mined block: + * submit block to node + * if node crashed on/after submitting: + - restart until recovery succeeds + - check that utxo matches node3 using gettxoutsetinfo""" + +import errno +import http.client +import random +import sys +import time + +from test_framework.messages import COIN, COutPoint, CTransaction, CTxIn, CTxOut, ToHex +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, create_confirmed_utxos, hex_str_to_bytes + +HTTP_DISCONNECT_ERRORS = [http.client.CannotSendRequest] +try: + HTTP_DISCONNECT_ERRORS.append(http.client.RemoteDisconnected) +except AttributeError: + pass + +class ChainstateWriteCrashTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 4 + self.setup_clean_chain = False + # Need a bit of extra time for the nodes to start up for this test + self.rpc_timewait = 90 + + # Set -maxmempool=0 to turn off mempool memory sharing with dbcache + # Set -rpcservertimeout=900 to reduce socket disconnects in this + # long-running test + self.base_args = ["-limitdescendantsize=0", "-maxmempool=0", "-rpcservertimeout=900", "-dbbatchsize=200000"] + + # Set different crash ratios and cache sizes. Note that not all of + # -dbcache goes to pcoinsTip. + self.node0_args = ["-dbcrashratio=8", "-dbcache=4"] + self.base_args + self.node1_args = ["-dbcrashratio=16", "-dbcache=8"] + self.base_args + self.node2_args = ["-dbcrashratio=24", "-dbcache=16"] + self.base_args + + # Node3 is a normal node with default args, except will mine full blocks + self.node3_args = ["-blockmaxweight=4000000"] + self.extra_args = [self.node0_args, self.node1_args, self.node2_args, self.node3_args] + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def setup_network(self): + self.add_nodes(self.num_nodes, extra_args=self.extra_args) + self.start_nodes() + # Leave them unconnected, we'll use submitblock directly in this test + + def restart_node(self, node_index, expected_tip): + """Start up a given node id, wait for the tip to reach the given block hash, and calculate the utxo hash. + + Exceptions on startup should indicate node crash (due to -dbcrashratio), in which case we try again. Give up + after 60 seconds. Returns the utxo hash of the given node.""" + + time_start = time.time() + while time.time() - time_start < 120: + try: + # Any of these RPC calls could throw due to node crash + self.start_node(node_index) + self.nodes[node_index].waitforblock(expected_tip) + utxo_hash = self.nodes[node_index].gettxoutsetinfo()['hash_serialized_2'] + return utxo_hash + except: + # An exception here should mean the node is about to crash. + # If bitcoind exits, then try again. wait_for_node_exit() + # should raise an exception if bitcoind doesn't exit. + self.wait_for_node_exit(node_index, timeout=10) + self.crashed_on_restart += 1 + time.sleep(1) + + # If we got here, bitcoind isn't coming back up on restart. Could be a + # bug in bitcoind, or we've gotten unlucky with our dbcrash ratio -- + # perhaps we generated a test case that blew up our cache? + # TODO: If this happens a lot, we should try to restart without -dbcrashratio + # and make sure that recovery happens. + raise AssertionError("Unable to successfully restart node %d in allotted time", node_index) + + def submit_block_catch_error(self, node_index, block): + """Try submitting a block to the given node. + + Catch any exceptions that indicate the node has crashed. + Returns true if the block was submitted successfully; false otherwise.""" + + try: + self.nodes[node_index].submitblock(block) + return True + except http.client.BadStatusLine as e: + # Prior to 3.5 BadStatusLine('') was raised for a remote disconnect error. + if sys.version_info[0] == 3 and sys.version_info[1] < 5 and e.line == "''": + self.log.debug("node %d submitblock raised exception: %s", node_index, e) + return False + else: + raise + except tuple(HTTP_DISCONNECT_ERRORS) as e: + self.log.debug("node %d submitblock raised exception: %s", node_index, e) + return False + except OSError as e: + self.log.debug("node %d submitblock raised OSError exception: errno=%s", node_index, e.errno) + if e.errno in [errno.EPIPE, errno.ECONNREFUSED, errno.ECONNRESET]: + # The node has likely crashed + return False + else: + # Unexpected exception, raise + raise + + def sync_node3blocks(self, block_hashes): + """Use submitblock to sync node3's chain with the other nodes + + If submitblock fails, restart the node and get the new utxo hash. + If any nodes crash while updating, we'll compare utxo hashes to + ensure recovery was successful.""" + + node3_utxo_hash = self.nodes[3].gettxoutsetinfo()['hash_serialized_2'] + + # Retrieve all the blocks from node3 + blocks = [] + for block_hash in block_hashes: + blocks.append([block_hash, self.nodes[3].getblock(block_hash, 0)]) + + # Deliver each block to each other node + for i in range(3): + nodei_utxo_hash = None + self.log.debug("Syncing blocks to node %d", i) + for (block_hash, block) in blocks: + # Get the block from node3, and submit to node_i + self.log.debug("submitting block %s", block_hash) + if not self.submit_block_catch_error(i, block): + # TODO: more carefully check that the crash is due to -dbcrashratio + # (change the exit code perhaps, and check that here?) + self.wait_for_node_exit(i, timeout=30) + self.log.debug("Restarting node %d after block hash %s", i, block_hash) + nodei_utxo_hash = self.restart_node(i, block_hash) + assert nodei_utxo_hash is not None + self.restart_counts[i] += 1 + else: + # Clear it out after successful submitblock calls -- the cached + # utxo hash will no longer be correct + nodei_utxo_hash = None + + # Check that the utxo hash matches node3's utxo set + # NOTE: we only check the utxo set if we had to restart the node + # after the last block submitted: + # - checking the utxo hash causes a cache flush, which we don't + # want to do every time; so + # - we only update the utxo cache after a node restart, since flushing + # the cache is a no-op at that point + if nodei_utxo_hash is not None: + self.log.debug("Checking txoutsetinfo matches for node %d", i) + assert_equal(nodei_utxo_hash, node3_utxo_hash) + + def verify_utxo_hash(self): + """Verify that the utxo hash of each node matches node3. + + Restart any nodes that crash while querying.""" + node3_utxo_hash = self.nodes[3].gettxoutsetinfo()['hash_serialized_2'] + self.log.info("Verifying utxo hash matches for all nodes") + + for i in range(3): + try: + nodei_utxo_hash = self.nodes[i].gettxoutsetinfo()['hash_serialized_2'] + except OSError: + # probably a crash on db flushing + nodei_utxo_hash = self.restart_node(i, self.nodes[3].getbestblockhash()) + assert_equal(nodei_utxo_hash, node3_utxo_hash) + + def generate_small_transactions(self, node, count, utxo_list): + FEE = 1000 # TODO: replace this with node relay fee based calculation + num_transactions = 0 + random.shuffle(utxo_list) + while len(utxo_list) >= 2 and num_transactions < count: + tx = CTransaction() + input_amount = 0 + for i in range(2): + utxo = utxo_list.pop() + tx.vin.append(CTxIn(COutPoint(int(utxo['txid'], 16), utxo['vout']))) + input_amount += int(utxo['amount'] * COIN) + output_amount = (input_amount - FEE) // 3 + + if output_amount <= 0: + # Sanity check -- if we chose inputs that are too small, skip + continue + + for i in range(3): + tx.vout.append(CTxOut(output_amount, hex_str_to_bytes(utxo['scriptPubKey']))) + + # Sign and send the transaction to get into the mempool + tx_signed_hex = node.signrawtransactionwithwallet(ToHex(tx))['hex'] + node.sendrawtransaction(tx_signed_hex) + num_transactions += 1 + + def run_test(self): + # Track test coverage statistics + self.restart_counts = [0, 0, 0] # Track the restarts for nodes 0-2 + self.crashed_on_restart = 0 # Track count of crashes during recovery + + # Start by creating a lot of utxos on node3 + initial_height = self.nodes[3].getblockcount() + utxo_list = create_confirmed_utxos(self.nodes[3].getnetworkinfo()['relayfee'], self.nodes[3], 5000) + self.log.info("Prepped %d utxo entries", len(utxo_list)) + + # Sync these blocks with the other nodes + block_hashes_to_sync = [] + for height in range(initial_height + 1, self.nodes[3].getblockcount() + 1): + block_hashes_to_sync.append(self.nodes[3].getblockhash(height)) + + self.log.debug("Syncing %d blocks with other nodes", len(block_hashes_to_sync)) + # Syncing the blocks could cause nodes to crash, so the test begins here. + self.sync_node3blocks(block_hashes_to_sync) + + starting_tip_height = self.nodes[3].getblockcount() + + # Main test loop: + # each time through the loop, generate a bunch of transactions, + # and then either mine a single new block on the tip, or some-sized reorg. + for i in range(40): + self.log.info("Iteration %d, generating 2500 transactions %s", i, self.restart_counts) + # Generate a bunch of small-ish transactions + self.generate_small_transactions(self.nodes[3], 2500, utxo_list) + # Pick a random block between current tip, and starting tip + current_height = self.nodes[3].getblockcount() + random_height = random.randint(starting_tip_height, current_height) + self.log.debug("At height %d, considering height %d", current_height, random_height) + if random_height > starting_tip_height: + # Randomly reorg from this point with some probability (1/4 for + # tip, 1/5 for tip-1, ...) + if random.random() < 1.0 / (current_height + 4 - random_height): + self.log.debug("Invalidating block at height %d", random_height) + self.nodes[3].invalidateblock(self.nodes[3].getblockhash(random_height)) + + # Now generate new blocks until we pass the old tip height + self.log.debug("Mining longer tip") + block_hashes = [] + while current_height + 1 > self.nodes[3].getblockcount(): + block_hashes.extend(self.nodes[3].generate(min(10, current_height + 1 - self.nodes[3].getblockcount()))) + self.log.debug("Syncing %d new blocks...", len(block_hashes)) + self.sync_node3blocks(block_hashes) + utxo_list = self.nodes[3].listunspent() + self.log.debug("Node3 utxo count: %d", len(utxo_list)) + + # Check that the utxo hashes agree with node3 + # Useful side effect: each utxo cache gets flushed here, so that we + # won't get crashes on shutdown at the end of the test. + self.verify_utxo_hash() + + # Check the test coverage + self.log.info("Restarted nodes: %s; crashes on restart: %d", self.restart_counts, self.crashed_on_restart) + + # If no nodes were restarted, we didn't test anything. + assert self.restart_counts != [0, 0, 0] + + # Make sure we tested the case of crash-during-recovery. + assert self.crashed_on_restart > 0 + + # Warn if any of the nodes escaped restart. + for i in range(3): + if self.restart_counts[i] == 0: + self.log.warn("Node %d never crashed during utxo flush!", i) + +if __name__ == "__main__": + ChainstateWriteCrashTest().main() diff --git a/test/functional/feature_dersig.py b/test/functional/feature_dersig.py new file mode 100755 index 000000000..16272877e --- /dev/null +++ b/test/functional/feature_dersig.py @@ -0,0 +1,136 @@ +#!/usr/bin/env python3 +# Copyright (c) 2015-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test BIP66 (DER SIG). + +Test that the DERSIG soft-fork activates at (regtest) height 1251. +""" + +from test_framework.blocktools import create_coinbase, create_block, create_transaction +from test_framework.messages import msg_block +from test_framework.mininode import mininode_lock, P2PInterface +from test_framework.script import CScript +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + bytes_to_hex_str, + wait_until, +) + +DERSIG_HEIGHT = 1251 + +# Reject codes that we might receive in this test +REJECT_INVALID = 16 +REJECT_OBSOLETE = 17 +REJECT_NONSTANDARD = 64 + +# A canonical signature consists of: +# <30> <02> <02> +def unDERify(tx): + """ + Make the signature in vin 0 of a tx non-DER-compliant, + by adding padding after the S-value. + """ + scriptSig = CScript(tx.vin[0].scriptSig) + newscript = [] + for i in scriptSig: + if (len(newscript) == 0): + newscript.append(i[0:-1] + b'\0' + i[-1:]) + else: + newscript.append(i) + tx.vin[0].scriptSig = CScript(newscript) + + + +class BIP66Test(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 1 + self.extra_args = [['-whitelist=127.0.0.1', '-par=1', '-enablebip61']] # Use only one script thread to get the exact reject reason for testing + self.setup_clean_chain = True + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + self.nodes[0].add_p2p_connection(P2PInterface()) + + self.log.info("Mining %d blocks", DERSIG_HEIGHT - 2) + self.coinbase_txids = [self.nodes[0].getblock(b)['tx'][0] for b in self.nodes[0].generate(DERSIG_HEIGHT - 2)] + self.nodeaddress = self.nodes[0].getnewaddress() + + self.log.info("Test that a transaction with non-DER signature can still appear in a block") + + spendtx = create_transaction(self.nodes[0], self.coinbase_txids[0], + self.nodeaddress, amount=1.0) + unDERify(spendtx) + spendtx.rehash() + + tip = self.nodes[0].getbestblockhash() + block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1 + block = create_block(int(tip, 16), create_coinbase(DERSIG_HEIGHT - 1), block_time) + block.nVersion = 2 + block.vtx.append(spendtx) + block.hashMerkleRoot = block.calc_merkle_root() + block.rehash() + block.solve() + + self.nodes[0].p2p.send_and_ping(msg_block(block)) + assert_equal(self.nodes[0].getbestblockhash(), block.hash) + + self.log.info("Test that blocks must now be at least version 3") + tip = block.sha256 + block_time += 1 + block = create_block(tip, create_coinbase(DERSIG_HEIGHT), block_time) + block.nVersion = 2 + block.rehash() + block.solve() + + with self.nodes[0].assert_debug_log(expected_msgs=['{}, bad-version(0x00000002)'.format(block.hash)]): + self.nodes[0].p2p.send_and_ping(msg_block(block)) + assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip) + self.nodes[0].p2p.sync_with_ping() + + self.log.info("Test that transactions with non-DER signatures cannot appear in a block") + block.nVersion = 3 + + spendtx = create_transaction(self.nodes[0], self.coinbase_txids[1], + self.nodeaddress, amount=1.0) + unDERify(spendtx) + spendtx.rehash() + + # First we show that this tx is valid except for DERSIG by getting it + # rejected from the mempool for exactly that reason. + assert_equal( + [{'txid': spendtx.hash, 'allowed': False, 'reject-reason': '64: non-mandatory-script-verify-flag (Non-canonical DER signature)'}], + self.nodes[0].testmempoolaccept(rawtxs=[bytes_to_hex_str(spendtx.serialize())], allowhighfees=True) + ) + + # Now we verify that a block with this transaction is also invalid. + block.vtx.append(spendtx) + block.hashMerkleRoot = block.calc_merkle_root() + block.rehash() + block.solve() + + with self.nodes[0].assert_debug_log(expected_msgs=['CheckInputs on {} failed with non-mandatory-script-verify-flag (Non-canonical DER signature)'.format(block.vtx[-1].hash)]): + self.nodes[0].p2p.send_and_ping(msg_block(block)) + assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip) + self.nodes[0].p2p.sync_with_ping() + + wait_until(lambda: "reject" in self.nodes[0].p2p.last_message.keys(), lock=mininode_lock) + with mininode_lock: + assert self.nodes[0].p2p.last_message["reject"].code in [REJECT_INVALID, REJECT_NONSTANDARD] + assert_equal(self.nodes[0].p2p.last_message["reject"].data, block.sha256) + assert b'Non-canonical DER signature' in self.nodes[0].p2p.last_message["reject"].reason + + self.log.info("Test that a version 3 block with a DERSIG-compliant transaction is accepted") + block.vtx[1] = create_transaction(self.nodes[0], self.coinbase_txids[1], self.nodeaddress, amount=1.0) + block.hashMerkleRoot = block.calc_merkle_root() + block.rehash() + block.solve() + + self.nodes[0].p2p.send_and_ping(msg_block(block)) + assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256) + +if __name__ == '__main__': + BIP66Test().main() diff --git a/test/functional/feature_fee_estimation.py b/test/functional/feature_fee_estimation.py new file mode 100755 index 000000000..aaab4279b --- /dev/null +++ b/test/functional/feature_fee_estimation.py @@ -0,0 +1,247 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test fee estimation code.""" +from decimal import Decimal +import random + +from test_framework.messages import CTransaction, CTxIn, CTxOut, COutPoint, ToHex, COIN +from test_framework.script import CScript, OP_1, OP_DROP, OP_2, OP_HASH160, OP_EQUAL, hash160, OP_TRUE +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + assert_greater_than, + assert_greater_than_or_equal, + connect_nodes, + satoshi_round, + sync_blocks, + sync_mempools, +) + +# Construct 2 trivial P2SH's and the ScriptSigs that spend them +# So we can create many transactions without needing to spend +# time signing. +REDEEM_SCRIPT_1 = CScript([OP_1, OP_DROP]) +REDEEM_SCRIPT_2 = CScript([OP_2, OP_DROP]) +P2SH_1 = CScript([OP_HASH160, hash160(REDEEM_SCRIPT_1), OP_EQUAL]) +P2SH_2 = CScript([OP_HASH160, hash160(REDEEM_SCRIPT_2), OP_EQUAL]) + +# Associated ScriptSig's to spend satisfy P2SH_1 and P2SH_2 +SCRIPT_SIG = [CScript([OP_TRUE, REDEEM_SCRIPT_1]), CScript([OP_TRUE, REDEEM_SCRIPT_2])] + +def small_txpuzzle_randfee(from_node, conflist, unconflist, amount, min_fee, fee_increment): + """Create and send a transaction with a random fee. + + The transaction pays to a trivial P2SH script, and assumes that its inputs + are of the same form. + The function takes a list of confirmed outputs and unconfirmed outputs + and attempts to use the confirmed list first for its inputs. + It adds the newly created outputs to the unconfirmed list. + Returns (raw transaction, fee).""" + + # It's best to exponentially distribute our random fees + # because the buckets are exponentially spaced. + # Exponentially distributed from 1-128 * fee_increment + rand_fee = float(fee_increment) * (1.1892 ** random.randint(0, 28)) + # Total fee ranges from min_fee to min_fee + 127*fee_increment + fee = min_fee - fee_increment + satoshi_round(rand_fee) + tx = CTransaction() + total_in = Decimal("0.00000000") + while total_in <= (amount + fee) and len(conflist) > 0: + t = conflist.pop(0) + total_in += t["amount"] + tx.vin.append(CTxIn(COutPoint(int(t["txid"], 16), t["vout"]), b"")) + if total_in <= amount + fee: + while total_in <= (amount + fee) and len(unconflist) > 0: + t = unconflist.pop(0) + total_in += t["amount"] + tx.vin.append(CTxIn(COutPoint(int(t["txid"], 16), t["vout"]), b"")) + if total_in <= amount + fee: + raise RuntimeError("Insufficient funds: need %d, have %d" % (amount + fee, total_in)) + tx.vout.append(CTxOut(int((total_in - amount - fee) * COIN), P2SH_1)) + tx.vout.append(CTxOut(int(amount * COIN), P2SH_2)) + # These transactions don't need to be signed, but we still have to insert + # the ScriptSig that will satisfy the ScriptPubKey. + for inp in tx.vin: + inp.scriptSig = SCRIPT_SIG[inp.prevout.n] + txid = from_node.sendrawtransaction(ToHex(tx), True) + unconflist.append({"txid": txid, "vout": 0, "amount": total_in - amount - fee}) + unconflist.append({"txid": txid, "vout": 1, "amount": amount}) + + return (ToHex(tx), fee) + +def split_inputs(from_node, txins, txouts, initial_split=False): + """Generate a lot of inputs so we can generate a ton of transactions. + + This function takes an input from txins, and creates and sends a transaction + which splits the value into 2 outputs which are appended to txouts. + Previously this was designed to be small inputs so they wouldn't have + a high coin age when the notion of priority still existed.""" + + prevtxout = txins.pop() + tx = CTransaction() + tx.vin.append(CTxIn(COutPoint(int(prevtxout["txid"], 16), prevtxout["vout"]), b"")) + + half_change = satoshi_round(prevtxout["amount"] / 2) + rem_change = prevtxout["amount"] - half_change - Decimal("0.00001000") + tx.vout.append(CTxOut(int(half_change * COIN), P2SH_1)) + tx.vout.append(CTxOut(int(rem_change * COIN), P2SH_2)) + + # If this is the initial split we actually need to sign the transaction + # Otherwise we just need to insert the proper ScriptSig + if (initial_split): + completetx = from_node.signrawtransactionwithwallet(ToHex(tx))["hex"] + else: + tx.vin[0].scriptSig = SCRIPT_SIG[prevtxout["vout"]] + completetx = ToHex(tx) + txid = from_node.sendrawtransaction(completetx, True) + txouts.append({"txid": txid, "vout": 0, "amount": half_change}) + txouts.append({"txid": txid, "vout": 1, "amount": rem_change}) + +def check_estimates(node, fees_seen): + """Call estimatesmartfee and verify that the estimates meet certain invariants.""" + + delta = 1.0e-6 # account for rounding error + last_feerate = float(max(fees_seen)) + all_smart_estimates = [node.estimatesmartfee(i) for i in range(1, 26)] + for i, e in enumerate(all_smart_estimates): # estimate is for i+1 + feerate = float(e["feerate"]) + assert_greater_than(feerate, 0) + + if feerate + delta < min(fees_seen) or feerate - delta > max(fees_seen): + raise AssertionError("Estimated fee (%f) out of range (%f,%f)" + % (feerate, min(fees_seen), max(fees_seen))) + if feerate - delta > last_feerate: + raise AssertionError("Estimated fee (%f) larger than last fee (%f) for lower number of confirms" + % (feerate, last_feerate)) + last_feerate = feerate + + if i == 0: + assert_equal(e["blocks"], 2) + else: + assert_greater_than_or_equal(i + 1, e["blocks"]) + +class EstimateFeeTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 3 + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def setup_network(self): + """ + We'll setup the network to have 3 nodes that all mine with different parameters. + But first we need to use one node to create a lot of outputs + which we will use to generate our transactions. + """ + self.add_nodes(3, extra_args=[["-maxorphantx=1000", "-whitelist=127.0.0.1"], + ["-blockmaxweight=68000", "-maxorphantx=1000"], + ["-blockmaxweight=32000", "-maxorphantx=1000"]]) + # Use node0 to mine blocks for input splitting + # Node1 mines small blocks but that are bigger than the expected transaction rate. + # NOTE: the CreateNewBlock code starts counting block weight at 4,000 weight, + # (68k weight is room enough for 120 or so transactions) + # Node2 is a stingy miner, that + # produces too small blocks (room for only 55 or so transactions) + + def transact_and_mine(self, numblocks, mining_node): + min_fee = Decimal("0.00001") + # We will now mine numblocks blocks generating on average 100 transactions between each block + # We shuffle our confirmed txout set before each set of transactions + # small_txpuzzle_randfee will use the transactions that have inputs already in the chain when possible + # resorting to tx's that depend on the mempool when those run out + for i in range(numblocks): + random.shuffle(self.confutxo) + for j in range(random.randrange(100 - 50, 100 + 50)): + from_index = random.randint(1, 2) + (txhex, fee) = small_txpuzzle_randfee(self.nodes[from_index], self.confutxo, + self.memutxo, Decimal("0.005"), min_fee, min_fee) + tx_kbytes = (len(txhex) // 2) / 1000.0 + self.fees_per_kb.append(float(fee) / tx_kbytes) + sync_mempools(self.nodes[0:3], wait=.1) + mined = mining_node.getblock(mining_node.generate(1)[0], True)["tx"] + sync_blocks(self.nodes[0:3], wait=.1) + # update which txouts are confirmed + newmem = [] + for utx in self.memutxo: + if utx["txid"] in mined: + self.confutxo.append(utx) + else: + newmem.append(utx) + self.memutxo = newmem + + def import_deterministic_coinbase_privkeys(self): + self.start_nodes() + super().import_deterministic_coinbase_privkeys() + self.stop_nodes() + + def run_test(self): + self.log.info("This test is time consuming, please be patient") + self.log.info("Splitting inputs so we can generate tx's") + + # Start node0 + self.start_node(0) + self.txouts = [] + self.txouts2 = [] + # Split a coinbase into two transaction puzzle outputs + split_inputs(self.nodes[0], self.nodes[0].listunspent(0), self.txouts, True) + + # Mine + while (len(self.nodes[0].getrawmempool()) > 0): + self.nodes[0].generate(1) + + # Repeatedly split those 2 outputs, doubling twice for each rep + # Use txouts to monitor the available utxo, since these won't be tracked in wallet + reps = 0 + while (reps < 5): + # Double txouts to txouts2 + while (len(self.txouts) > 0): + split_inputs(self.nodes[0], self.txouts, self.txouts2) + while (len(self.nodes[0].getrawmempool()) > 0): + self.nodes[0].generate(1) + # Double txouts2 to txouts + while (len(self.txouts2) > 0): + split_inputs(self.nodes[0], self.txouts2, self.txouts) + while (len(self.nodes[0].getrawmempool()) > 0): + self.nodes[0].generate(1) + reps += 1 + self.log.info("Finished splitting") + + # Now we can connect the other nodes, didn't want to connect them earlier + # so the estimates would not be affected by the splitting transactions + self.start_node(1) + self.start_node(2) + connect_nodes(self.nodes[1], 0) + connect_nodes(self.nodes[0], 2) + connect_nodes(self.nodes[2], 1) + + self.sync_all() + + self.fees_per_kb = [] + self.memutxo = [] + self.confutxo = self.txouts # Start with the set of confirmed txouts after splitting + self.log.info("Will output estimates for 1/2/3/6/15/25 blocks") + + for i in range(2): + self.log.info("Creating transactions and mining them with a block size that can't keep up") + # Create transactions and mine 10 small blocks with node 2, but create txs faster than we can mine + self.transact_and_mine(10, self.nodes[2]) + check_estimates(self.nodes[1], self.fees_per_kb) + + self.log.info("Creating transactions and mining them at a block size that is just big enough") + # Generate transactions while mining 10 more blocks, this time with node1 + # which mines blocks with capacity just above the rate that transactions are being created + self.transact_and_mine(10, self.nodes[1]) + check_estimates(self.nodes[1], self.fees_per_kb) + + # Finish by mining a normal-sized block: + while len(self.nodes[1].getrawmempool()) > 0: + self.nodes[1].generate(1) + + sync_blocks(self.nodes[0:3], wait=.1) + self.log.info("Final estimates after emptying mempools") + check_estimates(self.nodes[1], self.fees_per_kb) + +if __name__ == '__main__': + EstimateFeeTest().main() diff --git a/test/functional/feature_help.py b/test/functional/feature_help.py new file mode 100755 index 000000000..ed1d25c0d --- /dev/null +++ b/test/functional/feature_help.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python3 +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Verify that starting bitcoin with -h works as expected.""" + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal + +class HelpTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 1 + + def setup_network(self): + self.add_nodes(self.num_nodes) + # Don't start the node + + def get_node_output(self, *, ret_code_expected): + ret_code = self.nodes[0].process.wait(timeout=5) + assert_equal(ret_code, ret_code_expected) + self.nodes[0].stdout.seek(0) + self.nodes[0].stderr.seek(0) + out = self.nodes[0].stdout.read() + err = self.nodes[0].stderr.read() + self.nodes[0].stdout.close() + self.nodes[0].stderr.close() + + # Clean up TestNode state + self.nodes[0].running = False + self.nodes[0].process = None + self.nodes[0].rpc_connected = False + self.nodes[0].rpc = None + + return out, err + + def run_test(self): + self.log.info("Start bitcoin with -h for help text") + self.nodes[0].start(extra_args=['-h']) + # Node should exit immediately and output help to stdout. + output, _ = self.get_node_output(ret_code_expected=0) + assert b'Options' in output + self.log.info("Help text received: {} (...)".format(output[0:60])) + + self.log.info("Start bitcoin with -version for version information") + self.nodes[0].start(extra_args=['-version']) + # Node should exit immediately and output version to stdout. + output, _ = self.get_node_output(ret_code_expected=0) + assert b'version' in output + self.log.info("Version text received: {} (...)".format(output[0:60])) + + # Test that arguments not in the help results in an error + self.log.info("Start bitcoind with -fakearg to make sure it does not start") + self.nodes[0].start(extra_args=['-fakearg']) + # Node should exit immediately and output an error to stderr + _, output = self.get_node_output(ret_code_expected=1) + assert b'Error parsing command line arguments' in output + self.log.info("Error message received: {} (...)".format(output[0:60])) + + +if __name__ == '__main__': + HelpTest().main() diff --git a/test/functional/feature_includeconf.py b/test/functional/feature_includeconf.py new file mode 100755 index 000000000..d06f6826f --- /dev/null +++ b/test/functional/feature_includeconf.py @@ -0,0 +1,82 @@ +#!/usr/bin/env python3 +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Tests the includeconf argument + +Verify that: + +1. adding includeconf to the configuration file causes the includeconf + file to be loaded in the correct order. +2. includeconf cannot be used as a command line argument. +3. includeconf cannot be used recursively (ie includeconf can only + be used from the base config file). +4. multiple includeconf arguments can be specified in the main config + file. +""" +import os + +from test_framework.test_framework import BitcoinTestFramework + +class IncludeConfTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = False + self.num_nodes = 1 + + def setup_chain(self): + super().setup_chain() + # Create additional config files + # - tmpdir/node0/relative.conf + with open(os.path.join(self.options.tmpdir, "node0", "relative.conf"), "w", encoding="utf8") as f: + f.write("uacomment=relative\n") + # - tmpdir/node0/relative2.conf + with open(os.path.join(self.options.tmpdir, "node0", "relative2.conf"), "w", encoding="utf8") as f: + f.write("uacomment=relative2\n") + with open(os.path.join(self.options.tmpdir, "node0", "bitcoin.conf"), "a", encoding='utf8') as f: + f.write("uacomment=main\nincludeconf=relative.conf\n") + + def run_test(self): + self.log.info("-includeconf works from config file. subversion should end with 'main; relative)/'") + + subversion = self.nodes[0].getnetworkinfo()["subversion"] + assert subversion.endswith("main; relative)/") + + self.log.info("-includeconf cannot be used as command-line arg") + self.stop_node(0) + self.nodes[0].assert_start_raises_init_error(extra_args=["-includeconf=relative2.conf"], expected_msg="Error parsing command line arguments: -includeconf cannot be used from commandline; -includeconf=relative2.conf") + + self.log.info("-includeconf cannot be used recursively. subversion should end with 'main; relative)/'") + with open(os.path.join(self.options.tmpdir, "node0", "relative.conf"), "a", encoding="utf8") as f: + f.write("includeconf=relative2.conf\n") + self.start_node(0) + + subversion = self.nodes[0].getnetworkinfo()["subversion"] + assert subversion.endswith("main; relative)/") + self.stop_node(0, expected_stderr="warning: -includeconf cannot be used from included files; ignoring -includeconf=relative2.conf") + + self.log.info("-includeconf cannot contain invalid arg") + + # Commented out as long as we ignore invalid arguments in configuration files + #with open(os.path.join(self.options.tmpdir, "node0", "relative.conf"), "w", encoding="utf8") as f: + # f.write("foo=bar\n") + #self.nodes[0].assert_start_raises_init_error(expected_msg="Error reading configuration file: Invalid configuration value foo") + + self.log.info("-includeconf cannot be invalid path") + os.remove(os.path.join(self.options.tmpdir, "node0", "relative.conf")) + self.nodes[0].assert_start_raises_init_error(expected_msg="Error reading configuration file: Failed to include configuration file relative.conf") + + self.log.info("multiple -includeconf args can be used from the base config file. subversion should end with 'main; relative; relative2)/'") + with open(os.path.join(self.options.tmpdir, "node0", "relative.conf"), "w", encoding="utf8") as f: + # Restore initial file contents + f.write("uacomment=relative\n") + + with open(os.path.join(self.options.tmpdir, "node0", "bitcoin.conf"), "a", encoding='utf8') as f: + f.write("includeconf=relative2.conf\n") + + self.start_node(0) + + subversion = self.nodes[0].getnetworkinfo()["subversion"] + assert subversion.endswith("main; relative; relative2)/") + +if __name__ == '__main__': + IncludeConfTest().main() diff --git a/test/functional/feature_logging.py b/test/functional/feature_logging.py new file mode 100755 index 000000000..8bb7e0269 --- /dev/null +++ b/test/functional/feature_logging.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python3 +# Copyright (c) 2017-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test debug logging.""" + +import os + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.test_node import ErrorMatch + + +class LoggingTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 1 + self.setup_clean_chain = True + + def relative_log_path(self, name): + return os.path.join(self.nodes[0].datadir, "regtest", name) + + def run_test(self): + # test default log file name + default_log_path = self.relative_log_path("debug.log") + assert os.path.isfile(default_log_path) + + # test alternative log file name in datadir + self.restart_node(0, ["-debuglogfile=foo.log"]) + assert os.path.isfile(self.relative_log_path("foo.log")) + + # test alternative log file name outside datadir + tempname = os.path.join(self.options.tmpdir, "foo.log") + self.restart_node(0, ["-debuglogfile=%s" % tempname]) + assert os.path.isfile(tempname) + + # check that invalid log (relative) will cause error + invdir = self.relative_log_path("foo") + invalidname = os.path.join("foo", "foo.log") + self.stop_node(0) + exp_stderr = "Error: Could not open debug log file \S+$" + self.nodes[0].assert_start_raises_init_error(["-debuglogfile=%s" % (invalidname)], exp_stderr, match=ErrorMatch.FULL_REGEX) + assert not os.path.isfile(os.path.join(invdir, "foo.log")) + + # check that invalid log (relative) works after path exists + self.stop_node(0) + os.mkdir(invdir) + self.start_node(0, ["-debuglogfile=%s" % (invalidname)]) + assert os.path.isfile(os.path.join(invdir, "foo.log")) + + # check that invalid log (absolute) will cause error + self.stop_node(0) + invdir = os.path.join(self.options.tmpdir, "foo") + invalidname = os.path.join(invdir, "foo.log") + self.nodes[0].assert_start_raises_init_error(["-debuglogfile=%s" % invalidname], exp_stderr, match=ErrorMatch.FULL_REGEX) + assert not os.path.isfile(os.path.join(invdir, "foo.log")) + + # check that invalid log (absolute) works after path exists + self.stop_node(0) + os.mkdir(invdir) + self.start_node(0, ["-debuglogfile=%s" % (invalidname)]) + assert os.path.isfile(os.path.join(invdir, "foo.log")) + + # check that -nodebuglogfile disables logging + self.stop_node(0) + os.unlink(default_log_path) + assert not os.path.isfile(default_log_path) + self.start_node(0, ["-nodebuglogfile"]) + assert not os.path.isfile(default_log_path) + + # just sanity check no crash here + self.stop_node(0) + self.start_node(0, ["-debuglogfile=%s" % os.devnull]) + + +if __name__ == '__main__': + LoggingTest().main() diff --git a/test/functional/feature_maxuploadtarget.py b/test/functional/feature_maxuploadtarget.py new file mode 100755 index 000000000..87c318de9 --- /dev/null +++ b/test/functional/feature_maxuploadtarget.py @@ -0,0 +1,164 @@ +#!/usr/bin/env python3 +# Copyright (c) 2015-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test behavior of -maxuploadtarget. + +* Verify that getdata requests for old blocks (>1week) are dropped +if uploadtarget has been reached. +* Verify that getdata requests for recent blocks are respected even +if uploadtarget has been reached. +* Verify that the upload counters are reset after 24 hours. +""" +from collections import defaultdict +import time + +from test_framework.messages import CInv, msg_getdata +from test_framework.mininode import P2PInterface +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, mine_large_block + +class TestP2PConn(P2PInterface): + def __init__(self): + super().__init__() + self.block_receive_map = defaultdict(int) + + def on_inv(self, message): + pass + + def on_block(self, message): + message.block.calc_sha256() + self.block_receive_map[message.block.sha256] += 1 + +class MaxUploadTest(BitcoinTestFramework): + + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 1 + self.extra_args = [["-maxuploadtarget=800"]] + + # Cache for utxos, as the listunspent may take a long time later in the test + self.utxo_cache = [] + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + # Before we connect anything, we first set the time on the node + # to be in the past, otherwise things break because the CNode + # time counters can't be reset backward after initialization + old_time = int(time.time() - 2*60*60*24*7) + self.nodes[0].setmocktime(old_time) + + # Generate some old blocks + self.nodes[0].generate(130) + + # p2p_conns[0] will only request old blocks + # p2p_conns[1] will only request new blocks + # p2p_conns[2] will test resetting the counters + p2p_conns = [] + + for _ in range(3): + p2p_conns.append(self.nodes[0].add_p2p_connection(TestP2PConn())) + + # Now mine a big block + mine_large_block(self.nodes[0], self.utxo_cache) + + # Store the hash; we'll request this later + big_old_block = self.nodes[0].getbestblockhash() + old_block_size = self.nodes[0].getblock(big_old_block, True)['size'] + big_old_block = int(big_old_block, 16) + + # Advance to two days ago + self.nodes[0].setmocktime(int(time.time()) - 2*60*60*24) + + # Mine one more block, so that the prior block looks old + mine_large_block(self.nodes[0], self.utxo_cache) + + # We'll be requesting this new block too + big_new_block = self.nodes[0].getbestblockhash() + big_new_block = int(big_new_block, 16) + + # p2p_conns[0] will test what happens if we just keep requesting the + # the same big old block too many times (expect: disconnect) + + getdata_request = msg_getdata() + getdata_request.inv.append(CInv(2, big_old_block)) + + max_bytes_per_day = 800*1024*1024 + daily_buffer = 144 * 4000000 + max_bytes_available = max_bytes_per_day - daily_buffer + success_count = max_bytes_available // old_block_size + + # 576MB will be reserved for relaying new blocks, so expect this to + # succeed for ~235 tries. + for i in range(success_count): + p2p_conns[0].send_message(getdata_request) + p2p_conns[0].sync_with_ping() + assert_equal(p2p_conns[0].block_receive_map[big_old_block], i+1) + + assert_equal(len(self.nodes[0].getpeerinfo()), 3) + # At most a couple more tries should succeed (depending on how long + # the test has been running so far). + for i in range(3): + p2p_conns[0].send_message(getdata_request) + p2p_conns[0].wait_for_disconnect() + assert_equal(len(self.nodes[0].getpeerinfo()), 2) + self.log.info("Peer 0 disconnected after downloading old block too many times") + + # Requesting the current block on p2p_conns[1] should succeed indefinitely, + # even when over the max upload target. + # We'll try 800 times + getdata_request.inv = [CInv(2, big_new_block)] + for i in range(800): + p2p_conns[1].send_message(getdata_request) + p2p_conns[1].sync_with_ping() + assert_equal(p2p_conns[1].block_receive_map[big_new_block], i+1) + + self.log.info("Peer 1 able to repeatedly download new block") + + # But if p2p_conns[1] tries for an old block, it gets disconnected too. + getdata_request.inv = [CInv(2, big_old_block)] + p2p_conns[1].send_message(getdata_request) + p2p_conns[1].wait_for_disconnect() + assert_equal(len(self.nodes[0].getpeerinfo()), 1) + + self.log.info("Peer 1 disconnected after trying to download old block") + + self.log.info("Advancing system time on node to clear counters...") + + # If we advance the time by 24 hours, then the counters should reset, + # and p2p_conns[2] should be able to retrieve the old block. + self.nodes[0].setmocktime(int(time.time())) + p2p_conns[2].sync_with_ping() + p2p_conns[2].send_message(getdata_request) + p2p_conns[2].sync_with_ping() + assert_equal(p2p_conns[2].block_receive_map[big_old_block], 1) + + self.log.info("Peer 2 able to download old block") + + self.nodes[0].disconnect_p2ps() + + #stop and start node 0 with 1MB maxuploadtarget, whitelist 127.0.0.1 + self.log.info("Restarting nodes with -whitelist=127.0.0.1") + self.stop_node(0) + self.start_node(0, ["-whitelist=127.0.0.1", "-maxuploadtarget=1"]) + + # Reconnect to self.nodes[0] + self.nodes[0].add_p2p_connection(TestP2PConn()) + + #retrieve 20 blocks which should be enough to break the 1MB limit + getdata_request.inv = [CInv(2, big_new_block)] + for i in range(20): + self.nodes[0].p2p.send_message(getdata_request) + self.nodes[0].p2p.sync_with_ping() + assert_equal(self.nodes[0].p2p.block_receive_map[big_new_block], i+1) + + getdata_request.inv = [CInv(2, big_old_block)] + self.nodes[0].p2p.send_and_ping(getdata_request) + assert_equal(len(self.nodes[0].getpeerinfo()), 1) #node is still connected because of the whitelist + + self.log.info("Peer still connected after trying to download old block (whitelisted)") + +if __name__ == '__main__': + MaxUploadTest().main() diff --git a/test/functional/feature_minchainwork.py b/test/functional/feature_minchainwork.py new file mode 100755 index 000000000..dbff6f15f --- /dev/null +++ b/test/functional/feature_minchainwork.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python3 +# Copyright (c) 2017-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test logic for setting nMinimumChainWork on command line. + +Nodes don't consider themselves out of "initial block download" until +their active chain has more work than nMinimumChainWork. + +Nodes don't download blocks from a peer unless the peer's best known block +has more work than nMinimumChainWork. + +While in initial block download, nodes won't relay blocks to their peers, so +test that this parameter functions as intended by verifying that block relay +only succeeds past a given node once its nMinimumChainWork has been exceeded. +""" + +import time + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import connect_nodes, assert_equal + +# 2 hashes required per regtest block (with no difficulty adjustment) +REGTEST_WORK_PER_BLOCK = 2 + +class MinimumChainWorkTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 3 + + self.extra_args = [[], ["-minimumchainwork=0x65"], ["-minimumchainwork=0x65"]] + self.node_min_work = [0, 101, 101] + + def setup_network(self): + # This test relies on the chain setup being: + # node0 <- node1 <- node2 + # Before leaving IBD, nodes prefer to download blocks from outbound + # peers, so ensure that we're mining on an outbound peer and testing + # block relay to inbound peers. + self.setup_nodes() + for i in range(self.num_nodes-1): + connect_nodes(self.nodes[i+1], i) + + def run_test(self): + # Start building a chain on node0. node2 shouldn't be able to sync until node1's + # minchainwork is exceeded + starting_chain_work = REGTEST_WORK_PER_BLOCK # Genesis block's work + self.log.info("Testing relay across node %d (minChainWork = %d)", 1, self.node_min_work[1]) + + starting_blockcount = self.nodes[2].getblockcount() + + num_blocks_to_generate = int((self.node_min_work[1] - starting_chain_work) / REGTEST_WORK_PER_BLOCK) + self.log.info("Generating %d blocks on node0", num_blocks_to_generate) + hashes = self.nodes[0].generatetoaddress(num_blocks_to_generate, + self.nodes[0].get_deterministic_priv_key().address) + + self.log.info("Node0 current chain work: %s", self.nodes[0].getblockheader(hashes[-1])['chainwork']) + + # Sleep a few seconds and verify that node2 didn't get any new blocks + # or headers. We sleep, rather than sync_blocks(node0, node1) because + # it's reasonable either way for node1 to get the blocks, or not get + # them (since they're below node1's minchainwork). + time.sleep(3) + + self.log.info("Verifying node 2 has no more blocks than before") + self.log.info("Blockcounts: %s", [n.getblockcount() for n in self.nodes]) + # Node2 shouldn't have any new headers yet, because node1 should not + # have relayed anything. + assert_equal(len(self.nodes[2].getchaintips()), 1) + assert_equal(self.nodes[2].getchaintips()[0]['height'], 0) + + assert self.nodes[1].getbestblockhash() != self.nodes[0].getbestblockhash() + assert_equal(self.nodes[2].getblockcount(), starting_blockcount) + + self.log.info("Generating one more block") + self.nodes[0].generatetoaddress(1, self.nodes[0].get_deterministic_priv_key().address) + + self.log.info("Verifying nodes are all synced") + + # Because nodes in regtest are all manual connections (eg using + # addnode), node1 should not have disconnected node0. If not for that, + # we'd expect node1 to have disconnected node0 for serving an + # insufficient work chain, in which case we'd need to reconnect them to + # continue the test. + + self.sync_all() + self.log.info("Blockcounts: %s", [n.getblockcount() for n in self.nodes]) + +if __name__ == '__main__': + MinimumChainWorkTest().main() diff --git a/test/functional/feature_notifications.py b/test/functional/feature_notifications.py new file mode 100755 index 000000000..a93443f2d --- /dev/null +++ b/test/functional/feature_notifications.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the -alertnotify, -blocknotify and -walletnotify options.""" +import os + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, wait_until, connect_nodes_bi + +class NotificationsTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 2 + self.setup_clean_chain = True + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def setup_network(self): + self.alertnotify_dir = os.path.join(self.options.tmpdir, "alertnotify") + self.blocknotify_dir = os.path.join(self.options.tmpdir, "blocknotify") + self.walletnotify_dir = os.path.join(self.options.tmpdir, "walletnotify") + os.mkdir(self.alertnotify_dir) + os.mkdir(self.blocknotify_dir) + os.mkdir(self.walletnotify_dir) + + # -alertnotify and -blocknotify on node0, walletnotify on node1 + self.extra_args = [["-blockversion=2", + "-alertnotify=echo > {}".format(os.path.join(self.alertnotify_dir, '%s')), + "-blocknotify=echo > {}".format(os.path.join(self.blocknotify_dir, '%s'))], + ["-blockversion=211", + "-rescan", + "-walletnotify=echo > {}".format(os.path.join(self.walletnotify_dir, '%s'))]] + super().setup_network() + + def run_test(self): + self.log.info("test -blocknotify") + block_count = 10 + blocks = self.nodes[1].generate(block_count) + + # wait at most 10 seconds for expected number of files before reading the content + wait_until(lambda: len(os.listdir(self.blocknotify_dir)) == block_count, timeout=10) + + # directory content should equal the generated blocks hashes + assert_equal(sorted(blocks), sorted(os.listdir(self.blocknotify_dir))) + + self.log.info("test -walletnotify") + # wait at most 10 seconds for expected number of files before reading the content + wait_until(lambda: len(os.listdir(self.walletnotify_dir)) == block_count, timeout=10) + + # directory content should equal the generated transaction hashes + txids_rpc = list(map(lambda t: t['txid'], self.nodes[1].listtransactions("*", block_count))) + assert_equal(sorted(txids_rpc), sorted(os.listdir(self.walletnotify_dir))) + for tx_file in os.listdir(self.walletnotify_dir): + os.remove(os.path.join(self.walletnotify_dir, tx_file)) + + self.log.info("test -walletnotify after rescan") + # restart node to rescan to force wallet notifications + self.restart_node(1) + connect_nodes_bi(self.nodes, 0, 1) + + wait_until(lambda: len(os.listdir(self.walletnotify_dir)) == block_count, timeout=10) + + # directory content should equal the generated transaction hashes + txids_rpc = list(map(lambda t: t['txid'], self.nodes[1].listtransactions("*", block_count))) + assert_equal(sorted(txids_rpc), sorted(os.listdir(self.walletnotify_dir))) + + # Mine another 41 up-version blocks. -alertnotify should trigger on the 51st. + self.log.info("test -alertnotify") + self.nodes[1].generate(41) + self.sync_all() + + # Give bitcoind 10 seconds to write the alert notification + wait_until(lambda: len(os.listdir(self.alertnotify_dir)), timeout=10) + + for notify_file in os.listdir(self.alertnotify_dir): + os.remove(os.path.join(self.alertnotify_dir, notify_file)) + + # Mine more up-version blocks, should not get more alerts: + self.nodes[1].generate(2) + self.sync_all() + + self.log.info("-alertnotify should not continue notifying for more unknown version blocks") + assert_equal(len(os.listdir(self.alertnotify_dir)), 0) + +if __name__ == '__main__': + NotificationsTest().main() diff --git a/test/functional/feature_nulldummy.py b/test/functional/feature_nulldummy.py new file mode 100755 index 000000000..a79cc3d34 --- /dev/null +++ b/test/functional/feature_nulldummy.py @@ -0,0 +1,124 @@ +#!/usr/bin/env python3 +# Copyright (c) 2016-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test NULLDUMMY softfork. + +Connect to a single node. +Generate 2 blocks (save the coinbases for later). +Generate 427 more blocks. +[Policy/Consensus] Check that NULLDUMMY compliant transactions are accepted in the 430th block. +[Policy] Check that non-NULLDUMMY transactions are rejected before activation. +[Consensus] Check that the new NULLDUMMY rules are not enforced on the 431st block. +[Policy/Consensus] Check that the new NULLDUMMY rules are enforced on the 432nd block. +""" + +from test_framework.blocktools import create_coinbase, create_block, create_transaction, add_witness_commitment +from test_framework.messages import CTransaction +from test_framework.script import CScript +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, assert_raises_rpc_error, bytes_to_hex_str + +import time + +NULLDUMMY_ERROR = "non-mandatory-script-verify-flag (Dummy CHECKMULTISIG argument must be zero) (code 64)" + +def trueDummy(tx): + scriptSig = CScript(tx.vin[0].scriptSig) + newscript = [] + for i in scriptSig: + if (len(newscript) == 0): + assert(len(i) == 0) + newscript.append(b'\x51') + else: + newscript.append(i) + tx.vin[0].scriptSig = CScript(newscript) + tx.rehash() + +class NULLDUMMYTest(BitcoinTestFramework): + + def set_test_params(self): + self.num_nodes = 1 + self.setup_clean_chain = True + # This script tests NULLDUMMY activation, which is part of the 'segwit' deployment, so we go through + # normal segwit activation here (and don't use the default always-on behaviour). + self.extra_args = [['-whitelist=127.0.0.1', '-vbparams=segwit:0:999999999999', '-addresstype=legacy', "-deprecatedrpc=addwitnessaddress"]] + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + self.address = self.nodes[0].getnewaddress() + self.ms_address = self.nodes[0].addmultisigaddress(1, [self.address])['address'] + self.wit_address = self.nodes[0].addwitnessaddress(self.address) + self.wit_ms_address = self.nodes[0].addmultisigaddress(1, [self.address], '', 'p2sh-segwit')['address'] + + self.coinbase_blocks = self.nodes[0].generate(2) # Block 2 + coinbase_txid = [] + for i in self.coinbase_blocks: + coinbase_txid.append(self.nodes[0].getblock(i)['tx'][0]) + self.nodes[0].generate(427) # Block 429 + self.lastblockhash = self.nodes[0].getbestblockhash() + self.tip = int("0x" + self.lastblockhash, 0) + self.lastblockheight = 429 + self.lastblocktime = int(time.time()) + 429 + + self.log.info("Test 1: NULLDUMMY compliant base transactions should be accepted to mempool and mined before activation [430]") + test1txs = [create_transaction(self.nodes[0], coinbase_txid[0], self.ms_address, amount=49)] + txid1 = self.nodes[0].sendrawtransaction(bytes_to_hex_str(test1txs[0].serialize_with_witness()), True) + test1txs.append(create_transaction(self.nodes[0], txid1, self.ms_address, amount=48)) + txid2 = self.nodes[0].sendrawtransaction(bytes_to_hex_str(test1txs[1].serialize_with_witness()), True) + test1txs.append(create_transaction(self.nodes[0], coinbase_txid[1], self.wit_ms_address, amount=49)) + txid3 = self.nodes[0].sendrawtransaction(bytes_to_hex_str(test1txs[2].serialize_with_witness()), True) + self.block_submit(self.nodes[0], test1txs, False, True) + + self.log.info("Test 2: Non-NULLDUMMY base multisig transaction should not be accepted to mempool before activation") + test2tx = create_transaction(self.nodes[0], txid2, self.ms_address, amount=47) + trueDummy(test2tx) + assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test2tx.serialize_with_witness()), True) + + self.log.info("Test 3: Non-NULLDUMMY base transactions should be accepted in a block before activation [431]") + self.block_submit(self.nodes[0], [test2tx], False, True) + + self.log.info("Test 4: Non-NULLDUMMY base multisig transaction is invalid after activation") + test4tx = create_transaction(self.nodes[0], test2tx.hash, self.address, amount=46) + test6txs=[CTransaction(test4tx)] + trueDummy(test4tx) + assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test4tx.serialize_with_witness()), True) + self.block_submit(self.nodes[0], [test4tx]) + + self.log.info("Test 5: Non-NULLDUMMY P2WSH multisig transaction invalid after activation") + test5tx = create_transaction(self.nodes[0], txid3, self.wit_address, amount=48) + test6txs.append(CTransaction(test5tx)) + test5tx.wit.vtxinwit[0].scriptWitness.stack[0] = b'\x01' + assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test5tx.serialize_with_witness()), True) + self.block_submit(self.nodes[0], [test5tx], True) + + self.log.info("Test 6: NULLDUMMY compliant base/witness transactions should be accepted to mempool and in block after activation [432]") + for i in test6txs: + self.nodes[0].sendrawtransaction(bytes_to_hex_str(i.serialize_with_witness()), True) + self.block_submit(self.nodes[0], test6txs, True, True) + + + def block_submit(self, node, txs, witness = False, accept = False): + block = create_block(self.tip, create_coinbase(self.lastblockheight + 1), self.lastblocktime + 1) + block.nVersion = 4 + for tx in txs: + tx.rehash() + block.vtx.append(tx) + block.hashMerkleRoot = block.calc_merkle_root() + witness and add_witness_commitment(block) + block.rehash() + block.solve() + node.submitblock(bytes_to_hex_str(block.serialize(True))) + if (accept): + assert_equal(node.getbestblockhash(), block.hash) + self.tip = block.sha256 + self.lastblockhash = block.hash + self.lastblocktime += 1 + self.lastblockheight += 1 + else: + assert_equal(node.getbestblockhash(), self.lastblockhash) + +if __name__ == '__main__': + NULLDUMMYTest().main() diff --git a/test/functional/feature_proxy.py b/test/functional/feature_proxy.py new file mode 100755 index 000000000..31d2ee8e1 --- /dev/null +++ b/test/functional/feature_proxy.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python3 +# Copyright (c) 2015-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test bitcoind with different proxy configuration. + +Test plan: +- Start bitcoind's with different proxy configurations +- Use addnode to initiate connections +- Verify that proxies are connected to, and the right connection command is given +- Proxy configurations to test on bitcoind side: + - `-proxy` (proxy everything) + - `-onion` (proxy just onions) + - `-proxyrandomize` Circuit randomization +- Proxy configurations to test on proxy side, + - support no authentication (other proxy) + - support no authentication + user/pass authentication (Tor) + - proxy on IPv6 + +- Create various proxies (as threads) +- Create bitcoinds that connect to them +- Manipulate the bitcoinds using addnode (onetry) an observe effects + +addnode connect to IPv4 +addnode connect to IPv6 +addnode connect to onion +addnode connect to generic DNS name +""" + +import socket +import os + +from test_framework.socks5 import Socks5Configuration, Socks5Command, Socks5Server, AddressType +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + PORT_MIN, + PORT_RANGE, + assert_equal, +) +from test_framework.netutil import test_ipv6_local + +RANGE_BEGIN = PORT_MIN + 2 * PORT_RANGE # Start after p2p and rpc ports + +class ProxyTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 4 + + def setup_nodes(self): + self.have_ipv6 = test_ipv6_local() + # Create two proxies on different ports + # ... one unauthenticated + self.conf1 = Socks5Configuration() + self.conf1.addr = ('127.0.0.1', RANGE_BEGIN + (os.getpid() % 1000)) + self.conf1.unauth = True + self.conf1.auth = False + # ... one supporting authenticated and unauthenticated (Tor) + self.conf2 = Socks5Configuration() + self.conf2.addr = ('127.0.0.1', RANGE_BEGIN + 1000 + (os.getpid() % 1000)) + self.conf2.unauth = True + self.conf2.auth = True + if self.have_ipv6: + # ... one on IPv6 with similar configuration + self.conf3 = Socks5Configuration() + self.conf3.af = socket.AF_INET6 + self.conf3.addr = ('::1', RANGE_BEGIN + 2000 + (os.getpid() % 1000)) + self.conf3.unauth = True + self.conf3.auth = True + else: + self.log.warning("Testing without local IPv6 support") + + self.serv1 = Socks5Server(self.conf1) + self.serv1.start() + self.serv2 = Socks5Server(self.conf2) + self.serv2.start() + if self.have_ipv6: + self.serv3 = Socks5Server(self.conf3) + self.serv3.start() + + # Note: proxies are not used to connect to local nodes + # this is because the proxy to use is based on CService.GetNetwork(), which return NET_UNROUTABLE for localhost + args = [ + ['-listen', '-proxy=%s:%i' % (self.conf1.addr),'-proxyrandomize=1'], + ['-listen', '-proxy=%s:%i' % (self.conf1.addr),'-onion=%s:%i' % (self.conf2.addr),'-proxyrandomize=0'], + ['-listen', '-proxy=%s:%i' % (self.conf2.addr),'-proxyrandomize=1'], + [] + ] + if self.have_ipv6: + args[3] = ['-listen', '-proxy=[%s]:%i' % (self.conf3.addr),'-proxyrandomize=0', '-noonion'] + self.add_nodes(self.num_nodes, extra_args=args) + self.start_nodes() + + def node_test(self, node, proxies, auth, test_onion=True): + rv = [] + # Test: outgoing IPv4 connection through node + node.addnode("15.61.23.23:1234", "onetry") + cmd = proxies[0].queue.get() + assert(isinstance(cmd, Socks5Command)) + # Note: bitcoind's SOCKS5 implementation only sends atyp DOMAINNAME, even if connecting directly to IPv4/IPv6 + assert_equal(cmd.atyp, AddressType.DOMAINNAME) + assert_equal(cmd.addr, b"15.61.23.23") + assert_equal(cmd.port, 1234) + if not auth: + assert_equal(cmd.username, None) + assert_equal(cmd.password, None) + rv.append(cmd) + + if self.have_ipv6: + # Test: outgoing IPv6 connection through node + node.addnode("[1233:3432:2434:2343:3234:2345:6546:4534]:5443", "onetry") + cmd = proxies[1].queue.get() + assert(isinstance(cmd, Socks5Command)) + # Note: bitcoind's SOCKS5 implementation only sends atyp DOMAINNAME, even if connecting directly to IPv4/IPv6 + assert_equal(cmd.atyp, AddressType.DOMAINNAME) + assert_equal(cmd.addr, b"1233:3432:2434:2343:3234:2345:6546:4534") + assert_equal(cmd.port, 5443) + if not auth: + assert_equal(cmd.username, None) + assert_equal(cmd.password, None) + rv.append(cmd) + + if test_onion: + # Test: outgoing onion connection through node + node.addnode("bitcoinostk4e4re.onion:8333", "onetry") + cmd = proxies[2].queue.get() + assert(isinstance(cmd, Socks5Command)) + assert_equal(cmd.atyp, AddressType.DOMAINNAME) + assert_equal(cmd.addr, b"bitcoinostk4e4re.onion") + assert_equal(cmd.port, 8333) + if not auth: + assert_equal(cmd.username, None) + assert_equal(cmd.password, None) + rv.append(cmd) + + # Test: outgoing DNS name connection through node + node.addnode("node.noumenon:8333", "onetry") + cmd = proxies[3].queue.get() + assert(isinstance(cmd, Socks5Command)) + assert_equal(cmd.atyp, AddressType.DOMAINNAME) + assert_equal(cmd.addr, b"node.noumenon") + assert_equal(cmd.port, 8333) + if not auth: + assert_equal(cmd.username, None) + assert_equal(cmd.password, None) + rv.append(cmd) + + return rv + + def run_test(self): + # basic -proxy + self.node_test(self.nodes[0], [self.serv1, self.serv1, self.serv1, self.serv1], False) + + # -proxy plus -onion + self.node_test(self.nodes[1], [self.serv1, self.serv1, self.serv2, self.serv1], False) + + # -proxy plus -onion, -proxyrandomize + rv = self.node_test(self.nodes[2], [self.serv2, self.serv2, self.serv2, self.serv2], True) + # Check that credentials as used for -proxyrandomize connections are unique + credentials = set((x.username,x.password) for x in rv) + assert_equal(len(credentials), len(rv)) + + if self.have_ipv6: + # proxy on IPv6 localhost + self.node_test(self.nodes[3], [self.serv3, self.serv3, self.serv3, self.serv3], False, False) + + def networks_dict(d): + r = {} + for x in d['networks']: + r[x['name']] = x + return r + + # test RPC getnetworkinfo + n0 = networks_dict(self.nodes[0].getnetworkinfo()) + for net in ['ipv4','ipv6','onion']: + assert_equal(n0[net]['proxy'], '%s:%i' % (self.conf1.addr)) + assert_equal(n0[net]['proxy_randomize_credentials'], True) + assert_equal(n0['onion']['reachable'], True) + + n1 = networks_dict(self.nodes[1].getnetworkinfo()) + for net in ['ipv4','ipv6']: + assert_equal(n1[net]['proxy'], '%s:%i' % (self.conf1.addr)) + assert_equal(n1[net]['proxy_randomize_credentials'], False) + assert_equal(n1['onion']['proxy'], '%s:%i' % (self.conf2.addr)) + assert_equal(n1['onion']['proxy_randomize_credentials'], False) + assert_equal(n1['onion']['reachable'], True) + + n2 = networks_dict(self.nodes[2].getnetworkinfo()) + for net in ['ipv4','ipv6','onion']: + assert_equal(n2[net]['proxy'], '%s:%i' % (self.conf2.addr)) + assert_equal(n2[net]['proxy_randomize_credentials'], True) + assert_equal(n2['onion']['reachable'], True) + + if self.have_ipv6: + n3 = networks_dict(self.nodes[3].getnetworkinfo()) + for net in ['ipv4','ipv6']: + assert_equal(n3[net]['proxy'], '[%s]:%i' % (self.conf3.addr)) + assert_equal(n3[net]['proxy_randomize_credentials'], False) + assert_equal(n3['onion']['reachable'], False) + +if __name__ == '__main__': + ProxyTest().main() + diff --git a/test/functional/feature_pruning.py b/test/functional/feature_pruning.py new file mode 100755 index 000000000..772151dc4 --- /dev/null +++ b/test/functional/feature_pruning.py @@ -0,0 +1,461 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the pruning code. + +WARNING: +This test uses 4GB of disk space. +This test takes 30 mins or more (up to 2 hours) +""" + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, assert_greater_than, assert_raises_rpc_error, connect_nodes, mine_large_block, sync_blocks, wait_until + +import os + +MIN_BLOCKS_TO_KEEP = 288 + +# Rescans start at the earliest block up to 2 hours before a key timestamp, so +# the manual prune RPC avoids pruning blocks in the same window to be +# compatible with pruning based on key creation time. +TIMESTAMP_WINDOW = 2 * 60 * 60 + + +def calc_usage(blockdir): + return sum(os.path.getsize(blockdir+f) for f in os.listdir(blockdir) if os.path.isfile(os.path.join(blockdir, f))) / (1024. * 1024.) + +class PruneTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 6 + self.rpc_timewait = 900 + + # Create nodes 0 and 1 to mine. + # Create node 2 to test pruning. + self.full_node_default_args = ["-maxreceivebuffer=20000", "-checkblocks=5", "-limitdescendantcount=100", "-limitdescendantsize=5000", "-limitancestorcount=100", "-limitancestorsize=5000"] + # Create nodes 3 and 4 to test manual pruning (they will be re-started with manual pruning later) + # Create nodes 5 to test wallet in prune mode, but do not connect + self.extra_args = [ + self.full_node_default_args, + self.full_node_default_args, + ["-maxreceivebuffer=20000", "-prune=550"], + ["-maxreceivebuffer=20000"], + ["-maxreceivebuffer=20000"], + ["-prune=550"], + ] + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def setup_network(self): + self.setup_nodes() + + self.prunedir = os.path.join(self.nodes[2].datadir, 'regtest', 'blocks', '') + + connect_nodes(self.nodes[0], 1) + connect_nodes(self.nodes[1], 2) + connect_nodes(self.nodes[2], 0) + connect_nodes(self.nodes[0], 3) + connect_nodes(self.nodes[0], 4) + sync_blocks(self.nodes[0:5]) + + def setup_nodes(self): + self.add_nodes(self.num_nodes, self.extra_args) + self.start_nodes() + + def create_big_chain(self): + # Start by creating some coinbases we can spend later + self.nodes[1].generate(200) + sync_blocks(self.nodes[0:2]) + self.nodes[0].generate(150) + # Then mine enough full blocks to create more than 550MiB of data + for i in range(645): + mine_large_block(self.nodes[0], self.utxo_cache_0) + + sync_blocks(self.nodes[0:5]) + + def test_height_min(self): + if not os.path.isfile(os.path.join(self.prunedir, "blk00000.dat")): + raise AssertionError("blk00000.dat is missing, pruning too early") + self.log.info("Success") + self.log.info("Though we're already using more than 550MiB, current usage: %d" % calc_usage(self.prunedir)) + self.log.info("Mining 25 more blocks should cause the first block file to be pruned") + # Pruning doesn't run until we're allocating another chunk, 20 full blocks past the height cutoff will ensure this + for i in range(25): + mine_large_block(self.nodes[0], self.utxo_cache_0) + + # Wait for blk00000.dat to be pruned + wait_until(lambda: not os.path.isfile(os.path.join(self.prunedir, "blk00000.dat")), timeout=30) + + self.log.info("Success") + usage = calc_usage(self.prunedir) + self.log.info("Usage should be below target: %d" % usage) + if (usage > 550): + raise AssertionError("Pruning target not being met") + + def create_chain_with_staleblocks(self): + # Create stale blocks in manageable sized chunks + self.log.info("Mine 24 (stale) blocks on Node 1, followed by 25 (main chain) block reorg from Node 0, for 12 rounds") + + for j in range(12): + # Disconnect node 0 so it can mine a longer reorg chain without knowing about node 1's soon-to-be-stale chain + # Node 2 stays connected, so it hears about the stale blocks and then reorg's when node0 reconnects + # Stopping node 0 also clears its mempool, so it doesn't have node1's transactions to accidentally mine + self.stop_node(0) + self.start_node(0, extra_args=self.full_node_default_args) + # Mine 24 blocks in node 1 + for i in range(24): + if j == 0: + mine_large_block(self.nodes[1], self.utxo_cache_1) + else: + # Add node1's wallet transactions back to the mempool, to + # avoid the mined blocks from being too small. + self.nodes[1].resendwallettransactions() + self.nodes[1].generate(1) #tx's already in mempool from previous disconnects + + # Reorg back with 25 block chain from node 0 + for i in range(25): + mine_large_block(self.nodes[0], self.utxo_cache_0) + + # Create connections in the order so both nodes can see the reorg at the same time + connect_nodes(self.nodes[1], 0) + connect_nodes(self.nodes[2], 0) + sync_blocks(self.nodes[0:3]) + + self.log.info("Usage can be over target because of high stale rate: %d" % calc_usage(self.prunedir)) + + def reorg_test(self): + # Node 1 will mine a 300 block chain starting 287 blocks back from Node 0 and Node 2's tip + # This will cause Node 2 to do a reorg requiring 288 blocks of undo data to the reorg_test chain + # Reboot node 1 to clear its mempool (hopefully make the invalidate faster) + # Lower the block max size so we don't keep mining all our big mempool transactions (from disconnected blocks) + self.stop_node(1) + self.start_node(1, extra_args=["-maxreceivebuffer=20000","-blockmaxweight=20000", "-checkblocks=5"]) + + height = self.nodes[1].getblockcount() + self.log.info("Current block height: %d" % height) + + invalidheight = height-287 + badhash = self.nodes[1].getblockhash(invalidheight) + self.log.info("Invalidating block %s at height %d" % (badhash,invalidheight)) + self.nodes[1].invalidateblock(badhash) + + # We've now switched to our previously mined-24 block fork on node 1, but that's not what we want + # So invalidate that fork as well, until we're on the same chain as node 0/2 (but at an ancestor 288 blocks ago) + mainchainhash = self.nodes[0].getblockhash(invalidheight - 1) + curhash = self.nodes[1].getblockhash(invalidheight - 1) + while curhash != mainchainhash: + self.nodes[1].invalidateblock(curhash) + curhash = self.nodes[1].getblockhash(invalidheight - 1) + + assert(self.nodes[1].getblockcount() == invalidheight - 1) + self.log.info("New best height: %d" % self.nodes[1].getblockcount()) + + # Reboot node1 to clear those giant tx's from mempool + self.stop_node(1) + self.start_node(1, extra_args=["-maxreceivebuffer=20000","-blockmaxweight=20000", "-checkblocks=5"]) + + self.log.info("Generating new longer chain of 300 more blocks") + self.nodes[1].generate(300) + + self.log.info("Reconnect nodes") + connect_nodes(self.nodes[0], 1) + connect_nodes(self.nodes[2], 1) + sync_blocks(self.nodes[0:3], timeout=120) + + self.log.info("Verify height on node 2: %d" % self.nodes[2].getblockcount()) + self.log.info("Usage possibly still high bc of stale blocks in block files: %d" % calc_usage(self.prunedir)) + + self.log.info("Mine 220 more blocks so we have requisite history (some blocks will be big and cause pruning of previous chain)") + + # Get node0's wallet transactions back in its mempool, to avoid the + # mined blocks from being too small. + self.nodes[0].resendwallettransactions() + + for i in range(22): + # This can be slow, so do this in multiple RPC calls to avoid + # RPC timeouts. + self.nodes[0].generate(10) #node 0 has many large tx's in its mempool from the disconnects + sync_blocks(self.nodes[0:3], timeout=300) + + usage = calc_usage(self.prunedir) + self.log.info("Usage should be below target: %d" % usage) + if (usage > 550): + raise AssertionError("Pruning target not being met") + + return invalidheight,badhash + + def reorg_back(self): + # Verify that a block on the old main chain fork has been pruned away + assert_raises_rpc_error(-1, "Block not available (pruned data)", self.nodes[2].getblock, self.forkhash) + self.log.info("Will need to redownload block %d" % self.forkheight) + + # Verify that we have enough history to reorg back to the fork point + # Although this is more than 288 blocks, because this chain was written more recently + # and only its other 299 small and 220 large blocks are in the block files after it, + # it is expected to still be retained + self.nodes[2].getblock(self.nodes[2].getblockhash(self.forkheight)) + + first_reorg_height = self.nodes[2].getblockcount() + curchainhash = self.nodes[2].getblockhash(self.mainchainheight) + self.nodes[2].invalidateblock(curchainhash) + goalbestheight = self.mainchainheight + goalbesthash = self.mainchainhash2 + + # As of 0.10 the current block download logic is not able to reorg to the original chain created in + # create_chain_with_stale_blocks because it doesn't know of any peer that's on that chain from which to + # redownload its missing blocks. + # Invalidate the reorg_test chain in node 0 as well, it can successfully switch to the original chain + # because it has all the block data. + # However it must mine enough blocks to have a more work chain than the reorg_test chain in order + # to trigger node 2's block download logic. + # At this point node 2 is within 288 blocks of the fork point so it will preserve its ability to reorg + if self.nodes[2].getblockcount() < self.mainchainheight: + blocks_to_mine = first_reorg_height + 1 - self.mainchainheight + self.log.info("Rewind node 0 to prev main chain to mine longer chain to trigger redownload. Blocks needed: %d" % blocks_to_mine) + self.nodes[0].invalidateblock(curchainhash) + assert(self.nodes[0].getblockcount() == self.mainchainheight) + assert(self.nodes[0].getbestblockhash() == self.mainchainhash2) + goalbesthash = self.nodes[0].generate(blocks_to_mine)[-1] + goalbestheight = first_reorg_height + 1 + + self.log.info("Verify node 2 reorged back to the main chain, some blocks of which it had to redownload") + # Wait for Node 2 to reorg to proper height + wait_until(lambda: self.nodes[2].getblockcount() >= goalbestheight, timeout=900) + assert(self.nodes[2].getbestblockhash() == goalbesthash) + # Verify we can now have the data for a block previously pruned + assert(self.nodes[2].getblock(self.forkhash)["height"] == self.forkheight) + + def manual_test(self, node_number, use_timestamp): + # at this point, node has 995 blocks and has not yet run in prune mode + self.start_node(node_number) + node = self.nodes[node_number] + assert_equal(node.getblockcount(), 995) + assert_raises_rpc_error(-1, "not in prune mode", node.pruneblockchain, 500) + + # now re-start in manual pruning mode + self.stop_node(node_number) + self.start_node(node_number, extra_args=["-prune=1"]) + node = self.nodes[node_number] + assert_equal(node.getblockcount(), 995) + + def height(index): + if use_timestamp: + return node.getblockheader(node.getblockhash(index))["time"] + TIMESTAMP_WINDOW + else: + return index + + def prune(index, expected_ret=None): + ret = node.pruneblockchain(height(index)) + # Check the return value. When use_timestamp is True, just check + # that the return value is less than or equal to the expected + # value, because when more than one block is generated per second, + # a timestamp will not be granular enough to uniquely identify an + # individual block. + if expected_ret is None: + expected_ret = index + if use_timestamp: + assert_greater_than(ret, 0) + assert_greater_than(expected_ret + 1, ret) + else: + assert_equal(ret, expected_ret) + + def has_block(index): + return os.path.isfile(os.path.join(self.nodes[node_number].datadir, "regtest", "blocks", "blk{:05}.dat".format(index))) + + # should not prune because chain tip of node 3 (995) < PruneAfterHeight (1000) + assert_raises_rpc_error(-1, "Blockchain is too short for pruning", node.pruneblockchain, height(500)) + + # Save block transaction count before pruning, assert value + block1_details = node.getblock(node.getblockhash(1)) + assert_equal(block1_details["nTx"], len(block1_details["tx"])) + + # mine 6 blocks so we are at height 1001 (i.e., above PruneAfterHeight) + node.generate(6) + assert_equal(node.getblockchaininfo()["blocks"], 1001) + + # Pruned block should still know the number of transactions + assert_equal(node.getblockheader(node.getblockhash(1))["nTx"], block1_details["nTx"]) + + # negative heights should raise an exception + assert_raises_rpc_error(-8, "Negative", node.pruneblockchain, -10) + + # height=100 too low to prune first block file so this is a no-op + prune(100) + if not has_block(0): + raise AssertionError("blk00000.dat is missing when should still be there") + + # Does nothing + node.pruneblockchain(height(0)) + if not has_block(0): + raise AssertionError("blk00000.dat is missing when should still be there") + + # height=500 should prune first file + prune(500) + if has_block(0): + raise AssertionError("blk00000.dat is still there, should be pruned by now") + if not has_block(1): + raise AssertionError("blk00001.dat is missing when should still be there") + + # height=650 should prune second file + prune(650) + if has_block(1): + raise AssertionError("blk00001.dat is still there, should be pruned by now") + + # height=1000 should not prune anything more, because tip-288 is in blk00002.dat. + prune(1000, 1001 - MIN_BLOCKS_TO_KEEP) + if not has_block(2): + raise AssertionError("blk00002.dat is still there, should be pruned by now") + + # advance the tip so blk00002.dat and blk00003.dat can be pruned (the last 288 blocks should now be in blk00004.dat) + node.generate(288) + prune(1000) + if has_block(2): + raise AssertionError("blk00002.dat is still there, should be pruned by now") + if has_block(3): + raise AssertionError("blk00003.dat is still there, should be pruned by now") + + # stop node, start back up with auto-prune at 550MB, make sure still runs + self.stop_node(node_number) + self.start_node(node_number, extra_args=["-prune=550"]) + + self.log.info("Success") + + def wallet_test(self): + # check that the pruning node's wallet is still in good shape + self.log.info("Stop and start pruning node to trigger wallet rescan") + self.stop_node(2) + self.start_node(2, extra_args=["-prune=550"]) + self.log.info("Success") + + # check that wallet loads successfully when restarting a pruned node after IBD. + # this was reported to fail in #7494. + self.log.info("Syncing node 5 to test wallet") + connect_nodes(self.nodes[0], 5) + nds = [self.nodes[0], self.nodes[5]] + sync_blocks(nds, wait=5, timeout=300) + self.stop_node(5) #stop and start to trigger rescan + self.start_node(5, extra_args=["-prune=550"]) + self.log.info("Success") + + def run_test(self): + self.log.info("Warning! This test requires 4GB of disk space and takes over 30 mins (up to 2 hours)") + self.log.info("Mining a big blockchain of 995 blocks") + + # Determine default relay fee + self.relayfee = self.nodes[0].getnetworkinfo()["relayfee"] + + # Cache for utxos, as the listunspent may take a long time later in the test + self.utxo_cache_0 = [] + self.utxo_cache_1 = [] + + self.create_big_chain() + # Chain diagram key: + # * blocks on main chain + # +,&,$,@ blocks on other forks + # X invalidated block + # N1 Node 1 + # + # Start by mining a simple chain that all nodes have + # N0=N1=N2 **...*(995) + + # stop manual-pruning node with 995 blocks + self.stop_node(3) + self.stop_node(4) + + self.log.info("Check that we haven't started pruning yet because we're below PruneAfterHeight") + self.test_height_min() + # Extend this chain past the PruneAfterHeight + # N0=N1=N2 **...*(1020) + + self.log.info("Check that we'll exceed disk space target if we have a very high stale block rate") + self.create_chain_with_staleblocks() + # Disconnect N0 + # And mine a 24 block chain on N1 and a separate 25 block chain on N0 + # N1=N2 **...*+...+(1044) + # N0 **...**...**(1045) + # + # reconnect nodes causing reorg on N1 and N2 + # N1=N2 **...*(1020) *...**(1045) + # \ + # +...+(1044) + # + # repeat this process until you have 12 stale forks hanging off the + # main chain on N1 and N2 + # N0 *************************...***************************(1320) + # + # N1=N2 **...*(1020) *...**(1045) *.. ..**(1295) *...**(1320) + # \ \ \ + # +...+(1044) &.. $...$(1319) + + # Save some current chain state for later use + self.mainchainheight = self.nodes[2].getblockcount() #1320 + self.mainchainhash2 = self.nodes[2].getblockhash(self.mainchainheight) + + self.log.info("Check that we can survive a 288 block reorg still") + (self.forkheight,self.forkhash) = self.reorg_test() #(1033, ) + # Now create a 288 block reorg by mining a longer chain on N1 + # First disconnect N1 + # Then invalidate 1033 on main chain and 1032 on fork so height is 1032 on main chain + # N1 **...*(1020) **...**(1032)X.. + # \ + # ++...+(1031)X.. + # + # Now mine 300 more blocks on N1 + # N1 **...*(1020) **...**(1032) @@...@(1332) + # \ \ + # \ X... + # \ \ + # ++...+(1031)X.. .. + # + # Reconnect nodes and mine 220 more blocks on N1 + # N1 **...*(1020) **...**(1032) @@...@@@(1552) + # \ \ + # \ X... + # \ \ + # ++...+(1031)X.. .. + # + # N2 **...*(1020) **...**(1032) @@...@@@(1552) + # \ \ + # \ *...**(1320) + # \ \ + # ++...++(1044) .. + # + # N0 ********************(1032) @@...@@@(1552) + # \ + # *...**(1320) + + self.log.info("Test that we can rerequest a block we previously pruned if needed for a reorg") + self.reorg_back() + # Verify that N2 still has block 1033 on current chain (@), but not on main chain (*) + # Invalidate 1033 on current chain (@) on N2 and we should be able to reorg to + # original main chain (*), but will require redownload of some blocks + # In order to have a peer we think we can download from, must also perform this invalidation + # on N0 and mine a new longest chain to trigger. + # Final result: + # N0 ********************(1032) **...****(1553) + # \ + # X@...@@@(1552) + # + # N2 **...*(1020) **...**(1032) **...****(1553) + # \ \ + # \ X@...@@@(1552) + # \ + # +.. + # + # N1 doesn't change because 1033 on main chain (*) is invalid + + self.log.info("Test manual pruning with block indices") + self.manual_test(3, use_timestamp=False) + + self.log.info("Test manual pruning with timestamps") + self.manual_test(4, use_timestamp=True) + + self.log.info("Test wallet re-scan") + self.wallet_test() + + self.log.info("Done") + +if __name__ == '__main__': + PruneTest().main() diff --git a/test/functional/feature_rbf.py b/test/functional/feature_rbf.py new file mode 100755 index 000000000..ff7c2b23b --- /dev/null +++ b/test/functional/feature_rbf.py @@ -0,0 +1,582 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the RBF code.""" + +from decimal import Decimal + +from test_framework.messages import COIN, COutPoint, CTransaction, CTxIn, CTxOut +from test_framework.script import CScript, OP_DROP +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, assert_raises_rpc_error, bytes_to_hex_str, satoshi_round + +MAX_REPLACEMENT_LIMIT = 100 + +def txToHex(tx): + return bytes_to_hex_str(tx.serialize()) + +def make_utxo(node, amount, confirmed=True, scriptPubKey=CScript([1])): + """Create a txout with a given amount and scriptPubKey + + Mines coins as needed. + + confirmed - txouts created will be confirmed in the blockchain; + unconfirmed otherwise. + """ + fee = 1*COIN + while node.getbalance() < satoshi_round((amount + fee)/COIN): + node.generate(100) + + new_addr = node.getnewaddress() + txid = node.sendtoaddress(new_addr, satoshi_round((amount+fee)/COIN)) + tx1 = node.getrawtransaction(txid, 1) + txid = int(txid, 16) + i = None + + for i, txout in enumerate(tx1['vout']): + if txout['scriptPubKey']['addresses'] == [new_addr]: + break + assert i is not None + + tx2 = CTransaction() + tx2.vin = [CTxIn(COutPoint(txid, i))] + tx2.vout = [CTxOut(amount, scriptPubKey)] + tx2.rehash() + + signed_tx = node.signrawtransactionwithwallet(txToHex(tx2)) + + txid = node.sendrawtransaction(signed_tx['hex'], True) + + # If requested, ensure txouts are confirmed. + if confirmed: + mempool_size = len(node.getrawmempool()) + while mempool_size > 0: + node.generate(1) + new_size = len(node.getrawmempool()) + # Error out if we have something stuck in the mempool, as this + # would likely be a bug. + assert(new_size < mempool_size) + mempool_size = new_size + + return COutPoint(int(txid, 16), 0) + + +class ReplaceByFeeTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 2 + self.extra_args = [ + [ + "-maxorphantx=1000", + "-whitelist=127.0.0.1", + "-limitancestorcount=50", + "-limitancestorsize=101", + "-limitdescendantcount=200", + "-limitdescendantsize=101", + ], + [ + "-mempoolreplacement=0", + ], + ] + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + # Leave IBD + self.nodes[0].generate(1) + + make_utxo(self.nodes[0], 1*COIN) + + # Ensure nodes are synced + self.sync_all() + + self.log.info("Running test simple doublespend...") + self.test_simple_doublespend() + + self.log.info("Running test doublespend chain...") + self.test_doublespend_chain() + + self.log.info("Running test doublespend tree...") + self.test_doublespend_tree() + + self.log.info("Running test replacement feeperkb...") + self.test_replacement_feeperkb() + + self.log.info("Running test spends of conflicting outputs...") + self.test_spends_of_conflicting_outputs() + + self.log.info("Running test new unconfirmed inputs...") + self.test_new_unconfirmed_inputs() + + self.log.info("Running test too many replacements...") + self.test_too_many_replacements() + + self.log.info("Running test opt-in...") + self.test_opt_in() + + self.log.info("Running test RPC...") + self.test_rpc() + + self.log.info("Running test prioritised transactions...") + self.test_prioritised_transactions() + + self.log.info("Passed") + + def test_simple_doublespend(self): + """Simple doublespend""" + tx0_outpoint = make_utxo(self.nodes[0], int(1.1*COIN)) + + # make_utxo may have generated a bunch of blocks, so we need to sync + # before we can spend the coins generated, or else the resulting + # transactions might not be accepted by our peers. + self.sync_all() + + tx1a = CTransaction() + tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)] + tx1a.vout = [CTxOut(1 * COIN, CScript([b'a' * 35]))] + tx1a_hex = txToHex(tx1a) + tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True) + + self.sync_all() + + # Should fail because we haven't changed the fee + tx1b = CTransaction() + tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)] + tx1b.vout = [CTxOut(1 * COIN, CScript([b'b' * 35]))] + tx1b_hex = txToHex(tx1b) + + # This will raise an exception due to insufficient fee + assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx1b_hex, True) + # This will raise an exception due to transaction replacement being disabled + assert_raises_rpc_error(-26, "txn-mempool-conflict", self.nodes[1].sendrawtransaction, tx1b_hex, True) + + # Extra 0.1 BTC fee + tx1b = CTransaction() + tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)] + tx1b.vout = [CTxOut(int(0.9 * COIN), CScript([b'b' * 35]))] + tx1b_hex = txToHex(tx1b) + # Replacement still disabled even with "enough fee" + assert_raises_rpc_error(-26, "txn-mempool-conflict", self.nodes[1].sendrawtransaction, tx1b_hex, True) + # Works when enabled + tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True) + + mempool = self.nodes[0].getrawmempool() + + assert (tx1a_txid not in mempool) + assert (tx1b_txid in mempool) + + assert_equal(tx1b_hex, self.nodes[0].getrawtransaction(tx1b_txid)) + + # Second node is running mempoolreplacement=0, will not replace originally-seen txn + mempool = self.nodes[1].getrawmempool() + assert tx1a_txid in mempool + assert tx1b_txid not in mempool + + def test_doublespend_chain(self): + """Doublespend of a long chain""" + + initial_nValue = 50*COIN + tx0_outpoint = make_utxo(self.nodes[0], initial_nValue) + + prevout = tx0_outpoint + remaining_value = initial_nValue + chain_txids = [] + while remaining_value > 10*COIN: + remaining_value -= 1*COIN + tx = CTransaction() + tx.vin = [CTxIn(prevout, nSequence=0)] + tx.vout = [CTxOut(remaining_value, CScript([1, OP_DROP] * 15 + [1]))] + tx_hex = txToHex(tx) + txid = self.nodes[0].sendrawtransaction(tx_hex, True) + chain_txids.append(txid) + prevout = COutPoint(int(txid, 16), 0) + + # Whether the double-spend is allowed is evaluated by including all + # child fees - 40 BTC - so this attempt is rejected. + dbl_tx = CTransaction() + dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)] + dbl_tx.vout = [CTxOut(initial_nValue - 30 * COIN, CScript([1] * 35))] + dbl_tx_hex = txToHex(dbl_tx) + + # This will raise an exception due to insufficient fee + assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, dbl_tx_hex, True) + + # Accepted with sufficient fee + dbl_tx = CTransaction() + dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)] + dbl_tx.vout = [CTxOut(1 * COIN, CScript([1] * 35))] + dbl_tx_hex = txToHex(dbl_tx) + self.nodes[0].sendrawtransaction(dbl_tx_hex, True) + + mempool = self.nodes[0].getrawmempool() + for doublespent_txid in chain_txids: + assert(doublespent_txid not in mempool) + + def test_doublespend_tree(self): + """Doublespend of a big tree of transactions""" + + initial_nValue = 50*COIN + tx0_outpoint = make_utxo(self.nodes[0], initial_nValue) + + def branch(prevout, initial_value, max_txs, tree_width=5, fee=0.0001*COIN, _total_txs=None): + if _total_txs is None: + _total_txs = [0] + if _total_txs[0] >= max_txs: + return + + txout_value = (initial_value - fee) // tree_width + if txout_value < fee: + return + + vout = [CTxOut(txout_value, CScript([i+1])) + for i in range(tree_width)] + tx = CTransaction() + tx.vin = [CTxIn(prevout, nSequence=0)] + tx.vout = vout + tx_hex = txToHex(tx) + + assert(len(tx.serialize()) < 100000) + txid = self.nodes[0].sendrawtransaction(tx_hex, True) + yield tx + _total_txs[0] += 1 + + txid = int(txid, 16) + + for i, txout in enumerate(tx.vout): + for x in branch(COutPoint(txid, i), txout_value, + max_txs, + tree_width=tree_width, fee=fee, + _total_txs=_total_txs): + yield x + + fee = int(0.0001*COIN) + n = MAX_REPLACEMENT_LIMIT + tree_txs = list(branch(tx0_outpoint, initial_nValue, n, fee=fee)) + assert_equal(len(tree_txs), n) + + # Attempt double-spend, will fail because too little fee paid + dbl_tx = CTransaction() + dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)] + dbl_tx.vout = [CTxOut(initial_nValue - fee * n, CScript([1] * 35))] + dbl_tx_hex = txToHex(dbl_tx) + # This will raise an exception due to insufficient fee + assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, dbl_tx_hex, True) + + # 1 BTC fee is enough + dbl_tx = CTransaction() + dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)] + dbl_tx.vout = [CTxOut(initial_nValue - fee * n - 1 * COIN, CScript([1] * 35))] + dbl_tx_hex = txToHex(dbl_tx) + self.nodes[0].sendrawtransaction(dbl_tx_hex, True) + + mempool = self.nodes[0].getrawmempool() + + for tx in tree_txs: + tx.rehash() + assert (tx.hash not in mempool) + + # Try again, but with more total transactions than the "max txs + # double-spent at once" anti-DoS limit. + for n in (MAX_REPLACEMENT_LIMIT+1, MAX_REPLACEMENT_LIMIT*2): + fee = int(0.0001*COIN) + tx0_outpoint = make_utxo(self.nodes[0], initial_nValue) + tree_txs = list(branch(tx0_outpoint, initial_nValue, n, fee=fee)) + assert_equal(len(tree_txs), n) + + dbl_tx = CTransaction() + dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)] + dbl_tx.vout = [CTxOut(initial_nValue - 2 * fee * n, CScript([1] * 35))] + dbl_tx_hex = txToHex(dbl_tx) + # This will raise an exception + assert_raises_rpc_error(-26, "too many potential replacements", self.nodes[0].sendrawtransaction, dbl_tx_hex, True) + + for tx in tree_txs: + tx.rehash() + self.nodes[0].getrawtransaction(tx.hash) + + def test_replacement_feeperkb(self): + """Replacement requires fee-per-KB to be higher""" + tx0_outpoint = make_utxo(self.nodes[0], int(1.1*COIN)) + + tx1a = CTransaction() + tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)] + tx1a.vout = [CTxOut(1 * COIN, CScript([b'a' * 35]))] + tx1a_hex = txToHex(tx1a) + self.nodes[0].sendrawtransaction(tx1a_hex, True) + + # Higher fee, but the fee per KB is much lower, so the replacement is + # rejected. + tx1b = CTransaction() + tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)] + tx1b.vout = [CTxOut(int(0.001*COIN), CScript([b'a'*999000]))] + tx1b_hex = txToHex(tx1b) + + # This will raise an exception due to insufficient fee + assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx1b_hex, True) + + def test_spends_of_conflicting_outputs(self): + """Replacements that spend conflicting tx outputs are rejected""" + utxo1 = make_utxo(self.nodes[0], int(1.2*COIN)) + utxo2 = make_utxo(self.nodes[0], 3*COIN) + + tx1a = CTransaction() + tx1a.vin = [CTxIn(utxo1, nSequence=0)] + tx1a.vout = [CTxOut(int(1.1 * COIN), CScript([b'a' * 35]))] + tx1a_hex = txToHex(tx1a) + tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True) + + tx1a_txid = int(tx1a_txid, 16) + + # Direct spend an output of the transaction we're replacing. + tx2 = CTransaction() + tx2.vin = [CTxIn(utxo1, nSequence=0), CTxIn(utxo2, nSequence=0)] + tx2.vin.append(CTxIn(COutPoint(tx1a_txid, 0), nSequence=0)) + tx2.vout = tx1a.vout + tx2_hex = txToHex(tx2) + + # This will raise an exception + assert_raises_rpc_error(-26, "bad-txns-spends-conflicting-tx", self.nodes[0].sendrawtransaction, tx2_hex, True) + + # Spend tx1a's output to test the indirect case. + tx1b = CTransaction() + tx1b.vin = [CTxIn(COutPoint(tx1a_txid, 0), nSequence=0)] + tx1b.vout = [CTxOut(1 * COIN, CScript([b'a' * 35]))] + tx1b_hex = txToHex(tx1b) + tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True) + tx1b_txid = int(tx1b_txid, 16) + + tx2 = CTransaction() + tx2.vin = [CTxIn(utxo1, nSequence=0), CTxIn(utxo2, nSequence=0), + CTxIn(COutPoint(tx1b_txid, 0))] + tx2.vout = tx1a.vout + tx2_hex = txToHex(tx2) + + # This will raise an exception + assert_raises_rpc_error(-26, "bad-txns-spends-conflicting-tx", self.nodes[0].sendrawtransaction, tx2_hex, True) + + def test_new_unconfirmed_inputs(self): + """Replacements that add new unconfirmed inputs are rejected""" + confirmed_utxo = make_utxo(self.nodes[0], int(1.1*COIN)) + unconfirmed_utxo = make_utxo(self.nodes[0], int(0.1*COIN), False) + + tx1 = CTransaction() + tx1.vin = [CTxIn(confirmed_utxo)] + tx1.vout = [CTxOut(1 * COIN, CScript([b'a' * 35]))] + tx1_hex = txToHex(tx1) + self.nodes[0].sendrawtransaction(tx1_hex, True) + + tx2 = CTransaction() + tx2.vin = [CTxIn(confirmed_utxo), CTxIn(unconfirmed_utxo)] + tx2.vout = tx1.vout + tx2_hex = txToHex(tx2) + + # This will raise an exception + assert_raises_rpc_error(-26, "replacement-adds-unconfirmed", self.nodes[0].sendrawtransaction, tx2_hex, True) + + def test_too_many_replacements(self): + """Replacements that evict too many transactions are rejected""" + # Try directly replacing more than MAX_REPLACEMENT_LIMIT + # transactions + + # Start by creating a single transaction with many outputs + initial_nValue = 10*COIN + utxo = make_utxo(self.nodes[0], initial_nValue) + fee = int(0.0001*COIN) + split_value = int((initial_nValue-fee)/(MAX_REPLACEMENT_LIMIT+1)) + + outputs = [] + for i in range(MAX_REPLACEMENT_LIMIT+1): + outputs.append(CTxOut(split_value, CScript([1]))) + + splitting_tx = CTransaction() + splitting_tx.vin = [CTxIn(utxo, nSequence=0)] + splitting_tx.vout = outputs + splitting_tx_hex = txToHex(splitting_tx) + + txid = self.nodes[0].sendrawtransaction(splitting_tx_hex, True) + txid = int(txid, 16) + + # Now spend each of those outputs individually + for i in range(MAX_REPLACEMENT_LIMIT+1): + tx_i = CTransaction() + tx_i.vin = [CTxIn(COutPoint(txid, i), nSequence=0)] + tx_i.vout = [CTxOut(split_value - fee, CScript([b'a' * 35]))] + tx_i_hex = txToHex(tx_i) + self.nodes[0].sendrawtransaction(tx_i_hex, True) + + # Now create doublespend of the whole lot; should fail. + # Need a big enough fee to cover all spending transactions and have + # a higher fee rate + double_spend_value = (split_value-100*fee)*(MAX_REPLACEMENT_LIMIT+1) + inputs = [] + for i in range(MAX_REPLACEMENT_LIMIT+1): + inputs.append(CTxIn(COutPoint(txid, i), nSequence=0)) + double_tx = CTransaction() + double_tx.vin = inputs + double_tx.vout = [CTxOut(double_spend_value, CScript([b'a']))] + double_tx_hex = txToHex(double_tx) + + # This will raise an exception + assert_raises_rpc_error(-26, "too many potential replacements", self.nodes[0].sendrawtransaction, double_tx_hex, True) + + # If we remove an input, it should pass + double_tx = CTransaction() + double_tx.vin = inputs[0:-1] + double_tx.vout = [CTxOut(double_spend_value, CScript([b'a']))] + double_tx_hex = txToHex(double_tx) + self.nodes[0].sendrawtransaction(double_tx_hex, True) + + def test_opt_in(self): + """Replacing should only work if orig tx opted in""" + tx0_outpoint = make_utxo(self.nodes[0], int(1.1*COIN)) + + # Create a non-opting in transaction + tx1a = CTransaction() + tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0xffffffff)] + tx1a.vout = [CTxOut(1 * COIN, CScript([b'a' * 35]))] + tx1a_hex = txToHex(tx1a) + tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True) + + # This transaction isn't shown as replaceable + assert_equal(self.nodes[0].getmempoolentry(tx1a_txid)['bip125-replaceable'], False) + + # Shouldn't be able to double-spend + tx1b = CTransaction() + tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)] + tx1b.vout = [CTxOut(int(0.9 * COIN), CScript([b'b' * 35]))] + tx1b_hex = txToHex(tx1b) + + # This will raise an exception + assert_raises_rpc_error(-26, "txn-mempool-conflict", self.nodes[0].sendrawtransaction, tx1b_hex, True) + + tx1_outpoint = make_utxo(self.nodes[0], int(1.1*COIN)) + + # Create a different non-opting in transaction + tx2a = CTransaction() + tx2a.vin = [CTxIn(tx1_outpoint, nSequence=0xfffffffe)] + tx2a.vout = [CTxOut(1 * COIN, CScript([b'a' * 35]))] + tx2a_hex = txToHex(tx2a) + tx2a_txid = self.nodes[0].sendrawtransaction(tx2a_hex, True) + + # Still shouldn't be able to double-spend + tx2b = CTransaction() + tx2b.vin = [CTxIn(tx1_outpoint, nSequence=0)] + tx2b.vout = [CTxOut(int(0.9 * COIN), CScript([b'b' * 35]))] + tx2b_hex = txToHex(tx2b) + + # This will raise an exception + assert_raises_rpc_error(-26, "txn-mempool-conflict", self.nodes[0].sendrawtransaction, tx2b_hex, True) + + # Now create a new transaction that spends from tx1a and tx2a + # opt-in on one of the inputs + # Transaction should be replaceable on either input + + tx1a_txid = int(tx1a_txid, 16) + tx2a_txid = int(tx2a_txid, 16) + + tx3a = CTransaction() + tx3a.vin = [CTxIn(COutPoint(tx1a_txid, 0), nSequence=0xffffffff), + CTxIn(COutPoint(tx2a_txid, 0), nSequence=0xfffffffd)] + tx3a.vout = [CTxOut(int(0.9*COIN), CScript([b'c'])), CTxOut(int(0.9*COIN), CScript([b'd']))] + tx3a_hex = txToHex(tx3a) + + tx3a_txid = self.nodes[0].sendrawtransaction(tx3a_hex, True) + + # This transaction is shown as replaceable + assert_equal(self.nodes[0].getmempoolentry(tx3a_txid)['bip125-replaceable'], True) + + tx3b = CTransaction() + tx3b.vin = [CTxIn(COutPoint(tx1a_txid, 0), nSequence=0)] + tx3b.vout = [CTxOut(int(0.5 * COIN), CScript([b'e' * 35]))] + tx3b_hex = txToHex(tx3b) + + tx3c = CTransaction() + tx3c.vin = [CTxIn(COutPoint(tx2a_txid, 0), nSequence=0)] + tx3c.vout = [CTxOut(int(0.5 * COIN), CScript([b'f' * 35]))] + tx3c_hex = txToHex(tx3c) + + self.nodes[0].sendrawtransaction(tx3b_hex, True) + # If tx3b was accepted, tx3c won't look like a replacement, + # but make sure it is accepted anyway + self.nodes[0].sendrawtransaction(tx3c_hex, True) + + def test_prioritised_transactions(self): + # Ensure that fee deltas used via prioritisetransaction are + # correctly used by replacement logic + + # 1. Check that feeperkb uses modified fees + tx0_outpoint = make_utxo(self.nodes[0], int(1.1*COIN)) + + tx1a = CTransaction() + tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)] + tx1a.vout = [CTxOut(1 * COIN, CScript([b'a' * 35]))] + tx1a_hex = txToHex(tx1a) + tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True) + + # Higher fee, but the actual fee per KB is much lower. + tx1b = CTransaction() + tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)] + tx1b.vout = [CTxOut(int(0.001*COIN), CScript([b'a'*740000]))] + tx1b_hex = txToHex(tx1b) + + # Verify tx1b cannot replace tx1a. + assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx1b_hex, True) + + # Use prioritisetransaction to set tx1a's fee to 0. + self.nodes[0].prioritisetransaction(txid=tx1a_txid, fee_delta=int(-0.1*COIN)) + + # Now tx1b should be able to replace tx1a + tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True) + + assert(tx1b_txid in self.nodes[0].getrawmempool()) + + # 2. Check that absolute fee checks use modified fee. + tx1_outpoint = make_utxo(self.nodes[0], int(1.1*COIN)) + + tx2a = CTransaction() + tx2a.vin = [CTxIn(tx1_outpoint, nSequence=0)] + tx2a.vout = [CTxOut(1 * COIN, CScript([b'a' * 35]))] + tx2a_hex = txToHex(tx2a) + self.nodes[0].sendrawtransaction(tx2a_hex, True) + + # Lower fee, but we'll prioritise it + tx2b = CTransaction() + tx2b.vin = [CTxIn(tx1_outpoint, nSequence=0)] + tx2b.vout = [CTxOut(int(1.01 * COIN), CScript([b'a' * 35]))] + tx2b.rehash() + tx2b_hex = txToHex(tx2b) + + # Verify tx2b cannot replace tx2a. + assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx2b_hex, True) + + # Now prioritise tx2b to have a higher modified fee + self.nodes[0].prioritisetransaction(txid=tx2b.hash, fee_delta=int(0.1*COIN)) + + # tx2b should now be accepted + tx2b_txid = self.nodes[0].sendrawtransaction(tx2b_hex, True) + + assert(tx2b_txid in self.nodes[0].getrawmempool()) + + def test_rpc(self): + us0 = self.nodes[0].listunspent()[0] + ins = [us0] + outs = {self.nodes[0].getnewaddress() : Decimal(1.0000000)} + rawtx0 = self.nodes[0].createrawtransaction(ins, outs, 0, True) + rawtx1 = self.nodes[0].createrawtransaction(ins, outs, 0, False) + json0 = self.nodes[0].decoderawtransaction(rawtx0) + json1 = self.nodes[0].decoderawtransaction(rawtx1) + assert_equal(json0["vin"][0]["sequence"], 4294967293) + assert_equal(json1["vin"][0]["sequence"], 4294967295) + + rawtx2 = self.nodes[0].createrawtransaction([], outs) + frawtx2a = self.nodes[0].fundrawtransaction(rawtx2, {"replaceable": True}) + frawtx2b = self.nodes[0].fundrawtransaction(rawtx2, {"replaceable": False}) + + json0 = self.nodes[0].decoderawtransaction(frawtx2a['hex']) + json1 = self.nodes[0].decoderawtransaction(frawtx2b['hex']) + assert_equal(json0["vin"][0]["sequence"], 4294967293) + assert_equal(json1["vin"][0]["sequence"], 4294967294) + +if __name__ == '__main__': + ReplaceByFeeTest().main() diff --git a/test/functional/feature_reindex.py b/test/functional/feature_reindex.py new file mode 100755 index 000000000..940b403f9 --- /dev/null +++ b/test/functional/feature_reindex.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test running bitcoind with -reindex and -reindex-chainstate options. + +- Start a single node and generate 3 blocks. +- Stop the node and restart it with -reindex. Verify that the node has reindexed up to block 3. +- Stop the node and restart it with -reindex-chainstate. Verify that the node has reindexed up to block 3. +""" + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import wait_until + +class ReindexTest(BitcoinTestFramework): + + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 1 + + def reindex(self, justchainstate=False): + self.nodes[0].generatetoaddress(3, self.nodes[0].get_deterministic_priv_key().address) + blockcount = self.nodes[0].getblockcount() + self.stop_nodes() + extra_args = [["-reindex-chainstate" if justchainstate else "-reindex"]] + self.start_nodes(extra_args) + wait_until(lambda: self.nodes[0].getblockcount() == blockcount) + self.log.info("Success") + + def run_test(self): + self.reindex(False) + self.reindex(True) + self.reindex(False) + self.reindex(True) + +if __name__ == '__main__': + ReindexTest().main() diff --git a/test/functional/feature_segwit.py b/test/functional/feature_segwit.py new file mode 100755 index 000000000..2cbfc26e8 --- /dev/null +++ b/test/functional/feature_segwit.py @@ -0,0 +1,652 @@ +#!/usr/bin/env python3 +# Copyright (c) 2016-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the SegWit changeover logic.""" + +from decimal import Decimal + +from test_framework.address import ( + key_to_p2pkh, + key_to_p2sh_p2wpkh, + key_to_p2wpkh, + program_to_witness, + script_to_p2sh, + script_to_p2sh_p2wsh, + script_to_p2wsh, +) +from test_framework.blocktools import witness_script, send_to_witness +from test_framework.messages import COIN, COutPoint, CTransaction, CTxIn, CTxOut, FromHex, sha256, ToHex +from test_framework.script import CScript, OP_HASH160, OP_CHECKSIG, OP_0, hash160, OP_EQUAL, OP_DUP, OP_EQUALVERIFY, OP_1, OP_2, OP_CHECKMULTISIG, OP_TRUE, OP_DROP +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, assert_raises_rpc_error, bytes_to_hex_str, connect_nodes, hex_str_to_bytes, sync_blocks, try_rpc + +from io import BytesIO + +NODE_0 = 0 +NODE_2 = 2 +WIT_V0 = 0 +WIT_V1 = 1 + +def getutxo(txid): + utxo = {} + utxo["vout"] = 0 + utxo["txid"] = txid + return utxo + +def find_spendable_utxo(node, min_value): + for utxo in node.listunspent(query_options={'minimumAmount': min_value}): + if utxo['spendable']: + return utxo + + raise AssertionError("Unspent output equal or higher than %s not found" % min_value) + +class SegWitTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 3 + # This test tests SegWit both pre and post-activation, so use the normal BIP9 activation. + self.extra_args = [ + [ + "-rpcserialversion=0", + "-vbparams=segwit:0:999999999999", + "-addresstype=legacy", + "-deprecatedrpc=addwitnessaddress", + ], + [ + "-blockversion=4", + "-rpcserialversion=1", + "-vbparams=segwit:0:999999999999", + "-addresstype=legacy", + "-deprecatedrpc=addwitnessaddress", + ], + [ + "-blockversion=536870915", + "-vbparams=segwit:0:999999999999", + "-addresstype=legacy", + "-deprecatedrpc=addwitnessaddress", + ], + ] + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def setup_network(self): + super().setup_network() + connect_nodes(self.nodes[0], 2) + self.sync_all() + + def success_mine(self, node, txid, sign, redeem_script=""): + send_to_witness(1, node, getutxo(txid), self.pubkey[0], False, Decimal("49.998"), sign, redeem_script) + block = node.generate(1) + assert_equal(len(node.getblock(block[0])["tx"]), 2) + sync_blocks(self.nodes) + + def skip_mine(self, node, txid, sign, redeem_script=""): + send_to_witness(1, node, getutxo(txid), self.pubkey[0], False, Decimal("49.998"), sign, redeem_script) + block = node.generate(1) + assert_equal(len(node.getblock(block[0])["tx"]), 1) + sync_blocks(self.nodes) + + def fail_accept(self, node, error_msg, txid, sign, redeem_script=""): + assert_raises_rpc_error(-26, error_msg, send_to_witness, use_p2wsh=1, node=node, utxo=getutxo(txid), pubkey=self.pubkey[0], encode_p2sh=False, amount=Decimal("49.998"), sign=sign, insert_redeem_script=redeem_script) + + + def run_test(self): + self.nodes[0].generate(161) #block 161 + + self.log.info("Verify sigops are counted in GBT with pre-BIP141 rules before the fork") + txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1) + tmpl = self.nodes[0].getblocktemplate({}) + assert(tmpl['sizelimit'] == 1000000) + assert('weightlimit' not in tmpl) + assert(tmpl['sigoplimit'] == 20000) + assert(tmpl['transactions'][0]['hash'] == txid) + assert(tmpl['transactions'][0]['sigops'] == 2) + tmpl = self.nodes[0].getblocktemplate({'rules':['segwit']}) + assert(tmpl['sizelimit'] == 1000000) + assert('weightlimit' not in tmpl) + assert(tmpl['sigoplimit'] == 20000) + assert(tmpl['transactions'][0]['hash'] == txid) + assert(tmpl['transactions'][0]['sigops'] == 2) + self.nodes[0].generate(1) #block 162 + + balance_presetup = self.nodes[0].getbalance() + self.pubkey = [] + p2sh_ids = [] # p2sh_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE embedded in p2sh + wit_ids = [] # wit_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE via bare witness + for i in range(3): + newaddress = self.nodes[i].getnewaddress() + self.pubkey.append(self.nodes[i].getaddressinfo(newaddress)["pubkey"]) + multiscript = CScript([OP_1, hex_str_to_bytes(self.pubkey[-1]), OP_1, OP_CHECKMULTISIG]) + p2sh_addr = self.nodes[i].addwitnessaddress(newaddress) + bip173_addr = self.nodes[i].addwitnessaddress(newaddress, False) + p2sh_ms_addr = self.nodes[i].addmultisigaddress(1, [self.pubkey[-1]], '', 'p2sh-segwit')['address'] + bip173_ms_addr = self.nodes[i].addmultisigaddress(1, [self.pubkey[-1]], '', 'bech32')['address'] + assert_equal(p2sh_addr, key_to_p2sh_p2wpkh(self.pubkey[-1])) + assert_equal(bip173_addr, key_to_p2wpkh(self.pubkey[-1])) + assert_equal(p2sh_ms_addr, script_to_p2sh_p2wsh(multiscript)) + assert_equal(bip173_ms_addr, script_to_p2wsh(multiscript)) + p2sh_ids.append([]) + wit_ids.append([]) + for v in range(2): + p2sh_ids[i].append([]) + wit_ids[i].append([]) + + for i in range(5): + for n in range(3): + for v in range(2): + wit_ids[n][v].append(send_to_witness(v, self.nodes[0], find_spendable_utxo(self.nodes[0], 50), self.pubkey[n], False, Decimal("49.999"))) + p2sh_ids[n][v].append(send_to_witness(v, self.nodes[0], find_spendable_utxo(self.nodes[0], 50), self.pubkey[n], True, Decimal("49.999"))) + + self.nodes[0].generate(1) #block 163 + sync_blocks(self.nodes) + + # Make sure all nodes recognize the transactions as theirs + assert_equal(self.nodes[0].getbalance(), balance_presetup - 60*50 + 20*Decimal("49.999") + 50) + assert_equal(self.nodes[1].getbalance(), 20*Decimal("49.999")) + assert_equal(self.nodes[2].getbalance(), 20*Decimal("49.999")) + + self.nodes[0].generate(260) #block 423 + sync_blocks(self.nodes) + + self.log.info("Verify witness txs are skipped for mining before the fork") + self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][0], True) #block 424 + self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V1][0], True) #block 425 + self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][0], True) #block 426 + self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][0], True) #block 427 + + self.log.info("Verify unsigned p2sh witness txs without a redeem script are invalid") + self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", p2sh_ids[NODE_2][WIT_V0][1], False) + self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", p2sh_ids[NODE_2][WIT_V1][1], False) + + self.nodes[2].generate(4) # blocks 428-431 + + self.log.info("Verify previous witness txs skipped for mining can now be mined") + assert_equal(len(self.nodes[2].getrawmempool()), 4) + block = self.nodes[2].generate(1) #block 432 (first block with new rules; 432 = 144 * 3) + sync_blocks(self.nodes) + assert_equal(len(self.nodes[2].getrawmempool()), 0) + segwit_tx_list = self.nodes[2].getblock(block[0])["tx"] + assert_equal(len(segwit_tx_list), 5) + + self.log.info("Verify default node can't accept txs with missing witness") + # unsigned, no scriptsig + self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", wit_ids[NODE_0][WIT_V0][0], False) + self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", wit_ids[NODE_0][WIT_V1][0], False) + self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V0][0], False) + self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V1][0], False) + # unsigned with redeem script + self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V0][0], False, witness_script(False, self.pubkey[0])) + self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V1][0], False, witness_script(True, self.pubkey[0])) + + self.log.info("Verify block and transaction serialization rpcs return differing serializations depending on rpc serialization flag") + assert(self.nodes[2].getblock(block[0], False) != self.nodes[0].getblock(block[0], False)) + assert(self.nodes[1].getblock(block[0], False) == self.nodes[2].getblock(block[0], False)) + for i in range(len(segwit_tx_list)): + tx = FromHex(CTransaction(), self.nodes[2].gettransaction(segwit_tx_list[i])["hex"]) + assert(self.nodes[2].getrawtransaction(segwit_tx_list[i]) != self.nodes[0].getrawtransaction(segwit_tx_list[i])) + assert(self.nodes[1].getrawtransaction(segwit_tx_list[i], 0) == self.nodes[2].getrawtransaction(segwit_tx_list[i])) + assert(self.nodes[0].getrawtransaction(segwit_tx_list[i]) != self.nodes[2].gettransaction(segwit_tx_list[i])["hex"]) + assert(self.nodes[1].getrawtransaction(segwit_tx_list[i]) == self.nodes[2].gettransaction(segwit_tx_list[i])["hex"]) + assert(self.nodes[0].getrawtransaction(segwit_tx_list[i]) == bytes_to_hex_str(tx.serialize_without_witness())) + + self.log.info("Verify witness txs without witness data are invalid after the fork") + self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program hash mismatch) (code 64)', wit_ids[NODE_2][WIT_V0][2], sign=False) + self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program was passed an empty witness) (code 64)', wit_ids[NODE_2][WIT_V1][2], sign=False) + self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program hash mismatch) (code 64)', p2sh_ids[NODE_2][WIT_V0][2], sign=False, redeem_script=witness_script(False, self.pubkey[2])) + self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program was passed an empty witness) (code 64)', p2sh_ids[NODE_2][WIT_V1][2], sign=False, redeem_script=witness_script(True, self.pubkey[2])) + + self.log.info("Verify default node can now use witness txs") + self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V0][0], True) #block 432 + self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V1][0], True) #block 433 + self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V0][0], True) #block 434 + self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V1][0], True) #block 435 + + self.log.info("Verify sigops are counted in GBT with BIP141 rules after the fork") + txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1) + tmpl = self.nodes[0].getblocktemplate({'rules':['segwit']}) + assert(tmpl['sizelimit'] >= 3999577) # actual maximum size is lower due to minimum mandatory non-witness data + assert(tmpl['weightlimit'] == 4000000) + assert(tmpl['sigoplimit'] == 80000) + assert(tmpl['transactions'][0]['txid'] == txid) + assert(tmpl['transactions'][0]['sigops'] == 8) + + self.nodes[0].generate(1) # Mine a block to clear the gbt cache + + self.log.info("Non-segwit miners are able to use GBT response after activation.") + # Create a 3-tx chain: tx1 (non-segwit input, paying to a segwit output) -> + # tx2 (segwit input, paying to a non-segwit output) -> + # tx3 (non-segwit input, paying to a non-segwit output). + # tx1 is allowed to appear in the block, but no others. + txid1 = send_to_witness(1, self.nodes[0], find_spendable_utxo(self.nodes[0], 50), self.pubkey[0], False, Decimal("49.996")) + hex_tx = self.nodes[0].gettransaction(txid)['hex'] + tx = FromHex(CTransaction(), hex_tx) + assert(tx.wit.is_null()) # This should not be a segwit input + assert(txid1 in self.nodes[0].getrawmempool()) + + # Now create tx2, which will spend from txid1. + tx = CTransaction() + tx.vin.append(CTxIn(COutPoint(int(txid1, 16), 0), b'')) + tx.vout.append(CTxOut(int(49.99 * COIN), CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE]))) + tx2_hex = self.nodes[0].signrawtransactionwithwallet(ToHex(tx))['hex'] + txid2 = self.nodes[0].sendrawtransaction(tx2_hex) + tx = FromHex(CTransaction(), tx2_hex) + assert(not tx.wit.is_null()) + + # Now create tx3, which will spend from txid2 + tx = CTransaction() + tx.vin.append(CTxIn(COutPoint(int(txid2, 16), 0), b"")) + tx.vout.append(CTxOut(int(49.95 * COIN), CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE]))) # Huge fee + tx.calc_sha256() + txid3 = self.nodes[0].sendrawtransaction(ToHex(tx)) + assert(tx.wit.is_null()) + assert(txid3 in self.nodes[0].getrawmempool()) + + # Now try calling getblocktemplate() without segwit support. + template = self.nodes[0].getblocktemplate() + + # Check that tx1 is the only transaction of the 3 in the template. + template_txids = [ t['txid'] for t in template['transactions'] ] + assert(txid2 not in template_txids and txid3 not in template_txids) + assert(txid1 in template_txids) + + # Check that running with segwit support results in all 3 being included. + template = self.nodes[0].getblocktemplate({"rules": ["segwit"]}) + template_txids = [ t['txid'] for t in template['transactions'] ] + assert(txid1 in template_txids) + assert(txid2 in template_txids) + assert(txid3 in template_txids) + + # Check that wtxid is properly reported in mempool entry + assert_equal(int(self.nodes[0].getmempoolentry(txid3)["wtxid"], 16), tx.calc_sha256(True)) + + # Mine a block to clear the gbt cache again. + self.nodes[0].generate(1) + + self.log.info("Verify behaviour of importaddress, addwitnessaddress and listunspent") + + # Some public keys to be used later + pubkeys = [ + "0363D44AABD0F1699138239DF2F042C3282C0671CC7A76826A55C8203D90E39242", # cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb + "02D3E626B3E616FC8662B489C123349FECBFC611E778E5BE739B257EAE4721E5BF", # cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97 + "04A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538A62F5BD8EC85C2477F39650BD391EA6250207065B2A81DA8B009FC891E898F0E", # 91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV + "02A47F2CBCEFFA7B9BCDA184E7D5668D3DA6F9079AD41E422FA5FD7B2D458F2538", # cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd + "036722F784214129FEB9E8129D626324F3F6716555B603FFE8300BBCB882151228", # cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66 + "0266A8396EE936BF6D99D17920DB21C6C7B1AB14C639D5CD72B300297E416FD2EC", # cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K + "0450A38BD7F0AC212FEBA77354A9B036A32E0F7C81FC4E0C5ADCA7C549C4505D2522458C2D9AE3CEFD684E039194B72C8A10F9CB9D4764AB26FCC2718D421D3B84", # 92h2XPssjBpsJN5CqSP7v9a7cf2kgDunBC6PDFwJHMACM1rrVBJ + ] + + # Import a compressed key and an uncompressed key, generate some multisig addresses + self.nodes[0].importprivkey("92e6XLo5jVAVwrQKPNTs93oQco8f8sDNBcpv73Dsrs397fQtFQn") + uncompressed_spendable_address = ["mvozP4UwyGD2mGZU4D2eMvMLPB9WkMmMQu"] + self.nodes[0].importprivkey("cNC8eQ5dg3mFAVePDX4ddmPYpPbw41r9bm2jd1nLJT77e6RrzTRR") + compressed_spendable_address = ["mmWQubrDomqpgSYekvsU7HWEVjLFHAakLe"] + assert ((self.nodes[0].getaddressinfo(uncompressed_spendable_address[0])['iscompressed'] == False)) + assert ((self.nodes[0].getaddressinfo(compressed_spendable_address[0])['iscompressed'] == True)) + + self.nodes[0].importpubkey(pubkeys[0]) + compressed_solvable_address = [key_to_p2pkh(pubkeys[0])] + self.nodes[0].importpubkey(pubkeys[1]) + compressed_solvable_address.append(key_to_p2pkh(pubkeys[1])) + self.nodes[0].importpubkey(pubkeys[2]) + uncompressed_solvable_address = [key_to_p2pkh(pubkeys[2])] + + spendable_anytime = [] # These outputs should be seen anytime after importprivkey and addmultisigaddress + spendable_after_importaddress = [] # These outputs should be seen after importaddress + solvable_after_importaddress = [] # These outputs should be seen after importaddress but not spendable + unsolvable_after_importaddress = [] # These outputs should be unsolvable after importaddress + solvable_anytime = [] # These outputs should be solvable after importpubkey + unseen_anytime = [] # These outputs should never be seen + + uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], compressed_spendable_address[0]])['address']) + uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], uncompressed_spendable_address[0]])['address']) + compressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_spendable_address[0]])['address']) + uncompressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], uncompressed_solvable_address[0]])['address']) + compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_solvable_address[0]])['address']) + compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_solvable_address[0], compressed_solvable_address[1]])['address']) + unknown_address = ["mtKKyoHabkk6e4ppT7NaM7THqPUt7AzPrT", "2NDP3jLWAFT8NDAiUa9qiE6oBt2awmMq7Dx"] + + # Test multisig_without_privkey + # We have 2 public keys without private keys, use addmultisigaddress to add to wallet. + # Money sent to P2SH of multisig of this should only be seen after importaddress with the BASE58 P2SH address. + + multisig_without_privkey_address = self.nodes[0].addmultisigaddress(2, [pubkeys[3], pubkeys[4]])['address'] + script = CScript([OP_2, hex_str_to_bytes(pubkeys[3]), hex_str_to_bytes(pubkeys[4]), OP_2, OP_CHECKMULTISIG]) + solvable_after_importaddress.append(CScript([OP_HASH160, hash160(script), OP_EQUAL])) + + for i in compressed_spendable_address: + v = self.nodes[0].getaddressinfo(i) + if (v['isscript']): + [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) + # p2sh multisig with compressed keys should always be spendable + spendable_anytime.extend([p2sh]) + # bare multisig can be watched and signed, but is not treated as ours + solvable_after_importaddress.extend([bare]) + # P2WSH and P2SH(P2WSH) multisig with compressed keys are spendable after direct importaddress + spendable_after_importaddress.extend([p2wsh, p2sh_p2wsh]) + else: + [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v) + # normal P2PKH and P2PK with compressed keys should always be spendable + spendable_anytime.extend([p2pkh, p2pk]) + # P2SH_P2PK, P2SH_P2PKH with compressed keys are spendable after direct importaddress + spendable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh]) + # P2WPKH and P2SH_P2WPKH with compressed keys should always be spendable + spendable_anytime.extend([p2wpkh, p2sh_p2wpkh]) + + for i in uncompressed_spendable_address: + v = self.nodes[0].getaddressinfo(i) + if (v['isscript']): + [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) + # p2sh multisig with uncompressed keys should always be spendable + spendable_anytime.extend([p2sh]) + # bare multisig can be watched and signed, but is not treated as ours + solvable_after_importaddress.extend([bare]) + # P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen + unseen_anytime.extend([p2wsh, p2sh_p2wsh]) + else: + [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v) + # normal P2PKH and P2PK with uncompressed keys should always be spendable + spendable_anytime.extend([p2pkh, p2pk]) + # P2SH_P2PK and P2SH_P2PKH are spendable after direct importaddress + spendable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh]) + # Witness output types with uncompressed keys are never seen + unseen_anytime.extend([p2wpkh, p2sh_p2wpkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh]) + + for i in compressed_solvable_address: + v = self.nodes[0].getaddressinfo(i) + if (v['isscript']): + # Multisig without private is not seen after addmultisigaddress, but seen after importaddress + [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) + solvable_after_importaddress.extend([bare, p2sh, p2wsh, p2sh_p2wsh]) + else: + [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v) + # normal P2PKH, P2PK, P2WPKH and P2SH_P2WPKH with compressed keys should always be seen + solvable_anytime.extend([p2pkh, p2pk, p2wpkh, p2sh_p2wpkh]) + # P2SH_P2PK, P2SH_P2PKH with compressed keys are seen after direct importaddress + solvable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh]) + + for i in uncompressed_solvable_address: + v = self.nodes[0].getaddressinfo(i) + if (v['isscript']): + [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) + # Base uncompressed multisig without private is not seen after addmultisigaddress, but seen after importaddress + solvable_after_importaddress.extend([bare, p2sh]) + # P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen + unseen_anytime.extend([p2wsh, p2sh_p2wsh]) + else: + [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v) + # normal P2PKH and P2PK with uncompressed keys should always be seen + solvable_anytime.extend([p2pkh, p2pk]) + # P2SH_P2PK, P2SH_P2PKH with uncompressed keys are seen after direct importaddress + solvable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh]) + # Witness output types with uncompressed keys are never seen + unseen_anytime.extend([p2wpkh, p2sh_p2wpkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh]) + + op1 = CScript([OP_1]) + op0 = CScript([OP_0]) + # 2N7MGY19ti4KDMSzRfPAssP6Pxyuxoi6jLe is the P2SH(P2PKH) version of mjoE3sSrb8ByYEvgnC3Aox86u1CHnfJA4V + unsolvable_address = ["mjoE3sSrb8ByYEvgnC3Aox86u1CHnfJA4V", "2N7MGY19ti4KDMSzRfPAssP6Pxyuxoi6jLe", script_to_p2sh(op1), script_to_p2sh(op0)] + unsolvable_address_key = hex_str_to_bytes("02341AEC7587A51CDE5279E0630A531AEA2615A9F80B17E8D9376327BAEAA59E3D") + unsolvablep2pkh = CScript([OP_DUP, OP_HASH160, hash160(unsolvable_address_key), OP_EQUALVERIFY, OP_CHECKSIG]) + unsolvablep2wshp2pkh = CScript([OP_0, sha256(unsolvablep2pkh)]) + p2shop0 = CScript([OP_HASH160, hash160(op0), OP_EQUAL]) + p2wshop1 = CScript([OP_0, sha256(op1)]) + unsolvable_after_importaddress.append(unsolvablep2pkh) + unsolvable_after_importaddress.append(unsolvablep2wshp2pkh) + unsolvable_after_importaddress.append(op1) # OP_1 will be imported as script + unsolvable_after_importaddress.append(p2wshop1) + unseen_anytime.append(op0) # OP_0 will be imported as P2SH address with no script provided + unsolvable_after_importaddress.append(p2shop0) + + spendable_txid = [] + solvable_txid = [] + spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime, 2)) + solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime, 1)) + self.mine_and_test_listunspent(spendable_after_importaddress + solvable_after_importaddress + unseen_anytime + unsolvable_after_importaddress, 0) + + importlist = [] + for i in compressed_spendable_address + uncompressed_spendable_address + compressed_solvable_address + uncompressed_solvable_address: + v = self.nodes[0].getaddressinfo(i) + if (v['isscript']): + bare = hex_str_to_bytes(v['hex']) + importlist.append(bytes_to_hex_str(bare)) + importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(bare)]))) + else: + pubkey = hex_str_to_bytes(v['pubkey']) + p2pk = CScript([pubkey, OP_CHECKSIG]) + p2pkh = CScript([OP_DUP, OP_HASH160, hash160(pubkey), OP_EQUALVERIFY, OP_CHECKSIG]) + importlist.append(bytes_to_hex_str(p2pk)) + importlist.append(bytes_to_hex_str(p2pkh)) + importlist.append(bytes_to_hex_str(CScript([OP_0, hash160(pubkey)]))) + importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(p2pk)]))) + importlist.append(bytes_to_hex_str(CScript([OP_0, sha256(p2pkh)]))) + + importlist.append(bytes_to_hex_str(unsolvablep2pkh)) + importlist.append(bytes_to_hex_str(unsolvablep2wshp2pkh)) + importlist.append(bytes_to_hex_str(op1)) + importlist.append(bytes_to_hex_str(p2wshop1)) + + for i in importlist: + # import all generated addresses. The wallet already has the private keys for some of these, so catch JSON RPC + # exceptions and continue. + try_rpc(-4, "The wallet already contains the private key for this address or script", self.nodes[0].importaddress, i, "", False, True) + + self.nodes[0].importaddress(script_to_p2sh(op0)) # import OP_0 as address only + self.nodes[0].importaddress(multisig_without_privkey_address) # Test multisig_without_privkey + + spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime + spendable_after_importaddress, 2)) + solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime + solvable_after_importaddress, 1)) + self.mine_and_test_listunspent(unsolvable_after_importaddress, 1) + self.mine_and_test_listunspent(unseen_anytime, 0) + + # addwitnessaddress should refuse to return a witness address if an uncompressed key is used + # note that no witness address should be returned by unsolvable addresses + for i in uncompressed_spendable_address + uncompressed_solvable_address + unknown_address + unsolvable_address: + assert_raises_rpc_error(-4, "Public key or redeemscript not known to wallet, or the key is uncompressed", self.nodes[0].addwitnessaddress, i) + + # addwitnessaddress should return a witness addresses even if keys are not in the wallet + self.nodes[0].addwitnessaddress(multisig_without_privkey_address) + + for i in compressed_spendable_address + compressed_solvable_address: + witaddress = self.nodes[0].addwitnessaddress(i) + # addwitnessaddress should return the same address if it is a known P2SH-witness address + assert_equal(witaddress, self.nodes[0].addwitnessaddress(witaddress)) + + spendable_txid.append(self.mine_and_test_listunspent(spendable_anytime + spendable_after_importaddress, 2)) + solvable_txid.append(self.mine_and_test_listunspent(solvable_anytime + solvable_after_importaddress, 1)) + self.mine_and_test_listunspent(unsolvable_after_importaddress, 1) + self.mine_and_test_listunspent(unseen_anytime, 0) + + # Repeat some tests. This time we don't add witness scripts with importaddress + # Import a compressed key and an uncompressed key, generate some multisig addresses + self.nodes[0].importprivkey("927pw6RW8ZekycnXqBQ2JS5nPyo1yRfGNN8oq74HeddWSpafDJH") + uncompressed_spendable_address = ["mguN2vNSCEUh6rJaXoAVwY3YZwZvEmf5xi"] + self.nodes[0].importprivkey("cMcrXaaUC48ZKpcyydfFo8PxHAjpsYLhdsp6nmtB3E2ER9UUHWnw") + compressed_spendable_address = ["n1UNmpmbVUJ9ytXYXiurmGPQ3TRrXqPWKL"] + + self.nodes[0].importpubkey(pubkeys[5]) + compressed_solvable_address = [key_to_p2pkh(pubkeys[5])] + self.nodes[0].importpubkey(pubkeys[6]) + uncompressed_solvable_address = [key_to_p2pkh(pubkeys[6])] + + spendable_after_addwitnessaddress = [] # These outputs should be seen after importaddress + solvable_after_addwitnessaddress=[] # These outputs should be seen after importaddress but not spendable + unseen_anytime = [] # These outputs should never be seen + solvable_anytime = [] # These outputs should be solvable after importpubkey + unseen_anytime = [] # These outputs should never be seen + + uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], compressed_spendable_address[0]])['address']) + uncompressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [uncompressed_spendable_address[0], uncompressed_spendable_address[0]])['address']) + compressed_spendable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_spendable_address[0]])['address']) + uncompressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_solvable_address[0], uncompressed_solvable_address[0]])['address']) + compressed_solvable_address.append(self.nodes[0].addmultisigaddress(2, [compressed_spendable_address[0], compressed_solvable_address[0]])['address']) + + premature_witaddress = [] + + for i in compressed_spendable_address: + v = self.nodes[0].getaddressinfo(i) + if (v['isscript']): + [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) + # P2WSH and P2SH(P2WSH) multisig with compressed keys are spendable after addwitnessaddress + spendable_after_addwitnessaddress.extend([p2wsh, p2sh_p2wsh]) + premature_witaddress.append(script_to_p2sh(p2wsh)) + else: + [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v) + # P2WPKH, P2SH_P2WPKH are always spendable + spendable_anytime.extend([p2wpkh, p2sh_p2wpkh]) + + for i in uncompressed_spendable_address + uncompressed_solvable_address: + v = self.nodes[0].getaddressinfo(i) + if (v['isscript']): + [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) + # P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen + unseen_anytime.extend([p2wsh, p2sh_p2wsh]) + else: + [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v) + # P2WPKH, P2SH_P2WPKH with uncompressed keys are never seen + unseen_anytime.extend([p2wpkh, p2sh_p2wpkh]) + + for i in compressed_solvable_address: + v = self.nodes[0].getaddressinfo(i) + if (v['isscript']): + # P2WSH multisig without private key are seen after addwitnessaddress + [bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v) + solvable_after_addwitnessaddress.extend([p2wsh, p2sh_p2wsh]) + premature_witaddress.append(script_to_p2sh(p2wsh)) + else: + [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] = self.p2pkh_address_to_script(v) + # P2SH_P2PK, P2SH_P2PKH with compressed keys are always solvable + solvable_anytime.extend([p2wpkh, p2sh_p2wpkh]) + + self.mine_and_test_listunspent(spendable_anytime, 2) + self.mine_and_test_listunspent(solvable_anytime, 1) + self.mine_and_test_listunspent(spendable_after_addwitnessaddress + solvable_after_addwitnessaddress + unseen_anytime, 0) + + # addwitnessaddress should refuse to return a witness address if an uncompressed key is used + # note that a multisig address returned by addmultisigaddress is not solvable until it is added with importaddress + # premature_witaddress are not accepted until the script is added with addwitnessaddress first + for i in uncompressed_spendable_address + uncompressed_solvable_address + premature_witaddress: + # This will raise an exception + assert_raises_rpc_error(-4, "Public key or redeemscript not known to wallet, or the key is uncompressed", self.nodes[0].addwitnessaddress, i) + + # after importaddress it should pass addwitnessaddress + v = self.nodes[0].getaddressinfo(compressed_solvable_address[1]) + self.nodes[0].importaddress(v['hex'],"",False,True) + for i in compressed_spendable_address + compressed_solvable_address + premature_witaddress: + witaddress = self.nodes[0].addwitnessaddress(i) + assert_equal(witaddress, self.nodes[0].addwitnessaddress(witaddress)) + + spendable_txid.append(self.mine_and_test_listunspent(spendable_after_addwitnessaddress + spendable_anytime, 2)) + solvable_txid.append(self.mine_and_test_listunspent(solvable_after_addwitnessaddress + solvable_anytime, 1)) + self.mine_and_test_listunspent(unseen_anytime, 0) + + # Check that createrawtransaction/decoderawtransaction with non-v0 Bech32 works + v1_addr = program_to_witness(1, [3,5]) + v1_tx = self.nodes[0].createrawtransaction([getutxo(spendable_txid[0])],{v1_addr: 1}) + v1_decoded = self.nodes[1].decoderawtransaction(v1_tx) + assert_equal(v1_decoded['vout'][0]['scriptPubKey']['addresses'][0], v1_addr) + assert_equal(v1_decoded['vout'][0]['scriptPubKey']['hex'], "51020305") + + # Check that spendable outputs are really spendable + self.create_and_mine_tx_from_txids(spendable_txid) + + # import all the private keys so solvable addresses become spendable + self.nodes[0].importprivkey("cPiM8Ub4heR9NBYmgVzJQiUH1if44GSBGiqaeJySuL2BKxubvgwb") + self.nodes[0].importprivkey("cPpAdHaD6VoYbW78kveN2bsvb45Q7G5PhaPApVUGwvF8VQ9brD97") + self.nodes[0].importprivkey("91zqCU5B9sdWxzMt1ca3VzbtVm2YM6Hi5Rxn4UDtxEaN9C9nzXV") + self.nodes[0].importprivkey("cPQFjcVRpAUBG8BA9hzr2yEzHwKoMgLkJZBBtK9vJnvGJgMjzTbd") + self.nodes[0].importprivkey("cQGtcm34xiLjB1v7bkRa4V3aAc9tS2UTuBZ1UnZGeSeNy627fN66") + self.nodes[0].importprivkey("cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K") + self.create_and_mine_tx_from_txids(solvable_txid) + + # Test that importing native P2WPKH/P2WSH scripts works + for use_p2wsh in [False, True]: + if use_p2wsh: + scriptPubKey = "00203a59f3f56b713fdcf5d1a57357f02c44342cbf306ffe0c4741046837bf90561a" + transaction = "01000000000100e1f505000000002200203a59f3f56b713fdcf5d1a57357f02c44342cbf306ffe0c4741046837bf90561a00000000" + else: + scriptPubKey = "a9142f8c469c2f0084c48e11f998ffbe7efa7549f26d87" + transaction = "01000000000100e1f5050000000017a9142f8c469c2f0084c48e11f998ffbe7efa7549f26d8700000000" + + self.nodes[1].importaddress(scriptPubKey, "", False) + rawtxfund = self.nodes[1].fundrawtransaction(transaction)['hex'] + rawtxfund = self.nodes[1].signrawtransactionwithwallet(rawtxfund)["hex"] + txid = self.nodes[1].sendrawtransaction(rawtxfund) + + assert_equal(self.nodes[1].gettransaction(txid, True)["txid"], txid) + assert_equal(self.nodes[1].listtransactions("*", 1, 0, True)[0]["txid"], txid) + + # Assert it is properly saved + self.stop_node(1) + self.start_node(1) + assert_equal(self.nodes[1].gettransaction(txid, True)["txid"], txid) + assert_equal(self.nodes[1].listtransactions("*", 1, 0, True)[0]["txid"], txid) + + def mine_and_test_listunspent(self, script_list, ismine): + utxo = find_spendable_utxo(self.nodes[0], 50) + tx = CTransaction() + tx.vin.append(CTxIn(COutPoint(int('0x'+utxo['txid'],0), utxo['vout']))) + for i in script_list: + tx.vout.append(CTxOut(10000000, i)) + tx.rehash() + signresults = self.nodes[0].signrawtransactionwithwallet(bytes_to_hex_str(tx.serialize_without_witness()))['hex'] + txid = self.nodes[0].sendrawtransaction(signresults, True) + self.nodes[0].generate(1) + sync_blocks(self.nodes) + watchcount = 0 + spendcount = 0 + for i in self.nodes[0].listunspent(): + if (i['txid'] == txid): + watchcount += 1 + if (i['spendable'] == True): + spendcount += 1 + if (ismine == 2): + assert_equal(spendcount, len(script_list)) + elif (ismine == 1): + assert_equal(watchcount, len(script_list)) + assert_equal(spendcount, 0) + else: + assert_equal(watchcount, 0) + return txid + + def p2sh_address_to_script(self,v): + bare = CScript(hex_str_to_bytes(v['hex'])) + p2sh = CScript(hex_str_to_bytes(v['scriptPubKey'])) + p2wsh = CScript([OP_0, sha256(bare)]) + p2sh_p2wsh = CScript([OP_HASH160, hash160(p2wsh), OP_EQUAL]) + return([bare, p2sh, p2wsh, p2sh_p2wsh]) + + def p2pkh_address_to_script(self,v): + pubkey = hex_str_to_bytes(v['pubkey']) + p2wpkh = CScript([OP_0, hash160(pubkey)]) + p2sh_p2wpkh = CScript([OP_HASH160, hash160(p2wpkh), OP_EQUAL]) + p2pk = CScript([pubkey, OP_CHECKSIG]) + p2pkh = CScript(hex_str_to_bytes(v['scriptPubKey'])) + p2sh_p2pk = CScript([OP_HASH160, hash160(p2pk), OP_EQUAL]) + p2sh_p2pkh = CScript([OP_HASH160, hash160(p2pkh), OP_EQUAL]) + p2wsh_p2pk = CScript([OP_0, sha256(p2pk)]) + p2wsh_p2pkh = CScript([OP_0, sha256(p2pkh)]) + p2sh_p2wsh_p2pk = CScript([OP_HASH160, hash160(p2wsh_p2pk), OP_EQUAL]) + p2sh_p2wsh_p2pkh = CScript([OP_HASH160, hash160(p2wsh_p2pkh), OP_EQUAL]) + return [p2wpkh, p2sh_p2wpkh, p2pk, p2pkh, p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh] + + def create_and_mine_tx_from_txids(self, txids, success = True): + tx = CTransaction() + for i in txids: + txtmp = CTransaction() + txraw = self.nodes[0].getrawtransaction(i) + f = BytesIO(hex_str_to_bytes(txraw)) + txtmp.deserialize(f) + for j in range(len(txtmp.vout)): + tx.vin.append(CTxIn(COutPoint(int('0x'+i,0), j))) + tx.vout.append(CTxOut(0, CScript())) + tx.rehash() + signresults = self.nodes[0].signrawtransactionwithwallet(bytes_to_hex_str(tx.serialize_without_witness()))['hex'] + self.nodes[0].sendrawtransaction(signresults, True) + self.nodes[0].generate(1) + sync_blocks(self.nodes) + + +if __name__ == '__main__': + SegWitTest().main() diff --git a/test/functional/feature_uacomment.py b/test/functional/feature_uacomment.py new file mode 100755 index 000000000..691a39b82 --- /dev/null +++ b/test/functional/feature_uacomment.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python3 +# Copyright (c) 2017-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the -uacomment option.""" + +import re + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.test_node import ErrorMatch +from test_framework.util import assert_equal + + +class UacommentTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 1 + self.setup_clean_chain = True + + def run_test(self): + self.log.info("test multiple -uacomment") + test_uacomment = self.nodes[0].getnetworkinfo()["subversion"][-12:-1] + assert_equal(test_uacomment, "(testnode0)") + + self.restart_node(0, ["-uacomment=foo"]) + foo_uacomment = self.nodes[0].getnetworkinfo()["subversion"][-17:-1] + assert_equal(foo_uacomment, "(testnode0; foo)") + + self.log.info("test -uacomment max length") + self.stop_node(0) + expected = "Error: Total length of network version string \([0-9]+\) exceeds maximum length \(256\). Reduce the number or size of uacomments." + self.nodes[0].assert_start_raises_init_error(["-uacomment=" + 'a' * 256], expected, match=ErrorMatch.FULL_REGEX) + + self.log.info("test -uacomment unsafe characters") + for unsafe_char in ['/', ':', '(', ')']: + expected = "Error: User Agent comment \(" + re.escape(unsafe_char) + "\) contains unsafe characters." + self.nodes[0].assert_start_raises_init_error(["-uacomment=" + unsafe_char], expected, match=ErrorMatch.FULL_REGEX) + + +if __name__ == '__main__': + UacommentTest().main() diff --git a/test/functional/feature_versionbits_warning.py b/test/functional/feature_versionbits_warning.py new file mode 100755 index 000000000..88df61cab --- /dev/null +++ b/test/functional/feature_versionbits_warning.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python3 +# Copyright (c) 2016-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test version bits warning system. + +Generate chains with block versions that appear to be signalling unknown +soft-forks, and test that warning alerts are generated. +""" +import os +import re + +from test_framework.blocktools import create_block, create_coinbase +from test_framework.messages import msg_block +from test_framework.mininode import P2PInterface, mininode_lock +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import wait_until + +VB_PERIOD = 144 # versionbits period length for regtest +VB_THRESHOLD = 108 # versionbits activation threshold for regtest +VB_TOP_BITS = 0x20000000 +VB_UNKNOWN_BIT = 27 # Choose a bit unassigned to any deployment +VB_UNKNOWN_VERSION = VB_TOP_BITS | (1 << VB_UNKNOWN_BIT) + +WARN_UNKNOWN_RULES_MINED = "Unknown block versions being mined! It's possible unknown rules are in effect" +WARN_UNKNOWN_RULES_ACTIVE = "unknown new rules activated (versionbit {})".format(VB_UNKNOWN_BIT) +VB_PATTERN = re.compile("Warning: unknown new rules activated.*versionbit") + +class VersionBitsWarningTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 1 + + def setup_network(self): + self.alert_filename = os.path.join(self.options.tmpdir, "alert.txt") + # Open and close to create zero-length file + with open(self.alert_filename, 'w', encoding='utf8'): + pass + self.extra_args = [["-alertnotify=echo %s >> \"" + self.alert_filename + "\""]] + self.setup_nodes() + + def send_blocks_with_version(self, peer, numblocks, version): + """Send numblocks blocks to peer with version set""" + tip = self.nodes[0].getbestblockhash() + height = self.nodes[0].getblockcount() + block_time = self.nodes[0].getblockheader(tip)["time"] + 1 + tip = int(tip, 16) + + for _ in range(numblocks): + block = create_block(tip, create_coinbase(height + 1), block_time) + block.nVersion = version + block.solve() + peer.send_message(msg_block(block)) + block_time += 1 + height += 1 + tip = block.sha256 + peer.sync_with_ping() + + def versionbits_in_alert_file(self): + """Test that the versionbits warning has been written to the alert file.""" + alert_text = open(self.alert_filename, 'r', encoding='utf8').read() + return VB_PATTERN.search(alert_text) is not None + + def run_test(self): + node = self.nodes[0] + node.add_p2p_connection(P2PInterface()) + + node_deterministic_address = node.get_deterministic_priv_key().address + # Mine one period worth of blocks + node.generatetoaddress(VB_PERIOD, node_deterministic_address) + + self.log.info("Check that there is no warning if previous VB_BLOCKS have 50 blocks in the last 100 were an unknown version") + # Build one period of blocks with VB_THRESHOLD blocks signaling some unknown bit + self.send_blocks_with_version(node.p2p, VB_THRESHOLD, VB_UNKNOWN_VERSION) + node.generatetoaddress(VB_PERIOD - VB_THRESHOLD, node_deterministic_address) + + # Check that get*info() shows the 51/100 unknown block version error. + assert(WARN_UNKNOWN_RULES_MINED in node.getmininginfo()["warnings"]) + assert(WARN_UNKNOWN_RULES_MINED in node.getnetworkinfo()["warnings"]) + + self.log.info("Check that there is a warning if previous VB_BLOCKS have >=VB_THRESHOLD blocks with unknown versionbits version.") + # Mine a period worth of expected blocks so the generic block-version warning + # is cleared. This will move the versionbit state to ACTIVE. + node.generatetoaddress(VB_PERIOD, node_deterministic_address) + + # Stop-start the node. This is required because bitcoind will only warn once about unknown versions or unknown rules activating. + self.restart_node(0) + + # Generating one block guarantees that we'll get out of IBD + node.generatetoaddress(1, node_deterministic_address) + wait_until(lambda: not node.getblockchaininfo()['initialblockdownload'], timeout=10, lock=mininode_lock) + # Generating one more block will be enough to generate an error. + node.generatetoaddress(1, node_deterministic_address) + # Check that get*info() shows the versionbits unknown rules warning + assert(WARN_UNKNOWN_RULES_ACTIVE in node.getmininginfo()["warnings"]) + assert(WARN_UNKNOWN_RULES_ACTIVE in node.getnetworkinfo()["warnings"]) + # Check that the alert file shows the versionbits unknown rules warning + wait_until(lambda: self.versionbits_in_alert_file(), timeout=60) + +if __name__ == '__main__': + VersionBitsWarningTest().main() diff --git a/test/functional/interface_bitcoin_cli.py b/test/functional/interface_bitcoin_cli.py new file mode 100755 index 000000000..f311858be --- /dev/null +++ b/test/functional/interface_bitcoin_cli.py @@ -0,0 +1,77 @@ +#!/usr/bin/env python3 +# Copyright (c) 2017-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test bitcoin-cli""" +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, assert_raises_process_error, get_auth_cookie + +class TestBitcoinCli(BitcoinTestFramework): + + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 1 + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + """Main test logic""" + + cli_response = self.nodes[0].cli("-version").send_cli() + assert("Bitcoin Core RPC client version" in cli_response) + + self.log.info("Compare responses from gewalletinfo RPC and `bitcoin-cli getwalletinfo`") + cli_response = self.nodes[0].cli.getwalletinfo() + rpc_response = self.nodes[0].getwalletinfo() + assert_equal(cli_response, rpc_response) + + self.log.info("Compare responses from getblockchaininfo RPC and `bitcoin-cli getblockchaininfo`") + cli_response = self.nodes[0].cli.getblockchaininfo() + rpc_response = self.nodes[0].getblockchaininfo() + assert_equal(cli_response, rpc_response) + + user, password = get_auth_cookie(self.nodes[0].datadir) + + self.log.info("Test -stdinrpcpass option") + assert_equal(0, self.nodes[0].cli('-rpcuser=%s' % user, '-stdinrpcpass', input=password).getblockcount()) + assert_raises_process_error(1, "Incorrect rpcuser or rpcpassword", self.nodes[0].cli('-rpcuser=%s' % user, '-stdinrpcpass', input="foo").echo) + + self.log.info("Test -stdin and -stdinrpcpass") + assert_equal(["foo", "bar"], self.nodes[0].cli('-rpcuser=%s' % user, '-stdin', '-stdinrpcpass', input=password + "\nfoo\nbar").echo()) + assert_raises_process_error(1, "Incorrect rpcuser or rpcpassword", self.nodes[0].cli('-rpcuser=%s' % user, '-stdin', '-stdinrpcpass', input="foo").echo) + + self.log.info("Test connecting to a non-existing server") + assert_raises_process_error(1, "Could not connect to the server", self.nodes[0].cli('-rpcport=1').echo) + + self.log.info("Test connecting with non-existing RPC cookie file") + assert_raises_process_error(1, "Could not locate RPC credentials", self.nodes[0].cli('-rpccookiefile=does-not-exist', '-rpcpassword=').echo) + + self.log.info("Make sure that -getinfo with arguments fails") + assert_raises_process_error(1, "-getinfo takes no arguments", self.nodes[0].cli('-getinfo').help) + + self.log.info("Compare responses from `bitcoin-cli -getinfo` and the RPCs data is retrieved from.") + cli_get_info = self.nodes[0].cli('-getinfo').send_cli() + wallet_info = self.nodes[0].getwalletinfo() + network_info = self.nodes[0].getnetworkinfo() + blockchain_info = self.nodes[0].getblockchaininfo() + + assert_equal(cli_get_info['version'], network_info['version']) + assert_equal(cli_get_info['protocolversion'], network_info['protocolversion']) + assert_equal(cli_get_info['walletversion'], wallet_info['walletversion']) + assert_equal(cli_get_info['balance'], wallet_info['balance']) + assert_equal(cli_get_info['blocks'], blockchain_info['blocks']) + assert_equal(cli_get_info['timeoffset'], network_info['timeoffset']) + assert_equal(cli_get_info['connections'], network_info['connections']) + assert_equal(cli_get_info['proxy'], network_info['networks'][0]['proxy']) + assert_equal(cli_get_info['difficulty'], blockchain_info['difficulty']) + assert_equal(cli_get_info['testnet'], blockchain_info['chain'] == "test") + assert_equal(cli_get_info['balance'], wallet_info['balance']) + assert_equal(cli_get_info['keypoololdest'], wallet_info['keypoololdest']) + assert_equal(cli_get_info['keypoolsize'], wallet_info['keypoolsize']) + assert_equal(cli_get_info['paytxfee'], wallet_info['paytxfee']) + assert_equal(cli_get_info['relayfee'], network_info['relayfee']) + # unlocked_until is not tested because the wallet is not encrypted + +if __name__ == '__main__': + TestBitcoinCli().main() diff --git a/test/functional/interface_http.py b/test/functional/interface_http.py new file mode 100755 index 000000000..e4b86f9e1 --- /dev/null +++ b/test/functional/interface_http.py @@ -0,0 +1,108 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the RPC HTTP basics.""" + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, str_to_b64str + +import http.client +import urllib.parse + +class HTTPBasicsTest (BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 3 + + def setup_network(self): + self.setup_nodes() + + def run_test(self): + + ################################################# + # lowlevel check for http persistent connection # + ################################################# + url = urllib.parse.urlparse(self.nodes[0].url) + authpair = url.username + ':' + url.password + headers = {"Authorization": "Basic " + str_to_b64str(authpair)} + + conn = http.client.HTTPConnection(url.hostname, url.port) + conn.connect() + conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) + out1 = conn.getresponse().read() + assert(b'"error":null' in out1) + assert(conn.sock!=None) #according to http/1.1 connection must still be open! + + #send 2nd request without closing connection + conn.request('POST', '/', '{"method": "getchaintips"}', headers) + out1 = conn.getresponse().read() + assert(b'"error":null' in out1) #must also response with a correct json-rpc message + assert(conn.sock!=None) #according to http/1.1 connection must still be open! + conn.close() + + #same should be if we add keep-alive because this should be the std. behaviour + headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection": "keep-alive"} + + conn = http.client.HTTPConnection(url.hostname, url.port) + conn.connect() + conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) + out1 = conn.getresponse().read() + assert(b'"error":null' in out1) + assert(conn.sock!=None) #according to http/1.1 connection must still be open! + + #send 2nd request without closing connection + conn.request('POST', '/', '{"method": "getchaintips"}', headers) + out1 = conn.getresponse().read() + assert(b'"error":null' in out1) #must also response with a correct json-rpc message + assert(conn.sock!=None) #according to http/1.1 connection must still be open! + conn.close() + + #now do the same with "Connection: close" + headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection":"close"} + + conn = http.client.HTTPConnection(url.hostname, url.port) + conn.connect() + conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) + out1 = conn.getresponse().read() + assert(b'"error":null' in out1) + assert(conn.sock==None) #now the connection must be closed after the response + + #node1 (2nd node) is running with disabled keep-alive option + urlNode1 = urllib.parse.urlparse(self.nodes[1].url) + authpair = urlNode1.username + ':' + urlNode1.password + headers = {"Authorization": "Basic " + str_to_b64str(authpair)} + + conn = http.client.HTTPConnection(urlNode1.hostname, urlNode1.port) + conn.connect() + conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) + out1 = conn.getresponse().read() + assert(b'"error":null' in out1) + + #node2 (third node) is running with standard keep-alive parameters which means keep-alive is on + urlNode2 = urllib.parse.urlparse(self.nodes[2].url) + authpair = urlNode2.username + ':' + urlNode2.password + headers = {"Authorization": "Basic " + str_to_b64str(authpair)} + + conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port) + conn.connect() + conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) + out1 = conn.getresponse().read() + assert(b'"error":null' in out1) + assert(conn.sock!=None) #connection must be closed because bitcoind should use keep-alive by default + + # Check excessive request size + conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port) + conn.connect() + conn.request('GET', '/' + ('x'*1000), '', headers) + out1 = conn.getresponse() + assert_equal(out1.status, http.client.NOT_FOUND) + + conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port) + conn.connect() + conn.request('GET', '/' + ('x'*10000), '', headers) + out1 = conn.getresponse() + assert_equal(out1.status, http.client.BAD_REQUEST) + + +if __name__ == '__main__': + HTTPBasicsTest ().main () diff --git a/test/functional/interface_rest.py b/test/functional/interface_rest.py new file mode 100755 index 000000000..afa9de580 --- /dev/null +++ b/test/functional/interface_rest.py @@ -0,0 +1,303 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the REST API.""" + +import binascii +from decimal import Decimal +from enum import Enum +from io import BytesIO +import json +from struct import pack, unpack + +import http.client +import urllib.parse + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + assert_greater_than, + assert_greater_than_or_equal, + hex_str_to_bytes, +) + +class ReqType(Enum): + JSON = 1 + BIN = 2 + HEX = 3 + +class RetType(Enum): + OBJ = 1 + BYTES = 2 + JSON = 3 + +def filter_output_indices_by_value(vouts, value): + for vout in vouts: + if vout['value'] == value: + yield vout['n'] + +class RESTTest (BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 2 + self.extra_args = [["-rest"], []] + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def test_rest_request(self, uri, http_method='GET', req_type=ReqType.JSON, body='', status=200, ret_type=RetType.JSON): + rest_uri = '/rest' + uri + if req_type == ReqType.JSON: + rest_uri += '.json' + elif req_type == ReqType.BIN: + rest_uri += '.bin' + elif req_type == ReqType.HEX: + rest_uri += '.hex' + + conn = http.client.HTTPConnection(self.url.hostname, self.url.port) + self.log.debug('%s %s %s', http_method, rest_uri, body) + if http_method == 'GET': + conn.request('GET', rest_uri) + elif http_method == 'POST': + conn.request('POST', rest_uri, body) + resp = conn.getresponse() + + assert_equal(resp.status, status) + + if ret_type == RetType.OBJ: + return resp + elif ret_type == RetType.BYTES: + return resp.read() + elif ret_type == RetType.JSON: + return json.loads(resp.read().decode('utf-8'), parse_float=Decimal) + + def run_test(self): + self.url = urllib.parse.urlparse(self.nodes[0].url) + self.log.info("Mine blocks and send Bitcoin to node 1") + + # Random address so node1's balance doesn't increase + not_related_address = "2MxqoHEdNQTyYeX1mHcbrrpzgojbosTpCvJ" + + self.nodes[0].generate(1) + self.sync_all() + self.nodes[1].generatetoaddress(100, not_related_address) + self.sync_all() + + assert_equal(self.nodes[0].getbalance(), 50) + + txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1) + self.sync_all() + self.nodes[1].generatetoaddress(1, not_related_address) + self.sync_all() + bb_hash = self.nodes[0].getbestblockhash() + + assert_equal(self.nodes[1].getbalance(), Decimal("0.1")) + + self.log.info("Load the transaction using the /tx URI") + + json_obj = self.test_rest_request("/tx/{}".format(txid)) + spent = (json_obj['vin'][0]['txid'], json_obj['vin'][0]['vout']) # get the vin to later check for utxo (should be spent by then) + # get n of 0.1 outpoint + n, = filter_output_indices_by_value(json_obj['vout'], Decimal('0.1')) + spending = (txid, n) + + self.log.info("Query an unspent TXO using the /getutxos URI") + + json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spending)) + + # Check chainTip response + assert_equal(json_obj['chaintipHash'], bb_hash) + + # Make sure there is one utxo + assert_equal(len(json_obj['utxos']), 1) + assert_equal(json_obj['utxos'][0]['value'], Decimal('0.1')) + + self.log.info("Query a spent TXO using the /getutxos URI") + + json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spent)) + + # Check chainTip response + assert_equal(json_obj['chaintipHash'], bb_hash) + + # Make sure there is no utxo in the response because this outpoint has been spent + assert_equal(len(json_obj['utxos']), 0) + + # Check bitmap + assert_equal(json_obj['bitmap'], "0") + + self.log.info("Query two TXOs using the /getutxos URI") + + json_obj = self.test_rest_request("/getutxos/{}-{}/{}-{}".format(*(spending + spent))) + + assert_equal(len(json_obj['utxos']), 1) + assert_equal(json_obj['bitmap'], "10") + + self.log.info("Query the TXOs using the /getutxos URI with a binary response") + + bin_request = b'\x01\x02' + for txid, n in [spending, spent]: + bin_request += hex_str_to_bytes(txid) + bin_request += pack("i", n) + + bin_response = self.test_rest_request("/getutxos", http_method='POST', req_type=ReqType.BIN, body=bin_request, ret_type=RetType.BYTES) + output = BytesIO(bin_response) + chain_height, = unpack("i", output.read(4)) + response_hash = binascii.hexlify(output.read(32)[::-1]).decode('ascii') + + assert_equal(bb_hash, response_hash) # check if getutxo's chaintip during calculation was fine + assert_equal(chain_height, 102) # chain height must be 102 + + self.log.info("Test the /getutxos URI with and without /checkmempool") + # Create a transaction, check that it's found with /checkmempool, but + # not found without. Then confirm the transaction and check that it's + # found with or without /checkmempool. + + # do a tx and don't sync + txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1) + json_obj = self.test_rest_request("/tx/{}".format(txid)) + # get the spent output to later check for utxo (should be spent by then) + spent = (json_obj['vin'][0]['txid'], json_obj['vin'][0]['vout']) + # get n of 0.1 outpoint + n, = filter_output_indices_by_value(json_obj['vout'], Decimal('0.1')) + spending = (txid, n) + + json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spending)) + assert_equal(len(json_obj['utxos']), 0) + + json_obj = self.test_rest_request("/getutxos/checkmempool/{}-{}".format(*spending)) + assert_equal(len(json_obj['utxos']), 1) + + json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spent)) + assert_equal(len(json_obj['utxos']), 1) + + json_obj = self.test_rest_request("/getutxos/checkmempool/{}-{}".format(*spent)) + assert_equal(len(json_obj['utxos']), 0) + + self.nodes[0].generate(1) + self.sync_all() + + json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spending)) + assert_equal(len(json_obj['utxos']), 1) + + json_obj = self.test_rest_request("/getutxos/checkmempool/{}-{}".format(*spending)) + assert_equal(len(json_obj['utxos']), 1) + + # Do some invalid requests + self.test_rest_request("/getutxos", http_method='POST', req_type=ReqType.JSON, body='{"checkmempool', status=400, ret_type=RetType.OBJ) + self.test_rest_request("/getutxos", http_method='POST', req_type=ReqType.BIN, body='{"checkmempool', status=400, ret_type=RetType.OBJ) + self.test_rest_request("/getutxos/checkmempool", http_method='POST', req_type=ReqType.JSON, status=400, ret_type=RetType.OBJ) + + # Test limits + long_uri = '/'.join(["{}-{}".format(txid, n_) for n_ in range(20)]) + self.test_rest_request("/getutxos/checkmempool/{}".format(long_uri), http_method='POST', status=400, ret_type=RetType.OBJ) + + long_uri = '/'.join(['{}-{}'.format(txid, n_) for n_ in range(15)]) + self.test_rest_request("/getutxos/checkmempool/{}".format(long_uri), http_method='POST', status=200) + + self.nodes[0].generate(1) # generate block to not affect upcoming tests + self.sync_all() + + self.log.info("Test the /block and /headers URIs") + bb_hash = self.nodes[0].getbestblockhash() + + # Check binary format + response = self.test_rest_request("/block/{}".format(bb_hash), req_type=ReqType.BIN, ret_type=RetType.OBJ) + assert_greater_than(int(response.getheader('content-length')), 80) + response_bytes = response.read() + + # Compare with block header + response_header = self.test_rest_request("/headers/1/{}".format(bb_hash), req_type=ReqType.BIN, ret_type=RetType.OBJ) + assert_equal(int(response_header.getheader('content-length')), 80) + response_header_bytes = response_header.read() + assert_equal(response_bytes[:80], response_header_bytes) + + # Check block hex format + response_hex = self.test_rest_request("/block/{}".format(bb_hash), req_type=ReqType.HEX, ret_type=RetType.OBJ) + assert_greater_than(int(response_hex.getheader('content-length')), 160) + response_hex_bytes = response_hex.read().strip(b'\n') + assert_equal(binascii.hexlify(response_bytes), response_hex_bytes) + + # Compare with hex block header + response_header_hex = self.test_rest_request("/headers/1/{}".format(bb_hash), req_type=ReqType.HEX, ret_type=RetType.OBJ) + assert_greater_than(int(response_header_hex.getheader('content-length')), 160) + response_header_hex_bytes = response_header_hex.read(160) + assert_equal(binascii.hexlify(response_bytes[:80]), response_header_hex_bytes) + + # Check json format + block_json_obj = self.test_rest_request("/block/{}".format(bb_hash)) + assert_equal(block_json_obj['hash'], bb_hash) + + # Compare with json block header + json_obj = self.test_rest_request("/headers/1/{}".format(bb_hash)) + assert_equal(len(json_obj), 1) # ensure that there is one header in the json response + assert_equal(json_obj[0]['hash'], bb_hash) # request/response hash should be the same + + # Compare with normal RPC block response + rpc_block_json = self.nodes[0].getblock(bb_hash) + for key in ['hash', 'confirmations', 'height', 'version', 'merkleroot', 'time', 'nonce', 'bits', 'difficulty', 'chainwork', 'previousblockhash']: + assert_equal(json_obj[0][key], rpc_block_json[key]) + + # See if we can get 5 headers in one response + self.nodes[1].generate(5) + self.sync_all() + json_obj = self.test_rest_request("/headers/5/{}".format(bb_hash)) + assert_equal(len(json_obj), 5) # now we should have 5 header objects + + self.log.info("Test the /tx URI") + + tx_hash = block_json_obj['tx'][0]['txid'] + json_obj = self.test_rest_request("/tx/{}".format(tx_hash)) + assert_equal(json_obj['txid'], tx_hash) + + # Check hex format response + hex_response = self.test_rest_request("/tx/{}".format(tx_hash), req_type=ReqType.HEX, ret_type=RetType.OBJ) + assert_greater_than_or_equal(int(hex_response.getheader('content-length')), + json_obj['size']*2) + + self.log.info("Test tx inclusion in the /mempool and /block URIs") + + # Make 3 tx and mine them on node 1 + txs = [] + txs.append(self.nodes[0].sendtoaddress(not_related_address, 11)) + txs.append(self.nodes[0].sendtoaddress(not_related_address, 11)) + txs.append(self.nodes[0].sendtoaddress(not_related_address, 11)) + self.sync_all() + + # Check that there are exactly 3 transactions in the TX memory pool before generating the block + json_obj = self.test_rest_request("/mempool/info") + assert_equal(json_obj['size'], 3) + # the size of the memory pool should be greater than 3x ~100 bytes + assert_greater_than(json_obj['bytes'], 300) + + # Check that there are our submitted transactions in the TX memory pool + json_obj = self.test_rest_request("/mempool/contents") + for i, tx in enumerate(txs): + assert tx in json_obj + assert_equal(json_obj[tx]['spentby'], txs[i + 1:i + 2]) + assert_equal(json_obj[tx]['depends'], txs[i - 1:i]) + + # Now mine the transactions + newblockhash = self.nodes[1].generate(1) + self.sync_all() + + # Check if the 3 tx show up in the new block + json_obj = self.test_rest_request("/block/{}".format(newblockhash[0])) + non_coinbase_txs = {tx['txid'] for tx in json_obj['tx'] + if 'coinbase' not in tx['vin'][0]} + assert_equal(non_coinbase_txs, set(txs)) + + # Check the same but without tx details + json_obj = self.test_rest_request("/block/notxdetails/{}".format(newblockhash[0])) + for tx in txs: + assert tx in json_obj['tx'] + + self.log.info("Test the /chaininfo URI") + + bb_hash = self.nodes[0].getbestblockhash() + + json_obj = self.test_rest_request("/chaininfo") + assert_equal(json_obj['bestblockhash'], bb_hash) + +if __name__ == '__main__': + RESTTest().main() diff --git a/test/functional/interface_zmq.py b/test/functional/interface_zmq.py new file mode 100755 index 000000000..1c518eab7 --- /dev/null +++ b/test/functional/interface_zmq.py @@ -0,0 +1,121 @@ +#!/usr/bin/env python3 +# Copyright (c) 2015-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the ZMQ notification interface.""" +import struct + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.messages import CTransaction +from test_framework.util import ( + assert_equal, + bytes_to_hex_str, + hash256, +) +from io import BytesIO + + +class ZMQSubscriber: + def __init__(self, socket, topic): + self.sequence = 0 + self.socket = socket + self.topic = topic + + import zmq + self.socket.setsockopt(zmq.SUBSCRIBE, self.topic) + + def receive(self): + topic, body, seq = self.socket.recv_multipart() + # Topic should match the subscriber topic. + assert_equal(topic, self.topic) + # Sequence should be incremental. + assert_equal(struct.unpack('= than minrelaytxfee + txF = self.nodes[0].fundrawtransaction(tx, {'feeRate': relayfee}) + txFS = self.nodes[0].signrawtransactionwithwallet(txF['hex']) + assert_raises_rpc_error(-26, "mempool min fee not met", self.nodes[0].sendrawtransaction, txFS['hex']) + if __name__ == '__main__': MempoolLimitTest().main() diff --git a/test/functional/mempool_packages.py b/test/functional/mempool_packages.py index 6d3849bf5..9336547a6 100755 --- a/test/functional/mempool_packages.py +++ b/test/functional/mempool_packages.py @@ -1,12 +1,14 @@ #!/usr/bin/env python3 -# Copyright (c) 2014-2016 The Bitcoin Core developers +# Copyright (c) 2014-2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test descendant package tracking code.""" +from decimal import Decimal + +from test_framework.messages import COIN from test_framework.test_framework import BitcoinTestFramework -from test_framework.util import * -from test_framework.mininode import COIN +from test_framework.util import assert_equal, assert_raises_rpc_error, satoshi_round, sync_blocks, sync_mempools MAX_ANCESTORS = 25 MAX_DESCENDANTS = 25 @@ -16,6 +18,9 @@ def set_test_params(self): self.num_nodes = 2 self.extra_args = [["-maxorphantx=1000"], ["-maxorphantx=1000", "-limitancestorcount=5"]] + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + # Build a transaction that spends parent_txid:vout # Return amount sent def chain_transaction(self, node, parent_txid, vout, value, fee, num_outputs): @@ -25,14 +30,14 @@ def chain_transaction(self, node, parent_txid, vout, value, fee, num_outputs): for i in range(num_outputs): outputs[node.getnewaddress()] = send_value rawtx = node.createrawtransaction(inputs, outputs) - signedtx = node.signrawtransaction(rawtx) + signedtx = node.signrawtransactionwithwallet(rawtx) txid = node.sendrawtransaction(signedtx['hex']) fulltx = node.getrawtransaction(txid, 1) assert(len(fulltx['vout']) == num_outputs) # make sure we didn't generate a change output return (txid, send_value) def run_test(self): - ''' Mine some blocks and have them mature. ''' + # Mine some blocks and have them mature. self.nodes[0].generate(101) utxo = self.nodes[0].listunspent(10) txid = utxo[0]['txid'] @@ -47,7 +52,7 @@ def run_test(self): value = sent_value chain.append(txid) - # Check mempool has MAX_ANCESTORS transactions in it, and descendant + # Check mempool has MAX_ANCESTORS transactions in it, and descendant and ancestor # count and fees should look correct mempool = self.nodes[0].getrawmempool(True) assert_equal(len(mempool), MAX_ANCESTORS) @@ -55,6 +60,10 @@ def run_test(self): descendant_fees = 0 descendant_size = 0 + ancestor_size = sum([mempool[tx]['size'] for tx in mempool]) + ancestor_count = MAX_ANCESTORS + ancestor_fees = sum([mempool[tx]['fee'] for tx in mempool]) + descendants = [] ancestors = list(chain) for x in reversed(chain): @@ -66,19 +75,51 @@ def run_test(self): assert_equal(mempool[x]['descendantcount'], descendant_count) descendant_fees += mempool[x]['fee'] assert_equal(mempool[x]['modifiedfee'], mempool[x]['fee']) + assert_equal(mempool[x]['fees']['base'], mempool[x]['fee']) + assert_equal(mempool[x]['fees']['modified'], mempool[x]['modifiedfee']) assert_equal(mempool[x]['descendantfees'], descendant_fees * COIN) + assert_equal(mempool[x]['fees']['descendant'], descendant_fees) descendant_size += mempool[x]['size'] assert_equal(mempool[x]['descendantsize'], descendant_size) descendant_count += 1 + # Check that ancestor calculations are correct + assert_equal(mempool[x]['ancestorcount'], ancestor_count) + assert_equal(mempool[x]['ancestorfees'], ancestor_fees * COIN) + assert_equal(mempool[x]['ancestorsize'], ancestor_size) + ancestor_size -= mempool[x]['size'] + ancestor_fees -= mempool[x]['fee'] + ancestor_count -= 1 + + # Check that parent/child list is correct + assert_equal(mempool[x]['spentby'], descendants[-1:]) + assert_equal(mempool[x]['depends'], ancestors[-2:-1]) + # Check that getmempooldescendants is correct assert_equal(sorted(descendants), sorted(self.nodes[0].getmempooldescendants(x))) + + # Check getmempooldescendants verbose output is correct + for descendant, dinfo in self.nodes[0].getmempooldescendants(x, True).items(): + assert_equal(dinfo['depends'], [chain[chain.index(descendant)-1]]) + if dinfo['descendantcount'] > 1: + assert_equal(dinfo['spentby'], [chain[chain.index(descendant)+1]]) + else: + assert_equal(dinfo['spentby'], []) descendants.append(x) # Check that getmempoolancestors is correct ancestors.remove(x) assert_equal(sorted(ancestors), sorted(self.nodes[0].getmempoolancestors(x))) + # Check that getmempoolancestors verbose output is correct + for ancestor, ainfo in self.nodes[0].getmempoolancestors(x, True).items(): + assert_equal(ainfo['spentby'], [chain[chain.index(ancestor)+1]]) + if ainfo['ancestorcount'] > 1: + assert_equal(ainfo['depends'], [chain[chain.index(ancestor)-1]]) + else: + assert_equal(ainfo['depends'], []) + + # Check that getmempoolancestors/getmempooldescendants correctly handle verbose=true v_ancestors = self.nodes[0].getmempoolancestors(chain[-1], True) assert_equal(len(v_ancestors), len(chain)-1) @@ -99,8 +140,9 @@ def run_test(self): ancestor_fees = 0 for x in chain: ancestor_fees += mempool[x]['fee'] + assert_equal(mempool[x]['fees']['ancestor'], ancestor_fees + Decimal('0.00001')) assert_equal(mempool[x]['ancestorfees'], ancestor_fees * COIN + 1000) - + # Undo the prioritisetransaction for later tests self.nodes[0].prioritisetransaction(txid=chain[0], fee_delta=-1000) @@ -112,6 +154,7 @@ def run_test(self): descendant_fees = 0 for x in reversed(chain): descendant_fees += mempool[x]['fee'] + assert_equal(mempool[x]['fees']['descendant'], descendant_fees + Decimal('0.00001')) assert_equal(mempool[x]['descendantfees'], descendant_fees * COIN + 1000) # Adding one more transaction on to the chain should fail. @@ -137,7 +180,9 @@ def run_test(self): descendant_fees += mempool[x]['fee'] if (x == chain[-1]): assert_equal(mempool[x]['modifiedfee'], mempool[x]['fee']+satoshi_round(0.00002)) + assert_equal(mempool[x]['fees']['modified'], mempool[x]['fee']+satoshi_round(0.00002)) assert_equal(mempool[x]['descendantfees'], descendant_fees * COIN + 2000) + assert_equal(mempool[x]['fees']['descendant'], descendant_fees+satoshi_round(0.00002)) # TODO: check that node1's mempool is as expected @@ -149,6 +194,7 @@ def run_test(self): vout = utxo[1]['vout'] transaction_package = [] + tx_children = [] # First create one parent tx with 10 children (txid, sent_value) = self.chain_transaction(self.nodes[0], txid, vout, value, fee, 10) parent_transaction = txid @@ -159,11 +205,17 @@ def run_test(self): for i in range(MAX_DESCENDANTS - 1): utxo = transaction_package.pop(0) (txid, sent_value) = self.chain_transaction(self.nodes[0], utxo['txid'], utxo['vout'], utxo['amount'], fee, 10) + if utxo['txid'] is parent_transaction: + tx_children.append(txid) for j in range(10): transaction_package.append({'txid': txid, 'vout': j, 'amount': sent_value}) mempool = self.nodes[0].getrawmempool(True) assert_equal(mempool[parent_transaction]['descendantcount'], MAX_DESCENDANTS) + assert_equal(sorted(mempool[parent_transaction]['spentby']), sorted(tx_children)) + + for child in tx_children: + assert_equal(mempool[child]['depends'], [parent_transaction]) # Sending one more chained transaction will fail utxo = transaction_package.pop(0) @@ -205,13 +257,13 @@ def run_test(self): for i in range(2): outputs[self.nodes[0].getnewaddress()] = send_value rawtx = self.nodes[0].createrawtransaction(inputs, outputs) - signedtx = self.nodes[0].signrawtransaction(rawtx) + signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx) txid = self.nodes[0].sendrawtransaction(signedtx['hex']) tx0_id = txid value = send_value # Create tx1 - (tx1_id, tx1_value) = self.chain_transaction(self.nodes[0], tx0_id, 0, value, fee, 1) + tx1_id, _ = self.chain_transaction(self.nodes[0], tx0_id, 0, value, fee, 1) # Create tx2-7 vout = 1 @@ -229,10 +281,10 @@ def run_test(self): inputs = [ {'txid' : tx1_id, 'vout': 0}, {'txid' : txid, 'vout': 0} ] outputs = { self.nodes[0].getnewaddress() : send_value + value - 4*fee } rawtx = self.nodes[0].createrawtransaction(inputs, outputs) - signedtx = self.nodes[0].signrawtransaction(rawtx) + signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx) txid = self.nodes[0].sendrawtransaction(signedtx['hex']) sync_mempools(self.nodes) - + # Now try to disconnect the tip on each node... self.nodes[1].invalidateblock(self.nodes[1].getbestblockhash()) self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) diff --git a/test/functional/mempool_persist.py b/test/functional/mempool_persist.py index 01f65b137..b4e9d967f 100755 --- a/test/functional/mempool_persist.py +++ b/test/functional/mempool_persist.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright (c) 2014-2017 The Bitcoin Core developers +# Copyright (c) 2014-2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test mempool persistence. @@ -28,18 +28,28 @@ - Restart node0 with -persistmempool. Verify that it has 5 transactions in its mempool. This tests that -persistmempool=0 does not overwrite a previously valid mempool stored on disk. + - Remove node0 mempool.dat and verify savemempool RPC recreates it + and verify that node1 can load it and has 5 transactions in its + mempool. + - Verify that savemempool throws when the RPC is called if + node1 can't write to disk. """ +from decimal import Decimal +import os import time from test_framework.test_framework import BitcoinTestFramework -from test_framework.util import * +from test_framework.util import assert_equal, assert_raises_rpc_error, wait_until class MempoolPersistTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 3 self.extra_args = [[], ["-persistmempool=0"], []] + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + def run_test(self): chain_height = self.nodes[0].getblockcount() assert_equal(chain_height, 200) @@ -51,21 +61,30 @@ def run_test(self): self.log.debug("Send 5 transactions from node2 (to its own address)") for i in range(5): self.nodes[2].sendtoaddress(self.nodes[2].getnewaddress(), Decimal("10")) + node2_balance = self.nodes[2].getbalance() self.sync_all() self.log.debug("Verify that node0 and node1 have 5 transactions in their mempools") assert_equal(len(self.nodes[0].getrawmempool()), 5) assert_equal(len(self.nodes[1].getrawmempool()), 5) - self.log.debug("Stop-start node0 and node1. Verify that node0 has the transactions in its mempool and node1 does not.") + self.log.debug("Stop-start the nodes. Verify that node0 has the transactions in its mempool and node1 does not. Verify that node2 calculates its balance correctly after loading wallet transactions.") self.stop_nodes() + # Give this node a head-start, so we can be "extra-sure" that it didn't load anything later + # Also don't store the mempool, to keep the datadir clean + self.start_node(1, extra_args=["-persistmempool=0"]) self.start_node(0) - self.start_node(1) + self.start_node(2) # Give bitcoind a second to reload the mempool - time.sleep(1) - wait_until(lambda: len(self.nodes[0].getrawmempool()) == 5) + wait_until(lambda: len(self.nodes[0].getrawmempool()) == 5, timeout=1) + wait_until(lambda: len(self.nodes[2].getrawmempool()) == 5, timeout=1) + # The others have loaded their mempool. If node_1 loaded anything, we'd probably notice by now: assert_equal(len(self.nodes[1].getrawmempool()), 0) + # Verify accounting of mempool transactions after restart is correct + self.nodes[2].syncwithvalidationinterfacequeue() # Flush mempool to wallet + assert_equal(node2_balance, self.nodes[2].getbalance()) + self.log.debug("Stop-start node0 with -persistmempool=0. Verify that it doesn't load its mempool.dat file.") self.stop_nodes() self.start_node(0, extra_args=["-persistmempool=0"]) @@ -78,5 +97,27 @@ def run_test(self): self.start_node(0) wait_until(lambda: len(self.nodes[0].getrawmempool()) == 5) + mempooldat0 = os.path.join(self.nodes[0].datadir, 'regtest', 'mempool.dat') + mempooldat1 = os.path.join(self.nodes[1].datadir, 'regtest', 'mempool.dat') + self.log.debug("Remove the mempool.dat file. Verify that savemempool to disk via RPC re-creates it") + os.remove(mempooldat0) + self.nodes[0].savemempool() + assert os.path.isfile(mempooldat0) + + self.log.debug("Stop nodes, make node1 use mempool.dat from node0. Verify it has 5 transactions") + os.rename(mempooldat0, mempooldat1) + self.stop_nodes() + self.start_node(1, extra_args=[]) + wait_until(lambda: len(self.nodes[1].getrawmempool()) == 5) + + self.log.debug("Prevent bitcoind from writing mempool.dat to disk. Verify that `savemempool` fails") + # to test the exception we are creating a tmp folder called mempool.dat.new + # which is an implementation detail that could change and break this test + mempooldotnew1 = mempooldat1 + '.new' + os.mkdir(mempooldotnew1) + assert_raises_rpc_error(-1, "Unable to dump mempool to disk", self.nodes[1].savemempool) + os.rmdir(mempooldotnew1) + + if __name__ == '__main__': MempoolPersistTest().main() diff --git a/test/functional/mempool_reorg.py b/test/functional/mempool_reorg.py index 2803371f5..123f0b4c2 100755 --- a/test/functional/mempool_reorg.py +++ b/test/functional/mempool_reorg.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright (c) 2014-2016 The Bitcoin Core developers +# Copyright (c) 2014-2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test mempool re-org scenarios. @@ -8,14 +8,17 @@ that spend (directly or indirectly) coinbase transactions. """ +from test_framework.blocktools import create_raw_transaction from test_framework.test_framework import BitcoinTestFramework -from test_framework.util import * +from test_framework.util import assert_equal, assert_raises_rpc_error + -# Create one-input, one-output, no-fee transaction: class MempoolCoinbaseTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 - self.extra_args = [["-checkmempool"]] * 2 + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() alert_filename = None # Set by setup_network @@ -39,16 +42,16 @@ def run_test(self): # and make sure the mempool code behaves correctly. b = [ self.nodes[0].getblockhash(n) for n in range(101, 105) ] coinbase_txids = [ self.nodes[0].getblock(h)['tx'][0] for h in b ] - spend_101_raw = create_tx(self.nodes[0], coinbase_txids[1], node1_address, 49.99) - spend_102_raw = create_tx(self.nodes[0], coinbase_txids[2], node0_address, 49.99) - spend_103_raw = create_tx(self.nodes[0], coinbase_txids[3], node0_address, 49.99) + spend_101_raw = create_raw_transaction(self.nodes[0], coinbase_txids[1], node1_address, amount=49.99) + spend_102_raw = create_raw_transaction(self.nodes[0], coinbase_txids[2], node0_address, amount=49.99) + spend_103_raw = create_raw_transaction(self.nodes[0], coinbase_txids[3], node0_address, amount=49.99) # Create a transaction which is time-locked to two blocks in the future timelock_tx = self.nodes[0].createrawtransaction([{"txid": coinbase_txids[0], "vout": 0}], {node0_address: 49.99}) # Set the time lock timelock_tx = timelock_tx.replace("ffffffff", "11111191", 1) timelock_tx = timelock_tx[:-8] + hex(self.nodes[0].getblockcount() + 2)[2:] + "000000" - timelock_tx = self.nodes[0].signrawtransaction(timelock_tx)["hex"] + timelock_tx = self.nodes[0].signrawtransactionwithwallet(timelock_tx)["hex"] # This will raise an exception because the timelock transaction is too immature to spend assert_raises_rpc_error(-26, "non-final", self.nodes[0].sendrawtransaction, timelock_tx) @@ -57,11 +60,11 @@ def run_test(self): spend_103_id = self.nodes[0].sendrawtransaction(spend_103_raw) self.nodes[0].generate(1) # Time-locked transaction is still too immature to spend - assert_raises_rpc_error(-26,'non-final', self.nodes[0].sendrawtransaction, timelock_tx) + assert_raises_rpc_error(-26, 'non-final', self.nodes[0].sendrawtransaction, timelock_tx) # Create 102_1 and 103_1: - spend_102_1_raw = create_tx(self.nodes[0], spend_102_id, node1_address, 49.98) - spend_103_1_raw = create_tx(self.nodes[0], spend_103_id, node1_address, 49.98) + spend_102_1_raw = create_raw_transaction(self.nodes[0], spend_102_id, node1_address, amount=49.98) + spend_103_1_raw = create_raw_transaction(self.nodes[0], spend_103_id, node1_address, amount=49.98) # Broadcast and mine 103_1: spend_103_1_id = self.nodes[0].sendrawtransaction(spend_103_1_raw) diff --git a/test/functional/mempool_resurrect.py b/test/functional/mempool_resurrect.py new file mode 100755 index 000000000..d035ca907 --- /dev/null +++ b/test/functional/mempool_resurrect.py @@ -0,0 +1,71 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test resurrection of mined transactions when the blockchain is re-organized.""" + +from test_framework.blocktools import create_raw_transaction +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal + + +class MempoolCoinbaseTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 1 + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + node0_address = self.nodes[0].getnewaddress() + # Spend block 1/2/3's coinbase transactions + # Mine a block. + # Create three more transactions, spending the spends + # Mine another block. + # ... make sure all the transactions are confirmed + # Invalidate both blocks + # ... make sure all the transactions are put back in the mempool + # Mine a new block + # ... make sure all the transactions are confirmed again. + + b = [self.nodes[0].getblockhash(n) for n in range(1, 4)] + coinbase_txids = [self.nodes[0].getblock(h)['tx'][0] for h in b] + spends1_raw = [create_raw_transaction(self.nodes[0], txid, node0_address, amount=49.99) for txid in coinbase_txids] + spends1_id = [self.nodes[0].sendrawtransaction(tx) for tx in spends1_raw] + + blocks = [] + blocks.extend(self.nodes[0].generate(1)) + + spends2_raw = [create_raw_transaction(self.nodes[0], txid, node0_address, amount=49.98) for txid in spends1_id] + spends2_id = [self.nodes[0].sendrawtransaction(tx) for tx in spends2_raw] + + blocks.extend(self.nodes[0].generate(1)) + + # mempool should be empty, all txns confirmed + assert_equal(set(self.nodes[0].getrawmempool()), set()) + for txid in spends1_id+spends2_id: + tx = self.nodes[0].gettransaction(txid) + assert(tx["confirmations"] > 0) + + # Use invalidateblock to re-org back; all transactions should + # end up unconfirmed and back in the mempool + for node in self.nodes: + node.invalidateblock(blocks[0]) + + # mempool should be empty, all txns confirmed + assert_equal(set(self.nodes[0].getrawmempool()), set(spends1_id+spends2_id)) + for txid in spends1_id+spends2_id: + tx = self.nodes[0].gettransaction(txid) + assert(tx["confirmations"] == 0) + + # Generate another block, they should all get mined + self.nodes[0].generate(1) + # mempool should be empty, all txns confirmed + assert_equal(set(self.nodes[0].getrawmempool()), set()) + for txid in spends1_id+spends2_id: + tx = self.nodes[0].gettransaction(txid) + assert(tx["confirmations"] > 0) + + +if __name__ == '__main__': + MempoolCoinbaseTest().main() diff --git a/test/functional/mempool_spend_coinbase.py b/test/functional/mempool_spend_coinbase.py new file mode 100755 index 000000000..854d506f0 --- /dev/null +++ b/test/functional/mempool_spend_coinbase.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test spending coinbase transactions. + +The coinbase transaction in block N can appear in block +N+100... so is valid in the mempool when the best block +height is N+99. +This test makes sure coinbase spends that will be mature +in the next block are accepted into the memory pool, +but less mature coinbase spends are NOT. +""" + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.blocktools import create_raw_transaction +from test_framework.util import assert_equal, assert_raises_rpc_error + + +class MempoolSpendCoinbaseTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 1 + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + chain_height = self.nodes[0].getblockcount() + assert_equal(chain_height, 200) + node0_address = self.nodes[0].getnewaddress() + + # Coinbase at height chain_height-100+1 ok in mempool, should + # get mined. Coinbase at height chain_height-100+2 is + # is too immature to spend. + b = [self.nodes[0].getblockhash(n) for n in range(101, 103)] + coinbase_txids = [self.nodes[0].getblock(h)['tx'][0] for h in b] + spends_raw = [create_raw_transaction(self.nodes[0], txid, node0_address, amount=49.99) for txid in coinbase_txids] + + spend_101_id = self.nodes[0].sendrawtransaction(spends_raw[0]) + + # coinbase at height 102 should be too immature to spend + assert_raises_rpc_error(-26,"bad-txns-premature-spend-of-coinbase", self.nodes[0].sendrawtransaction, spends_raw[1]) + + # mempool should have just spend_101: + assert_equal(self.nodes[0].getrawmempool(), [ spend_101_id ]) + + # mine a block, spend_101 should get confirmed + self.nodes[0].generate(1) + assert_equal(set(self.nodes[0].getrawmempool()), set()) + + # ... and now height 102 can be spent: + spend_102_id = self.nodes[0].sendrawtransaction(spends_raw[1]) + assert_equal(self.nodes[0].getrawmempool(), [ spend_102_id ]) + +if __name__ == '__main__': + MempoolSpendCoinbaseTest().main() diff --git a/test/functional/mining_basic.py b/test/functional/mining_basic.py new file mode 100755 index 000000000..ff55ea552 --- /dev/null +++ b/test/functional/mining_basic.py @@ -0,0 +1,221 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test mining RPCs + +- getmininginfo +- getblocktemplate proposal mode +- submitblock""" + +import copy +from decimal import Decimal + +from test_framework.blocktools import create_coinbase +from test_framework.messages import ( + CBlock, + CBlockHeader, +) +from test_framework.mininode import ( + P2PDataStore, +) +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + assert_raises_rpc_error, + bytes_to_hex_str as b2x, +) + + +def assert_template(node, block, expect, rehash=True): + if rehash: + block.hashMerkleRoot = block.calc_merkle_root() + rsp = node.getblocktemplate({'data': b2x(block.serialize()), 'mode': 'proposal'}) + assert_equal(rsp, expect) + +class MiningTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 2 + self.setup_clean_chain = False + + def run_test(self): + node = self.nodes[0] + + def assert_submitblock(block, result_str_1, result_str_2=None): + block.solve() + result_str_2 = result_str_2 or 'duplicate-invalid' + assert_equal(result_str_1, node.submitblock(hexdata=b2x(block.serialize()))) + assert_equal(result_str_2, node.submitblock(hexdata=b2x(block.serialize()))) + + self.log.info('getmininginfo') + mining_info = node.getmininginfo() + assert_equal(mining_info['blocks'], 200) + assert_equal(mining_info['chain'], 'regtest') + assert_equal(mining_info['currentblocktx'], 0) + assert_equal(mining_info['currentblockweight'], 0) + assert_equal(mining_info['difficulty'], Decimal('4.656542373906925E-10')) + assert_equal(mining_info['networkhashps'], Decimal('0.003333333333333334')) + assert_equal(mining_info['pooledtx'], 0) + + # Mine a block to leave initial block download + node.generatetoaddress(1, node.get_deterministic_priv_key().address) + tmpl = node.getblocktemplate() + self.log.info("getblocktemplate: Test capability advertised") + assert 'proposal' in tmpl['capabilities'] + assert 'coinbasetxn' not in tmpl + + coinbase_tx = create_coinbase(height=int(tmpl["height"]) + 1) + # sequence numbers must not be max for nLockTime to have effect + coinbase_tx.vin[0].nSequence = 2 ** 32 - 2 + coinbase_tx.rehash() + + block = CBlock() + block.nVersion = tmpl["version"] + block.hashPrevBlock = int(tmpl["previousblockhash"], 16) + block.nTime = tmpl["curtime"] + block.nBits = int(tmpl["bits"], 16) + block.nNonce = 0 + block.vtx = [coinbase_tx] + + self.log.info("getblocktemplate: Test valid block") + assert_template(node, block, None) + + self.log.info("submitblock: Test block decode failure") + assert_raises_rpc_error(-22, "Block decode failed", node.submitblock, b2x(block.serialize()[:-15])) + + self.log.info("getblocktemplate: Test bad input hash for coinbase transaction") + bad_block = copy.deepcopy(block) + bad_block.vtx[0].vin[0].prevout.hash += 1 + bad_block.vtx[0].rehash() + assert_template(node, bad_block, 'bad-cb-missing') + + self.log.info("submitblock: Test invalid coinbase transaction") + assert_raises_rpc_error(-22, "Block does not start with a coinbase", node.submitblock, b2x(bad_block.serialize())) + + self.log.info("getblocktemplate: Test truncated final transaction") + assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, {'data': b2x(block.serialize()[:-1]), 'mode': 'proposal'}) + + self.log.info("getblocktemplate: Test duplicate transaction") + bad_block = copy.deepcopy(block) + bad_block.vtx.append(bad_block.vtx[0]) + assert_template(node, bad_block, 'bad-txns-duplicate') + assert_submitblock(bad_block, 'bad-txns-duplicate', 'bad-txns-duplicate') + + self.log.info("getblocktemplate: Test invalid transaction") + bad_block = copy.deepcopy(block) + bad_tx = copy.deepcopy(bad_block.vtx[0]) + bad_tx.vin[0].prevout.hash = 255 + bad_tx.rehash() + bad_block.vtx.append(bad_tx) + assert_template(node, bad_block, 'bad-txns-inputs-missingorspent') + assert_submitblock(bad_block, 'bad-txns-inputs-missingorspent') + + self.log.info("getblocktemplate: Test nonfinal transaction") + bad_block = copy.deepcopy(block) + bad_block.vtx[0].nLockTime = 2 ** 32 - 1 + bad_block.vtx[0].rehash() + assert_template(node, bad_block, 'bad-txns-nonfinal') + assert_submitblock(bad_block, 'bad-txns-nonfinal') + + self.log.info("getblocktemplate: Test bad tx count") + # The tx count is immediately after the block header + TX_COUNT_OFFSET = 80 + bad_block_sn = bytearray(block.serialize()) + assert_equal(bad_block_sn[TX_COUNT_OFFSET], 1) + bad_block_sn[TX_COUNT_OFFSET] += 1 + assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, {'data': b2x(bad_block_sn), 'mode': 'proposal'}) + + self.log.info("getblocktemplate: Test bad bits") + bad_block = copy.deepcopy(block) + bad_block.nBits = 469762303 # impossible in the real world + assert_template(node, bad_block, 'bad-diffbits') + + self.log.info("getblocktemplate: Test bad merkle root") + bad_block = copy.deepcopy(block) + bad_block.hashMerkleRoot += 1 + assert_template(node, bad_block, 'bad-txnmrklroot', False) + assert_submitblock(bad_block, 'bad-txnmrklroot', 'bad-txnmrklroot') + + self.log.info("getblocktemplate: Test bad timestamps") + bad_block = copy.deepcopy(block) + bad_block.nTime = 2 ** 31 - 1 + assert_template(node, bad_block, 'time-too-new') + assert_submitblock(bad_block, 'time-too-new', 'time-too-new') + bad_block.nTime = 0 + assert_template(node, bad_block, 'time-too-old') + assert_submitblock(bad_block, 'time-too-old', 'time-too-old') + + self.log.info("getblocktemplate: Test not best block") + bad_block = copy.deepcopy(block) + bad_block.hashPrevBlock = 123 + assert_template(node, bad_block, 'inconclusive-not-best-prevblk') + assert_submitblock(bad_block, 'prev-blk-not-found', 'prev-blk-not-found') + + self.log.info('submitheader tests') + assert_raises_rpc_error(-22, 'Block header decode failed', lambda: node.submitheader(hexdata='xx' * 80)) + assert_raises_rpc_error(-22, 'Block header decode failed', lambda: node.submitheader(hexdata='ff' * 78)) + assert_raises_rpc_error(-25, 'Must submit previous header', lambda: node.submitheader(hexdata='ff' * 80)) + + block.nTime += 1 + block.solve() + + def chain_tip(b_hash, *, status='headers-only', branchlen=1): + return {'hash': b_hash, 'height': 202, 'branchlen': branchlen, 'status': status} + + assert chain_tip(block.hash) not in node.getchaintips() + node.submitheader(hexdata=b2x(block.serialize())) + assert chain_tip(block.hash) in node.getchaintips() + node.submitheader(hexdata=b2x(CBlockHeader(block).serialize())) # Noop + assert chain_tip(block.hash) in node.getchaintips() + + bad_block_root = copy.deepcopy(block) + bad_block_root.hashMerkleRoot += 2 + bad_block_root.solve() + assert chain_tip(bad_block_root.hash) not in node.getchaintips() + node.submitheader(hexdata=b2x(CBlockHeader(bad_block_root).serialize())) + assert chain_tip(bad_block_root.hash) in node.getchaintips() + # Should still reject invalid blocks, even if we have the header: + assert_equal(node.submitblock(hexdata=b2x(bad_block_root.serialize())), 'bad-txnmrklroot') + assert_equal(node.submitblock(hexdata=b2x(bad_block_root.serialize())), 'bad-txnmrklroot') + assert chain_tip(bad_block_root.hash) in node.getchaintips() + # We know the header for this invalid block, so should just return early without error: + node.submitheader(hexdata=b2x(CBlockHeader(bad_block_root).serialize())) + assert chain_tip(bad_block_root.hash) in node.getchaintips() + + bad_block_lock = copy.deepcopy(block) + bad_block_lock.vtx[0].nLockTime = 2**32 - 1 + bad_block_lock.vtx[0].rehash() + bad_block_lock.hashMerkleRoot = bad_block_lock.calc_merkle_root() + bad_block_lock.solve() + assert_equal(node.submitblock(hexdata=b2x(bad_block_lock.serialize())), 'bad-txns-nonfinal') + assert_equal(node.submitblock(hexdata=b2x(bad_block_lock.serialize())), 'duplicate-invalid') + # Build a "good" block on top of the submitted bad block + bad_block2 = copy.deepcopy(block) + bad_block2.hashPrevBlock = bad_block_lock.sha256 + bad_block2.solve() + assert_raises_rpc_error(-25, 'bad-prevblk', lambda: node.submitheader(hexdata=b2x(CBlockHeader(bad_block2).serialize()))) + + # Should reject invalid header right away + bad_block_time = copy.deepcopy(block) + bad_block_time.nTime = 1 + bad_block_time.solve() + assert_raises_rpc_error(-25, 'time-too-old', lambda: node.submitheader(hexdata=b2x(CBlockHeader(bad_block_time).serialize()))) + + # Should ask for the block from a p2p node, if they announce the header as well: + node.add_p2p_connection(P2PDataStore()) + node.p2p.wait_for_getheaders(timeout=5) # Drop the first getheaders + node.p2p.send_blocks_and_test(blocks=[block], node=node) + # Must be active now: + assert chain_tip(block.hash, status='active', branchlen=0) in node.getchaintips() + + # Building a few blocks should give the same results + node.generatetoaddress(10, node.get_deterministic_priv_key().address) + assert_raises_rpc_error(-25, 'time-too-old', lambda: node.submitheader(hexdata=b2x(CBlockHeader(bad_block_time).serialize()))) + assert_raises_rpc_error(-25, 'bad-prevblk', lambda: node.submitheader(hexdata=b2x(CBlockHeader(bad_block2).serialize()))) + node.submitheader(hexdata=b2x(CBlockHeader(block).serialize())) + node.submitheader(hexdata=b2x(CBlockHeader(bad_block_root).serialize())) + assert_equal(node.submitblock(hexdata=b2x(block.serialize())), 'duplicate') # valid + + +if __name__ == '__main__': + MiningTest().main() diff --git a/test/functional/mining_getblocktemplate_longpoll.py b/test/functional/mining_getblocktemplate_longpoll.py new file mode 100755 index 000000000..9a3c15a4a --- /dev/null +++ b/test/functional/mining_getblocktemplate_longpoll.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test longpolling with getblocktemplate.""" + +from decimal import Decimal + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import get_rpc_proxy, random_transaction + +import threading + +class LongpollThread(threading.Thread): + def __init__(self, node): + threading.Thread.__init__(self) + # query current longpollid + template = node.getblocktemplate() + self.longpollid = template['longpollid'] + # create a new connection to the node, we can't use the same + # connection from two threads + self.node = get_rpc_proxy(node.url, 1, timeout=600, coveragedir=node.coverage_dir) + + def run(self): + self.node.getblocktemplate({'longpollid':self.longpollid}) + +class GetBlockTemplateLPTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 2 + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + self.log.info("Warning: this test will take about 70 seconds in the best case. Be patient.") + self.nodes[0].generate(10) + template = self.nodes[0].getblocktemplate() + longpollid = template['longpollid'] + # longpollid should not change between successive invocations if nothing else happens + template2 = self.nodes[0].getblocktemplate() + assert(template2['longpollid'] == longpollid) + + # Test 1: test that the longpolling wait if we do nothing + thr = LongpollThread(self.nodes[0]) + thr.start() + # check that thread still lives + thr.join(5) # wait 5 seconds or until thread exits + assert(thr.is_alive()) + + # Test 2: test that longpoll will terminate if another node generates a block + self.nodes[1].generate(1) # generate a block on another node + # check that thread will exit now that new transaction entered mempool + thr.join(5) # wait 5 seconds or until thread exits + assert(not thr.is_alive()) + + # Test 3: test that longpoll will terminate if we generate a block ourselves + thr = LongpollThread(self.nodes[0]) + thr.start() + self.nodes[0].generate(1) # generate a block on another node + thr.join(5) # wait 5 seconds or until thread exits + assert(not thr.is_alive()) + + # Test 4: test that introducing a new transaction into the mempool will terminate the longpoll + thr = LongpollThread(self.nodes[0]) + thr.start() + # generate a random transaction and submit it + min_relay_fee = self.nodes[0].getnetworkinfo()["relayfee"] + # min_relay_fee is fee per 1000 bytes, which should be more than enough. + (txid, txhex, fee) = random_transaction(self.nodes, Decimal("1.1"), min_relay_fee, Decimal("0.001"), 20) + # after one minute, every 10 seconds the mempool is probed, so in 80 seconds it should have returned + thr.join(60 + 20) + assert(not thr.is_alive()) + +if __name__ == '__main__': + GetBlockTemplateLPTest().main() diff --git a/test/functional/mining_prioritisetransaction.py b/test/functional/mining_prioritisetransaction.py new file mode 100755 index 000000000..c5ddee56f --- /dev/null +++ b/test/functional/mining_prioritisetransaction.py @@ -0,0 +1,153 @@ +#!/usr/bin/env python3 +# Copyright (c) 2015-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the prioritisetransaction mining RPC.""" + +import time + +from test_framework.messages import COIN, MAX_BLOCK_BASE_SIZE +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, assert_raises_rpc_error, create_confirmed_utxos, create_lots_of_big_transactions, gen_return_txouts + +class PrioritiseTransactionTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 2 + self.extra_args = [["-printpriority=1"], ["-printpriority=1"]] + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + # Test `prioritisetransaction` required parameters + assert_raises_rpc_error(-1, "prioritisetransaction", self.nodes[0].prioritisetransaction) + assert_raises_rpc_error(-1, "prioritisetransaction", self.nodes[0].prioritisetransaction, '') + assert_raises_rpc_error(-1, "prioritisetransaction", self.nodes[0].prioritisetransaction, '', 0) + + # Test `prioritisetransaction` invalid extra parameters + assert_raises_rpc_error(-1, "prioritisetransaction", self.nodes[0].prioritisetransaction, '', 0, 0, 0) + + # Test `prioritisetransaction` invalid `txid` + assert_raises_rpc_error(-8, "txid must be of length 64 (not 3, for 'foo')", self.nodes[0].prioritisetransaction, txid='foo', fee_delta=0) + assert_raises_rpc_error(-8, "txid must be hexadecimal string (not 'Zd1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000')", self.nodes[0].prioritisetransaction, txid='Zd1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000', fee_delta=0) + + # Test `prioritisetransaction` invalid `dummy` + txid = '1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000' + assert_raises_rpc_error(-1, "JSON value is not a number as expected", self.nodes[0].prioritisetransaction, txid, 'foo', 0) + assert_raises_rpc_error(-8, "Priority is no longer supported, dummy argument to prioritisetransaction must be 0.", self.nodes[0].prioritisetransaction, txid, 1, 0) + + # Test `prioritisetransaction` invalid `fee_delta` + assert_raises_rpc_error(-1, "JSON value is not an integer as expected", self.nodes[0].prioritisetransaction, txid=txid, fee_delta='foo') + + self.txouts = gen_return_txouts() + self.relayfee = self.nodes[0].getnetworkinfo()['relayfee'] + + utxo_count = 90 + utxos = create_confirmed_utxos(self.relayfee, self.nodes[0], utxo_count) + base_fee = self.relayfee*100 # our transactions are smaller than 100kb + txids = [] + + # Create 3 batches of transactions at 3 different fee rate levels + range_size = utxo_count // 3 + for i in range(3): + txids.append([]) + start_range = i * range_size + end_range = start_range + range_size + txids[i] = create_lots_of_big_transactions(self.nodes[0], self.txouts, utxos[start_range:end_range], end_range - start_range, (i+1)*base_fee) + + # Make sure that the size of each group of transactions exceeds + # MAX_BLOCK_BASE_SIZE -- otherwise the test needs to be revised to create + # more transactions. + mempool = self.nodes[0].getrawmempool(True) + sizes = [0, 0, 0] + for i in range(3): + for j in txids[i]: + assert(j in mempool) + sizes[i] += mempool[j]['size'] + assert(sizes[i] > MAX_BLOCK_BASE_SIZE) # Fail => raise utxo_count + + # add a fee delta to something in the cheapest bucket and make sure it gets mined + # also check that a different entry in the cheapest bucket is NOT mined + self.nodes[0].prioritisetransaction(txid=txids[0][0], fee_delta=int(3*base_fee*COIN)) + + self.nodes[0].generate(1) + + mempool = self.nodes[0].getrawmempool() + self.log.info("Assert that prioritised transaction was mined") + assert(txids[0][0] not in mempool) + assert(txids[0][1] in mempool) + + high_fee_tx = None + for x in txids[2]: + if x not in mempool: + high_fee_tx = x + + # Something high-fee should have been mined! + assert(high_fee_tx != None) + + # Add a prioritisation before a tx is in the mempool (de-prioritising a + # high-fee transaction so that it's now low fee). + self.nodes[0].prioritisetransaction(txid=high_fee_tx, fee_delta=-int(2*base_fee*COIN)) + + # Add everything back to mempool + self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) + + # Check to make sure our high fee rate tx is back in the mempool + mempool = self.nodes[0].getrawmempool() + assert(high_fee_tx in mempool) + + # Now verify the modified-high feerate transaction isn't mined before + # the other high fee transactions. Keep mining until our mempool has + # decreased by all the high fee size that we calculated above. + while (self.nodes[0].getmempoolinfo()['bytes'] > sizes[0] + sizes[1]): + self.nodes[0].generate(1) + + # High fee transaction should not have been mined, but other high fee rate + # transactions should have been. + mempool = self.nodes[0].getrawmempool() + self.log.info("Assert that de-prioritised transaction is still in mempool") + assert(high_fee_tx in mempool) + for x in txids[2]: + if (x != high_fee_tx): + assert(x not in mempool) + + # Create a free transaction. Should be rejected. + utxo_list = self.nodes[0].listunspent() + assert(len(utxo_list) > 0) + utxo = utxo_list[0] + + inputs = [] + outputs = {} + inputs.append({"txid" : utxo["txid"], "vout" : utxo["vout"]}) + outputs[self.nodes[0].getnewaddress()] = utxo["amount"] + raw_tx = self.nodes[0].createrawtransaction(inputs, outputs) + tx_hex = self.nodes[0].signrawtransactionwithwallet(raw_tx)["hex"] + tx_id = self.nodes[0].decoderawtransaction(tx_hex)["txid"] + + # This will raise an exception due to min relay fee not being met + assert_raises_rpc_error(-26, "min relay fee not met", self.nodes[0].sendrawtransaction, tx_hex) + assert(tx_id not in self.nodes[0].getrawmempool()) + + # This is a less than 1000-byte transaction, so just set the fee + # to be the minimum for a 1000-byte transaction and check that it is + # accepted. + self.nodes[0].prioritisetransaction(txid=tx_id, fee_delta=int(self.relayfee*COIN)) + + self.log.info("Assert that prioritised free transaction is accepted to mempool") + assert_equal(self.nodes[0].sendrawtransaction(tx_hex), tx_id) + assert(tx_id in self.nodes[0].getrawmempool()) + + # Test that calling prioritisetransaction is sufficient to trigger + # getblocktemplate to (eventually) return a new block. + mock_time = int(time.time()) + self.nodes[0].setmocktime(mock_time) + template = self.nodes[0].getblocktemplate() + self.nodes[0].prioritisetransaction(txid=tx_id, fee_delta=-int(self.relayfee*COIN)) + self.nodes[0].setmocktime(mock_time+10) + new_template = self.nodes[0].getblocktemplate() + + assert(template != new_template) + +if __name__ == '__main__': + PrioritiseTransactionTest().main() diff --git a/test/functional/p2p_compactblocks.py b/test/functional/p2p_compactblocks.py new file mode 100755 index 000000000..fafc27d16 --- /dev/null +++ b/test/functional/p2p_compactblocks.py @@ -0,0 +1,924 @@ +#!/usr/bin/env python3 +# Copyright (c) 2016-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test compact blocks (BIP 152). + +Version 1 compact blocks are pre-segwit (txids) +Version 2 compact blocks are post-segwit (wtxids) +""" + +from decimal import Decimal +import random + +from test_framework.blocktools import create_block, create_coinbase, add_witness_commitment +from test_framework.messages import BlockTransactions, BlockTransactionsRequest, calculate_shortid, CBlock, CBlockHeader, CInv, COutPoint, CTransaction, CTxIn, CTxInWitness, CTxOut, FromHex, HeaderAndShortIDs, msg_block, msg_blocktxn, msg_cmpctblock, msg_getblocktxn, msg_getdata, msg_getheaders, msg_headers, msg_inv, msg_sendcmpct, msg_sendheaders, msg_tx, msg_witness_block, msg_witness_blocktxn, MSG_WITNESS_FLAG, NODE_NETWORK, NODE_WITNESS, P2PHeaderAndShortIDs, PrefilledTransaction, ser_uint256, ToHex +from test_framework.mininode import mininode_lock, P2PInterface +from test_framework.script import CScript, OP_TRUE, OP_DROP +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, get_bip9_status, satoshi_round, sync_blocks, wait_until + +# TestP2PConn: A peer we use to send messages to bitcoind, and store responses. +class TestP2PConn(P2PInterface): + def __init__(self): + super().__init__() + self.last_sendcmpct = [] + self.block_announced = False + # Store the hashes of blocks we've seen announced. + # This is for synchronizing the p2p message traffic, + # so we can eg wait until a particular block is announced. + self.announced_blockhashes = set() + + def on_sendcmpct(self, message): + self.last_sendcmpct.append(message) + + def on_cmpctblock(self, message): + self.block_announced = True + self.last_message["cmpctblock"].header_and_shortids.header.calc_sha256() + self.announced_blockhashes.add(self.last_message["cmpctblock"].header_and_shortids.header.sha256) + + def on_headers(self, message): + self.block_announced = True + for x in self.last_message["headers"].headers: + x.calc_sha256() + self.announced_blockhashes.add(x.sha256) + + def on_inv(self, message): + for x in self.last_message["inv"].inv: + if x.type == 2: + self.block_announced = True + self.announced_blockhashes.add(x.hash) + + # Requires caller to hold mininode_lock + def received_block_announcement(self): + return self.block_announced + + def clear_block_announcement(self): + with mininode_lock: + self.block_announced = False + self.last_message.pop("inv", None) + self.last_message.pop("headers", None) + self.last_message.pop("cmpctblock", None) + + def get_headers(self, locator, hashstop): + msg = msg_getheaders() + msg.locator.vHave = locator + msg.hashstop = hashstop + self.send_message(msg) + + def send_header_for_blocks(self, new_blocks): + headers_message = msg_headers() + headers_message.headers = [CBlockHeader(b) for b in new_blocks] + self.send_message(headers_message) + + def request_headers_and_sync(self, locator, hashstop=0): + self.clear_block_announcement() + self.get_headers(locator, hashstop) + wait_until(self.received_block_announcement, timeout=30, lock=mininode_lock) + self.clear_block_announcement() + + # Block until a block announcement for a particular block hash is + # received. + def wait_for_block_announcement(self, block_hash, timeout=30): + def received_hash(): + return (block_hash in self.announced_blockhashes) + wait_until(received_hash, timeout=timeout, lock=mininode_lock) + + def send_await_disconnect(self, message, timeout=30): + """Sends a message to the node and wait for disconnect. + + This is used when we want to send a message into the node that we expect + will get us disconnected, eg an invalid block.""" + self.send_message(message) + wait_until(lambda: not self.is_connected, timeout=timeout, lock=mininode_lock) + +class CompactBlocksTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + # Node0 = pre-segwit, node1 = segwit-aware + self.num_nodes = 2 + # This test was written assuming SegWit is activated using BIP9 at height 432 (3x confirmation window). + # TODO: Rewrite this test to support SegWit being always active. + self.extra_args = [["-vbparams=segwit:0:0"], ["-vbparams=segwit:0:999999999999", "-txindex", "-deprecatedrpc=addwitnessaddress"]] + self.utxos = [] + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def build_block_on_tip(self, node, segwit=False): + height = node.getblockcount() + tip = node.getbestblockhash() + mtp = node.getblockheader(tip)['mediantime'] + block = create_block(int(tip, 16), create_coinbase(height + 1), mtp + 1) + block.nVersion = 4 + if segwit: + add_witness_commitment(block) + block.solve() + return block + + # Create 10 more anyone-can-spend utxo's for testing. + def make_utxos(self): + # Doesn't matter which node we use, just use node0. + block = self.build_block_on_tip(self.nodes[0]) + self.test_node.send_and_ping(msg_block(block)) + assert(int(self.nodes[0].getbestblockhash(), 16) == block.sha256) + self.nodes[0].generate(100) + + total_value = block.vtx[0].vout[0].nValue + out_value = total_value // 10 + tx = CTransaction() + tx.vin.append(CTxIn(COutPoint(block.vtx[0].sha256, 0), b'')) + for i in range(10): + tx.vout.append(CTxOut(out_value, CScript([OP_TRUE]))) + tx.rehash() + + block2 = self.build_block_on_tip(self.nodes[0]) + block2.vtx.append(tx) + block2.hashMerkleRoot = block2.calc_merkle_root() + block2.solve() + self.test_node.send_and_ping(msg_block(block2)) + assert_equal(int(self.nodes[0].getbestblockhash(), 16), block2.sha256) + self.utxos.extend([[tx.sha256, i, out_value] for i in range(10)]) + return + + # Test "sendcmpct" (between peers preferring the same version): + # - No compact block announcements unless sendcmpct is sent. + # - If sendcmpct is sent with version > preferred_version, the message is ignored. + # - If sendcmpct is sent with boolean 0, then block announcements are not + # made with compact blocks. + # - If sendcmpct is then sent with boolean 1, then new block announcements + # are made with compact blocks. + # If old_node is passed in, request compact blocks with version=preferred-1 + # and verify that it receives block announcements via compact block. + def test_sendcmpct(self, node, test_node, preferred_version, old_node=None): + # Make sure we get a SENDCMPCT message from our peer + def received_sendcmpct(): + return (len(test_node.last_sendcmpct) > 0) + wait_until(received_sendcmpct, timeout=30, lock=mininode_lock) + with mininode_lock: + # Check that the first version received is the preferred one + assert_equal(test_node.last_sendcmpct[0].version, preferred_version) + # And that we receive versions down to 1. + assert_equal(test_node.last_sendcmpct[-1].version, 1) + test_node.last_sendcmpct = [] + + tip = int(node.getbestblockhash(), 16) + + def check_announcement_of_new_block(node, peer, predicate): + peer.clear_block_announcement() + block_hash = int(node.generate(1)[0], 16) + peer.wait_for_block_announcement(block_hash, timeout=30) + assert(peer.block_announced) + + with mininode_lock: + assert predicate(peer), ( + "block_hash={!r}, cmpctblock={!r}, inv={!r}".format( + block_hash, peer.last_message.get("cmpctblock", None), peer.last_message.get("inv", None))) + + # We shouldn't get any block announcements via cmpctblock yet. + check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" not in p.last_message) + + # Try one more time, this time after requesting headers. + test_node.request_headers_and_sync(locator=[tip]) + check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" not in p.last_message and "inv" in p.last_message) + + # Test a few ways of using sendcmpct that should NOT + # result in compact block announcements. + # Before each test, sync the headers chain. + test_node.request_headers_and_sync(locator=[tip]) + + # Now try a SENDCMPCT message with too-high version + sendcmpct = msg_sendcmpct() + sendcmpct.version = preferred_version+1 + sendcmpct.announce = True + test_node.send_and_ping(sendcmpct) + check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" not in p.last_message) + + # Headers sync before next test. + test_node.request_headers_and_sync(locator=[tip]) + + # Now try a SENDCMPCT message with valid version, but announce=False + sendcmpct.version = preferred_version + sendcmpct.announce = False + test_node.send_and_ping(sendcmpct) + check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" not in p.last_message) + + # Headers sync before next test. + test_node.request_headers_and_sync(locator=[tip]) + + # Finally, try a SENDCMPCT message with announce=True + sendcmpct.version = preferred_version + sendcmpct.announce = True + test_node.send_and_ping(sendcmpct) + check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" in p.last_message) + + # Try one more time (no headers sync should be needed!) + check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" in p.last_message) + + # Try one more time, after turning on sendheaders + test_node.send_and_ping(msg_sendheaders()) + check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" in p.last_message) + + # Try one more time, after sending a version-1, announce=false message. + sendcmpct.version = preferred_version-1 + sendcmpct.announce = False + test_node.send_and_ping(sendcmpct) + check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" in p.last_message) + + # Now turn off announcements + sendcmpct.version = preferred_version + sendcmpct.announce = False + test_node.send_and_ping(sendcmpct) + check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" not in p.last_message and "headers" in p.last_message) + + if old_node is not None: + # Verify that a peer using an older protocol version can receive + # announcements from this node. + sendcmpct.version = preferred_version-1 + sendcmpct.announce = True + old_node.send_and_ping(sendcmpct) + # Header sync + old_node.request_headers_and_sync(locator=[tip]) + check_announcement_of_new_block(node, old_node, lambda p: "cmpctblock" in p.last_message) + + # This test actually causes bitcoind to (reasonably!) disconnect us, so do this last. + def test_invalid_cmpctblock_message(self): + self.nodes[0].generate(101) + block = self.build_block_on_tip(self.nodes[0]) + + cmpct_block = P2PHeaderAndShortIDs() + cmpct_block.header = CBlockHeader(block) + cmpct_block.prefilled_txn_length = 1 + # This index will be too high + prefilled_txn = PrefilledTransaction(1, block.vtx[0]) + cmpct_block.prefilled_txn = [prefilled_txn] + self.test_node.send_await_disconnect(msg_cmpctblock(cmpct_block)) + assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.hashPrevBlock) + + # Compare the generated shortids to what we expect based on BIP 152, given + # bitcoind's choice of nonce. + def test_compactblock_construction(self, node, test_node, version, use_witness_address): + # Generate a bunch of transactions. + node.generate(101) + num_transactions = 25 + address = node.getnewaddress() + if use_witness_address: + # Want at least one segwit spend, so move all funds to + # a witness address. + address = node.addwitnessaddress(address) + value_to_send = node.getbalance() + node.sendtoaddress(address, satoshi_round(value_to_send-Decimal(0.1))) + node.generate(1) + + segwit_tx_generated = False + for i in range(num_transactions): + txid = node.sendtoaddress(address, 0.1) + hex_tx = node.gettransaction(txid)["hex"] + tx = FromHex(CTransaction(), hex_tx) + if not tx.wit.is_null(): + segwit_tx_generated = True + + if use_witness_address: + assert(segwit_tx_generated) # check that our test is not broken + + # Wait until we've seen the block announcement for the resulting tip + tip = int(node.getbestblockhash(), 16) + test_node.wait_for_block_announcement(tip) + + # Make sure we will receive a fast-announce compact block + self.request_cb_announcements(test_node, node, version) + + # Now mine a block, and look at the resulting compact block. + test_node.clear_block_announcement() + block_hash = int(node.generate(1)[0], 16) + + # Store the raw block in our internal format. + block = FromHex(CBlock(), node.getblock("%064x" % block_hash, False)) + for tx in block.vtx: + tx.calc_sha256() + block.rehash() + + # Wait until the block was announced (via compact blocks) + wait_until(test_node.received_block_announcement, timeout=30, lock=mininode_lock) + + # Now fetch and check the compact block + header_and_shortids = None + with mininode_lock: + assert("cmpctblock" in test_node.last_message) + # Convert the on-the-wire representation to absolute indexes + header_and_shortids = HeaderAndShortIDs(test_node.last_message["cmpctblock"].header_and_shortids) + self.check_compactblock_construction_from_block(version, header_and_shortids, block_hash, block) + + # Now fetch the compact block using a normal non-announce getdata + with mininode_lock: + test_node.clear_block_announcement() + inv = CInv(4, block_hash) # 4 == "CompactBlock" + test_node.send_message(msg_getdata([inv])) + + wait_until(test_node.received_block_announcement, timeout=30, lock=mininode_lock) + + # Now fetch and check the compact block + header_and_shortids = None + with mininode_lock: + assert("cmpctblock" in test_node.last_message) + # Convert the on-the-wire representation to absolute indexes + header_and_shortids = HeaderAndShortIDs(test_node.last_message["cmpctblock"].header_and_shortids) + self.check_compactblock_construction_from_block(version, header_and_shortids, block_hash, block) + + def check_compactblock_construction_from_block(self, version, header_and_shortids, block_hash, block): + # Check that we got the right block! + header_and_shortids.header.calc_sha256() + assert_equal(header_and_shortids.header.sha256, block_hash) + + # Make sure the prefilled_txn appears to have included the coinbase + assert(len(header_and_shortids.prefilled_txn) >= 1) + assert_equal(header_and_shortids.prefilled_txn[0].index, 0) + + # Check that all prefilled_txn entries match what's in the block. + for entry in header_and_shortids.prefilled_txn: + entry.tx.calc_sha256() + # This checks the non-witness parts of the tx agree + assert_equal(entry.tx.sha256, block.vtx[entry.index].sha256) + + # And this checks the witness + wtxid = entry.tx.calc_sha256(True) + if version == 2: + assert_equal(wtxid, block.vtx[entry.index].calc_sha256(True)) + else: + # Shouldn't have received a witness + assert(entry.tx.wit.is_null()) + + # Check that the cmpctblock message announced all the transactions. + assert_equal(len(header_and_shortids.prefilled_txn) + len(header_and_shortids.shortids), len(block.vtx)) + + # And now check that all the shortids are as expected as well. + # Determine the siphash keys to use. + [k0, k1] = header_and_shortids.get_siphash_keys() + + index = 0 + while index < len(block.vtx): + if (len(header_and_shortids.prefilled_txn) > 0 and + header_and_shortids.prefilled_txn[0].index == index): + # Already checked prefilled transactions above + header_and_shortids.prefilled_txn.pop(0) + else: + tx_hash = block.vtx[index].sha256 + if version == 2: + tx_hash = block.vtx[index].calc_sha256(True) + shortid = calculate_shortid(k0, k1, tx_hash) + assert_equal(shortid, header_and_shortids.shortids[0]) + header_and_shortids.shortids.pop(0) + index += 1 + + # Test that bitcoind requests compact blocks when we announce new blocks + # via header or inv, and that responding to getblocktxn causes the block + # to be successfully reconstructed. + # Post-segwit: upgraded nodes would only make this request of cb-version-2, + # NODE_WITNESS peers. Unupgraded nodes would still make this request of + # any cb-version-1-supporting peer. + def test_compactblock_requests(self, node, test_node, version, segwit): + # Try announcing a block with an inv or header, expect a compactblock + # request + for announce in ["inv", "header"]: + block = self.build_block_on_tip(node, segwit=segwit) + with mininode_lock: + test_node.last_message.pop("getdata", None) + + if announce == "inv": + test_node.send_message(msg_inv([CInv(2, block.sha256)])) + wait_until(lambda: "getheaders" in test_node.last_message, timeout=30, lock=mininode_lock) + test_node.send_header_for_blocks([block]) + else: + test_node.send_header_for_blocks([block]) + wait_until(lambda: "getdata" in test_node.last_message, timeout=30, lock=mininode_lock) + assert_equal(len(test_node.last_message["getdata"].inv), 1) + assert_equal(test_node.last_message["getdata"].inv[0].type, 4) + assert_equal(test_node.last_message["getdata"].inv[0].hash, block.sha256) + + # Send back a compactblock message that omits the coinbase + comp_block = HeaderAndShortIDs() + comp_block.header = CBlockHeader(block) + comp_block.nonce = 0 + [k0, k1] = comp_block.get_siphash_keys() + coinbase_hash = block.vtx[0].sha256 + if version == 2: + coinbase_hash = block.vtx[0].calc_sha256(True) + comp_block.shortids = [ + calculate_shortid(k0, k1, coinbase_hash) ] + test_node.send_and_ping(msg_cmpctblock(comp_block.to_p2p())) + assert_equal(int(node.getbestblockhash(), 16), block.hashPrevBlock) + # Expect a getblocktxn message. + with mininode_lock: + assert("getblocktxn" in test_node.last_message) + absolute_indexes = test_node.last_message["getblocktxn"].block_txn_request.to_absolute() + assert_equal(absolute_indexes, [0]) # should be a coinbase request + + # Send the coinbase, and verify that the tip advances. + if version == 2: + msg = msg_witness_blocktxn() + else: + msg = msg_blocktxn() + msg.block_transactions.blockhash = block.sha256 + msg.block_transactions.transactions = [block.vtx[0]] + test_node.send_and_ping(msg) + assert_equal(int(node.getbestblockhash(), 16), block.sha256) + + # Create a chain of transactions from given utxo, and add to a new block. + def build_block_with_transactions(self, node, utxo, num_transactions): + block = self.build_block_on_tip(node) + + for i in range(num_transactions): + tx = CTransaction() + tx.vin.append(CTxIn(COutPoint(utxo[0], utxo[1]), b'')) + tx.vout.append(CTxOut(utxo[2] - 1000, CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE]))) + tx.rehash() + utxo = [tx.sha256, 0, tx.vout[0].nValue] + block.vtx.append(tx) + + block.hashMerkleRoot = block.calc_merkle_root() + block.solve() + return block + + # Test that we only receive getblocktxn requests for transactions that the + # node needs, and that responding to them causes the block to be + # reconstructed. + def test_getblocktxn_requests(self, node, test_node, version): + with_witness = (version==2) + + def test_getblocktxn_response(compact_block, peer, expected_result): + msg = msg_cmpctblock(compact_block.to_p2p()) + peer.send_and_ping(msg) + with mininode_lock: + assert("getblocktxn" in peer.last_message) + absolute_indexes = peer.last_message["getblocktxn"].block_txn_request.to_absolute() + assert_equal(absolute_indexes, expected_result) + + def test_tip_after_message(node, peer, msg, tip): + peer.send_and_ping(msg) + assert_equal(int(node.getbestblockhash(), 16), tip) + + # First try announcing compactblocks that won't reconstruct, and verify + # that we receive getblocktxn messages back. + utxo = self.utxos.pop(0) + + block = self.build_block_with_transactions(node, utxo, 5) + self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue]) + comp_block = HeaderAndShortIDs() + comp_block.initialize_from_block(block, use_witness=with_witness) + + test_getblocktxn_response(comp_block, test_node, [1, 2, 3, 4, 5]) + + msg_bt = msg_blocktxn() + if with_witness: + msg_bt = msg_witness_blocktxn() # serialize with witnesses + msg_bt.block_transactions = BlockTransactions(block.sha256, block.vtx[1:]) + test_tip_after_message(node, test_node, msg_bt, block.sha256) + + utxo = self.utxos.pop(0) + block = self.build_block_with_transactions(node, utxo, 5) + self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue]) + + # Now try interspersing the prefilled transactions + comp_block.initialize_from_block(block, prefill_list=[0, 1, 5], use_witness=with_witness) + test_getblocktxn_response(comp_block, test_node, [2, 3, 4]) + msg_bt.block_transactions = BlockTransactions(block.sha256, block.vtx[2:5]) + test_tip_after_message(node, test_node, msg_bt, block.sha256) + + # Now try giving one transaction ahead of time. + utxo = self.utxos.pop(0) + block = self.build_block_with_transactions(node, utxo, 5) + self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue]) + test_node.send_and_ping(msg_tx(block.vtx[1])) + assert(block.vtx[1].hash in node.getrawmempool()) + + # Prefill 4 out of the 6 transactions, and verify that only the one + # that was not in the mempool is requested. + comp_block.initialize_from_block(block, prefill_list=[0, 2, 3, 4], use_witness=with_witness) + test_getblocktxn_response(comp_block, test_node, [5]) + + msg_bt.block_transactions = BlockTransactions(block.sha256, [block.vtx[5]]) + test_tip_after_message(node, test_node, msg_bt, block.sha256) + + # Now provide all transactions to the node before the block is + # announced and verify reconstruction happens immediately. + utxo = self.utxos.pop(0) + block = self.build_block_with_transactions(node, utxo, 10) + self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue]) + for tx in block.vtx[1:]: + test_node.send_message(msg_tx(tx)) + test_node.sync_with_ping() + # Make sure all transactions were accepted. + mempool = node.getrawmempool() + for tx in block.vtx[1:]: + assert(tx.hash in mempool) + + # Clear out last request. + with mininode_lock: + test_node.last_message.pop("getblocktxn", None) + + # Send compact block + comp_block.initialize_from_block(block, prefill_list=[0], use_witness=with_witness) + test_tip_after_message(node, test_node, msg_cmpctblock(comp_block.to_p2p()), block.sha256) + with mininode_lock: + # Shouldn't have gotten a request for any transaction + assert("getblocktxn" not in test_node.last_message) + + # Incorrectly responding to a getblocktxn shouldn't cause the block to be + # permanently failed. + def test_incorrect_blocktxn_response(self, node, test_node, version): + if (len(self.utxos) == 0): + self.make_utxos() + utxo = self.utxos.pop(0) + + block = self.build_block_with_transactions(node, utxo, 10) + self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue]) + # Relay the first 5 transactions from the block in advance + for tx in block.vtx[1:6]: + test_node.send_message(msg_tx(tx)) + test_node.sync_with_ping() + # Make sure all transactions were accepted. + mempool = node.getrawmempool() + for tx in block.vtx[1:6]: + assert(tx.hash in mempool) + + # Send compact block + comp_block = HeaderAndShortIDs() + comp_block.initialize_from_block(block, prefill_list=[0], use_witness=(version == 2)) + test_node.send_and_ping(msg_cmpctblock(comp_block.to_p2p())) + absolute_indexes = [] + with mininode_lock: + assert("getblocktxn" in test_node.last_message) + absolute_indexes = test_node.last_message["getblocktxn"].block_txn_request.to_absolute() + assert_equal(absolute_indexes, [6, 7, 8, 9, 10]) + + # Now give an incorrect response. + # Note that it's possible for bitcoind to be smart enough to know we're + # lying, since it could check to see if the shortid matches what we're + # sending, and eg disconnect us for misbehavior. If that behavior + # change was made, we could just modify this test by having a + # different peer provide the block further down, so that we're still + # verifying that the block isn't marked bad permanently. This is good + # enough for now. + msg = msg_blocktxn() + if version==2: + msg = msg_witness_blocktxn() + msg.block_transactions = BlockTransactions(block.sha256, [block.vtx[5]] + block.vtx[7:]) + test_node.send_and_ping(msg) + + # Tip should not have updated + assert_equal(int(node.getbestblockhash(), 16), block.hashPrevBlock) + + # We should receive a getdata request + wait_until(lambda: "getdata" in test_node.last_message, timeout=10, lock=mininode_lock) + assert_equal(len(test_node.last_message["getdata"].inv), 1) + assert(test_node.last_message["getdata"].inv[0].type == 2 or test_node.last_message["getdata"].inv[0].type == 2|MSG_WITNESS_FLAG) + assert_equal(test_node.last_message["getdata"].inv[0].hash, block.sha256) + + # Deliver the block + if version==2: + test_node.send_and_ping(msg_witness_block(block)) + else: + test_node.send_and_ping(msg_block(block)) + assert_equal(int(node.getbestblockhash(), 16), block.sha256) + + def test_getblocktxn_handler(self, node, test_node, version): + # bitcoind will not send blocktxn responses for blocks whose height is + # more than 10 blocks deep. + MAX_GETBLOCKTXN_DEPTH = 10 + chain_height = node.getblockcount() + current_height = chain_height + while (current_height >= chain_height - MAX_GETBLOCKTXN_DEPTH): + block_hash = node.getblockhash(current_height) + block = FromHex(CBlock(), node.getblock(block_hash, False)) + + msg = msg_getblocktxn() + msg.block_txn_request = BlockTransactionsRequest(int(block_hash, 16), []) + num_to_request = random.randint(1, len(block.vtx)) + msg.block_txn_request.from_absolute(sorted(random.sample(range(len(block.vtx)), num_to_request))) + test_node.send_message(msg) + wait_until(lambda: "blocktxn" in test_node.last_message, timeout=10, lock=mininode_lock) + + [tx.calc_sha256() for tx in block.vtx] + with mininode_lock: + assert_equal(test_node.last_message["blocktxn"].block_transactions.blockhash, int(block_hash, 16)) + all_indices = msg.block_txn_request.to_absolute() + for index in all_indices: + tx = test_node.last_message["blocktxn"].block_transactions.transactions.pop(0) + tx.calc_sha256() + assert_equal(tx.sha256, block.vtx[index].sha256) + if version == 1: + # Witnesses should have been stripped + assert(tx.wit.is_null()) + else: + # Check that the witness matches + assert_equal(tx.calc_sha256(True), block.vtx[index].calc_sha256(True)) + test_node.last_message.pop("blocktxn", None) + current_height -= 1 + + # Next request should send a full block response, as we're past the + # allowed depth for a blocktxn response. + block_hash = node.getblockhash(current_height) + msg.block_txn_request = BlockTransactionsRequest(int(block_hash, 16), [0]) + with mininode_lock: + test_node.last_message.pop("block", None) + test_node.last_message.pop("blocktxn", None) + test_node.send_and_ping(msg) + with mininode_lock: + test_node.last_message["block"].block.calc_sha256() + assert_equal(test_node.last_message["block"].block.sha256, int(block_hash, 16)) + assert "blocktxn" not in test_node.last_message + + def test_compactblocks_not_at_tip(self, node, test_node): + # Test that requesting old compactblocks doesn't work. + MAX_CMPCTBLOCK_DEPTH = 5 + new_blocks = [] + for i in range(MAX_CMPCTBLOCK_DEPTH + 1): + test_node.clear_block_announcement() + new_blocks.append(node.generate(1)[0]) + wait_until(test_node.received_block_announcement, timeout=30, lock=mininode_lock) + + test_node.clear_block_announcement() + test_node.send_message(msg_getdata([CInv(4, int(new_blocks[0], 16))])) + wait_until(lambda: "cmpctblock" in test_node.last_message, timeout=30, lock=mininode_lock) + + test_node.clear_block_announcement() + node.generate(1) + wait_until(test_node.received_block_announcement, timeout=30, lock=mininode_lock) + test_node.clear_block_announcement() + with mininode_lock: + test_node.last_message.pop("block", None) + test_node.send_message(msg_getdata([CInv(4, int(new_blocks[0], 16))])) + wait_until(lambda: "block" in test_node.last_message, timeout=30, lock=mininode_lock) + with mininode_lock: + test_node.last_message["block"].block.calc_sha256() + assert_equal(test_node.last_message["block"].block.sha256, int(new_blocks[0], 16)) + + # Generate an old compactblock, and verify that it's not accepted. + cur_height = node.getblockcount() + hashPrevBlock = int(node.getblockhash(cur_height-5), 16) + block = self.build_block_on_tip(node) + block.hashPrevBlock = hashPrevBlock + block.solve() + + comp_block = HeaderAndShortIDs() + comp_block.initialize_from_block(block) + test_node.send_and_ping(msg_cmpctblock(comp_block.to_p2p())) + + tips = node.getchaintips() + found = False + for x in tips: + if x["hash"] == block.hash: + assert_equal(x["status"], "headers-only") + found = True + break + assert(found) + + # Requesting this block via getblocktxn should silently fail + # (to avoid fingerprinting attacks). + msg = msg_getblocktxn() + msg.block_txn_request = BlockTransactionsRequest(block.sha256, [0]) + with mininode_lock: + test_node.last_message.pop("blocktxn", None) + test_node.send_and_ping(msg) + with mininode_lock: + assert "blocktxn" not in test_node.last_message + + def activate_segwit(self, node): + node.generate(144*3) + assert_equal(get_bip9_status(node, "segwit")["status"], 'active') + + def test_end_to_end_block_relay(self, node, listeners): + utxo = self.utxos.pop(0) + + block = self.build_block_with_transactions(node, utxo, 10) + + [l.clear_block_announcement() for l in listeners] + + # ToHex() won't serialize with witness, but this block has no witnesses + # anyway. TODO: repeat this test with witness tx's to a segwit node. + node.submitblock(ToHex(block)) + + for l in listeners: + wait_until(lambda: l.received_block_announcement(), timeout=30, lock=mininode_lock) + with mininode_lock: + for l in listeners: + assert "cmpctblock" in l.last_message + l.last_message["cmpctblock"].header_and_shortids.header.calc_sha256() + assert_equal(l.last_message["cmpctblock"].header_and_shortids.header.sha256, block.sha256) + + # Test that we don't get disconnected if we relay a compact block with valid header, + # but invalid transactions. + def test_invalid_tx_in_compactblock(self, node, test_node, use_segwit): + assert(len(self.utxos)) + utxo = self.utxos[0] + + block = self.build_block_with_transactions(node, utxo, 5) + del block.vtx[3] + block.hashMerkleRoot = block.calc_merkle_root() + if use_segwit: + # If we're testing with segwit, also drop the coinbase witness, + # but include the witness commitment. + add_witness_commitment(block) + block.vtx[0].wit.vtxinwit = [] + block.solve() + + # Now send the compact block with all transactions prefilled, and + # verify that we don't get disconnected. + comp_block = HeaderAndShortIDs() + comp_block.initialize_from_block(block, prefill_list=[0, 1, 2, 3, 4], use_witness=use_segwit) + msg = msg_cmpctblock(comp_block.to_p2p()) + test_node.send_and_ping(msg) + + # Check that the tip didn't advance + assert(int(node.getbestblockhash(), 16) is not block.sha256) + test_node.sync_with_ping() + + # Helper for enabling cb announcements + # Send the sendcmpct request and sync headers + def request_cb_announcements(self, peer, node, version): + tip = node.getbestblockhash() + peer.get_headers(locator=[int(tip, 16)], hashstop=0) + + msg = msg_sendcmpct() + msg.version = version + msg.announce = True + peer.send_and_ping(msg) + + def test_compactblock_reconstruction_multiple_peers(self, node, stalling_peer, delivery_peer): + assert(len(self.utxos)) + + def announce_cmpct_block(node, peer): + utxo = self.utxos.pop(0) + block = self.build_block_with_transactions(node, utxo, 5) + + cmpct_block = HeaderAndShortIDs() + cmpct_block.initialize_from_block(block) + msg = msg_cmpctblock(cmpct_block.to_p2p()) + peer.send_and_ping(msg) + with mininode_lock: + assert "getblocktxn" in peer.last_message + return block, cmpct_block + + block, cmpct_block = announce_cmpct_block(node, stalling_peer) + + for tx in block.vtx[1:]: + delivery_peer.send_message(msg_tx(tx)) + delivery_peer.sync_with_ping() + mempool = node.getrawmempool() + for tx in block.vtx[1:]: + assert(tx.hash in mempool) + + delivery_peer.send_and_ping(msg_cmpctblock(cmpct_block.to_p2p())) + assert_equal(int(node.getbestblockhash(), 16), block.sha256) + + self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue]) + + # Now test that delivering an invalid compact block won't break relay + + block, cmpct_block = announce_cmpct_block(node, stalling_peer) + for tx in block.vtx[1:]: + delivery_peer.send_message(msg_tx(tx)) + delivery_peer.sync_with_ping() + + cmpct_block.prefilled_txn[0].tx.wit.vtxinwit = [ CTxInWitness() ] + cmpct_block.prefilled_txn[0].tx.wit.vtxinwit[0].scriptWitness.stack = [ser_uint256(0)] + + cmpct_block.use_witness = True + delivery_peer.send_and_ping(msg_cmpctblock(cmpct_block.to_p2p())) + assert(int(node.getbestblockhash(), 16) != block.sha256) + + msg = msg_blocktxn() + msg.block_transactions.blockhash = block.sha256 + msg.block_transactions.transactions = block.vtx[1:] + stalling_peer.send_and_ping(msg) + assert_equal(int(node.getbestblockhash(), 16), block.sha256) + + def run_test(self): + # Setup the p2p connections + self.test_node = self.nodes[0].add_p2p_connection(TestP2PConn()) + self.segwit_node = self.nodes[1].add_p2p_connection(TestP2PConn(), services=NODE_NETWORK | NODE_WITNESS) + self.old_node = self.nodes[1].add_p2p_connection(TestP2PConn(), services=NODE_NETWORK) + + # We will need UTXOs to construct transactions in later tests. + self.make_utxos() + + self.log.info("Running tests, pre-segwit activation:") + + self.log.info("Testing SENDCMPCT p2p message... ") + self.test_sendcmpct(self.nodes[0], self.test_node, 1) + sync_blocks(self.nodes) + self.test_sendcmpct(self.nodes[1], self.segwit_node, 2, old_node=self.old_node) + sync_blocks(self.nodes) + + self.log.info("Testing compactblock construction...") + self.test_compactblock_construction(self.nodes[0], self.test_node, 1, False) + sync_blocks(self.nodes) + self.test_compactblock_construction(self.nodes[1], self.segwit_node, 2, False) + sync_blocks(self.nodes) + + self.log.info("Testing compactblock requests... ") + self.test_compactblock_requests(self.nodes[0], self.test_node, 1, False) + sync_blocks(self.nodes) + self.test_compactblock_requests(self.nodes[1], self.segwit_node, 2, False) + sync_blocks(self.nodes) + + self.log.info("Testing getblocktxn requests...") + self.test_getblocktxn_requests(self.nodes[0], self.test_node, 1) + sync_blocks(self.nodes) + self.test_getblocktxn_requests(self.nodes[1], self.segwit_node, 2) + sync_blocks(self.nodes) + + self.log.info("Testing getblocktxn handler...") + self.test_getblocktxn_handler(self.nodes[0], self.test_node, 1) + sync_blocks(self.nodes) + self.test_getblocktxn_handler(self.nodes[1], self.segwit_node, 2) + self.test_getblocktxn_handler(self.nodes[1], self.old_node, 1) + sync_blocks(self.nodes) + + self.log.info("Testing compactblock requests/announcements not at chain tip...") + self.test_compactblocks_not_at_tip(self.nodes[0], self.test_node) + sync_blocks(self.nodes) + self.test_compactblocks_not_at_tip(self.nodes[1], self.segwit_node) + self.test_compactblocks_not_at_tip(self.nodes[1], self.old_node) + sync_blocks(self.nodes) + + self.log.info("Testing handling of incorrect blocktxn responses...") + self.test_incorrect_blocktxn_response(self.nodes[0], self.test_node, 1) + sync_blocks(self.nodes) + self.test_incorrect_blocktxn_response(self.nodes[1], self.segwit_node, 2) + sync_blocks(self.nodes) + + # End-to-end block relay tests + self.log.info("Testing end-to-end block relay...") + self.request_cb_announcements(self.test_node, self.nodes[0], 1) + self.request_cb_announcements(self.old_node, self.nodes[1], 1) + self.request_cb_announcements(self.segwit_node, self.nodes[1], 2) + self.test_end_to_end_block_relay(self.nodes[0], [self.segwit_node, self.test_node, self.old_node]) + self.test_end_to_end_block_relay(self.nodes[1], [self.segwit_node, self.test_node, self.old_node]) + + self.log.info("Testing handling of invalid compact blocks...") + self.test_invalid_tx_in_compactblock(self.nodes[0], self.test_node, False) + self.test_invalid_tx_in_compactblock(self.nodes[1], self.segwit_node, False) + self.test_invalid_tx_in_compactblock(self.nodes[1], self.old_node, False) + + self.log.info("Testing reconstructing compact blocks from all peers...") + self.test_compactblock_reconstruction_multiple_peers(self.nodes[1], self.segwit_node, self.old_node) + sync_blocks(self.nodes) + + # Advance to segwit activation + self.log.info("Advancing to segwit activation") + self.activate_segwit(self.nodes[1]) + self.log.info("Running tests, post-segwit activation...") + + self.log.info("Testing compactblock construction...") + self.test_compactblock_construction(self.nodes[1], self.old_node, 1, True) + self.test_compactblock_construction(self.nodes[1], self.segwit_node, 2, True) + sync_blocks(self.nodes) + + self.log.info("Testing compactblock requests (unupgraded node)... ") + self.test_compactblock_requests(self.nodes[0], self.test_node, 1, True) + + self.log.info("Testing getblocktxn requests (unupgraded node)...") + self.test_getblocktxn_requests(self.nodes[0], self.test_node, 1) + + # Need to manually sync node0 and node1, because post-segwit activation, + # node1 will not download blocks from node0. + self.log.info("Syncing nodes...") + assert(self.nodes[0].getbestblockhash() != self.nodes[1].getbestblockhash()) + while (self.nodes[0].getblockcount() > self.nodes[1].getblockcount()): + block_hash = self.nodes[0].getblockhash(self.nodes[1].getblockcount()+1) + self.nodes[1].submitblock(self.nodes[0].getblock(block_hash, False)) + assert_equal(self.nodes[0].getbestblockhash(), self.nodes[1].getbestblockhash()) + + self.log.info("Testing compactblock requests (segwit node)... ") + self.test_compactblock_requests(self.nodes[1], self.segwit_node, 2, True) + + self.log.info("Testing getblocktxn requests (segwit node)...") + self.test_getblocktxn_requests(self.nodes[1], self.segwit_node, 2) + sync_blocks(self.nodes) + + self.log.info("Testing getblocktxn handler (segwit node should return witnesses)...") + self.test_getblocktxn_handler(self.nodes[1], self.segwit_node, 2) + self.test_getblocktxn_handler(self.nodes[1], self.old_node, 1) + + # Test that if we submitblock to node1, we'll get a compact block + # announcement to all peers. + # (Post-segwit activation, blocks won't propagate from node0 to node1 + # automatically, so don't bother testing a block announced to node0.) + self.log.info("Testing end-to-end block relay...") + self.request_cb_announcements(self.test_node, self.nodes[0], 1) + self.request_cb_announcements(self.old_node, self.nodes[1], 1) + self.request_cb_announcements(self.segwit_node, self.nodes[1], 2) + self.test_end_to_end_block_relay(self.nodes[1], [self.segwit_node, self.test_node, self.old_node]) + + self.log.info("Testing handling of invalid compact blocks...") + self.test_invalid_tx_in_compactblock(self.nodes[0], self.test_node, False) + self.test_invalid_tx_in_compactblock(self.nodes[1], self.segwit_node, True) + self.test_invalid_tx_in_compactblock(self.nodes[1], self.old_node, True) + + self.log.info("Testing invalid index in cmpctblock message...") + self.test_invalid_cmpctblock_message() + + +if __name__ == '__main__': + CompactBlocksTest().main() diff --git a/test/functional/p2p_disconnect_ban.py b/test/functional/p2p_disconnect_ban.py new file mode 100755 index 000000000..67f24d6bf --- /dev/null +++ b/test/functional/p2p_disconnect_ban.py @@ -0,0 +1,107 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test node disconnect and ban behavior""" +import time + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + assert_raises_rpc_error, + connect_nodes_bi, + wait_until, +) + +class DisconnectBanTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 2 + + def run_test(self): + self.log.info("Test setban and listbanned RPCs") + + self.log.info("setban: successfully ban single IP address") + assert_equal(len(self.nodes[1].getpeerinfo()), 2) # node1 should have 2 connections to node0 at this point + self.nodes[1].setban("127.0.0.1", "add") + wait_until(lambda: len(self.nodes[1].getpeerinfo()) == 0, timeout=10) + assert_equal(len(self.nodes[1].getpeerinfo()), 0) # all nodes must be disconnected at this point + assert_equal(len(self.nodes[1].listbanned()), 1) + + self.log.info("clearbanned: successfully clear ban list") + self.nodes[1].clearbanned() + assert_equal(len(self.nodes[1].listbanned()), 0) + self.nodes[1].setban("127.0.0.0/24", "add") + + self.log.info("setban: fail to ban an already banned subnet") + assert_equal(len(self.nodes[1].listbanned()), 1) + assert_raises_rpc_error(-23, "IP/Subnet already banned", self.nodes[1].setban, "127.0.0.1", "add") + + self.log.info("setban: fail to ban an invalid subnet") + assert_raises_rpc_error(-30, "Error: Invalid IP/Subnet", self.nodes[1].setban, "127.0.0.1/42", "add") + assert_equal(len(self.nodes[1].listbanned()), 1) # still only one banned ip because 127.0.0.1 is within the range of 127.0.0.0/24 + + self.log.info("setban remove: fail to unban a non-banned subnet") + assert_raises_rpc_error(-30, "Error: Unban failed", self.nodes[1].setban, "127.0.0.1", "remove") + assert_equal(len(self.nodes[1].listbanned()), 1) + + self.log.info("setban remove: successfully unban subnet") + self.nodes[1].setban("127.0.0.0/24", "remove") + assert_equal(len(self.nodes[1].listbanned()), 0) + self.nodes[1].clearbanned() + assert_equal(len(self.nodes[1].listbanned()), 0) + + self.log.info("setban: test persistence across node restart") + self.nodes[1].setban("127.0.0.0/32", "add") + self.nodes[1].setban("127.0.0.0/24", "add") + # Set the mocktime so we can control when bans expire + old_time = int(time.time()) + self.nodes[1].setmocktime(old_time) + self.nodes[1].setban("192.168.0.1", "add", 1) # ban for 1 seconds + self.nodes[1].setban("2001:4d48:ac57:400:cacf:e9ff:fe1d:9c63/19", "add", 1000) # ban for 1000 seconds + listBeforeShutdown = self.nodes[1].listbanned() + assert_equal("192.168.0.1/32", listBeforeShutdown[2]['address']) + # Move time forward by 3 seconds so the third ban has expired + self.nodes[1].setmocktime(old_time + 3) + assert_equal(len(self.nodes[1].listbanned()), 3) + + self.stop_node(1) + self.start_node(1) + + listAfterShutdown = self.nodes[1].listbanned() + assert_equal("127.0.0.0/24", listAfterShutdown[0]['address']) + assert_equal("127.0.0.0/32", listAfterShutdown[1]['address']) + assert_equal("/19" in listAfterShutdown[2]['address'], True) + + # Clear ban lists + self.nodes[1].clearbanned() + connect_nodes_bi(self.nodes, 0, 1) + + self.log.info("Test disconnectnode RPCs") + + self.log.info("disconnectnode: fail to disconnect when calling with address and nodeid") + address1 = self.nodes[0].getpeerinfo()[0]['addr'] + node1 = self.nodes[0].getpeerinfo()[0]['addr'] + assert_raises_rpc_error(-32602, "Only one of address and nodeid should be provided.", self.nodes[0].disconnectnode, address=address1, nodeid=node1) + + self.log.info("disconnectnode: fail to disconnect when calling with junk address") + assert_raises_rpc_error(-29, "Node not found in connected nodes", self.nodes[0].disconnectnode, address="221B Baker Street") + + self.log.info("disconnectnode: successfully disconnect node by address") + address1 = self.nodes[0].getpeerinfo()[0]['addr'] + self.nodes[0].disconnectnode(address=address1) + wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1, timeout=10) + assert not [node for node in self.nodes[0].getpeerinfo() if node['addr'] == address1] + + self.log.info("disconnectnode: successfully reconnect node") + connect_nodes_bi(self.nodes, 0, 1) # reconnect the node + assert_equal(len(self.nodes[0].getpeerinfo()), 2) + assert [node for node in self.nodes[0].getpeerinfo() if node['addr'] == address1] + + self.log.info("disconnectnode: successfully disconnect node by node id") + id1 = self.nodes[0].getpeerinfo()[0]['id'] + self.nodes[0].disconnectnode(nodeid=id1) + wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1, timeout=10) + assert not [node for node in self.nodes[0].getpeerinfo() if node['id'] == id1] + +if __name__ == '__main__': + DisconnectBanTest().main() diff --git a/test/functional/p2p_feefilter.py b/test/functional/p2p_feefilter.py new file mode 100755 index 000000000..d589519e4 --- /dev/null +++ b/test/functional/p2p_feefilter.py @@ -0,0 +1,95 @@ +#!/usr/bin/env python3 +# Copyright (c) 2016-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test processing of feefilter messages.""" + +from decimal import Decimal +import time + +from test_framework.messages import msg_feefilter +from test_framework.mininode import mininode_lock, P2PInterface +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import sync_blocks, sync_mempools + +def hashToHex(hash): + return format(hash, '064x') + +# Wait up to 60 secs to see if the testnode has received all the expected invs +def allInvsMatch(invsExpected, testnode): + for x in range(60): + with mininode_lock: + if (sorted(invsExpected) == sorted(testnode.txinvs)): + return True + time.sleep(1) + return False + +class TestP2PConn(P2PInterface): + def __init__(self): + super().__init__() + self.txinvs = [] + + def on_inv(self, message): + for i in message.inv: + if (i.type == 1): + self.txinvs.append(hashToHex(i.hash)) + + def clear_invs(self): + with mininode_lock: + self.txinvs = [] + +class FeeFilterTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 2 + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + node1 = self.nodes[1] + node0 = self.nodes[0] + # Get out of IBD + node1.generate(1) + sync_blocks(self.nodes) + + self.nodes[0].add_p2p_connection(TestP2PConn()) + + # Test that invs are received for all txs at feerate of 20 sat/byte + node1.settxfee(Decimal("0.00020000")) + txids = [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)] + assert(allInvsMatch(txids, self.nodes[0].p2p)) + self.nodes[0].p2p.clear_invs() + + # Set a filter of 15 sat/byte + self.nodes[0].p2p.send_and_ping(msg_feefilter(15000)) + + # Test that txs are still being received (paying 20 sat/byte) + txids = [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)] + assert(allInvsMatch(txids, self.nodes[0].p2p)) + self.nodes[0].p2p.clear_invs() + + # Change tx fee rate to 10 sat/byte and test they are no longer received + node1.settxfee(Decimal("0.00010000")) + [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)] + sync_mempools(self.nodes) # must be sure node 0 has received all txs + + # Send one transaction from node0 that should be received, so that we + # we can sync the test on receipt (if node1's txs were relayed, they'd + # be received by the time this node0 tx is received). This is + # unfortunately reliant on the current relay behavior where we batch up + # to 35 entries in an inv, which means that when this next transaction + # is eligible for relay, the prior transactions from node1 are eligible + # as well. + node0.settxfee(Decimal("0.00020000")) + txids = [node0.sendtoaddress(node0.getnewaddress(), 1)] + assert(allInvsMatch(txids, self.nodes[0].p2p)) + self.nodes[0].p2p.clear_invs() + + # Remove fee filter and check that txs are received again + self.nodes[0].p2p.send_and_ping(msg_feefilter(0)) + txids = [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)] + assert(allInvsMatch(txids, self.nodes[0].p2p)) + self.nodes[0].p2p.clear_invs() + +if __name__ == '__main__': + FeeFilterTest().main() diff --git a/test/functional/p2p_fingerprint.py b/test/functional/p2p_fingerprint.py new file mode 100755 index 000000000..fab088719 --- /dev/null +++ b/test/functional/p2p_fingerprint.py @@ -0,0 +1,148 @@ +#!/usr/bin/env python3 +# Copyright (c) 2017-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test various fingerprinting protections. + +If a stale block more than a month old or its header are requested by a peer, +the node should pretend that it does not have it to avoid fingerprinting. +""" + +import time + +from test_framework.blocktools import (create_block, create_coinbase) +from test_framework.messages import CInv +from test_framework.mininode import ( + P2PInterface, + msg_headers, + msg_block, + msg_getdata, + msg_getheaders, +) +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + wait_until, +) + +class P2PFingerprintTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 1 + + # Build a chain of blocks on top of given one + def build_chain(self, nblocks, prev_hash, prev_height, prev_median_time): + blocks = [] + for _ in range(nblocks): + coinbase = create_coinbase(prev_height + 1) + block_time = prev_median_time + 1 + block = create_block(int(prev_hash, 16), coinbase, block_time) + block.solve() + + blocks.append(block) + prev_hash = block.hash + prev_height += 1 + prev_median_time = block_time + return blocks + + # Send a getdata request for a given block hash + def send_block_request(self, block_hash, node): + msg = msg_getdata() + msg.inv.append(CInv(2, block_hash)) # 2 == "Block" + node.send_message(msg) + + # Send a getheaders request for a given single block hash + def send_header_request(self, block_hash, node): + msg = msg_getheaders() + msg.hashstop = block_hash + node.send_message(msg) + + # Check whether last block received from node has a given hash + def last_block_equals(self, expected_hash, node): + block_msg = node.last_message.get("block") + return block_msg and block_msg.block.rehash() == expected_hash + + # Check whether last block header received from node has a given hash + def last_header_equals(self, expected_hash, node): + headers_msg = node.last_message.get("headers") + return (headers_msg and + headers_msg.headers and + headers_msg.headers[0].rehash() == expected_hash) + + # Checks that stale blocks timestamped more than a month ago are not served + # by the node while recent stale blocks and old active chain blocks are. + # This does not currently test that stale blocks timestamped within the + # last month but that have over a month's worth of work are also withheld. + def run_test(self): + node0 = self.nodes[0].add_p2p_connection(P2PInterface()) + + # Set node time to 60 days ago + self.nodes[0].setmocktime(int(time.time()) - 60 * 24 * 60 * 60) + + # Generating a chain of 10 blocks + block_hashes = self.nodes[0].generatetoaddress(10, self.nodes[0].get_deterministic_priv_key().address) + + # Create longer chain starting 2 blocks before current tip + height = len(block_hashes) - 2 + block_hash = block_hashes[height - 1] + block_time = self.nodes[0].getblockheader(block_hash)["mediantime"] + 1 + new_blocks = self.build_chain(5, block_hash, height, block_time) + + # Force reorg to a longer chain + node0.send_message(msg_headers(new_blocks)) + node0.wait_for_getdata() + for block in new_blocks: + node0.send_and_ping(msg_block(block)) + + # Check that reorg succeeded + assert_equal(self.nodes[0].getblockcount(), 13) + + stale_hash = int(block_hashes[-1], 16) + + # Check that getdata request for stale block succeeds + self.send_block_request(stale_hash, node0) + test_function = lambda: self.last_block_equals(stale_hash, node0) + wait_until(test_function, timeout=3) + + # Check that getheader request for stale block header succeeds + self.send_header_request(stale_hash, node0) + test_function = lambda: self.last_header_equals(stale_hash, node0) + wait_until(test_function, timeout=3) + + # Longest chain is extended so stale is much older than chain tip + self.nodes[0].setmocktime(0) + tip = self.nodes[0].generatetoaddress(1, self.nodes[0].get_deterministic_priv_key().address)[0] + assert_equal(self.nodes[0].getblockcount(), 14) + + # Send getdata & getheaders to refresh last received getheader message + block_hash = int(tip, 16) + self.send_block_request(block_hash, node0) + self.send_header_request(block_hash, node0) + node0.sync_with_ping() + + # Request for very old stale block should now fail + self.send_block_request(stale_hash, node0) + time.sleep(3) + assert not self.last_block_equals(stale_hash, node0) + + # Request for very old stale block header should now fail + self.send_header_request(stale_hash, node0) + time.sleep(3) + assert not self.last_header_equals(stale_hash, node0) + + # Verify we can fetch very old blocks and headers on the active chain + block_hash = int(block_hashes[2], 16) + self.send_block_request(block_hash, node0) + self.send_header_request(block_hash, node0) + node0.sync_with_ping() + + self.send_block_request(block_hash, node0) + test_function = lambda: self.last_block_equals(block_hash, node0) + wait_until(test_function, timeout=3) + + self.send_header_request(block_hash, node0) + test_function = lambda: self.last_header_equals(block_hash, node0) + wait_until(test_function, timeout=3) + +if __name__ == '__main__': + P2PFingerprintTest().main() diff --git a/test/functional/p2p_invalid_block.py b/test/functional/p2p_invalid_block.py new file mode 100755 index 000000000..0678b1a65 --- /dev/null +++ b/test/functional/p2p_invalid_block.py @@ -0,0 +1,108 @@ +#!/usr/bin/env python3 +# Copyright (c) 2015-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test node responses to invalid blocks. + +In this test we connect to one node over p2p, and test block requests: +1) Valid blocks should be requested and become chain tip. +2) Invalid block with duplicated transaction should be re-requested. +3) Invalid block with bad coinbase value should be rejected and not +re-requested. +""" +import copy + +from test_framework.blocktools import create_block, create_coinbase, create_tx_with_script +from test_framework.messages import COIN +from test_framework.mininode import P2PDataStore +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal + +class InvalidBlockRequestTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 1 + self.setup_clean_chain = True + self.extra_args = [["-whitelist=127.0.0.1"]] + + def run_test(self): + # Add p2p connection to node0 + node = self.nodes[0] # convenience reference to the node + node.add_p2p_connection(P2PDataStore()) + + best_block = node.getblock(node.getbestblockhash()) + tip = int(node.getbestblockhash(), 16) + height = best_block["height"] + 1 + block_time = best_block["time"] + 1 + + self.log.info("Create a new block with an anyone-can-spend coinbase") + + height = 1 + block = create_block(tip, create_coinbase(height), block_time) + block.solve() + # Save the coinbase for later + block1 = block + tip = block.sha256 + node.p2p.send_blocks_and_test([block1], node, success=True) + + self.log.info("Mature the block.") + node.generatetoaddress(100, node.get_deterministic_priv_key().address) + + best_block = node.getblock(node.getbestblockhash()) + tip = int(node.getbestblockhash(), 16) + height = best_block["height"] + 1 + block_time = best_block["time"] + 1 + + # Use merkle-root malleability to generate an invalid block with + # same blockheader. + # Manufacture a block with 3 transactions (coinbase, spend of prior + # coinbase, spend of that spend). Duplicate the 3rd transaction to + # leave merkle root and blockheader unchanged but invalidate the block. + self.log.info("Test merkle root malleability.") + + block2 = create_block(tip, create_coinbase(height), block_time) + block_time += 1 + + # b'0x51' is OP_TRUE + tx1 = create_tx_with_script(block1.vtx[0], 0, script_sig=b'\x51', amount=50 * COIN) + tx2 = create_tx_with_script(tx1, 0, script_sig=b'\x51', amount=50 * COIN) + + block2.vtx.extend([tx1, tx2]) + block2.hashMerkleRoot = block2.calc_merkle_root() + block2.rehash() + block2.solve() + orig_hash = block2.sha256 + block2_orig = copy.deepcopy(block2) + + # Mutate block 2 + block2.vtx.append(tx2) + assert_equal(block2.hashMerkleRoot, block2.calc_merkle_root()) + assert_equal(orig_hash, block2.rehash()) + assert(block2_orig.vtx != block2.vtx) + + node.p2p.send_blocks_and_test([block2], node, success=False, request_block=False, reject_reason='bad-txns-duplicate') + + # Check transactions for duplicate inputs + self.log.info("Test duplicate input block.") + + block2_orig.vtx[2].vin.append(block2_orig.vtx[2].vin[0]) + block2_orig.vtx[2].rehash() + block2_orig.hashMerkleRoot = block2_orig.calc_merkle_root() + block2_orig.rehash() + block2_orig.solve() + node.p2p.send_blocks_and_test([block2_orig], node, success=False, request_block=False, reject_reason='bad-txns-inputs-duplicate') + + self.log.info("Test very broken block.") + + block3 = create_block(tip, create_coinbase(height), block_time) + block_time += 1 + block3.vtx[0].vout[0].nValue = 100 * COIN # Too high! + block3.vtx[0].sha256 = None + block3.vtx[0].calc_sha256() + block3.hashMerkleRoot = block3.calc_merkle_root() + block3.rehash() + block3.solve() + + node.p2p.send_blocks_and_test([block3], node, success=False, request_block=False, reject_reason='bad-cb-amount') + +if __name__ == '__main__': + InvalidBlockRequestTest().main() diff --git a/test/functional/p2p_invalid_locator.py b/test/functional/p2p_invalid_locator.py new file mode 100755 index 000000000..c8c752d1f --- /dev/null +++ b/test/functional/p2p_invalid_locator.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python3 +# Copyright (c) 2015-2017 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test node responses to invalid locators. +""" + +from test_framework.messages import msg_getheaders, msg_getblocks, MAX_LOCATOR_SZ +from test_framework.mininode import P2PInterface +from test_framework.test_framework import BitcoinTestFramework + + +class InvalidLocatorTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 1 + self.setup_clean_chain = False + + def run_test(self): + node = self.nodes[0] # convenience reference to the node + node.generatetoaddress(1, node.get_deterministic_priv_key().address) # Get node out of IBD + + self.log.info('Test max locator size') + block_count = node.getblockcount() + for msg in [msg_getheaders(), msg_getblocks()]: + self.log.info('Wait for disconnect when sending {} hashes in locator'.format(MAX_LOCATOR_SZ + 1)) + node.add_p2p_connection(P2PInterface()) + msg.locator.vHave = [int(node.getblockhash(i - 1), 16) for i in range(block_count, block_count - (MAX_LOCATOR_SZ + 1), -1)] + node.p2p.send_message(msg) + node.p2p.wait_for_disconnect() + node.disconnect_p2ps() + + self.log.info('Wait for response when sending {} hashes in locator'.format(MAX_LOCATOR_SZ)) + node.add_p2p_connection(P2PInterface()) + msg.locator.vHave = [int(node.getblockhash(i - 1), 16) for i in range(block_count, block_count - (MAX_LOCATOR_SZ), -1)] + node.p2p.send_message(msg) + if type(msg) == msg_getheaders: + node.p2p.wait_for_header(int(node.getbestblockhash(), 16)) + else: + node.p2p.wait_for_block(int(node.getbestblockhash(), 16)) + + +if __name__ == '__main__': + InvalidLocatorTest().main() diff --git a/test/functional/p2p_invalid_tx.py b/test/functional/p2p_invalid_tx.py new file mode 100755 index 000000000..58e129b57 --- /dev/null +++ b/test/functional/p2p_invalid_tx.py @@ -0,0 +1,140 @@ +#!/usr/bin/env python3 +# Copyright (c) 2015-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test node responses to invalid transactions. + +In this test we connect to one node over p2p, and test tx requests.""" +from test_framework.blocktools import create_block, create_coinbase, create_tx_with_script +from test_framework.messages import ( + COIN, + COutPoint, + CTransaction, + CTxIn, + CTxOut, +) +from test_framework.mininode import P2PDataStore +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + wait_until, +) + + +class InvalidTxRequestTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 1 + self.setup_clean_chain = True + + def bootstrap_p2p(self, *, num_connections=1): + """Add a P2P connection to the node. + + Helper to connect and wait for version handshake.""" + for _ in range(num_connections): + self.nodes[0].add_p2p_connection(P2PDataStore()) + + def reconnect_p2p(self, **kwargs): + """Tear down and bootstrap the P2P connection to the node. + + The node gets disconnected several times in this test. This helper + method reconnects the p2p and restarts the network thread.""" + self.nodes[0].disconnect_p2ps() + self.bootstrap_p2p(**kwargs) + + def run_test(self): + node = self.nodes[0] # convenience reference to the node + + self.bootstrap_p2p() # Add one p2p connection to the node + + best_block = self.nodes[0].getbestblockhash() + tip = int(best_block, 16) + best_block_time = self.nodes[0].getblock(best_block)['time'] + block_time = best_block_time + 1 + + self.log.info("Create a new block with an anyone-can-spend coinbase.") + height = 1 + block = create_block(tip, create_coinbase(height), block_time) + block.solve() + # Save the coinbase for later + block1 = block + tip = block.sha256 + node.p2p.send_blocks_and_test([block], node, success=True) + + self.log.info("Mature the block.") + self.nodes[0].generatetoaddress(100, self.nodes[0].get_deterministic_priv_key().address) + + # b'\x64' is OP_NOTIF + # Transaction will be rejected with code 16 (REJECT_INVALID) + # and we get disconnected immediately + self.log.info('Test a transaction that is rejected') + tx1 = create_tx_with_script(block1.vtx[0], 0, script_sig=b'\x64' * 35, amount=50 * COIN - 12000) + node.p2p.send_txs_and_test([tx1], node, success=False, expect_disconnect=True) + + # Make two p2p connections to provide the node with orphans + # * p2ps[0] will send valid orphan txs (one with low fee) + # * p2ps[1] will send an invalid orphan tx (and is later disconnected for that) + self.reconnect_p2p(num_connections=2) + + self.log.info('Test orphan transaction handling ... ') + # Create a root transaction that we withhold until all dependent transactions + # are sent out and in the orphan cache + SCRIPT_PUB_KEY_OP_TRUE = b'\x51\x75' * 15 + b'\x51' + tx_withhold = CTransaction() + tx_withhold.vin.append(CTxIn(outpoint=COutPoint(block1.vtx[0].sha256, 0))) + tx_withhold.vout.append(CTxOut(nValue=50 * COIN - 12000, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE)) + tx_withhold.calc_sha256() + + # Our first orphan tx with some outputs to create further orphan txs + tx_orphan_1 = CTransaction() + tx_orphan_1.vin.append(CTxIn(outpoint=COutPoint(tx_withhold.sha256, 0))) + tx_orphan_1.vout = [CTxOut(nValue=10 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE)] * 3 + tx_orphan_1.calc_sha256() + + # A valid transaction with low fee + tx_orphan_2_no_fee = CTransaction() + tx_orphan_2_no_fee.vin.append(CTxIn(outpoint=COutPoint(tx_orphan_1.sha256, 0))) + tx_orphan_2_no_fee.vout.append(CTxOut(nValue=10 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE)) + + # A valid transaction with sufficient fee + tx_orphan_2_valid = CTransaction() + tx_orphan_2_valid.vin.append(CTxIn(outpoint=COutPoint(tx_orphan_1.sha256, 1))) + tx_orphan_2_valid.vout.append(CTxOut(nValue=10 * COIN - 12000, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE)) + tx_orphan_2_valid.calc_sha256() + + # An invalid transaction with negative fee + tx_orphan_2_invalid = CTransaction() + tx_orphan_2_invalid.vin.append(CTxIn(outpoint=COutPoint(tx_orphan_1.sha256, 2))) + tx_orphan_2_invalid.vout.append(CTxOut(nValue=11 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE)) + + self.log.info('Send the orphans ... ') + # Send valid orphan txs from p2ps[0] + node.p2p.send_txs_and_test([tx_orphan_1, tx_orphan_2_no_fee, tx_orphan_2_valid], node, success=False) + # Send invalid tx from p2ps[1] + node.p2ps[1].send_txs_and_test([tx_orphan_2_invalid], node, success=False) + + assert_equal(0, node.getmempoolinfo()['size']) # Mempool should be empty + assert_equal(2, len(node.getpeerinfo())) # p2ps[1] is still connected + + self.log.info('Send the withhold tx ... ') + with node.assert_debug_log(expected_msgs=["bad-txns-in-belowout"]): + node.p2p.send_txs_and_test([tx_withhold], node, success=True) + + # Transactions that should end up in the mempool + expected_mempool = { + t.hash + for t in [ + tx_withhold, # The transaction that is the root for all orphans + tx_orphan_1, # The orphan transaction that splits the coins + tx_orphan_2_valid, # The valid transaction (with sufficient fee) + ] + } + # Transactions that do not end up in the mempool + # tx_orphan_no_fee, because it has too low fee (p2ps[0] is not disconnected for relaying that tx) + # tx_orphan_invaid, because it has negative fee (p2ps[1] is disconnected for relaying that tx) + + wait_until(lambda: 1 == len(node.getpeerinfo()), timeout=12) # p2ps[1] is no longer connected + assert_equal(expected_mempool, set(node.getrawmempool())) + + +if __name__ == '__main__': + InvalidTxRequestTest().main() diff --git a/test/functional/p2p_leak.py b/test/functional/p2p_leak.py new file mode 100755 index 000000000..336d34a81 --- /dev/null +++ b/test/functional/p2p_leak.py @@ -0,0 +1,126 @@ +#!/usr/bin/env python3 +# Copyright (c) 2017-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test message sending before handshake completion. + +A node should never send anything other than VERSION/VERACK/REJECT until it's +received a VERACK. + +This test connects to a node and sends it a few messages, trying to entice it +into sending us something it shouldn't.""" + +import time + +from test_framework.messages import msg_getaddr, msg_ping, msg_verack +from test_framework.mininode import mininode_lock, P2PInterface +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import wait_until + +banscore = 10 + +class CLazyNode(P2PInterface): + def __init__(self): + super().__init__() + self.unexpected_msg = False + self.ever_connected = False + + def bad_message(self, message): + self.unexpected_msg = True + self.log.info("should not have received message: %s" % message.command) + + def on_open(self): + self.ever_connected = True + + def on_version(self, message): self.bad_message(message) + def on_verack(self, message): self.bad_message(message) + def on_reject(self, message): self.bad_message(message) + def on_inv(self, message): self.bad_message(message) + def on_addr(self, message): self.bad_message(message) + def on_getdata(self, message): self.bad_message(message) + def on_getblocks(self, message): self.bad_message(message) + def on_tx(self, message): self.bad_message(message) + def on_block(self, message): self.bad_message(message) + def on_getaddr(self, message): self.bad_message(message) + def on_headers(self, message): self.bad_message(message) + def on_getheaders(self, message): self.bad_message(message) + def on_ping(self, message): self.bad_message(message) + def on_mempool(self, message): self.bad_message(message) + def on_pong(self, message): self.bad_message(message) + def on_feefilter(self, message): self.bad_message(message) + def on_sendheaders(self, message): self.bad_message(message) + def on_sendcmpct(self, message): self.bad_message(message) + def on_cmpctblock(self, message): self.bad_message(message) + def on_getblocktxn(self, message): self.bad_message(message) + def on_blocktxn(self, message): self.bad_message(message) + +# Node that never sends a version. We'll use this to send a bunch of messages +# anyway, and eventually get disconnected. +class CNodeNoVersionBan(CLazyNode): + # send a bunch of veracks without sending a message. This should get us disconnected. + # NOTE: implementation-specific check here. Remove if bitcoind ban behavior changes + def on_open(self): + super().on_open() + for i in range(banscore): + self.send_message(msg_verack()) + + def on_reject(self, message): pass + +# Node that never sends a version. This one just sits idle and hopes to receive +# any message (it shouldn't!) +class CNodeNoVersionIdle(CLazyNode): + def __init__(self): + super().__init__() + +# Node that sends a version but not a verack. +class CNodeNoVerackIdle(CLazyNode): + def __init__(self): + self.version_received = False + super().__init__() + + def on_reject(self, message): pass + def on_verack(self, message): pass + # When version is received, don't reply with a verack. Instead, see if the + # node will give us a message that it shouldn't. This is not an exhaustive + # list! + def on_version(self, message): + self.version_received = True + self.send_message(msg_ping()) + self.send_message(msg_getaddr()) + +class P2PLeakTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 1 + self.extra_args = [['-banscore=' + str(banscore)]] + + def run_test(self): + no_version_bannode = self.nodes[0].add_p2p_connection(CNodeNoVersionBan(), send_version=False, wait_for_verack=False) + no_version_idlenode = self.nodes[0].add_p2p_connection(CNodeNoVersionIdle(), send_version=False, wait_for_verack=False) + no_verack_idlenode = self.nodes[0].add_p2p_connection(CNodeNoVerackIdle()) + + wait_until(lambda: no_version_bannode.ever_connected, timeout=10, lock=mininode_lock) + wait_until(lambda: no_version_idlenode.ever_connected, timeout=10, lock=mininode_lock) + wait_until(lambda: no_verack_idlenode.version_received, timeout=10, lock=mininode_lock) + + # Mine a block and make sure that it's not sent to the connected nodes + self.nodes[0].generatetoaddress(1, self.nodes[0].get_deterministic_priv_key().address) + + #Give the node enough time to possibly leak out a message + time.sleep(5) + + #This node should have been banned + assert not no_version_bannode.is_connected + + self.nodes[0].disconnect_p2ps() + + # Wait until all connections are closed + wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 0) + + # Make sure no unexpected messages came in + assert(no_version_bannode.unexpected_msg == False) + assert(no_version_idlenode.unexpected_msg == False) + assert(no_verack_idlenode.unexpected_msg == False) + + +if __name__ == '__main__': + P2PLeakTest().main() diff --git a/test/functional/p2p_mempool.py b/test/functional/p2p_mempool.py new file mode 100755 index 000000000..a8fcb181e --- /dev/null +++ b/test/functional/p2p_mempool.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python3 +# Copyright (c) 2015-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test p2p mempool message. + +Test that nodes are disconnected if they send mempool messages when bloom +filters are not enabled. +""" + +from test_framework.messages import msg_mempool +from test_framework.mininode import P2PInterface +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal + +class P2PMempoolTests(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 1 + self.extra_args = [["-peerbloomfilters=0"]] + + def run_test(self): + # Add a p2p connection + self.nodes[0].add_p2p_connection(P2PInterface()) + + #request mempool + self.nodes[0].p2p.send_message(msg_mempool()) + self.nodes[0].p2p.wait_for_disconnect() + + #mininode must be disconnected at this point + assert_equal(len(self.nodes[0].getpeerinfo()), 0) + +if __name__ == '__main__': + P2PMempoolTests().main() diff --git a/test/functional/p2p_node_network_limited.py b/test/functional/p2p_node_network_limited.py new file mode 100755 index 000000000..ec3d336dc --- /dev/null +++ b/test/functional/p2p_node_network_limited.py @@ -0,0 +1,114 @@ +#!/usr/bin/env python3 +# Copyright (c) 2017-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Tests NODE_NETWORK_LIMITED. + +Tests that a node configured with -prune=550 signals NODE_NETWORK_LIMITED correctly +and that it responds to getdata requests for blocks correctly: + - send a block within 288 + 2 of the tip + - disconnect peers who request blocks older than that.""" +from test_framework.messages import CInv, msg_getdata, msg_verack, NODE_BLOOM, NODE_NETWORK_LIMITED, NODE_WITNESS +from test_framework.mininode import P2PInterface, mininode_lock +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, disconnect_nodes, connect_nodes_bi, sync_blocks, wait_until + +class P2PIgnoreInv(P2PInterface): + firstAddrnServices = 0 + def on_inv(self, message): + # The node will send us invs for other blocks. Ignore them. + pass + def on_addr(self, message): + self.firstAddrnServices = message.addrs[0].nServices + def wait_for_addr(self, timeout=5): + test_function = lambda: self.last_message.get("addr") + wait_until(test_function, timeout=timeout, lock=mininode_lock) + def send_getdata_for_block(self, blockhash): + getdata_request = msg_getdata() + getdata_request.inv.append(CInv(2, int(blockhash, 16))) + self.send_message(getdata_request) + +class NodeNetworkLimitedTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 3 + self.extra_args = [['-prune=550', '-addrmantest'], [], []] + + def disconnect_all(self): + disconnect_nodes(self.nodes[0], 1) + disconnect_nodes(self.nodes[1], 0) + disconnect_nodes(self.nodes[2], 1) + disconnect_nodes(self.nodes[2], 0) + disconnect_nodes(self.nodes[0], 2) + disconnect_nodes(self.nodes[1], 2) + + def setup_network(self): + super(NodeNetworkLimitedTest, self).setup_network() + self.disconnect_all() + + def run_test(self): + node = self.nodes[0].add_p2p_connection(P2PIgnoreInv()) + + expected_services = NODE_BLOOM | NODE_WITNESS | NODE_NETWORK_LIMITED + + self.log.info("Check that node has signalled expected services.") + assert_equal(node.nServices, expected_services) + + self.log.info("Check that the localservices is as expected.") + assert_equal(int(self.nodes[0].getnetworkinfo()['localservices'], 16), expected_services) + + self.log.info("Mine enough blocks to reach the NODE_NETWORK_LIMITED range.") + connect_nodes_bi(self.nodes, 0, 1) + blocks = self.nodes[1].generatetoaddress(292, self.nodes[1].get_deterministic_priv_key().address) + sync_blocks([self.nodes[0], self.nodes[1]]) + + self.log.info("Make sure we can max retrieve block at tip-288.") + node.send_getdata_for_block(blocks[1]) # last block in valid range + node.wait_for_block(int(blocks[1], 16), timeout=3) + + self.log.info("Requesting block at height 2 (tip-289) must fail (ignored).") + node.send_getdata_for_block(blocks[0]) # first block outside of the 288+2 limit + node.wait_for_disconnect(5) + + self.log.info("Check local address relay, do a fresh connection.") + self.nodes[0].disconnect_p2ps() + node1 = self.nodes[0].add_p2p_connection(P2PIgnoreInv()) + node1.send_message(msg_verack()) + + node1.wait_for_addr() + #must relay address with NODE_NETWORK_LIMITED + assert_equal(node1.firstAddrnServices, 1036) + + self.nodes[0].disconnect_p2ps() + node1.wait_for_disconnect() + + # connect unsynced node 2 with pruned NODE_NETWORK_LIMITED peer + # because node 2 is in IBD and node 0 is a NODE_NETWORK_LIMITED peer, sync must not be possible + connect_nodes_bi(self.nodes, 0, 2) + try: + sync_blocks([self.nodes[0], self.nodes[2]], timeout=5) + except: + pass + # node2 must remain at height 0 + assert_equal(self.nodes[2].getblockheader(self.nodes[2].getbestblockhash())['height'], 0) + + # now connect also to node 1 (non pruned) + connect_nodes_bi(self.nodes, 1, 2) + + # sync must be possible + sync_blocks(self.nodes) + + # disconnect all peers + self.disconnect_all() + + # mine 10 blocks on node 0 (pruned node) + self.nodes[0].generatetoaddress(10, self.nodes[0].get_deterministic_priv_key().address) + + # connect node1 (non pruned) with node0 (pruned) and check if the can sync + connect_nodes_bi(self.nodes, 0, 1) + + # sync must be possible, node 1 is no longer in IBD and should therefore connect to node 0 (NODE_NETWORK_LIMITED) + sync_blocks([self.nodes[0], self.nodes[1]]) + +if __name__ == '__main__': + NodeNetworkLimitedTest().main() diff --git a/test/functional/p2p_segwit.py b/test/functional/p2p_segwit.py new file mode 100755 index 000000000..afbbfa899 --- /dev/null +++ b/test/functional/p2p_segwit.py @@ -0,0 +1,2065 @@ +#!/usr/bin/env python3 +# Copyright (c) 2016-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test segwit transactions and blocks on P2P network.""" +from binascii import hexlify +import math +import random +import struct +import time + +from test_framework.blocktools import create_block, create_coinbase, add_witness_commitment, get_witness_script, WITNESS_COMMITMENT_HEADER +from test_framework.key import CECKey, CPubKey +from test_framework.messages import ( + BIP125_SEQUENCE_NUMBER, + CBlock, + CBlockHeader, + CInv, + COutPoint, + CTransaction, + CTxIn, + CTxInWitness, + CTxOut, + CTxWitness, + MAX_BLOCK_BASE_SIZE, + MSG_WITNESS_FLAG, + NODE_NETWORK, + NODE_WITNESS, + msg_block, + msg_getdata, + msg_headers, + msg_inv, + msg_tx, + msg_witness_block, + msg_witness_tx, + ser_uint256, + ser_vector, + sha256, + uint256_from_str, +) +from test_framework.mininode import ( + P2PInterface, + mininode_lock, +) +from test_framework.script import ( + CScript, + CScriptNum, + CScriptOp, + MAX_SCRIPT_ELEMENT_SIZE, + OP_0, + OP_1, + OP_16, + OP_2DROP, + OP_CHECKMULTISIG, + OP_CHECKSIG, + OP_DROP, + OP_DUP, + OP_ELSE, + OP_ENDIF, + OP_EQUAL, + OP_EQUALVERIFY, + OP_HASH160, + OP_IF, + OP_RETURN, + OP_TRUE, + SIGHASH_ALL, + SIGHASH_ANYONECANPAY, + SIGHASH_NONE, + SIGHASH_SINGLE, + SegwitVersion1SignatureHash, + SignatureHash, + hash160, +) +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + bytes_to_hex_str, + connect_nodes, + disconnect_nodes, + get_bip9_status, + hex_str_to_bytes, + sync_blocks, + sync_mempools, +) + +# The versionbit bit used to signal activation of SegWit +VB_WITNESS_BIT = 1 +VB_PERIOD = 144 +VB_TOP_BITS = 0x20000000 + +MAX_SIGOP_COST = 80000 + +class UTXO(): + """Used to keep track of anyone-can-spend outputs that we can use in the tests.""" + def __init__(self, sha256, n, value): + self.sha256 = sha256 + self.n = n + self.nValue = value + +def get_p2pkh_script(pubkeyhash): + """Get the script associated with a P2PKH.""" + return CScript([CScriptOp(OP_DUP), CScriptOp(OP_HASH160), pubkeyhash, CScriptOp(OP_EQUALVERIFY), CScriptOp(OP_CHECKSIG)]) + +def sign_p2pk_witness_input(script, tx_to, in_idx, hashtype, value, key): + """Add signature for a P2PK witness program.""" + tx_hash = SegwitVersion1SignatureHash(script, tx_to, in_idx, hashtype, value) + signature = key.sign(tx_hash) + chr(hashtype).encode('latin-1') + tx_to.wit.vtxinwit[in_idx].scriptWitness.stack = [signature, script] + tx_to.rehash() + +def get_virtual_size(witness_block): + """Calculate the virtual size of a witness block. + + Virtual size is base + witness/4.""" + base_size = len(witness_block.serialize(with_witness=False)) + total_size = len(witness_block.serialize(with_witness=True)) + # the "+3" is so we round up + vsize = int((3 * base_size + total_size + 3) / 4) + return vsize + +def test_transaction_acceptance(node, p2p, tx, with_witness, accepted, reason=None): + """Send a transaction to the node and check that it's accepted to the mempool + + - Submit the transaction over the p2p interface + - use the getrawmempool rpc to check for acceptance.""" + reason = [reason] if reason else [] + with node.assert_debug_log(expected_msgs=reason): + p2p.send_message(msg_witness_tx(tx) if with_witness else msg_tx(tx)) + p2p.sync_with_ping() + assert_equal(tx.hash in node.getrawmempool(), accepted) + +def test_witness_block(node, p2p, block, accepted, with_witness=True, reason=None): + """Send a block to the node and check that it's accepted + + - Submit the block over the p2p interface + - use the getbestblockhash rpc to check for acceptance.""" + reason = [reason] if reason else [] + with node.assert_debug_log(expected_msgs=reason): + p2p.send_message(msg_witness_block(block) if with_witness else msg_block(block)) + p2p.sync_with_ping() + assert_equal(node.getbestblockhash() == block.hash, accepted) + + +class TestP2PConn(P2PInterface): + def __init__(self): + super().__init__() + self.getdataset = set() + + def on_getdata(self, message): + for inv in message.inv: + self.getdataset.add(inv.hash) + + def announce_tx_and_wait_for_getdata(self, tx, timeout=60, success=True): + with mininode_lock: + self.last_message.pop("getdata", None) + self.send_message(msg_inv(inv=[CInv(1, tx.sha256)])) + if success: + self.wait_for_getdata(timeout) + else: + time.sleep(timeout) + assert not self.last_message.get("getdata") + + def announce_block_and_wait_for_getdata(self, block, use_header, timeout=60): + with mininode_lock: + self.last_message.pop("getdata", None) + self.last_message.pop("getheaders", None) + msg = msg_headers() + msg.headers = [CBlockHeader(block)] + if use_header: + self.send_message(msg) + else: + self.send_message(msg_inv(inv=[CInv(2, block.sha256)])) + self.wait_for_getheaders() + self.send_message(msg) + self.wait_for_getdata() + + def request_block(self, blockhash, inv_type, timeout=60): + with mininode_lock: + self.last_message.pop("block", None) + self.send_message(msg_getdata(inv=[CInv(inv_type, blockhash)])) + self.wait_for_block(blockhash, timeout) + return self.last_message["block"].block + +class SegWitTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 3 + # This test tests SegWit both pre and post-activation, so use the normal BIP9 activation. + self.extra_args = [["-whitelist=127.0.0.1", "-vbparams=segwit:0:999999999999"], ["-whitelist=127.0.0.1", "-acceptnonstdtxn=0", "-vbparams=segwit:0:999999999999"], ["-whitelist=127.0.0.1", "-vbparams=segwit:0:0"]] + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def setup_network(self): + self.setup_nodes() + connect_nodes(self.nodes[0], 1) + connect_nodes(self.nodes[0], 2) + self.sync_all() + + # Helper functions + + def build_next_block(self, version=4): + """Build a block on top of node0's tip.""" + tip = self.nodes[0].getbestblockhash() + height = self.nodes[0].getblockcount() + 1 + block_time = self.nodes[0].getblockheader(tip)["mediantime"] + 1 + block = create_block(int(tip, 16), create_coinbase(height), block_time) + block.nVersion = version + block.rehash() + return block + + def update_witness_block_with_transactions(self, block, tx_list, nonce=0): + """Add list of transactions to block, adds witness commitment, then solves.""" + block.vtx.extend(tx_list) + add_witness_commitment(block, nonce) + block.solve() + + def run_test(self): + # Setup the p2p connections + # self.test_node sets NODE_WITNESS|NODE_NETWORK + self.test_node = self.nodes[0].add_p2p_connection(TestP2PConn(), services=NODE_NETWORK | NODE_WITNESS) + # self.old_node sets only NODE_NETWORK + self.old_node = self.nodes[0].add_p2p_connection(TestP2PConn(), services=NODE_NETWORK) + # self.std_node is for testing node1 (fRequireStandard=true) + self.std_node = self.nodes[1].add_p2p_connection(TestP2PConn(), services=NODE_NETWORK | NODE_WITNESS) + + assert self.test_node.nServices & NODE_WITNESS != 0 + + # Keep a place to store utxo's that can be used in later tests + self.utxo = [] + + # Segwit status 'defined' + self.segwit_status = 'defined' + + self.test_non_witness_transaction() + self.test_unnecessary_witness_before_segwit_activation() + self.test_v0_outputs_arent_spendable() + self.test_block_relay() + self.advance_to_segwit_started() + + # Segwit status 'started' + + self.test_getblocktemplate_before_lockin() + self.advance_to_segwit_lockin() + + # Segwit status 'locked_in' + + self.test_unnecessary_witness_before_segwit_activation() + self.test_witness_tx_relay_before_segwit_activation() + self.test_block_relay() + self.test_standardness_v0() + self.advance_to_segwit_active() + + # Segwit status 'active' + + self.test_p2sh_witness() + self.test_witness_commitments() + self.test_block_malleability() + self.test_witness_block_size() + self.test_submit_block() + self.test_extra_witness_data() + self.test_max_witness_push_length() + self.test_max_witness_program_length() + self.test_witness_input_length() + self.test_block_relay() + self.test_tx_relay_after_segwit_activation() + self.test_standardness_v0() + self.test_segwit_versions() + self.test_premature_coinbase_witness_spend() + self.test_uncompressed_pubkey() + self.test_signature_version_1() + self.test_non_standard_witness_blinding() + self.test_non_standard_witness() + self.test_upgrade_after_activation() + self.test_witness_sigops() + + # Individual tests + + def subtest(func): # noqa: N805 + """Wraps the subtests for logging and state assertions.""" + def func_wrapper(self, *args, **kwargs): + self.log.info("Subtest: {} (Segwit status = {})".format(func.__name__, self.segwit_status)) + # Assert segwit status is as expected + assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], self.segwit_status) + func(self, *args, **kwargs) + # Each subtest should leave some utxos for the next subtest + assert self.utxo + sync_blocks(self.nodes) + # Assert segwit status is as expected at end of subtest + assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], self.segwit_status) + + return func_wrapper + + @subtest + def test_non_witness_transaction(self): + """See if sending a regular transaction works, and create a utxo to use in later tests.""" + # Mine a block with an anyone-can-spend coinbase, + # let it mature, then try to spend it. + + block = self.build_next_block(version=1) + block.solve() + self.test_node.send_message(msg_block(block)) + self.test_node.sync_with_ping() # make sure the block was processed + txid = block.vtx[0].sha256 + + self.nodes[0].generate(99) # let the block mature + + # Create a transaction that spends the coinbase + tx = CTransaction() + tx.vin.append(CTxIn(COutPoint(txid, 0), b"")) + tx.vout.append(CTxOut(49 * 100000000, CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE]))) + tx.calc_sha256() + + # Check that serializing it with or without witness is the same + # This is a sanity check of our testing framework. + assert_equal(msg_tx(tx).serialize(), msg_witness_tx(tx).serialize()) + + self.test_node.send_message(msg_witness_tx(tx)) + self.test_node.sync_with_ping() # make sure the tx was processed + assert(tx.hash in self.nodes[0].getrawmempool()) + # Save this transaction for later + self.utxo.append(UTXO(tx.sha256, 0, 49 * 100000000)) + self.nodes[0].generate(1) + + @subtest + def test_unnecessary_witness_before_segwit_activation(self): + """Verify that blocks with witnesses are rejected before activation.""" + + tx = CTransaction() + tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) + tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, CScript([OP_TRUE]))) + tx.wit.vtxinwit.append(CTxInWitness()) + tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([CScriptNum(1)])] + + # Verify the hash with witness differs from the txid + # (otherwise our testing framework must be broken!) + tx.rehash() + assert(tx.sha256 != tx.calc_sha256(with_witness=True)) + + # Construct a segwit-signaling block that includes the transaction. + block = self.build_next_block(version=(VB_TOP_BITS | (1 << VB_WITNESS_BIT))) + self.update_witness_block_with_transactions(block, [tx]) + # Sending witness data before activation is not allowed (anti-spam + # rule). + test_witness_block(self.nodes[0], self.test_node, block, accepted=False, reason='unexpected-witness') + + # But it should not be permanently marked bad... + # Resend without witness information. + self.test_node.send_message(msg_block(block)) + self.test_node.sync_with_ping() + assert_equal(self.nodes[0].getbestblockhash(), block.hash) + + # Update our utxo list; we spent the first entry. + self.utxo.pop(0) + self.utxo.append(UTXO(tx.sha256, 0, tx.vout[0].nValue)) + + @subtest + def test_block_relay(self): + """Test that block requests to NODE_WITNESS peer are with MSG_WITNESS_FLAG. + + This is true regardless of segwit activation. + Also test that we don't ask for blocks from unupgraded peers.""" + + blocktype = 2 | MSG_WITNESS_FLAG + + # test_node has set NODE_WITNESS, so all getdata requests should be for + # witness blocks. + # Test announcing a block via inv results in a getdata, and that + # announcing a version 4 or random VB block with a header results in a getdata + block1 = self.build_next_block() + block1.solve() + + self.test_node.announce_block_and_wait_for_getdata(block1, use_header=False) + assert(self.test_node.last_message["getdata"].inv[0].type == blocktype) + test_witness_block(self.nodes[0], self.test_node, block1, True) + + block2 = self.build_next_block(version=4) + block2.solve() + + self.test_node.announce_block_and_wait_for_getdata(block2, use_header=True) + assert(self.test_node.last_message["getdata"].inv[0].type == blocktype) + test_witness_block(self.nodes[0], self.test_node, block2, True) + + block3 = self.build_next_block(version=(VB_TOP_BITS | (1 << 15))) + block3.solve() + self.test_node.announce_block_and_wait_for_getdata(block3, use_header=True) + assert(self.test_node.last_message["getdata"].inv[0].type == blocktype) + test_witness_block(self.nodes[0], self.test_node, block3, True) + + # Check that we can getdata for witness blocks or regular blocks, + # and the right thing happens. + if self.segwit_status != 'active': + # Before activation, we should be able to request old blocks with + # or without witness, and they should be the same. + chain_height = self.nodes[0].getblockcount() + # Pick 10 random blocks on main chain, and verify that getdata's + # for MSG_BLOCK, MSG_WITNESS_BLOCK, and rpc getblock() are equal. + all_heights = list(range(chain_height + 1)) + random.shuffle(all_heights) + all_heights = all_heights[0:10] + for height in all_heights: + block_hash = self.nodes[0].getblockhash(height) + rpc_block = self.nodes[0].getblock(block_hash, False) + block_hash = int(block_hash, 16) + block = self.test_node.request_block(block_hash, 2) + wit_block = self.test_node.request_block(block_hash, 2 | MSG_WITNESS_FLAG) + assert_equal(block.serialize(True), wit_block.serialize(True)) + assert_equal(block.serialize(), hex_str_to_bytes(rpc_block)) + else: + # After activation, witness blocks and non-witness blocks should + # be different. Verify rpc getblock() returns witness blocks, while + # getdata respects the requested type. + block = self.build_next_block() + self.update_witness_block_with_transactions(block, []) + # This gives us a witness commitment. + assert(len(block.vtx[0].wit.vtxinwit) == 1) + assert(len(block.vtx[0].wit.vtxinwit[0].scriptWitness.stack) == 1) + test_witness_block(self.nodes[0], self.test_node, block, accepted=True) + # Now try to retrieve it... + rpc_block = self.nodes[0].getblock(block.hash, False) + non_wit_block = self.test_node.request_block(block.sha256, 2) + wit_block = self.test_node.request_block(block.sha256, 2 | MSG_WITNESS_FLAG) + assert_equal(wit_block.serialize(True), hex_str_to_bytes(rpc_block)) + assert_equal(wit_block.serialize(False), non_wit_block.serialize()) + assert_equal(wit_block.serialize(True), block.serialize(True)) + + # Test size, vsize, weight + rpc_details = self.nodes[0].getblock(block.hash, True) + assert_equal(rpc_details["size"], len(block.serialize(True))) + assert_equal(rpc_details["strippedsize"], len(block.serialize(False))) + weight = 3 * len(block.serialize(False)) + len(block.serialize(True)) + assert_equal(rpc_details["weight"], weight) + + # Upgraded node should not ask for blocks from unupgraded + block4 = self.build_next_block(version=4) + block4.solve() + self.old_node.getdataset = set() + + # Blocks can be requested via direct-fetch (immediately upon processing the announcement) + # or via parallel download (with an indeterminate delay from processing the announcement) + # so to test that a block is NOT requested, we could guess a time period to sleep for, + # and then check. We can avoid the sleep() by taking advantage of transaction getdata's + # being processed after block getdata's, and announce a transaction as well, + # and then check to see if that particular getdata has been received. + # Since 0.14, inv's will only be responded to with a getheaders, so send a header + # to announce this block. + msg = msg_headers() + msg.headers = [CBlockHeader(block4)] + self.old_node.send_message(msg) + self.old_node.announce_tx_and_wait_for_getdata(block4.vtx[0]) + assert(block4.sha256 not in self.old_node.getdataset) + + @subtest + def test_v0_outputs_arent_spendable(self): + """Test that v0 outputs aren't spendable before segwit activation. + + ~6 months after segwit activation, the SCRIPT_VERIFY_WITNESS flag was + backdated so that it applies to all blocks, going back to the genesis + block. + + Consequently, version 0 witness outputs are never spendable without + witness, and so can't be spent before segwit activation (the point at which + blocks are permitted to contain witnesses).""" + + # node2 doesn't need to be connected for this test. + # (If it's connected, node0 may propagate an invalid block to it over + # compact blocks and the nodes would have inconsistent tips.) + disconnect_nodes(self.nodes[0], 2) + + # Create two outputs, a p2wsh and p2sh-p2wsh + witness_program = CScript([OP_TRUE]) + witness_hash = sha256(witness_program) + script_pubkey = CScript([OP_0, witness_hash]) + + p2sh_pubkey = hash160(script_pubkey) + p2sh_script_pubkey = CScript([OP_HASH160, p2sh_pubkey, OP_EQUAL]) + + value = self.utxo[0].nValue // 3 + + tx = CTransaction() + tx.vin = [CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b'')] + tx.vout = [CTxOut(value, script_pubkey), CTxOut(value, p2sh_script_pubkey)] + tx.vout.append(CTxOut(value, CScript([OP_TRUE]))) + tx.rehash() + txid = tx.sha256 + + # Add it to a block + block = self.build_next_block() + self.update_witness_block_with_transactions(block, [tx]) + # Verify that segwit isn't activated. A block serialized with witness + # should be rejected prior to activation. + test_witness_block(self.nodes[0], self.test_node, block, accepted=False, with_witness=True, reason='unexpected-witness') + # Now send the block without witness. It should be accepted + test_witness_block(self.nodes[0], self.test_node, block, accepted=True, with_witness=False) + + # Now try to spend the outputs. This should fail since SCRIPT_VERIFY_WITNESS is always enabled. + p2wsh_tx = CTransaction() + p2wsh_tx.vin = [CTxIn(COutPoint(txid, 0), b'')] + p2wsh_tx.vout = [CTxOut(value, CScript([OP_TRUE]))] + p2wsh_tx.wit.vtxinwit.append(CTxInWitness()) + p2wsh_tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([OP_TRUE])] + p2wsh_tx.rehash() + + p2sh_p2wsh_tx = CTransaction() + p2sh_p2wsh_tx.vin = [CTxIn(COutPoint(txid, 1), CScript([script_pubkey]))] + p2sh_p2wsh_tx.vout = [CTxOut(value, CScript([OP_TRUE]))] + p2sh_p2wsh_tx.wit.vtxinwit.append(CTxInWitness()) + p2sh_p2wsh_tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([OP_TRUE])] + p2sh_p2wsh_tx.rehash() + + for tx in [p2wsh_tx, p2sh_p2wsh_tx]: + + block = self.build_next_block() + self.update_witness_block_with_transactions(block, [tx]) + + # When the block is serialized with a witness, the block will be rejected because witness + # data isn't allowed in blocks that don't commit to witness data. + test_witness_block(self.nodes[0], self.test_node, block, accepted=False, with_witness=True, reason='unexpected-witness') + + # When the block is serialized without witness, validation fails because the transaction is + # invalid (transactions are always validated with SCRIPT_VERIFY_WITNESS so a segwit v0 transaction + # without a witness is invalid). + # Note: The reject reason for this failure could be + # 'block-validation-failed' (if script check threads > 1) or + # 'non-mandatory-script-verify-flag (Witness program was passed an + # empty witness)' (otherwise). + # TODO: support multiple acceptable reject reasons. + test_witness_block(self.nodes[0], self.test_node, block, accepted=False, with_witness=False) + + connect_nodes(self.nodes[0], 2) + + self.utxo.pop(0) + self.utxo.append(UTXO(txid, 2, value)) + + @subtest + def advance_to_segwit_started(self): + """Mine enough blocks for segwit's vb state to be 'started'.""" + height = self.nodes[0].getblockcount() + # Will need to rewrite the tests here if we are past the first period + assert(height < VB_PERIOD - 1) + # Advance to end of period, status should now be 'started' + self.nodes[0].generate(VB_PERIOD - height - 1) + assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'started') + self.segwit_status = 'started' + + @subtest + def test_getblocktemplate_before_lockin(self): + # Node0 is segwit aware, node2 is not. + for node in [self.nodes[0], self.nodes[2]]: + gbt_results = node.getblocktemplate() + block_version = gbt_results['version'] + # If we're not indicating segwit support, we will still be + # signalling for segwit activation. + assert_equal((block_version & (1 << VB_WITNESS_BIT) != 0), node == self.nodes[0]) + # If we don't specify the segwit rule, then we won't get a default + # commitment. + assert('default_witness_commitment' not in gbt_results) + + # Workaround: + # Can either change the tip, or change the mempool and wait 5 seconds + # to trigger a recomputation of getblocktemplate. + txid = int(self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1), 16) + # Using mocktime lets us avoid sleep() + sync_mempools(self.nodes) + self.nodes[0].setmocktime(int(time.time()) + 10) + self.nodes[2].setmocktime(int(time.time()) + 10) + + for node in [self.nodes[0], self.nodes[2]]: + gbt_results = node.getblocktemplate({"rules": ["segwit"]}) + block_version = gbt_results['version'] + if node == self.nodes[2]: + # If this is a non-segwit node, we should still not get a witness + # commitment, nor a version bit signalling segwit. + assert_equal(block_version & (1 << VB_WITNESS_BIT), 0) + assert('default_witness_commitment' not in gbt_results) + else: + # For segwit-aware nodes, check the version bit and the witness + # commitment are correct. + assert(block_version & (1 << VB_WITNESS_BIT) != 0) + assert('default_witness_commitment' in gbt_results) + witness_commitment = gbt_results['default_witness_commitment'] + + # Check that default_witness_commitment is present. + witness_root = CBlock.get_merkle_root([ser_uint256(0), + ser_uint256(txid)]) + script = get_witness_script(witness_root, 0) + assert_equal(witness_commitment, bytes_to_hex_str(script)) + + # undo mocktime + self.nodes[0].setmocktime(0) + self.nodes[2].setmocktime(0) + + @subtest + def advance_to_segwit_lockin(self): + """Mine enough blocks to lock in segwit, but don't activate.""" + height = self.nodes[0].getblockcount() + # Advance to end of period, and verify lock-in happens at the end + self.nodes[0].generate(VB_PERIOD - 1) + height = self.nodes[0].getblockcount() + assert((height % VB_PERIOD) == VB_PERIOD - 2) + assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'started') + self.nodes[0].generate(1) + assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'locked_in') + self.segwit_status = 'locked_in' + + @subtest + def test_witness_tx_relay_before_segwit_activation(self): + + # Generate a transaction that doesn't require a witness, but send it + # with a witness. Should be rejected for premature-witness, but should + # not be added to recently rejected list. + tx = CTransaction() + tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) + tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE]))) + tx.wit.vtxinwit.append(CTxInWitness()) + tx.wit.vtxinwit[0].scriptWitness.stack = [b'a'] + tx.rehash() + + tx_hash = tx.sha256 + tx_value = tx.vout[0].nValue + + # Verify that if a peer doesn't set nServices to include NODE_WITNESS, + # the getdata is just for the non-witness portion. + self.old_node.announce_tx_and_wait_for_getdata(tx) + assert(self.old_node.last_message["getdata"].inv[0].type == 1) + + # Since we haven't delivered the tx yet, inv'ing the same tx from + # a witness transaction ought not result in a getdata. + self.test_node.announce_tx_and_wait_for_getdata(tx, timeout=2, success=False) + + # Delivering this transaction with witness should fail (no matter who + # its from) + assert_equal(len(self.nodes[0].getrawmempool()), 0) + assert_equal(len(self.nodes[1].getrawmempool()), 0) + test_transaction_acceptance(self.nodes[0], self.old_node, tx, with_witness=True, accepted=False) + test_transaction_acceptance(self.nodes[0], self.test_node, tx, with_witness=True, accepted=False) + + # But eliminating the witness should fix it + test_transaction_acceptance(self.nodes[0], self.test_node, tx, with_witness=False, accepted=True) + + # Cleanup: mine the first transaction and update utxo + self.nodes[0].generate(1) + assert_equal(len(self.nodes[0].getrawmempool()), 0) + + self.utxo.pop(0) + self.utxo.append(UTXO(tx_hash, 0, tx_value)) + + @subtest + def test_standardness_v0(self): + """Test V0 txout standardness. + + V0 segwit outputs and inputs are always standard. + V0 segwit inputs may only be mined after activation, but not before.""" + + witness_program = CScript([OP_TRUE]) + witness_hash = sha256(witness_program) + script_pubkey = CScript([OP_0, witness_hash]) + + p2sh_pubkey = hash160(witness_program) + p2sh_script_pubkey = CScript([OP_HASH160, p2sh_pubkey, OP_EQUAL]) + + # First prepare a p2sh output (so that spending it will pass standardness) + p2sh_tx = CTransaction() + p2sh_tx.vin = [CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")] + p2sh_tx.vout = [CTxOut(self.utxo[0].nValue - 1000, p2sh_script_pubkey)] + p2sh_tx.rehash() + + # Mine it on test_node to create the confirmed output. + test_transaction_acceptance(self.nodes[0], self.test_node, p2sh_tx, with_witness=True, accepted=True) + self.nodes[0].generate(1) + sync_blocks(self.nodes) + + # Now test standardness of v0 P2WSH outputs. + # Start by creating a transaction with two outputs. + tx = CTransaction() + tx.vin = [CTxIn(COutPoint(p2sh_tx.sha256, 0), CScript([witness_program]))] + tx.vout = [CTxOut(p2sh_tx.vout[0].nValue - 10000, script_pubkey)] + tx.vout.append(CTxOut(8000, script_pubkey)) # Might burn this later + tx.vin[0].nSequence = BIP125_SEQUENCE_NUMBER # Just to have the option to bump this tx from the mempool + tx.rehash() + + # This is always accepted, since the mempool policy is to consider segwit as always active + # and thus allow segwit outputs + test_transaction_acceptance(self.nodes[1], self.std_node, tx, with_witness=True, accepted=True) + + # Now create something that looks like a P2PKH output. This won't be spendable. + script_pubkey = CScript([OP_0, hash160(witness_hash)]) + tx2 = CTransaction() + # tx was accepted, so we spend the second output. + tx2.vin = [CTxIn(COutPoint(tx.sha256, 1), b"")] + tx2.vout = [CTxOut(7000, script_pubkey)] + tx2.wit.vtxinwit.append(CTxInWitness()) + tx2.wit.vtxinwit[0].scriptWitness.stack = [witness_program] + tx2.rehash() + + test_transaction_acceptance(self.nodes[1], self.std_node, tx2, with_witness=True, accepted=True) + + # Now update self.utxo for later tests. + tx3 = CTransaction() + # tx and tx2 were both accepted. Don't bother trying to reclaim the + # P2PKH output; just send tx's first output back to an anyone-can-spend. + sync_mempools([self.nodes[0], self.nodes[1]]) + tx3.vin = [CTxIn(COutPoint(tx.sha256, 0), b"")] + tx3.vout = [CTxOut(tx.vout[0].nValue - 1000, CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE]))] + tx3.wit.vtxinwit.append(CTxInWitness()) + tx3.wit.vtxinwit[0].scriptWitness.stack = [witness_program] + tx3.rehash() + if self.segwit_status != 'active': + # Just check mempool acceptance, but don't add the transaction to the mempool, since witness is disallowed + # in blocks and the tx is impossible to mine right now. + assert_equal(self.nodes[0].testmempoolaccept([bytes_to_hex_str(tx3.serialize_with_witness())]), [{'txid': tx3.hash, 'allowed': True}]) + # Create the same output as tx3, but by replacing tx + tx3_out = tx3.vout[0] + tx3 = tx + tx3.vout = [tx3_out] + tx3.rehash() + assert_equal(self.nodes[0].testmempoolaccept([bytes_to_hex_str(tx3.serialize_with_witness())]), [{'txid': tx3.hash, 'allowed': True}]) + test_transaction_acceptance(self.nodes[0], self.test_node, tx3, with_witness=True, accepted=True) + + self.nodes[0].generate(1) + sync_blocks(self.nodes) + self.utxo.pop(0) + self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue)) + assert_equal(len(self.nodes[1].getrawmempool()), 0) + + @subtest + def advance_to_segwit_active(self): + """Mine enough blocks to activate segwit.""" + height = self.nodes[0].getblockcount() + self.nodes[0].generate(VB_PERIOD - (height % VB_PERIOD) - 2) + assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'locked_in') + self.nodes[0].generate(1) + assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'active') + self.segwit_status = 'active' + + @subtest + def test_p2sh_witness(self): + """Test P2SH wrapped witness programs.""" + + # Prepare the p2sh-wrapped witness output + witness_program = CScript([OP_DROP, OP_TRUE]) + witness_hash = sha256(witness_program) + p2wsh_pubkey = CScript([OP_0, witness_hash]) + p2sh_witness_hash = hash160(p2wsh_pubkey) + script_pubkey = CScript([OP_HASH160, p2sh_witness_hash, OP_EQUAL]) + script_sig = CScript([p2wsh_pubkey]) # a push of the redeem script + + # Fund the P2SH output + tx = CTransaction() + tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) + tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, script_pubkey)) + tx.rehash() + + # Verify mempool acceptance and block validity + test_transaction_acceptance(self.nodes[0], self.test_node, tx, with_witness=False, accepted=True) + block = self.build_next_block() + self.update_witness_block_with_transactions(block, [tx]) + test_witness_block(self.nodes[0], self.test_node, block, accepted=True, with_witness=True) + sync_blocks(self.nodes) + + # Now test attempts to spend the output. + spend_tx = CTransaction() + spend_tx.vin.append(CTxIn(COutPoint(tx.sha256, 0), script_sig)) + spend_tx.vout.append(CTxOut(tx.vout[0].nValue - 1000, CScript([OP_TRUE]))) + spend_tx.rehash() + + # This transaction should not be accepted into the mempool pre- or + # post-segwit. Mempool acceptance will use SCRIPT_VERIFY_WITNESS which + # will require a witness to spend a witness program regardless of + # segwit activation. Note that older bitcoind's that are not + # segwit-aware would also reject this for failing CLEANSTACK. + with self.nodes[0].assert_debug_log( + expected_msgs=(spend_tx.hash, 'was not accepted: non-mandatory-script-verify-flag (Witness program was passed an empty witness)')): + test_transaction_acceptance(self.nodes[0], self.test_node, spend_tx, with_witness=False, accepted=False) + + # Try to put the witness script in the scriptSig, should also fail. + spend_tx.vin[0].scriptSig = CScript([p2wsh_pubkey, b'a']) + spend_tx.rehash() + with self.nodes[0].assert_debug_log( + expected_msgs=('Not relaying invalid transaction {}'.format(spend_tx.hash), 'was not accepted: mandatory-script-verify-flag-failed (Script evaluated without error but finished with a false/empty top stack element)')): + test_transaction_acceptance(self.nodes[0], self.test_node, spend_tx, with_witness=False, accepted=False) + + # Now put the witness script in the witness, should succeed after + # segwit activates. + spend_tx.vin[0].scriptSig = script_sig + spend_tx.rehash() + spend_tx.wit.vtxinwit.append(CTxInWitness()) + spend_tx.wit.vtxinwit[0].scriptWitness.stack = [b'a', witness_program] + + # Verify mempool acceptance + test_transaction_acceptance(self.nodes[0], self.test_node, spend_tx, with_witness=True, accepted=True) + block = self.build_next_block() + self.update_witness_block_with_transactions(block, [spend_tx]) + + # If we're after activation, then sending this with witnesses should be valid. + # This no longer works before activation, because SCRIPT_VERIFY_WITNESS + # is always set. + # TODO: rewrite this test to make clear that it only works after activation. + test_witness_block(self.nodes[0], self.test_node, block, accepted=True) + + # Update self.utxo + self.utxo.pop(0) + self.utxo.append(UTXO(spend_tx.sha256, 0, spend_tx.vout[0].nValue)) + + @subtest + def test_witness_commitments(self): + """Test witness commitments. + + This test can only be run after segwit has activated.""" + + # First try a correct witness commitment. + block = self.build_next_block() + add_witness_commitment(block) + block.solve() + + # Test the test -- witness serialization should be different + assert(msg_witness_block(block).serialize() != msg_block(block).serialize()) + + # This empty block should be valid. + test_witness_block(self.nodes[0], self.test_node, block, accepted=True) + + # Try to tweak the nonce + block_2 = self.build_next_block() + add_witness_commitment(block_2, nonce=28) + block_2.solve() + + # The commitment should have changed! + assert(block_2.vtx[0].vout[-1] != block.vtx[0].vout[-1]) + + # This should also be valid. + test_witness_block(self.nodes[0], self.test_node, block_2, accepted=True) + + # Now test commitments with actual transactions + tx = CTransaction() + tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) + + # Let's construct a witness program + witness_program = CScript([OP_TRUE]) + witness_hash = sha256(witness_program) + script_pubkey = CScript([OP_0, witness_hash]) + tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, script_pubkey)) + tx.rehash() + + # tx2 will spend tx1, and send back to a regular anyone-can-spend address + tx2 = CTransaction() + tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b"")) + tx2.vout.append(CTxOut(tx.vout[0].nValue - 1000, witness_program)) + tx2.wit.vtxinwit.append(CTxInWitness()) + tx2.wit.vtxinwit[0].scriptWitness.stack = [witness_program] + tx2.rehash() + + block_3 = self.build_next_block() + self.update_witness_block_with_transactions(block_3, [tx, tx2], nonce=1) + # Add an extra OP_RETURN output that matches the witness commitment template, + # even though it has extra data after the incorrect commitment. + # This block should fail. + block_3.vtx[0].vout.append(CTxOut(0, CScript([OP_RETURN, WITNESS_COMMITMENT_HEADER + ser_uint256(2), 10]))) + block_3.vtx[0].rehash() + block_3.hashMerkleRoot = block_3.calc_merkle_root() + block_3.rehash() + block_3.solve() + + test_witness_block(self.nodes[0], self.test_node, block_3, accepted=False) + + # Add a different commitment with different nonce, but in the + # right location, and with some funds burned(!). + # This should succeed (nValue shouldn't affect finding the + # witness commitment). + add_witness_commitment(block_3, nonce=0) + block_3.vtx[0].vout[0].nValue -= 1 + block_3.vtx[0].vout[-1].nValue += 1 + block_3.vtx[0].rehash() + block_3.hashMerkleRoot = block_3.calc_merkle_root() + block_3.rehash() + assert(len(block_3.vtx[0].vout) == 4) # 3 OP_returns + block_3.solve() + test_witness_block(self.nodes[0], self.test_node, block_3, accepted=True) + + # Finally test that a block with no witness transactions can + # omit the commitment. + block_4 = self.build_next_block() + tx3 = CTransaction() + tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), b"")) + tx3.vout.append(CTxOut(tx.vout[0].nValue - 1000, witness_program)) + tx3.rehash() + block_4.vtx.append(tx3) + block_4.hashMerkleRoot = block_4.calc_merkle_root() + block_4.solve() + test_witness_block(self.nodes[0], self.test_node, block_4, with_witness=False, accepted=True) + + # Update available utxo's for use in later test. + self.utxo.pop(0) + self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue)) + + @subtest + def test_block_malleability(self): + + # Make sure that a block that has too big a virtual size + # because of a too-large coinbase witness is not permanently + # marked bad. + block = self.build_next_block() + add_witness_commitment(block) + block.solve() + + block.vtx[0].wit.vtxinwit[0].scriptWitness.stack.append(b'a' * 5000000) + assert(get_virtual_size(block) > MAX_BLOCK_BASE_SIZE) + + # We can't send over the p2p network, because this is too big to relay + # TODO: repeat this test with a block that can be relayed + self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True))) + + assert(self.nodes[0].getbestblockhash() != block.hash) + + block.vtx[0].wit.vtxinwit[0].scriptWitness.stack.pop() + assert(get_virtual_size(block) < MAX_BLOCK_BASE_SIZE) + self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True))) + + assert(self.nodes[0].getbestblockhash() == block.hash) + + # Now make sure that malleating the witness reserved value doesn't + # result in a block permanently marked bad. + block = self.build_next_block() + add_witness_commitment(block) + block.solve() + + # Change the nonce -- should not cause the block to be permanently + # failed + block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ser_uint256(1)] + test_witness_block(self.nodes[0], self.test_node, block, accepted=False) + + # Changing the witness reserved value doesn't change the block hash + block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ser_uint256(0)] + test_witness_block(self.nodes[0], self.test_node, block, accepted=True) + + @subtest + def test_witness_block_size(self): + # TODO: Test that non-witness carrying blocks can't exceed 1MB + # Skipping this test for now; this is covered in p2p-fullblocktest.py + + # Test that witness-bearing blocks are limited at ceil(base + wit/4) <= 1MB. + block = self.build_next_block() + + assert(len(self.utxo) > 0) + + # Create a P2WSH transaction. + # The witness program will be a bunch of OP_2DROP's, followed by OP_TRUE. + # This should give us plenty of room to tweak the spending tx's + # virtual size. + NUM_DROPS = 200 # 201 max ops per script! + NUM_OUTPUTS = 50 + + witness_program = CScript([OP_2DROP] * NUM_DROPS + [OP_TRUE]) + witness_hash = uint256_from_str(sha256(witness_program)) + script_pubkey = CScript([OP_0, ser_uint256(witness_hash)]) + + prevout = COutPoint(self.utxo[0].sha256, self.utxo[0].n) + value = self.utxo[0].nValue + + parent_tx = CTransaction() + parent_tx.vin.append(CTxIn(prevout, b"")) + child_value = int(value / NUM_OUTPUTS) + for i in range(NUM_OUTPUTS): + parent_tx.vout.append(CTxOut(child_value, script_pubkey)) + parent_tx.vout[0].nValue -= 50000 + assert(parent_tx.vout[0].nValue > 0) + parent_tx.rehash() + + child_tx = CTransaction() + for i in range(NUM_OUTPUTS): + child_tx.vin.append(CTxIn(COutPoint(parent_tx.sha256, i), b"")) + child_tx.vout = [CTxOut(value - 100000, CScript([OP_TRUE]))] + for i in range(NUM_OUTPUTS): + child_tx.wit.vtxinwit.append(CTxInWitness()) + child_tx.wit.vtxinwit[-1].scriptWitness.stack = [b'a' * 195] * (2 * NUM_DROPS) + [witness_program] + child_tx.rehash() + self.update_witness_block_with_transactions(block, [parent_tx, child_tx]) + + vsize = get_virtual_size(block) + additional_bytes = (MAX_BLOCK_BASE_SIZE - vsize) * 4 + i = 0 + while additional_bytes > 0: + # Add some more bytes to each input until we hit MAX_BLOCK_BASE_SIZE+1 + extra_bytes = min(additional_bytes + 1, 55) + block.vtx[-1].wit.vtxinwit[int(i / (2 * NUM_DROPS))].scriptWitness.stack[i % (2 * NUM_DROPS)] = b'a' * (195 + extra_bytes) + additional_bytes -= extra_bytes + i += 1 + + block.vtx[0].vout.pop() # Remove old commitment + add_witness_commitment(block) + block.solve() + vsize = get_virtual_size(block) + assert_equal(vsize, MAX_BLOCK_BASE_SIZE + 1) + # Make sure that our test case would exceed the old max-network-message + # limit + assert(len(block.serialize(True)) > 2 * 1024 * 1024) + + test_witness_block(self.nodes[0], self.test_node, block, accepted=False) + + # Now resize the second transaction to make the block fit. + cur_length = len(block.vtx[-1].wit.vtxinwit[0].scriptWitness.stack[0]) + block.vtx[-1].wit.vtxinwit[0].scriptWitness.stack[0] = b'a' * (cur_length - 1) + block.vtx[0].vout.pop() + add_witness_commitment(block) + block.solve() + assert(get_virtual_size(block) == MAX_BLOCK_BASE_SIZE) + + test_witness_block(self.nodes[0], self.test_node, block, accepted=True) + + # Update available utxo's + self.utxo.pop(0) + self.utxo.append(UTXO(block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue)) + + @subtest + def test_submit_block(self): + """Test that submitblock adds the nonce automatically when possible.""" + block = self.build_next_block() + + # Try using a custom nonce and then don't supply it. + # This shouldn't possibly work. + add_witness_commitment(block, nonce=1) + block.vtx[0].wit = CTxWitness() # drop the nonce + block.solve() + self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True))) + assert(self.nodes[0].getbestblockhash() != block.hash) + + # Now redo commitment with the standard nonce, but let bitcoind fill it in. + add_witness_commitment(block, nonce=0) + block.vtx[0].wit = CTxWitness() + block.solve() + self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True))) + assert_equal(self.nodes[0].getbestblockhash(), block.hash) + + # This time, add a tx with non-empty witness, but don't supply + # the commitment. + block_2 = self.build_next_block() + + add_witness_commitment(block_2) + + block_2.solve() + + # Drop commitment and nonce -- submitblock should not fill in. + block_2.vtx[0].vout.pop() + block_2.vtx[0].wit = CTxWitness() + + self.nodes[0].submitblock(bytes_to_hex_str(block_2.serialize(True))) + # Tip should not advance! + assert(self.nodes[0].getbestblockhash() != block_2.hash) + + @subtest + def test_extra_witness_data(self): + """Test extra witness data in a transaction.""" + + block = self.build_next_block() + + witness_program = CScript([OP_DROP, OP_TRUE]) + witness_hash = sha256(witness_program) + script_pubkey = CScript([OP_0, witness_hash]) + + # First try extra witness data on a tx that doesn't require a witness + tx = CTransaction() + tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) + tx.vout.append(CTxOut(self.utxo[0].nValue - 2000, script_pubkey)) + tx.vout.append(CTxOut(1000, CScript([OP_TRUE]))) # non-witness output + tx.wit.vtxinwit.append(CTxInWitness()) + tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([])] + tx.rehash() + self.update_witness_block_with_transactions(block, [tx]) + + # Extra witness data should not be allowed. + test_witness_block(self.nodes[0], self.test_node, block, accepted=False) + + # Try extra signature data. Ok if we're not spending a witness output. + block.vtx[1].wit.vtxinwit = [] + block.vtx[1].vin[0].scriptSig = CScript([OP_0]) + block.vtx[1].rehash() + add_witness_commitment(block) + block.solve() + + test_witness_block(self.nodes[0], self.test_node, block, accepted=True) + + # Now try extra witness/signature data on an input that DOES require a + # witness + tx2 = CTransaction() + tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b"")) # witness output + tx2.vin.append(CTxIn(COutPoint(tx.sha256, 1), b"")) # non-witness + tx2.vout.append(CTxOut(tx.vout[0].nValue, CScript([OP_TRUE]))) + tx2.wit.vtxinwit.extend([CTxInWitness(), CTxInWitness()]) + tx2.wit.vtxinwit[0].scriptWitness.stack = [CScript([CScriptNum(1)]), CScript([CScriptNum(1)]), witness_program] + tx2.wit.vtxinwit[1].scriptWitness.stack = [CScript([OP_TRUE])] + + block = self.build_next_block() + self.update_witness_block_with_transactions(block, [tx2]) + + # This has extra witness data, so it should fail. + test_witness_block(self.nodes[0], self.test_node, block, accepted=False) + + # Now get rid of the extra witness, but add extra scriptSig data + tx2.vin[0].scriptSig = CScript([OP_TRUE]) + tx2.vin[1].scriptSig = CScript([OP_TRUE]) + tx2.wit.vtxinwit[0].scriptWitness.stack.pop(0) + tx2.wit.vtxinwit[1].scriptWitness.stack = [] + tx2.rehash() + add_witness_commitment(block) + block.solve() + + # This has extra signature data for a witness input, so it should fail. + test_witness_block(self.nodes[0], self.test_node, block, accepted=False) + + # Now get rid of the extra scriptsig on the witness input, and verify + # success (even with extra scriptsig data in the non-witness input) + tx2.vin[0].scriptSig = b"" + tx2.rehash() + add_witness_commitment(block) + block.solve() + + test_witness_block(self.nodes[0], self.test_node, block, accepted=True) + + # Update utxo for later tests + self.utxo.pop(0) + self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue)) + + @subtest + def test_max_witness_push_length(self): + """Test that witness stack can only allow up to 520 byte pushes.""" + + block = self.build_next_block() + + witness_program = CScript([OP_DROP, OP_TRUE]) + witness_hash = sha256(witness_program) + script_pubkey = CScript([OP_0, witness_hash]) + + tx = CTransaction() + tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) + tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, script_pubkey)) + tx.rehash() + + tx2 = CTransaction() + tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b"")) + tx2.vout.append(CTxOut(tx.vout[0].nValue - 1000, CScript([OP_TRUE]))) + tx2.wit.vtxinwit.append(CTxInWitness()) + # First try a 521-byte stack element + tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a' * (MAX_SCRIPT_ELEMENT_SIZE + 1), witness_program] + tx2.rehash() + + self.update_witness_block_with_transactions(block, [tx, tx2]) + test_witness_block(self.nodes[0], self.test_node, block, accepted=False) + + # Now reduce the length of the stack element + tx2.wit.vtxinwit[0].scriptWitness.stack[0] = b'a' * (MAX_SCRIPT_ELEMENT_SIZE) + + add_witness_commitment(block) + block.solve() + test_witness_block(self.nodes[0], self.test_node, block, accepted=True) + + # Update the utxo for later tests + self.utxo.pop() + self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue)) + + @subtest + def test_max_witness_program_length(self): + """Test that witness outputs greater than 10kB can't be spent.""" + + MAX_PROGRAM_LENGTH = 10000 + + # This program is 19 max pushes (9937 bytes), then 64 more opcode-bytes. + long_witness_program = CScript([b'a' * 520] * 19 + [OP_DROP] * 63 + [OP_TRUE]) + assert(len(long_witness_program) == MAX_PROGRAM_LENGTH + 1) + long_witness_hash = sha256(long_witness_program) + long_script_pubkey = CScript([OP_0, long_witness_hash]) + + block = self.build_next_block() + + tx = CTransaction() + tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) + tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, long_script_pubkey)) + tx.rehash() + + tx2 = CTransaction() + tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b"")) + tx2.vout.append(CTxOut(tx.vout[0].nValue - 1000, CScript([OP_TRUE]))) + tx2.wit.vtxinwit.append(CTxInWitness()) + tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a'] * 44 + [long_witness_program] + tx2.rehash() + + self.update_witness_block_with_transactions(block, [tx, tx2]) + + test_witness_block(self.nodes[0], self.test_node, block, accepted=False) + + # Try again with one less byte in the witness program + witness_program = CScript([b'a' * 520] * 19 + [OP_DROP] * 62 + [OP_TRUE]) + assert(len(witness_program) == MAX_PROGRAM_LENGTH) + witness_hash = sha256(witness_program) + script_pubkey = CScript([OP_0, witness_hash]) + + tx.vout[0] = CTxOut(tx.vout[0].nValue, script_pubkey) + tx.rehash() + tx2.vin[0].prevout.hash = tx.sha256 + tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a'] * 43 + [witness_program] + tx2.rehash() + block.vtx = [block.vtx[0]] + self.update_witness_block_with_transactions(block, [tx, tx2]) + test_witness_block(self.nodes[0], self.test_node, block, accepted=True) + + self.utxo.pop() + self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue)) + + @subtest + def test_witness_input_length(self): + """Test that vin length must match vtxinwit length.""" + + witness_program = CScript([OP_DROP, OP_TRUE]) + witness_hash = sha256(witness_program) + script_pubkey = CScript([OP_0, witness_hash]) + + # Create a transaction that splits our utxo into many outputs + tx = CTransaction() + tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) + value = self.utxo[0].nValue + for i in range(10): + tx.vout.append(CTxOut(int(value / 10), script_pubkey)) + tx.vout[0].nValue -= 1000 + assert(tx.vout[0].nValue >= 0) + + block = self.build_next_block() + self.update_witness_block_with_transactions(block, [tx]) + test_witness_block(self.nodes[0], self.test_node, block, accepted=True) + + # Try various ways to spend tx that should all break. + # This "broken" transaction serializer will not normalize + # the length of vtxinwit. + class BrokenCTransaction(CTransaction): + def serialize_with_witness(self): + flags = 0 + if not self.wit.is_null(): + flags |= 1 + r = b"" + r += struct.pack(" version 1 transactions + # are non-standard + script_pubkey = CScript([CScriptOp(OP_1), witness_hash]) + tx2 = CTransaction() + tx2.vin = [CTxIn(COutPoint(tx.sha256, 0), b"")] + tx2.vout = [CTxOut(tx.vout[0].nValue - 1000, script_pubkey)] + tx2.wit.vtxinwit.append(CTxInWitness()) + tx2.wit.vtxinwit[0].scriptWitness.stack = [witness_program] + tx2.rehash() + # Gets accepted to test_node, because standardness of outputs isn't + # checked with fRequireStandard + test_transaction_acceptance(self.nodes[0], self.test_node, tx2, with_witness=True, accepted=True) + test_transaction_acceptance(self.nodes[1], self.std_node, tx2, with_witness=True, accepted=False) + temp_utxo.pop() # last entry in temp_utxo was the output we just spent + temp_utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue)) + + # Spend everything in temp_utxo back to an OP_TRUE output. + tx3 = CTransaction() + total_value = 0 + for i in temp_utxo: + tx3.vin.append(CTxIn(COutPoint(i.sha256, i.n), b"")) + tx3.wit.vtxinwit.append(CTxInWitness()) + total_value += i.nValue + tx3.wit.vtxinwit[-1].scriptWitness.stack = [witness_program] + tx3.vout.append(CTxOut(total_value - 1000, CScript([OP_TRUE]))) + tx3.rehash() + # Spending a higher version witness output is not allowed by policy, + # even with fRequireStandard=false. + test_transaction_acceptance(self.nodes[0], self.test_node, tx3, with_witness=True, accepted=False, reason="reserved for soft-fork upgrades") + + # Building a block with the transaction must be valid, however. + block = self.build_next_block() + self.update_witness_block_with_transactions(block, [tx2, tx3]) + test_witness_block(self.nodes[0], self.test_node, block, accepted=True) + sync_blocks(self.nodes) + + # Add utxo to our list + self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue)) + + @subtest + def test_premature_coinbase_witness_spend(self): + + block = self.build_next_block() + # Change the output of the block to be a witness output. + witness_program = CScript([OP_TRUE]) + witness_hash = sha256(witness_program) + script_pubkey = CScript([OP_0, witness_hash]) + block.vtx[0].vout[0].scriptPubKey = script_pubkey + # This next line will rehash the coinbase and update the merkle + # root, and solve. + self.update_witness_block_with_transactions(block, []) + test_witness_block(self.nodes[0], self.test_node, block, accepted=True) + + spend_tx = CTransaction() + spend_tx.vin = [CTxIn(COutPoint(block.vtx[0].sha256, 0), b"")] + spend_tx.vout = [CTxOut(block.vtx[0].vout[0].nValue, witness_program)] + spend_tx.wit.vtxinwit.append(CTxInWitness()) + spend_tx.wit.vtxinwit[0].scriptWitness.stack = [witness_program] + spend_tx.rehash() + + # Now test a premature spend. + self.nodes[0].generate(98) + sync_blocks(self.nodes) + block2 = self.build_next_block() + self.update_witness_block_with_transactions(block2, [spend_tx]) + test_witness_block(self.nodes[0], self.test_node, block2, accepted=False) + + # Advancing one more block should allow the spend. + self.nodes[0].generate(1) + block2 = self.build_next_block() + self.update_witness_block_with_transactions(block2, [spend_tx]) + test_witness_block(self.nodes[0], self.test_node, block2, accepted=True) + sync_blocks(self.nodes) + + @subtest + def test_uncompressed_pubkey(self): + """Test uncompressed pubkey validity in segwit transactions. + + Uncompressed pubkeys are no longer supported in default relay policy, + but (for now) are still valid in blocks.""" + + # Segwit transactions using uncompressed pubkeys are not accepted + # under default policy, but should still pass consensus. + key = CECKey() + key.set_secretbytes(b"9") + key.set_compressed(False) + pubkey = CPubKey(key.get_pubkey()) + assert_equal(len(pubkey), 65) # This should be an uncompressed pubkey + + utxo = self.utxo.pop(0) + + # Test 1: P2WPKH + # First create a P2WPKH output that uses an uncompressed pubkey + pubkeyhash = hash160(pubkey) + script_pkh = CScript([OP_0, pubkeyhash]) + tx = CTransaction() + tx.vin.append(CTxIn(COutPoint(utxo.sha256, utxo.n), b"")) + tx.vout.append(CTxOut(utxo.nValue - 1000, script_pkh)) + tx.rehash() + + # Confirm it in a block. + block = self.build_next_block() + self.update_witness_block_with_transactions(block, [tx]) + test_witness_block(self.nodes[0], self.test_node, block, accepted=True) + + # Now try to spend it. Send it to a P2WSH output, which we'll + # use in the next test. + witness_program = CScript([pubkey, CScriptOp(OP_CHECKSIG)]) + witness_hash = sha256(witness_program) + script_wsh = CScript([OP_0, witness_hash]) + + tx2 = CTransaction() + tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b"")) + tx2.vout.append(CTxOut(tx.vout[0].nValue - 1000, script_wsh)) + script = get_p2pkh_script(pubkeyhash) + sig_hash = SegwitVersion1SignatureHash(script, tx2, 0, SIGHASH_ALL, tx.vout[0].nValue) + signature = key.sign(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL + tx2.wit.vtxinwit.append(CTxInWitness()) + tx2.wit.vtxinwit[0].scriptWitness.stack = [signature, pubkey] + tx2.rehash() + + # Should fail policy test. + test_transaction_acceptance(self.nodes[0], self.test_node, tx2, True, False, 'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)') + # But passes consensus. + block = self.build_next_block() + self.update_witness_block_with_transactions(block, [tx2]) + test_witness_block(self.nodes[0], self.test_node, block, accepted=True) + + # Test 2: P2WSH + # Try to spend the P2WSH output created in last test. + # Send it to a P2SH(P2WSH) output, which we'll use in the next test. + p2sh_witness_hash = hash160(script_wsh) + script_p2sh = CScript([OP_HASH160, p2sh_witness_hash, OP_EQUAL]) + script_sig = CScript([script_wsh]) + + tx3 = CTransaction() + tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), b"")) + tx3.vout.append(CTxOut(tx2.vout[0].nValue - 1000, script_p2sh)) + tx3.wit.vtxinwit.append(CTxInWitness()) + sign_p2pk_witness_input(witness_program, tx3, 0, SIGHASH_ALL, tx2.vout[0].nValue, key) + + # Should fail policy test. + test_transaction_acceptance(self.nodes[0], self.test_node, tx3, True, False, 'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)') + # But passes consensus. + block = self.build_next_block() + self.update_witness_block_with_transactions(block, [tx3]) + test_witness_block(self.nodes[0], self.test_node, block, accepted=True) + + # Test 3: P2SH(P2WSH) + # Try to spend the P2SH output created in the last test. + # Send it to a P2PKH output, which we'll use in the next test. + script_pubkey = get_p2pkh_script(pubkeyhash) + tx4 = CTransaction() + tx4.vin.append(CTxIn(COutPoint(tx3.sha256, 0), script_sig)) + tx4.vout.append(CTxOut(tx3.vout[0].nValue - 1000, script_pubkey)) + tx4.wit.vtxinwit.append(CTxInWitness()) + sign_p2pk_witness_input(witness_program, tx4, 0, SIGHASH_ALL, tx3.vout[0].nValue, key) + + # Should fail policy test. + test_transaction_acceptance(self.nodes[0], self.test_node, tx4, True, False, 'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)') + block = self.build_next_block() + self.update_witness_block_with_transactions(block, [tx4]) + test_witness_block(self.nodes[0], self.test_node, block, accepted=True) + + # Test 4: Uncompressed pubkeys should still be valid in non-segwit + # transactions. + tx5 = CTransaction() + tx5.vin.append(CTxIn(COutPoint(tx4.sha256, 0), b"")) + tx5.vout.append(CTxOut(tx4.vout[0].nValue - 1000, CScript([OP_TRUE]))) + (sig_hash, err) = SignatureHash(script_pubkey, tx5, 0, SIGHASH_ALL) + signature = key.sign(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL + tx5.vin[0].scriptSig = CScript([signature, pubkey]) + tx5.rehash() + # Should pass policy and consensus. + test_transaction_acceptance(self.nodes[0], self.test_node, tx5, True, True) + block = self.build_next_block() + self.update_witness_block_with_transactions(block, [tx5]) + test_witness_block(self.nodes[0], self.test_node, block, accepted=True) + self.utxo.append(UTXO(tx5.sha256, 0, tx5.vout[0].nValue)) + + @subtest + def test_signature_version_1(self): + + key = CECKey() + key.set_secretbytes(b"9") + pubkey = CPubKey(key.get_pubkey()) + + witness_program = CScript([pubkey, CScriptOp(OP_CHECKSIG)]) + witness_hash = sha256(witness_program) + script_pubkey = CScript([OP_0, witness_hash]) + + # First create a witness output for use in the tests. + tx = CTransaction() + tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) + tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, script_pubkey)) + tx.rehash() + + test_transaction_acceptance(self.nodes[0], self.test_node, tx, with_witness=True, accepted=True) + # Mine this transaction in preparation for following tests. + block = self.build_next_block() + self.update_witness_block_with_transactions(block, [tx]) + test_witness_block(self.nodes[0], self.test_node, block, accepted=True) + sync_blocks(self.nodes) + self.utxo.pop(0) + + # Test each hashtype + prev_utxo = UTXO(tx.sha256, 0, tx.vout[0].nValue) + for sigflag in [0, SIGHASH_ANYONECANPAY]: + for hashtype in [SIGHASH_ALL, SIGHASH_NONE, SIGHASH_SINGLE]: + hashtype |= sigflag + block = self.build_next_block() + tx = CTransaction() + tx.vin.append(CTxIn(COutPoint(prev_utxo.sha256, prev_utxo.n), b"")) + tx.vout.append(CTxOut(prev_utxo.nValue - 1000, script_pubkey)) + tx.wit.vtxinwit.append(CTxInWitness()) + # Too-large input value + sign_p2pk_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue + 1, key) + self.update_witness_block_with_transactions(block, [tx]) + test_witness_block(self.nodes[0], self.test_node, block, accepted=False) + + # Too-small input value + sign_p2pk_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue - 1, key) + block.vtx.pop() # remove last tx + self.update_witness_block_with_transactions(block, [tx]) + test_witness_block(self.nodes[0], self.test_node, block, accepted=False) + + # Now try correct value + sign_p2pk_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue, key) + block.vtx.pop() + self.update_witness_block_with_transactions(block, [tx]) + test_witness_block(self.nodes[0], self.test_node, block, accepted=True) + + prev_utxo = UTXO(tx.sha256, 0, tx.vout[0].nValue) + + # Test combinations of signature hashes. + # Split the utxo into a lot of outputs. + # Randomly choose up to 10 to spend, sign with different hashtypes, and + # output to a random number of outputs. Repeat NUM_SIGHASH_TESTS times. + # Ensure that we've tested a situation where we use SIGHASH_SINGLE with + # an input index > number of outputs. + NUM_SIGHASH_TESTS = 500 + temp_utxos = [] + tx = CTransaction() + tx.vin.append(CTxIn(COutPoint(prev_utxo.sha256, prev_utxo.n), b"")) + split_value = prev_utxo.nValue // NUM_SIGHASH_TESTS + for i in range(NUM_SIGHASH_TESTS): + tx.vout.append(CTxOut(split_value, script_pubkey)) + tx.wit.vtxinwit.append(CTxInWitness()) + sign_p2pk_witness_input(witness_program, tx, 0, SIGHASH_ALL, prev_utxo.nValue, key) + for i in range(NUM_SIGHASH_TESTS): + temp_utxos.append(UTXO(tx.sha256, i, split_value)) + + block = self.build_next_block() + self.update_witness_block_with_transactions(block, [tx]) + test_witness_block(self.nodes[0], self.test_node, block, accepted=True) + + block = self.build_next_block() + used_sighash_single_out_of_bounds = False + for i in range(NUM_SIGHASH_TESTS): + # Ping regularly to keep the connection alive + if (not i % 100): + self.test_node.sync_with_ping() + # Choose random number of inputs to use. + num_inputs = random.randint(1, 10) + # Create a slight bias for producing more utxos + num_outputs = random.randint(1, 11) + random.shuffle(temp_utxos) + assert(len(temp_utxos) > num_inputs) + tx = CTransaction() + total_value = 0 + for i in range(num_inputs): + tx.vin.append(CTxIn(COutPoint(temp_utxos[i].sha256, temp_utxos[i].n), b"")) + tx.wit.vtxinwit.append(CTxInWitness()) + total_value += temp_utxos[i].nValue + split_value = total_value // num_outputs + for i in range(num_outputs): + tx.vout.append(CTxOut(split_value, script_pubkey)) + for i in range(num_inputs): + # Now try to sign each input, using a random hashtype. + anyonecanpay = 0 + if random.randint(0, 1): + anyonecanpay = SIGHASH_ANYONECANPAY + hashtype = random.randint(1, 3) | anyonecanpay + sign_p2pk_witness_input(witness_program, tx, i, hashtype, temp_utxos[i].nValue, key) + if (hashtype == SIGHASH_SINGLE and i >= num_outputs): + used_sighash_single_out_of_bounds = True + tx.rehash() + for i in range(num_outputs): + temp_utxos.append(UTXO(tx.sha256, i, split_value)) + temp_utxos = temp_utxos[num_inputs:] + + block.vtx.append(tx) + + # Test the block periodically, if we're close to maxblocksize + if (get_virtual_size(block) > MAX_BLOCK_BASE_SIZE - 1000): + self.update_witness_block_with_transactions(block, []) + test_witness_block(self.nodes[0], self.test_node, block, accepted=True) + block = self.build_next_block() + + if (not used_sighash_single_out_of_bounds): + self.log.info("WARNING: this test run didn't attempt SIGHASH_SINGLE with out-of-bounds index value") + # Test the transactions we've added to the block + if (len(block.vtx) > 1): + self.update_witness_block_with_transactions(block, []) + test_witness_block(self.nodes[0], self.test_node, block, accepted=True) + + # Now test witness version 0 P2PKH transactions + pubkeyhash = hash160(pubkey) + script_pkh = CScript([OP_0, pubkeyhash]) + tx = CTransaction() + tx.vin.append(CTxIn(COutPoint(temp_utxos[0].sha256, temp_utxos[0].n), b"")) + tx.vout.append(CTxOut(temp_utxos[0].nValue, script_pkh)) + tx.wit.vtxinwit.append(CTxInWitness()) + sign_p2pk_witness_input(witness_program, tx, 0, SIGHASH_ALL, temp_utxos[0].nValue, key) + tx2 = CTransaction() + tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b"")) + tx2.vout.append(CTxOut(tx.vout[0].nValue, CScript([OP_TRUE]))) + + script = get_p2pkh_script(pubkeyhash) + sig_hash = SegwitVersion1SignatureHash(script, tx2, 0, SIGHASH_ALL, tx.vout[0].nValue) + signature = key.sign(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL + + # Check that we can't have a scriptSig + tx2.vin[0].scriptSig = CScript([signature, pubkey]) + block = self.build_next_block() + self.update_witness_block_with_transactions(block, [tx, tx2]) + test_witness_block(self.nodes[0], self.test_node, block, accepted=False) + + # Move the signature to the witness. + block.vtx.pop() + tx2.wit.vtxinwit.append(CTxInWitness()) + tx2.wit.vtxinwit[0].scriptWitness.stack = [signature, pubkey] + tx2.vin[0].scriptSig = b"" + tx2.rehash() + + self.update_witness_block_with_transactions(block, [tx2]) + test_witness_block(self.nodes[0], self.test_node, block, accepted=True) + + temp_utxos.pop(0) + + # Update self.utxos for later tests by creating two outputs + # that consolidate all the coins in temp_utxos. + output_value = sum(i.nValue for i in temp_utxos) // 2 + + tx = CTransaction() + index = 0 + # Just spend to our usual anyone-can-spend output + tx.vout = [CTxOut(output_value, CScript([OP_TRUE]))] * 2 + for i in temp_utxos: + # Use SIGHASH_ALL|SIGHASH_ANYONECANPAY so we can build up + # the signatures as we go. + tx.vin.append(CTxIn(COutPoint(i.sha256, i.n), b"")) + tx.wit.vtxinwit.append(CTxInWitness()) + sign_p2pk_witness_input(witness_program, tx, index, SIGHASH_ALL | SIGHASH_ANYONECANPAY, i.nValue, key) + index += 1 + block = self.build_next_block() + self.update_witness_block_with_transactions(block, [tx]) + test_witness_block(self.nodes[0], self.test_node, block, accepted=True) + + for i in range(len(tx.vout)): + self.utxo.append(UTXO(tx.sha256, i, tx.vout[i].nValue)) + + @subtest + def test_non_standard_witness_blinding(self): + """Test behavior of unnecessary witnesses in transactions does not blind the node for the transaction""" + + # Create a p2sh output -- this is so we can pass the standardness + # rules (an anyone-can-spend OP_TRUE would be rejected, if not wrapped + # in P2SH). + p2sh_program = CScript([OP_TRUE]) + p2sh_pubkey = hash160(p2sh_program) + script_pubkey = CScript([OP_HASH160, p2sh_pubkey, OP_EQUAL]) + + # Now check that unnecessary witnesses can't be used to blind a node + # to a transaction, eg by violating standardness checks. + tx = CTransaction() + tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) + tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, script_pubkey)) + tx.rehash() + test_transaction_acceptance(self.nodes[0], self.test_node, tx, False, True) + self.nodes[0].generate(1) + sync_blocks(self.nodes) + + # We'll add an unnecessary witness to this transaction that would cause + # it to be non-standard, to test that violating policy with a witness + # doesn't blind a node to a transaction. Transactions + # rejected for having a witness shouldn't be added + # to the rejection cache. + tx2 = CTransaction() + tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), CScript([p2sh_program]))) + tx2.vout.append(CTxOut(tx.vout[0].nValue - 1000, script_pubkey)) + tx2.wit.vtxinwit.append(CTxInWitness()) + tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a' * 400] + tx2.rehash() + # This will be rejected due to a policy check: + # No witness is allowed, since it is not a witness program but a p2sh program + test_transaction_acceptance(self.nodes[1], self.std_node, tx2, True, False, 'bad-witness-nonstandard') + + # If we send without witness, it should be accepted. + test_transaction_acceptance(self.nodes[1], self.std_node, tx2, False, True) + + # Now create a new anyone-can-spend utxo for the next test. + tx3 = CTransaction() + tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), CScript([p2sh_program]))) + tx3.vout.append(CTxOut(tx2.vout[0].nValue - 1000, CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE]))) + tx3.rehash() + test_transaction_acceptance(self.nodes[0], self.test_node, tx2, False, True) + test_transaction_acceptance(self.nodes[0], self.test_node, tx3, False, True) + + self.nodes[0].generate(1) + sync_blocks(self.nodes) + + # Update our utxo list; we spent the first entry. + self.utxo.pop(0) + self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue)) + + @subtest + def test_non_standard_witness(self): + """Test detection of non-standard P2WSH witness""" + pad = chr(1).encode('latin-1') + + # Create scripts for tests + scripts = [] + scripts.append(CScript([OP_DROP] * 100)) + scripts.append(CScript([OP_DROP] * 99)) + scripts.append(CScript([pad * 59] * 59 + [OP_DROP] * 60)) + scripts.append(CScript([pad * 59] * 59 + [OP_DROP] * 61)) + + p2wsh_scripts = [] + + tx = CTransaction() + tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) + + # For each script, generate a pair of P2WSH and P2SH-P2WSH output. + outputvalue = (self.utxo[0].nValue - 1000) // (len(scripts) * 2) + for i in scripts: + p2wsh = CScript([OP_0, sha256(i)]) + p2sh = hash160(p2wsh) + p2wsh_scripts.append(p2wsh) + tx.vout.append(CTxOut(outputvalue, p2wsh)) + tx.vout.append(CTxOut(outputvalue, CScript([OP_HASH160, p2sh, OP_EQUAL]))) + tx.rehash() + txid = tx.sha256 + test_transaction_acceptance(self.nodes[0], self.test_node, tx, with_witness=False, accepted=True) + + self.nodes[0].generate(1) + sync_blocks(self.nodes) + + # Creating transactions for tests + p2wsh_txs = [] + p2sh_txs = [] + for i in range(len(scripts)): + p2wsh_tx = CTransaction() + p2wsh_tx.vin.append(CTxIn(COutPoint(txid, i * 2))) + p2wsh_tx.vout.append(CTxOut(outputvalue - 5000, CScript([OP_0, hash160(hex_str_to_bytes(""))]))) + p2wsh_tx.wit.vtxinwit.append(CTxInWitness()) + p2wsh_tx.rehash() + p2wsh_txs.append(p2wsh_tx) + p2sh_tx = CTransaction() + p2sh_tx.vin.append(CTxIn(COutPoint(txid, i * 2 + 1), CScript([p2wsh_scripts[i]]))) + p2sh_tx.vout.append(CTxOut(outputvalue - 5000, CScript([OP_0, hash160(hex_str_to_bytes(""))]))) + p2sh_tx.wit.vtxinwit.append(CTxInWitness()) + p2sh_tx.rehash() + p2sh_txs.append(p2sh_tx) + + # Testing native P2WSH + # Witness stack size, excluding witnessScript, over 100 is non-standard + p2wsh_txs[0].wit.vtxinwit[0].scriptWitness.stack = [pad] * 101 + [scripts[0]] + test_transaction_acceptance(self.nodes[1], self.std_node, p2wsh_txs[0], True, False, 'bad-witness-nonstandard') + # Non-standard nodes should accept + test_transaction_acceptance(self.nodes[0], self.test_node, p2wsh_txs[0], True, True) + + # Stack element size over 80 bytes is non-standard + p2wsh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 81] * 100 + [scripts[1]] + test_transaction_acceptance(self.nodes[1], self.std_node, p2wsh_txs[1], True, False, 'bad-witness-nonstandard') + # Non-standard nodes should accept + test_transaction_acceptance(self.nodes[0], self.test_node, p2wsh_txs[1], True, True) + # Standard nodes should accept if element size is not over 80 bytes + p2wsh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 80] * 100 + [scripts[1]] + test_transaction_acceptance(self.nodes[1], self.std_node, p2wsh_txs[1], True, True) + + # witnessScript size at 3600 bytes is standard + p2wsh_txs[2].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, scripts[2]] + test_transaction_acceptance(self.nodes[0], self.test_node, p2wsh_txs[2], True, True) + test_transaction_acceptance(self.nodes[1], self.std_node, p2wsh_txs[2], True, True) + + # witnessScript size at 3601 bytes is non-standard + p2wsh_txs[3].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, pad, scripts[3]] + test_transaction_acceptance(self.nodes[1], self.std_node, p2wsh_txs[3], True, False, 'bad-witness-nonstandard') + # Non-standard nodes should accept + test_transaction_acceptance(self.nodes[0], self.test_node, p2wsh_txs[3], True, True) + + # Repeating the same tests with P2SH-P2WSH + p2sh_txs[0].wit.vtxinwit[0].scriptWitness.stack = [pad] * 101 + [scripts[0]] + test_transaction_acceptance(self.nodes[1], self.std_node, p2sh_txs[0], True, False, 'bad-witness-nonstandard') + test_transaction_acceptance(self.nodes[0], self.test_node, p2sh_txs[0], True, True) + p2sh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 81] * 100 + [scripts[1]] + test_transaction_acceptance(self.nodes[1], self.std_node, p2sh_txs[1], True, False, 'bad-witness-nonstandard') + test_transaction_acceptance(self.nodes[0], self.test_node, p2sh_txs[1], True, True) + p2sh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 80] * 100 + [scripts[1]] + test_transaction_acceptance(self.nodes[1], self.std_node, p2sh_txs[1], True, True) + p2sh_txs[2].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, scripts[2]] + test_transaction_acceptance(self.nodes[0], self.test_node, p2sh_txs[2], True, True) + test_transaction_acceptance(self.nodes[1], self.std_node, p2sh_txs[2], True, True) + p2sh_txs[3].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, pad, scripts[3]] + test_transaction_acceptance(self.nodes[1], self.std_node, p2sh_txs[3], True, False, 'bad-witness-nonstandard') + test_transaction_acceptance(self.nodes[0], self.test_node, p2sh_txs[3], True, True) + + self.nodes[0].generate(1) # Mine and clean up the mempool of non-standard node + # Valid but non-standard transactions in a block should be accepted by standard node + sync_blocks(self.nodes) + assert_equal(len(self.nodes[0].getrawmempool()), 0) + assert_equal(len(self.nodes[1].getrawmempool()), 0) + + self.utxo.pop(0) + + @subtest + def test_upgrade_after_activation(self): + """Test the behavior of starting up a segwit-aware node after the softfork has activated.""" + + # Restart with the new binary + self.stop_node(2) + self.start_node(2, extra_args=["-vbparams=segwit:0:999999999999"]) + connect_nodes(self.nodes[0], 2) + + sync_blocks(self.nodes) + + # Make sure that this peer thinks segwit has activated. + assert(get_bip9_status(self.nodes[2], 'segwit')['status'] == "active") + + # Make sure this peer's blocks match those of node0. + height = self.nodes[2].getblockcount() + while height >= 0: + block_hash = self.nodes[2].getblockhash(height) + assert_equal(block_hash, self.nodes[0].getblockhash(height)) + assert_equal(self.nodes[0].getblock(block_hash), self.nodes[2].getblock(block_hash)) + height -= 1 + + @subtest + def test_witness_sigops(self): + """Test sigop counting is correct inside witnesses.""" + + # Keep this under MAX_OPS_PER_SCRIPT (201) + witness_program = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKMULTISIG] * 5 + [OP_CHECKSIG] * 193 + [OP_ENDIF]) + witness_hash = sha256(witness_program) + script_pubkey = CScript([OP_0, witness_hash]) + + sigops_per_script = 20 * 5 + 193 * 1 + # We'll produce 2 extra outputs, one with a program that would take us + # over max sig ops, and one with a program that would exactly reach max + # sig ops + outputs = (MAX_SIGOP_COST // sigops_per_script) + 2 + extra_sigops_available = MAX_SIGOP_COST % sigops_per_script + + # We chose the number of checkmultisigs/checksigs to make this work: + assert(extra_sigops_available < 100) # steer clear of MAX_OPS_PER_SCRIPT + + # This script, when spent with the first + # N(=MAX_SIGOP_COST//sigops_per_script) outputs of our transaction, + # would push us just over the block sigop limit. + witness_program_toomany = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKSIG] * (extra_sigops_available + 1) + [OP_ENDIF]) + witness_hash_toomany = sha256(witness_program_toomany) + script_pubkey_toomany = CScript([OP_0, witness_hash_toomany]) + + # If we spend this script instead, we would exactly reach our sigop + # limit (for witness sigops). + witness_program_justright = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKSIG] * (extra_sigops_available) + [OP_ENDIF]) + witness_hash_justright = sha256(witness_program_justright) + script_pubkey_justright = CScript([OP_0, witness_hash_justright]) + + # First split our available utxo into a bunch of outputs + split_value = self.utxo[0].nValue // outputs + tx = CTransaction() + tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) + for i in range(outputs): + tx.vout.append(CTxOut(split_value, script_pubkey)) + tx.vout[-2].scriptPubKey = script_pubkey_toomany + tx.vout[-1].scriptPubKey = script_pubkey_justright + tx.rehash() + + block_1 = self.build_next_block() + self.update_witness_block_with_transactions(block_1, [tx]) + test_witness_block(self.nodes[0], self.test_node, block_1, accepted=True) + + tx2 = CTransaction() + # If we try to spend the first n-1 outputs from tx, that should be + # too many sigops. + total_value = 0 + for i in range(outputs - 1): + tx2.vin.append(CTxIn(COutPoint(tx.sha256, i), b"")) + tx2.wit.vtxinwit.append(CTxInWitness()) + tx2.wit.vtxinwit[-1].scriptWitness.stack = [witness_program] + total_value += tx.vout[i].nValue + tx2.wit.vtxinwit[-1].scriptWitness.stack = [witness_program_toomany] + tx2.vout.append(CTxOut(total_value, CScript([OP_TRUE]))) + tx2.rehash() + + block_2 = self.build_next_block() + self.update_witness_block_with_transactions(block_2, [tx2]) + test_witness_block(self.nodes[0], self.test_node, block_2, accepted=False) + + # Try dropping the last input in tx2, and add an output that has + # too many sigops (contributing to legacy sigop count). + checksig_count = (extra_sigops_available // 4) + 1 + script_pubkey_checksigs = CScript([OP_CHECKSIG] * checksig_count) + tx2.vout.append(CTxOut(0, script_pubkey_checksigs)) + tx2.vin.pop() + tx2.wit.vtxinwit.pop() + tx2.vout[0].nValue -= tx.vout[-2].nValue + tx2.rehash() + block_3 = self.build_next_block() + self.update_witness_block_with_transactions(block_3, [tx2]) + test_witness_block(self.nodes[0], self.test_node, block_3, accepted=False) + + # If we drop the last checksig in this output, the tx should succeed. + block_4 = self.build_next_block() + tx2.vout[-1].scriptPubKey = CScript([OP_CHECKSIG] * (checksig_count - 1)) + tx2.rehash() + self.update_witness_block_with_transactions(block_4, [tx2]) + test_witness_block(self.nodes[0], self.test_node, block_4, accepted=True) + + # Reset the tip back down for the next test + sync_blocks(self.nodes) + for x in self.nodes: + x.invalidateblock(block_4.hash) + + # Try replacing the last input of tx2 to be spending the last + # output of tx + block_5 = self.build_next_block() + tx2.vout.pop() + tx2.vin.append(CTxIn(COutPoint(tx.sha256, outputs - 1), b"")) + tx2.wit.vtxinwit.append(CTxInWitness()) + tx2.wit.vtxinwit[-1].scriptWitness.stack = [witness_program_justright] + tx2.rehash() + self.update_witness_block_with_transactions(block_5, [tx2]) + test_witness_block(self.nodes[0], self.test_node, block_5, accepted=True) + + # TODO: test p2sh sigop counting + +if __name__ == '__main__': + SegWitTest().main() diff --git a/test/functional/p2p_sendheaders.py b/test/functional/p2p_sendheaders.py new file mode 100755 index 000000000..c7ae57de8 --- /dev/null +++ b/test/functional/p2p_sendheaders.py @@ -0,0 +1,607 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test behavior of headers messages to announce blocks. + +Setup: + +- Two nodes: + - node0 is the node-under-test. We create two p2p connections to it. The + first p2p connection is a control and should only ever receive inv's. The + second p2p connection tests the headers sending logic. + - node1 is used to create reorgs. + +test_null_locators +================== + +Sends two getheaders requests with null locator values. First request's hashstop +value refers to validated block, while second request's hashstop value refers to +a block which hasn't been validated. Verifies only the first request returns +headers. + +test_nonnull_locators +===================== + +Part 1: No headers announcements before "sendheaders" +a. node mines a block [expect: inv] + send getdata for the block [expect: block] +b. node mines another block [expect: inv] + send getheaders and getdata [expect: headers, then block] +c. node mines another block [expect: inv] + peer mines a block, announces with header [expect: getdata] +d. node mines another block [expect: inv] + +Part 2: After "sendheaders", headers announcements should generally work. +a. peer sends sendheaders [expect: no response] + peer sends getheaders with current tip [expect: no response] +b. node mines a block [expect: tip header] +c. for N in 1, ..., 10: + * for announce-type in {inv, header} + - peer mines N blocks, announces with announce-type + [ expect: getheaders/getdata or getdata, deliver block(s) ] + - node mines a block [ expect: 1 header ] + +Part 3: Headers announcements stop after large reorg and resume after getheaders or inv from peer. +- For response-type in {inv, getheaders} + * node mines a 7 block reorg [ expect: headers announcement of 8 blocks ] + * node mines an 8-block reorg [ expect: inv at tip ] + * peer responds with getblocks/getdata [expect: inv, blocks ] + * node mines another block [ expect: inv at tip, peer sends getdata, expect: block ] + * node mines another block at tip [ expect: inv ] + * peer responds with getheaders with an old hashstop more than 8 blocks back [expect: headers] + * peer requests block [ expect: block ] + * node mines another block at tip [ expect: inv, peer sends getdata, expect: block ] + * peer sends response-type [expect headers if getheaders, getheaders/getdata if mining new block] + * node mines 1 block [expect: 1 header, peer responds with getdata] + +Part 4: Test direct fetch behavior +a. Announce 2 old block headers. + Expect: no getdata requests. +b. Announce 3 new blocks via 1 headers message. + Expect: one getdata request for all 3 blocks. + (Send blocks.) +c. Announce 1 header that forks off the last two blocks. + Expect: no response. +d. Announce 1 more header that builds on that fork. + Expect: one getdata request for two blocks. +e. Announce 16 more headers that build on that fork. + Expect: getdata request for 14 more blocks. +f. Announce 1 more header that builds on that fork. + Expect: no response. + +Part 5: Test handling of headers that don't connect. +a. Repeat 10 times: + 1. Announce a header that doesn't connect. + Expect: getheaders message + 2. Send headers chain. + Expect: getdata for the missing blocks, tip update. +b. Then send 9 more headers that don't connect. + Expect: getheaders message each time. +c. Announce a header that does connect. + Expect: no response. +d. Announce 49 headers that don't connect. + Expect: getheaders message each time. +e. Announce one more that doesn't connect. + Expect: disconnect. +""" +from test_framework.blocktools import create_block, create_coinbase +from test_framework.messages import CInv +from test_framework.mininode import ( + CBlockHeader, + NODE_WITNESS, + P2PInterface, + mininode_lock, + msg_block, + msg_getblocks, + msg_getdata, + msg_getheaders, + msg_headers, + msg_inv, + msg_sendheaders, +) +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + sync_blocks, + wait_until, +) + +DIRECT_FETCH_RESPONSE_TIME = 0.05 + +class BaseNode(P2PInterface): + def __init__(self): + super().__init__() + + self.block_announced = False + self.last_blockhash_announced = None + self.recent_headers_announced = [] + + def send_get_data(self, block_hashes): + """Request data for a list of block hashes.""" + msg = msg_getdata() + for x in block_hashes: + msg.inv.append(CInv(2, x)) + self.send_message(msg) + + def send_get_headers(self, locator, hashstop): + msg = msg_getheaders() + msg.locator.vHave = locator + msg.hashstop = hashstop + self.send_message(msg) + + def send_block_inv(self, blockhash): + msg = msg_inv() + msg.inv = [CInv(2, blockhash)] + self.send_message(msg) + + def send_header_for_blocks(self, new_blocks): + headers_message = msg_headers() + headers_message.headers = [CBlockHeader(b) for b in new_blocks] + self.send_message(headers_message) + + def send_getblocks(self, locator): + getblocks_message = msg_getblocks() + getblocks_message.locator.vHave = locator + self.send_message(getblocks_message) + + def wait_for_getdata(self, hash_list, timeout=60): + if hash_list == []: + return + + test_function = lambda: "getdata" in self.last_message and [x.hash for x in self.last_message["getdata"].inv] == hash_list + wait_until(test_function, timeout=timeout, lock=mininode_lock) + + def wait_for_block_announcement(self, block_hash, timeout=60): + test_function = lambda: self.last_blockhash_announced == block_hash + wait_until(test_function, timeout=timeout, lock=mininode_lock) + + def on_inv(self, message): + self.block_announced = True + self.last_blockhash_announced = message.inv[-1].hash + + def on_headers(self, message): + if len(message.headers): + self.block_announced = True + for x in message.headers: + x.calc_sha256() + # append because headers may be announced over multiple messages. + self.recent_headers_announced.append(x.sha256) + self.last_blockhash_announced = message.headers[-1].sha256 + + def clear_block_announcements(self): + with mininode_lock: + self.block_announced = False + self.last_message.pop("inv", None) + self.last_message.pop("headers", None) + self.recent_headers_announced = [] + + + def check_last_headers_announcement(self, headers): + """Test whether the last headers announcements received are right. + Headers may be announced across more than one message.""" + test_function = lambda: (len(self.recent_headers_announced) >= len(headers)) + wait_until(test_function, timeout=60, lock=mininode_lock) + with mininode_lock: + assert_equal(self.recent_headers_announced, headers) + self.block_announced = False + self.last_message.pop("headers", None) + self.recent_headers_announced = [] + + def check_last_inv_announcement(self, inv): + """Test whether the last announcement received had the right inv. + inv should be a list of block hashes.""" + + test_function = lambda: self.block_announced + wait_until(test_function, timeout=60, lock=mininode_lock) + + with mininode_lock: + compare_inv = [] + if "inv" in self.last_message: + compare_inv = [x.hash for x in self.last_message["inv"].inv] + assert_equal(compare_inv, inv) + self.block_announced = False + self.last_message.pop("inv", None) + +class SendHeadersTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 2 + + def mine_blocks(self, count): + """Mine count blocks and return the new tip.""" + + # Clear out block announcements from each p2p listener + [x.clear_block_announcements() for x in self.nodes[0].p2ps] + self.nodes[0].generatetoaddress(count, self.nodes[0].get_deterministic_priv_key().address) + return int(self.nodes[0].getbestblockhash(), 16) + + def mine_reorg(self, length): + """Mine a reorg that invalidates length blocks (replacing them with # length+1 blocks). + + Note: we clear the state of our p2p connections after the + to-be-reorged-out blocks are mined, so that we don't break later tests. + return the list of block hashes newly mined.""" + + # make sure all invalidated blocks are node0's + self.nodes[0].generatetoaddress(length, self.nodes[0].get_deterministic_priv_key().address) + sync_blocks(self.nodes, wait=0.1) + for x in self.nodes[0].p2ps: + x.wait_for_block_announcement(int(self.nodes[0].getbestblockhash(), 16)) + x.clear_block_announcements() + + tip_height = self.nodes[1].getblockcount() + hash_to_invalidate = self.nodes[1].getblockhash(tip_height - (length - 1)) + self.nodes[1].invalidateblock(hash_to_invalidate) + all_hashes = self.nodes[1].generatetoaddress(length + 1, self.nodes[1].get_deterministic_priv_key().address) # Must be longer than the orig chain + sync_blocks(self.nodes, wait=0.1) + return [int(x, 16) for x in all_hashes] + + def run_test(self): + # Setup the p2p connections + inv_node = self.nodes[0].add_p2p_connection(BaseNode()) + # Make sure NODE_NETWORK is not set for test_node, so no block download + # will occur outside of direct fetching + test_node = self.nodes[0].add_p2p_connection(BaseNode(), services=NODE_WITNESS) + + # Ensure verack's have been processed by our peer + inv_node.sync_with_ping() + test_node.sync_with_ping() + + self.test_null_locators(test_node, inv_node) + self.test_nonnull_locators(test_node, inv_node) + + def test_null_locators(self, test_node, inv_node): + tip = self.nodes[0].getblockheader(self.nodes[0].generatetoaddress(1, self.nodes[0].get_deterministic_priv_key().address)[0]) + tip_hash = int(tip["hash"], 16) + + inv_node.check_last_inv_announcement(inv=[tip_hash]) + test_node.check_last_inv_announcement(inv=[tip_hash]) + + self.log.info("Verify getheaders with null locator and valid hashstop returns headers.") + test_node.clear_block_announcements() + test_node.send_get_headers(locator=[], hashstop=tip_hash) + test_node.check_last_headers_announcement(headers=[tip_hash]) + + self.log.info("Verify getheaders with null locator and invalid hashstop does not return headers.") + block = create_block(int(tip["hash"], 16), create_coinbase(tip["height"] + 1), tip["mediantime"] + 1) + block.solve() + test_node.send_header_for_blocks([block]) + test_node.clear_block_announcements() + test_node.send_get_headers(locator=[], hashstop=int(block.hash, 16)) + test_node.sync_with_ping() + assert_equal(test_node.block_announced, False) + inv_node.clear_block_announcements() + test_node.send_message(msg_block(block)) + inv_node.check_last_inv_announcement(inv=[int(block.hash, 16)]) + + def test_nonnull_locators(self, test_node, inv_node): + tip = int(self.nodes[0].getbestblockhash(), 16) + + # PART 1 + # 1. Mine a block; expect inv announcements each time + self.log.info("Part 1: headers don't start before sendheaders message...") + for i in range(4): + self.log.debug("Part 1.{}: starting...".format(i)) + old_tip = tip + tip = self.mine_blocks(1) + inv_node.check_last_inv_announcement(inv=[tip]) + test_node.check_last_inv_announcement(inv=[tip]) + # Try a few different responses; none should affect next announcement + if i == 0: + # first request the block + test_node.send_get_data([tip]) + test_node.wait_for_block(tip) + elif i == 1: + # next try requesting header and block + test_node.send_get_headers(locator=[old_tip], hashstop=tip) + test_node.send_get_data([tip]) + test_node.wait_for_block(tip) + test_node.clear_block_announcements() # since we requested headers... + elif i == 2: + # this time announce own block via headers + inv_node.clear_block_announcements() + height = self.nodes[0].getblockcount() + last_time = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['time'] + block_time = last_time + 1 + new_block = create_block(tip, create_coinbase(height + 1), block_time) + new_block.solve() + test_node.send_header_for_blocks([new_block]) + test_node.wait_for_getdata([new_block.sha256]) + test_node.send_message(msg_block(new_block)) + test_node.sync_with_ping() # make sure this block is processed + wait_until(lambda: inv_node.block_announced, timeout=60, lock=mininode_lock) + inv_node.clear_block_announcements() + test_node.clear_block_announcements() + + self.log.info("Part 1: success!") + self.log.info("Part 2: announce blocks with headers after sendheaders message...") + # PART 2 + # 2. Send a sendheaders message and test that headers announcements + # commence and keep working. + test_node.send_message(msg_sendheaders()) + prev_tip = int(self.nodes[0].getbestblockhash(), 16) + test_node.send_get_headers(locator=[prev_tip], hashstop=0) + test_node.sync_with_ping() + + # Now that we've synced headers, headers announcements should work + tip = self.mine_blocks(1) + inv_node.check_last_inv_announcement(inv=[tip]) + test_node.check_last_headers_announcement(headers=[tip]) + + height = self.nodes[0].getblockcount() + 1 + block_time += 10 # Advance far enough ahead + for i in range(10): + self.log.debug("Part 2.{}: starting...".format(i)) + # Mine i blocks, and alternate announcing either via + # inv (of tip) or via headers. After each, new blocks + # mined by the node should successfully be announced + # with block header, even though the blocks are never requested + for j in range(2): + self.log.debug("Part 2.{}.{}: starting...".format(i, j)) + blocks = [] + for b in range(i + 1): + blocks.append(create_block(tip, create_coinbase(height), block_time)) + blocks[-1].solve() + tip = blocks[-1].sha256 + block_time += 1 + height += 1 + if j == 0: + # Announce via inv + test_node.send_block_inv(tip) + test_node.wait_for_getheaders() + # Should have received a getheaders now + test_node.send_header_for_blocks(blocks) + # Test that duplicate inv's won't result in duplicate + # getdata requests, or duplicate headers announcements + [inv_node.send_block_inv(x.sha256) for x in blocks] + test_node.wait_for_getdata([x.sha256 for x in blocks]) + inv_node.sync_with_ping() + else: + # Announce via headers + test_node.send_header_for_blocks(blocks) + test_node.wait_for_getdata([x.sha256 for x in blocks]) + # Test that duplicate headers won't result in duplicate + # getdata requests (the check is further down) + inv_node.send_header_for_blocks(blocks) + inv_node.sync_with_ping() + [test_node.send_message(msg_block(x)) for x in blocks] + test_node.sync_with_ping() + inv_node.sync_with_ping() + # This block should not be announced to the inv node (since it also + # broadcast it) + assert "inv" not in inv_node.last_message + assert "headers" not in inv_node.last_message + tip = self.mine_blocks(1) + inv_node.check_last_inv_announcement(inv=[tip]) + test_node.check_last_headers_announcement(headers=[tip]) + height += 1 + block_time += 1 + + self.log.info("Part 2: success!") + + self.log.info("Part 3: headers announcements can stop after large reorg, and resume after headers/inv from peer...") + + # PART 3. Headers announcements can stop after large reorg, and resume after + # getheaders or inv from peer. + for j in range(2): + self.log.debug("Part 3.{}: starting...".format(j)) + # First try mining a reorg that can propagate with header announcement + new_block_hashes = self.mine_reorg(length=7) + tip = new_block_hashes[-1] + inv_node.check_last_inv_announcement(inv=[tip]) + test_node.check_last_headers_announcement(headers=new_block_hashes) + + block_time += 8 + + # Mine a too-large reorg, which should be announced with a single inv + new_block_hashes = self.mine_reorg(length=8) + tip = new_block_hashes[-1] + inv_node.check_last_inv_announcement(inv=[tip]) + test_node.check_last_inv_announcement(inv=[tip]) + + block_time += 9 + + fork_point = self.nodes[0].getblock("%064x" % new_block_hashes[0])["previousblockhash"] + fork_point = int(fork_point, 16) + + # Use getblocks/getdata + test_node.send_getblocks(locator=[fork_point]) + test_node.check_last_inv_announcement(inv=new_block_hashes) + test_node.send_get_data(new_block_hashes) + test_node.wait_for_block(new_block_hashes[-1]) + + for i in range(3): + self.log.debug("Part 3.{}.{}: starting...".format(j, i)) + + # Mine another block, still should get only an inv + tip = self.mine_blocks(1) + inv_node.check_last_inv_announcement(inv=[tip]) + test_node.check_last_inv_announcement(inv=[tip]) + if i == 0: + # Just get the data -- shouldn't cause headers announcements to resume + test_node.send_get_data([tip]) + test_node.wait_for_block(tip) + elif i == 1: + # Send a getheaders message that shouldn't trigger headers announcements + # to resume (best header sent will be too old) + test_node.send_get_headers(locator=[fork_point], hashstop=new_block_hashes[1]) + test_node.send_get_data([tip]) + test_node.wait_for_block(tip) + elif i == 2: + # This time, try sending either a getheaders to trigger resumption + # of headers announcements, or mine a new block and inv it, also + # triggering resumption of headers announcements. + test_node.send_get_data([tip]) + test_node.wait_for_block(tip) + if j == 0: + test_node.send_get_headers(locator=[tip], hashstop=0) + test_node.sync_with_ping() + else: + test_node.send_block_inv(tip) + test_node.sync_with_ping() + # New blocks should now be announced with header + tip = self.mine_blocks(1) + inv_node.check_last_inv_announcement(inv=[tip]) + test_node.check_last_headers_announcement(headers=[tip]) + + self.log.info("Part 3: success!") + + self.log.info("Part 4: Testing direct fetch behavior...") + tip = self.mine_blocks(1) + height = self.nodes[0].getblockcount() + 1 + last_time = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['time'] + block_time = last_time + 1 + + # Create 2 blocks. Send the blocks, then send the headers. + blocks = [] + for b in range(2): + blocks.append(create_block(tip, create_coinbase(height), block_time)) + blocks[-1].solve() + tip = blocks[-1].sha256 + block_time += 1 + height += 1 + inv_node.send_message(msg_block(blocks[-1])) + + inv_node.sync_with_ping() # Make sure blocks are processed + test_node.last_message.pop("getdata", None) + test_node.send_header_for_blocks(blocks) + test_node.sync_with_ping() + # should not have received any getdata messages + with mininode_lock: + assert "getdata" not in test_node.last_message + + # This time, direct fetch should work + blocks = [] + for b in range(3): + blocks.append(create_block(tip, create_coinbase(height), block_time)) + blocks[-1].solve() + tip = blocks[-1].sha256 + block_time += 1 + height += 1 + + test_node.send_header_for_blocks(blocks) + test_node.sync_with_ping() + test_node.wait_for_getdata([x.sha256 for x in blocks], timeout=DIRECT_FETCH_RESPONSE_TIME) + + [test_node.send_message(msg_block(x)) for x in blocks] + + test_node.sync_with_ping() + + # Now announce a header that forks the last two blocks + tip = blocks[0].sha256 + height -= 1 + blocks = [] + + # Create extra blocks for later + for b in range(20): + blocks.append(create_block(tip, create_coinbase(height), block_time)) + blocks[-1].solve() + tip = blocks[-1].sha256 + block_time += 1 + height += 1 + + # Announcing one block on fork should not trigger direct fetch + # (less work than tip) + test_node.last_message.pop("getdata", None) + test_node.send_header_for_blocks(blocks[0:1]) + test_node.sync_with_ping() + with mininode_lock: + assert "getdata" not in test_node.last_message + + # Announcing one more block on fork should trigger direct fetch for + # both blocks (same work as tip) + test_node.send_header_for_blocks(blocks[1:2]) + test_node.sync_with_ping() + test_node.wait_for_getdata([x.sha256 for x in blocks[0:2]], timeout=DIRECT_FETCH_RESPONSE_TIME) + + # Announcing 16 more headers should trigger direct fetch for 14 more + # blocks + test_node.send_header_for_blocks(blocks[2:18]) + test_node.sync_with_ping() + test_node.wait_for_getdata([x.sha256 for x in blocks[2:16]], timeout=DIRECT_FETCH_RESPONSE_TIME) + + # Announcing 1 more header should not trigger any response + test_node.last_message.pop("getdata", None) + test_node.send_header_for_blocks(blocks[18:19]) + test_node.sync_with_ping() + with mininode_lock: + assert "getdata" not in test_node.last_message + + self.log.info("Part 4: success!") + + # Now deliver all those blocks we announced. + [test_node.send_message(msg_block(x)) for x in blocks] + + self.log.info("Part 5: Testing handling of unconnecting headers") + # First we test that receipt of an unconnecting header doesn't prevent + # chain sync. + for i in range(10): + self.log.debug("Part 5.{}: starting...".format(i)) + test_node.last_message.pop("getdata", None) + blocks = [] + # Create two more blocks. + for j in range(2): + blocks.append(create_block(tip, create_coinbase(height), block_time)) + blocks[-1].solve() + tip = blocks[-1].sha256 + block_time += 1 + height += 1 + # Send the header of the second block -> this won't connect. + with mininode_lock: + test_node.last_message.pop("getheaders", None) + test_node.send_header_for_blocks([blocks[1]]) + test_node.wait_for_getheaders() + test_node.send_header_for_blocks(blocks) + test_node.wait_for_getdata([x.sha256 for x in blocks]) + [test_node.send_message(msg_block(x)) for x in blocks] + test_node.sync_with_ping() + assert_equal(int(self.nodes[0].getbestblockhash(), 16), blocks[1].sha256) + + blocks = [] + # Now we test that if we repeatedly don't send connecting headers, we + # don't go into an infinite loop trying to get them to connect. + MAX_UNCONNECTING_HEADERS = 10 + for j in range(MAX_UNCONNECTING_HEADERS + 1): + blocks.append(create_block(tip, create_coinbase(height), block_time)) + blocks[-1].solve() + tip = blocks[-1].sha256 + block_time += 1 + height += 1 + + for i in range(1, MAX_UNCONNECTING_HEADERS): + # Send a header that doesn't connect, check that we get a getheaders. + with mininode_lock: + test_node.last_message.pop("getheaders", None) + test_node.send_header_for_blocks([blocks[i]]) + test_node.wait_for_getheaders() + + # Next header will connect, should re-set our count: + test_node.send_header_for_blocks([blocks[0]]) + + # Remove the first two entries (blocks[1] would connect): + blocks = blocks[2:] + + # Now try to see how many unconnecting headers we can send + # before we get disconnected. Should be 5*MAX_UNCONNECTING_HEADERS + for i in range(5 * MAX_UNCONNECTING_HEADERS - 1): + # Send a header that doesn't connect, check that we get a getheaders. + with mininode_lock: + test_node.last_message.pop("getheaders", None) + test_node.send_header_for_blocks([blocks[i % len(blocks)]]) + test_node.wait_for_getheaders() + + # Eventually this stops working. + test_node.send_header_for_blocks([blocks[-1]]) + + # Should get disconnected + test_node.wait_for_disconnect() + + self.log.info("Part 5: success!") + + # Finally, check that the inv node never received a getdata request, + # throughout the test + assert "getdata" not in inv_node.last_message + +if __name__ == '__main__': + SendHeadersTest().main() diff --git a/test/functional/p2p_timeouts.py b/test/functional/p2p_timeouts.py new file mode 100755 index 000000000..2459a9f24 --- /dev/null +++ b/test/functional/p2p_timeouts.py @@ -0,0 +1,73 @@ +#!/usr/bin/env python3 +# Copyright (c) 2016-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test various net timeouts. + +- Create three bitcoind nodes: + + no_verack_node - we never send a verack in response to their version + no_version_node - we never send a version (only a ping) + no_send_node - we never send any P2P message. + +- Start all three nodes +- Wait 1 second +- Assert that we're connected +- Send a ping to no_verack_node and no_version_node +- Wait 30 seconds +- Assert that we're still connected +- Send a ping to no_verack_node and no_version_node +- Wait 31 seconds +- Assert that we're no longer connected (timeout to receive version/verack is 60 seconds) +""" + +from time import sleep + +from test_framework.messages import msg_ping +from test_framework.mininode import P2PInterface +from test_framework.test_framework import BitcoinTestFramework + +class TestP2PConn(P2PInterface): + def on_version(self, message): + # Don't send a verack in response + pass + +class TimeoutsTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 1 + + def run_test(self): + # Setup the p2p connections + no_verack_node = self.nodes[0].add_p2p_connection(TestP2PConn()) + no_version_node = self.nodes[0].add_p2p_connection(TestP2PConn(), send_version=False, wait_for_verack=False) + no_send_node = self.nodes[0].add_p2p_connection(TestP2PConn(), send_version=False, wait_for_verack=False) + + sleep(1) + + assert no_verack_node.is_connected + assert no_version_node.is_connected + assert no_send_node.is_connected + + no_verack_node.send_message(msg_ping()) + no_version_node.send_message(msg_ping()) + + sleep(30) + + assert "version" in no_verack_node.last_message + + assert no_verack_node.is_connected + assert no_version_node.is_connected + assert no_send_node.is_connected + + no_verack_node.send_message(msg_ping()) + no_version_node.send_message(msg_ping()) + + sleep(31) + + assert not no_verack_node.is_connected + assert not no_version_node.is_connected + assert not no_send_node.is_connected + +if __name__ == '__main__': + TimeoutsTest().main() diff --git a/test/functional/p2p_unrequested_blocks.py b/test/functional/p2p_unrequested_blocks.py new file mode 100755 index 000000000..11299cbc0 --- /dev/null +++ b/test/functional/p2p_unrequested_blocks.py @@ -0,0 +1,309 @@ +#!/usr/bin/env python3 +# Copyright (c) 2015-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test processing of unrequested blocks. + +Setup: two nodes, node0+node1, not connected to each other. Node1 will have +nMinimumChainWork set to 0x10, so it won't process low-work unrequested blocks. + +We have one P2PInterface connection to node0 called test_node, and one to node1 +called min_work_node. + +The test: +1. Generate one block on each node, to leave IBD. + +2. Mine a new block on each tip, and deliver to each node from node's peer. + The tip should advance for node0, but node1 should skip processing due to + nMinimumChainWork. + +Node1 is unused in tests 3-7: + +3. Mine a block that forks from the genesis block, and deliver to test_node. + Node0 should not process this block (just accept the header), because it + is unrequested and doesn't have more or equal work to the tip. + +4a,b. Send another two blocks that build on the forking block. + Node0 should process the second block but be stuck on the shorter chain, + because it's missing an intermediate block. + +4c.Send 288 more blocks on the longer chain (the number of blocks ahead + we currently store). + Node0 should process all but the last block (too far ahead in height). + +5. Send a duplicate of the block in #3 to Node0. + Node0 should not process the block because it is unrequested, and stay on + the shorter chain. + +6. Send Node0 an inv for the height 3 block produced in #4 above. + Node0 should figure out that Node0 has the missing height 2 block and send a + getdata. + +7. Send Node0 the missing block again. + Node0 should process and the tip should advance. + +8. Create a fork which is invalid at a height longer than the current chain + (ie to which the node will try to reorg) but which has headers built on top + of the invalid block. Check that we get disconnected if we send more headers + on the chain the node now knows to be invalid. + +9. Test Node1 is able to sync when connected to node0 (which should have sufficient + work on its chain). +""" + +import time + +from test_framework.blocktools import create_block, create_coinbase, create_tx_with_script +from test_framework.messages import CBlockHeader, CInv, msg_block, msg_headers, msg_inv +from test_framework.mininode import mininode_lock, P2PInterface +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, assert_raises_rpc_error, connect_nodes, sync_blocks + + +class AcceptBlockTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 2 + self.extra_args = [[], ["-minimumchainwork=0x10"]] + + def setup_network(self): + # Node0 will be used to test behavior of processing unrequested blocks + # from peers which are not whitelisted, while Node1 will be used for + # the whitelisted case. + # Node2 will be used for non-whitelisted peers to test the interaction + # with nMinimumChainWork. + self.setup_nodes() + + def run_test(self): + # Setup the p2p connections + # test_node connects to node0 (not whitelisted) + test_node = self.nodes[0].add_p2p_connection(P2PInterface()) + # min_work_node connects to node1 (whitelisted) + min_work_node = self.nodes[1].add_p2p_connection(P2PInterface()) + + # 1. Have nodes mine a block (leave IBD) + [n.generatetoaddress(1, n.get_deterministic_priv_key().address) for n in self.nodes] + tips = [int("0x" + n.getbestblockhash(), 0) for n in self.nodes] + + # 2. Send one block that builds on each tip. + # This should be accepted by node0 + blocks_h2 = [] # the height 2 blocks on each node's chain + block_time = int(time.time()) + 1 + for i in range(2): + blocks_h2.append(create_block(tips[i], create_coinbase(2), block_time)) + blocks_h2[i].solve() + block_time += 1 + test_node.send_message(msg_block(blocks_h2[0])) + min_work_node.send_message(msg_block(blocks_h2[1])) + + for x in [test_node, min_work_node]: + x.sync_with_ping() + assert_equal(self.nodes[0].getblockcount(), 2) + assert_equal(self.nodes[1].getblockcount(), 1) + self.log.info("First height 2 block accepted by node0; correctly rejected by node1") + + # 3. Send another block that builds on genesis. + block_h1f = create_block(int("0x" + self.nodes[0].getblockhash(0), 0), create_coinbase(1), block_time) + block_time += 1 + block_h1f.solve() + test_node.send_message(msg_block(block_h1f)) + + test_node.sync_with_ping() + tip_entry_found = False + for x in self.nodes[0].getchaintips(): + if x['hash'] == block_h1f.hash: + assert_equal(x['status'], "headers-only") + tip_entry_found = True + assert(tip_entry_found) + assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, block_h1f.hash) + + # 4. Send another two block that build on the fork. + block_h2f = create_block(block_h1f.sha256, create_coinbase(2), block_time) + block_time += 1 + block_h2f.solve() + test_node.send_message(msg_block(block_h2f)) + + test_node.sync_with_ping() + # Since the earlier block was not processed by node, the new block + # can't be fully validated. + tip_entry_found = False + for x in self.nodes[0].getchaintips(): + if x['hash'] == block_h2f.hash: + assert_equal(x['status'], "headers-only") + tip_entry_found = True + assert(tip_entry_found) + + # But this block should be accepted by node since it has equal work. + self.nodes[0].getblock(block_h2f.hash) + self.log.info("Second height 2 block accepted, but not reorg'ed to") + + # 4b. Now send another block that builds on the forking chain. + block_h3 = create_block(block_h2f.sha256, create_coinbase(3), block_h2f.nTime+1) + block_h3.solve() + test_node.send_message(msg_block(block_h3)) + + test_node.sync_with_ping() + # Since the earlier block was not processed by node, the new block + # can't be fully validated. + tip_entry_found = False + for x in self.nodes[0].getchaintips(): + if x['hash'] == block_h3.hash: + assert_equal(x['status'], "headers-only") + tip_entry_found = True + assert(tip_entry_found) + self.nodes[0].getblock(block_h3.hash) + + # But this block should be accepted by node since it has more work. + self.nodes[0].getblock(block_h3.hash) + self.log.info("Unrequested more-work block accepted") + + # 4c. Now mine 288 more blocks and deliver; all should be processed but + # the last (height-too-high) on node (as long as it is not missing any headers) + tip = block_h3 + all_blocks = [] + for i in range(288): + next_block = create_block(tip.sha256, create_coinbase(i + 4), tip.nTime+1) + next_block.solve() + all_blocks.append(next_block) + tip = next_block + + # Now send the block at height 5 and check that it wasn't accepted (missing header) + test_node.send_message(msg_block(all_blocks[1])) + test_node.sync_with_ping() + assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getblock, all_blocks[1].hash) + assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getblockheader, all_blocks[1].hash) + + # The block at height 5 should be accepted if we provide the missing header, though + headers_message = msg_headers() + headers_message.headers.append(CBlockHeader(all_blocks[0])) + test_node.send_message(headers_message) + test_node.send_message(msg_block(all_blocks[1])) + test_node.sync_with_ping() + self.nodes[0].getblock(all_blocks[1].hash) + + # Now send the blocks in all_blocks + for i in range(288): + test_node.send_message(msg_block(all_blocks[i])) + test_node.sync_with_ping() + + # Blocks 1-287 should be accepted, block 288 should be ignored because it's too far ahead + for x in all_blocks[:-1]: + self.nodes[0].getblock(x.hash) + assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, all_blocks[-1].hash) + + # 5. Test handling of unrequested block on the node that didn't process + # Should still not be processed (even though it has a child that has more + # work). + + # The node should have requested the blocks at some point, so + # disconnect/reconnect first + + self.nodes[0].disconnect_p2ps() + self.nodes[1].disconnect_p2ps() + + test_node = self.nodes[0].add_p2p_connection(P2PInterface()) + + test_node.send_message(msg_block(block_h1f)) + + test_node.sync_with_ping() + assert_equal(self.nodes[0].getblockcount(), 2) + self.log.info("Unrequested block that would complete more-work chain was ignored") + + # 6. Try to get node to request the missing block. + # Poke the node with an inv for block at height 3 and see if that + # triggers a getdata on block 2 (it should if block 2 is missing). + with mininode_lock: + # Clear state so we can check the getdata request + test_node.last_message.pop("getdata", None) + test_node.send_message(msg_inv([CInv(2, block_h3.sha256)])) + + test_node.sync_with_ping() + with mininode_lock: + getdata = test_node.last_message["getdata"] + + # Check that the getdata includes the right block + assert_equal(getdata.inv[0].hash, block_h1f.sha256) + self.log.info("Inv at tip triggered getdata for unprocessed block") + + # 7. Send the missing block for the third time (now it is requested) + test_node.send_message(msg_block(block_h1f)) + + test_node.sync_with_ping() + assert_equal(self.nodes[0].getblockcount(), 290) + self.nodes[0].getblock(all_blocks[286].hash) + assert_equal(self.nodes[0].getbestblockhash(), all_blocks[286].hash) + assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, all_blocks[287].hash) + self.log.info("Successfully reorged to longer chain from non-whitelisted peer") + + # 8. Create a chain which is invalid at a height longer than the + # current chain, but which has more blocks on top of that + block_289f = create_block(all_blocks[284].sha256, create_coinbase(289), all_blocks[284].nTime+1) + block_289f.solve() + block_290f = create_block(block_289f.sha256, create_coinbase(290), block_289f.nTime+1) + block_290f.solve() + block_291 = create_block(block_290f.sha256, create_coinbase(291), block_290f.nTime+1) + # block_291 spends a coinbase below maturity! + block_291.vtx.append(create_tx_with_script(block_290f.vtx[0], 0, script_sig=b"42", amount=1)) + block_291.hashMerkleRoot = block_291.calc_merkle_root() + block_291.solve() + block_292 = create_block(block_291.sha256, create_coinbase(292), block_291.nTime+1) + block_292.solve() + + # Now send all the headers on the chain and enough blocks to trigger reorg + headers_message = msg_headers() + headers_message.headers.append(CBlockHeader(block_289f)) + headers_message.headers.append(CBlockHeader(block_290f)) + headers_message.headers.append(CBlockHeader(block_291)) + headers_message.headers.append(CBlockHeader(block_292)) + test_node.send_message(headers_message) + + test_node.sync_with_ping() + tip_entry_found = False + for x in self.nodes[0].getchaintips(): + if x['hash'] == block_292.hash: + assert_equal(x['status'], "headers-only") + tip_entry_found = True + assert(tip_entry_found) + assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, block_292.hash) + + test_node.send_message(msg_block(block_289f)) + test_node.send_message(msg_block(block_290f)) + + test_node.sync_with_ping() + self.nodes[0].getblock(block_289f.hash) + self.nodes[0].getblock(block_290f.hash) + + test_node.send_message(msg_block(block_291)) + + # At this point we've sent an obviously-bogus block, wait for full processing + # without assuming whether we will be disconnected or not + try: + # Only wait a short while so the test doesn't take forever if we do get + # disconnected + test_node.sync_with_ping(timeout=1) + except AssertionError: + test_node.wait_for_disconnect() + + self.nodes[0].disconnect_p2ps() + test_node = self.nodes[0].add_p2p_connection(P2PInterface()) + + # We should have failed reorg and switched back to 290 (but have block 291) + assert_equal(self.nodes[0].getblockcount(), 290) + assert_equal(self.nodes[0].getbestblockhash(), all_blocks[286].hash) + assert_equal(self.nodes[0].getblock(block_291.hash)["confirmations"], -1) + + # Now send a new header on the invalid chain, indicating we're forked off, and expect to get disconnected + block_293 = create_block(block_292.sha256, create_coinbase(293), block_292.nTime+1) + block_293.solve() + headers_message = msg_headers() + headers_message.headers.append(CBlockHeader(block_293)) + test_node.send_message(headers_message) + test_node.wait_for_disconnect() + + # 9. Connect node1 to node0 and ensure it is able to sync + connect_nodes(self.nodes[0], 1) + sync_blocks([self.nodes[0], self.nodes[1]]) + self.log.info("Successfully synced nodes 1 and 0") + +if __name__ == '__main__': + AcceptBlockTest().main() diff --git a/test/functional/rpc_bind.py b/test/functional/rpc_bind.py new file mode 100755 index 000000000..53916d529 --- /dev/null +++ b/test/functional/rpc_bind.py @@ -0,0 +1,126 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test running bitcoind with the -rpcbind and -rpcallowip options.""" + +import sys + +from test_framework.netutil import all_interfaces, addr_to_hex, get_bind_addrs, test_ipv6_local +from test_framework.test_framework import BitcoinTestFramework, SkipTest +from test_framework.util import assert_equal, assert_raises_rpc_error, get_rpc_proxy, rpc_port, rpc_url + +class RPCBindTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.bind_to_localhost_only = False + self.num_nodes = 1 + + def setup_network(self): + self.add_nodes(self.num_nodes, None) + + def add_options(self, parser): + parser.add_argument("--ipv4", action='store_true', dest="run_ipv4", help="Run ipv4 tests only", default=False) + parser.add_argument("--ipv6", action='store_true', dest="run_ipv6", help="Run ipv6 tests only", default=False) + parser.add_argument("--nonloopback", action='store_true', dest="run_nonloopback", help="Run non-loopback tests only", default=False) + + def run_bind_test(self, allow_ips, connect_to, addresses, expected): + ''' + Start a node with requested rpcallowip and rpcbind parameters, + then try to connect, and check if the set of bound addresses + matches the expected set. + ''' + self.log.info("Bind test for %s" % str(addresses)) + expected = [(addr_to_hex(addr), port) for (addr, port) in expected] + base_args = ['-disablewallet', '-nolisten'] + if allow_ips: + base_args += ['-rpcallowip=' + x for x in allow_ips] + binds = ['-rpcbind='+addr for addr in addresses] + self.nodes[0].rpchost = connect_to + self.start_node(0, base_args + binds) + pid = self.nodes[0].process.pid + assert_equal(set(get_bind_addrs(pid)), set(expected)) + self.stop_nodes() + + def run_allowip_test(self, allow_ips, rpchost, rpcport): + ''' + Start a node with rpcallow IP, and request getnetworkinfo + at a non-localhost IP. + ''' + self.log.info("Allow IP test for %s:%d" % (rpchost, rpcport)) + base_args = ['-disablewallet', '-nolisten'] + ['-rpcallowip='+x for x in allow_ips] + self.nodes[0].rpchost = None + self.start_nodes([base_args]) + # connect to node through non-loopback interface + node = get_rpc_proxy(rpc_url(self.nodes[0].datadir, 0, "%s:%d" % (rpchost, rpcport)), 0, coveragedir=self.options.coveragedir) + node.getnetworkinfo() + self.stop_nodes() + + def run_test(self): + # due to OS-specific network stats queries, this test works only on Linux + if sum([self.options.run_ipv4, self.options.run_ipv6, self.options.run_nonloopback]) > 1: + raise AssertionError("Only one of --ipv4, --ipv6 and --nonloopback can be set") + + self.log.info("Check for linux") + if not sys.platform.startswith('linux'): + raise SkipTest("This test can only be run on linux.") + + self.log.info("Check for ipv6") + have_ipv6 = test_ipv6_local() + if not have_ipv6 and not self.options.run_ipv4: + raise SkipTest("This test requires ipv6 support.") + + self.log.info("Check for non-loopback interface") + self.non_loopback_ip = None + for name,ip in all_interfaces(): + if ip != '127.0.0.1': + self.non_loopback_ip = ip + break + if self.non_loopback_ip is None and self.options.run_nonloopback: + raise SkipTest("This test requires a non-loopback ip address.") + + self.defaultport = rpc_port(0) + + if not self.options.run_nonloopback: + self._run_loopback_tests() + if not self.options.run_ipv4 and not self.options.run_ipv6: + self._run_nonloopback_tests() + + def _run_loopback_tests(self): + if self.options.run_ipv4: + # check only IPv4 localhost (explicit) + self.run_bind_test(['127.0.0.1'], '127.0.0.1', ['127.0.0.1'], + [('127.0.0.1', self.defaultport)]) + # check only IPv4 localhost (explicit) with alternative port + self.run_bind_test(['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171'], + [('127.0.0.1', 32171)]) + # check only IPv4 localhost (explicit) with multiple alternative ports on same host + self.run_bind_test(['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171', '127.0.0.1:32172'], + [('127.0.0.1', 32171), ('127.0.0.1', 32172)]) + else: + # check default without rpcallowip (IPv4 and IPv6 localhost) + self.run_bind_test(None, '127.0.0.1', [], + [('127.0.0.1', self.defaultport), ('::1', self.defaultport)]) + # check default with rpcallowip (IPv6 any) + self.run_bind_test(['127.0.0.1'], '127.0.0.1', [], + [('::0', self.defaultport)]) + # check only IPv6 localhost (explicit) + self.run_bind_test(['[::1]'], '[::1]', ['[::1]'], + [('::1', self.defaultport)]) + # check both IPv4 and IPv6 localhost (explicit) + self.run_bind_test(['127.0.0.1'], '127.0.0.1', ['127.0.0.1', '[::1]'], + [('127.0.0.1', self.defaultport), ('::1', self.defaultport)]) + + def _run_nonloopback_tests(self): + self.log.info("Using interface %s for testing" % self.non_loopback_ip) + + # check only non-loopback interface + self.run_bind_test([self.non_loopback_ip], self.non_loopback_ip, [self.non_loopback_ip], + [(self.non_loopback_ip, self.defaultport)]) + + # Check that with invalid rpcallowip, we are denied + self.run_allowip_test([self.non_loopback_ip], self.non_loopback_ip, self.defaultport) + assert_raises_rpc_error(-342, "non-JSON HTTP response with '403 Forbidden' from server", self.run_allowip_test, ['1.1.1.1'], self.non_loopback_ip, self.defaultport) + +if __name__ == '__main__': + RPCBindTest().main() diff --git a/test/functional/rpc_blockchain.py b/test/functional/rpc_blockchain.py new file mode 100755 index 000000000..92b690176 --- /dev/null +++ b/test/functional/rpc_blockchain.py @@ -0,0 +1,300 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test RPCs related to blockchainstate. + +Test the following RPCs: + - getblockchaininfo + - gettxoutsetinfo + - getdifficulty + - getbestblockhash + - getblockhash + - getblockheader + - getchaintxstats + - getnetworkhashps + - verifychain + +Tests correspond to code in rpc/blockchain.cpp. +""" + +from decimal import Decimal +import http.client +import subprocess + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + assert_greater_than, + assert_greater_than_or_equal, + assert_raises, + assert_raises_rpc_error, + assert_is_hex_string, + assert_is_hash_string, +) +from test_framework.blocktools import ( + create_block, + create_coinbase, +) +from test_framework.messages import ( + msg_block, +) +from test_framework.mininode import ( + P2PInterface, +) + + +class BlockchainTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 1 + + def run_test(self): + self.restart_node(0, extra_args=['-stopatheight=207', '-prune=1']) # Set extra args with pruning after rescan is complete + + self._test_getblockchaininfo() + self._test_getchaintxstats() + self._test_gettxoutsetinfo() + self._test_getblockheader() + self._test_getdifficulty() + self._test_getnetworkhashps() + self._test_stopatheight() + self._test_waitforblockheight() + assert self.nodes[0].verifychain(4, 0) + + def _test_getblockchaininfo(self): + self.log.info("Test getblockchaininfo") + + keys = [ + 'bestblockhash', + 'bip9_softforks', + 'blocks', + 'chain', + 'chainwork', + 'difficulty', + 'headers', + 'initialblockdownload', + 'mediantime', + 'pruned', + 'size_on_disk', + 'softforks', + 'verificationprogress', + 'warnings', + ] + res = self.nodes[0].getblockchaininfo() + + # result should have these additional pruning keys if manual pruning is enabled + assert_equal(sorted(res.keys()), sorted(['pruneheight', 'automatic_pruning'] + keys)) + + # size_on_disk should be > 0 + assert_greater_than(res['size_on_disk'], 0) + + # pruneheight should be greater or equal to 0 + assert_greater_than_or_equal(res['pruneheight'], 0) + + # check other pruning fields given that prune=1 + assert res['pruned'] + assert not res['automatic_pruning'] + + self.restart_node(0, ['-stopatheight=207']) + res = self.nodes[0].getblockchaininfo() + # should have exact keys + assert_equal(sorted(res.keys()), keys) + + self.restart_node(0, ['-stopatheight=207', '-prune=550']) + res = self.nodes[0].getblockchaininfo() + # result should have these additional pruning keys if prune=550 + assert_equal(sorted(res.keys()), sorted(['pruneheight', 'automatic_pruning', 'prune_target_size'] + keys)) + + # check related fields + assert res['pruned'] + assert_equal(res['pruneheight'], 0) + assert res['automatic_pruning'] + assert_equal(res['prune_target_size'], 576716800) + assert_greater_than(res['size_on_disk'], 0) + + def _test_getchaintxstats(self): + self.log.info("Test getchaintxstats") + + # Test `getchaintxstats` invalid extra parameters + assert_raises_rpc_error(-1, 'getchaintxstats', self.nodes[0].getchaintxstats, 0, '', 0) + + # Test `getchaintxstats` invalid `nblocks` + assert_raises_rpc_error(-1, "JSON value is not an integer as expected", self.nodes[0].getchaintxstats, '') + assert_raises_rpc_error(-8, "Invalid block count: should be between 0 and the block's height - 1", self.nodes[0].getchaintxstats, -1) + assert_raises_rpc_error(-8, "Invalid block count: should be between 0 and the block's height - 1", self.nodes[0].getchaintxstats, self.nodes[0].getblockcount()) + + # Test `getchaintxstats` invalid `blockhash` + assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].getchaintxstats, blockhash=0) + assert_raises_rpc_error(-8, "blockhash must be of length 64 (not 1, for '0')", self.nodes[0].getchaintxstats, blockhash='0') + assert_raises_rpc_error(-8, "blockhash must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].getchaintxstats, blockhash='ZZZ0000000000000000000000000000000000000000000000000000000000000') + assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getchaintxstats, blockhash='0000000000000000000000000000000000000000000000000000000000000000') + blockhash = self.nodes[0].getblockhash(200) + self.nodes[0].invalidateblock(blockhash) + assert_raises_rpc_error(-8, "Block is not in main chain", self.nodes[0].getchaintxstats, blockhash=blockhash) + self.nodes[0].reconsiderblock(blockhash) + + chaintxstats = self.nodes[0].getchaintxstats(1) + # 200 txs plus genesis tx + assert_equal(chaintxstats['txcount'], 201) + # tx rate should be 1 per 10 minutes, or 1/600 + # we have to round because of binary math + assert_equal(round(chaintxstats['txrate'] * 600, 10), Decimal(1)) + + b1_hash = self.nodes[0].getblockhash(1) + b1 = self.nodes[0].getblock(b1_hash) + b200_hash = self.nodes[0].getblockhash(200) + b200 = self.nodes[0].getblock(b200_hash) + time_diff = b200['mediantime'] - b1['mediantime'] + + chaintxstats = self.nodes[0].getchaintxstats() + assert_equal(chaintxstats['time'], b200['time']) + assert_equal(chaintxstats['txcount'], 201) + assert_equal(chaintxstats['window_final_block_hash'], b200_hash) + assert_equal(chaintxstats['window_block_count'], 199) + assert_equal(chaintxstats['window_tx_count'], 199) + assert_equal(chaintxstats['window_interval'], time_diff) + assert_equal(round(chaintxstats['txrate'] * time_diff, 10), Decimal(199)) + + chaintxstats = self.nodes[0].getchaintxstats(blockhash=b1_hash) + assert_equal(chaintxstats['time'], b1['time']) + assert_equal(chaintxstats['txcount'], 2) + assert_equal(chaintxstats['window_final_block_hash'], b1_hash) + assert_equal(chaintxstats['window_block_count'], 0) + assert('window_tx_count' not in chaintxstats) + assert('window_interval' not in chaintxstats) + assert('txrate' not in chaintxstats) + + def _test_gettxoutsetinfo(self): + node = self.nodes[0] + res = node.gettxoutsetinfo() + + assert_equal(res['total_amount'], Decimal('8725.00000000')) + assert_equal(res['transactions'], 200) + assert_equal(res['height'], 200) + assert_equal(res['txouts'], 200) + assert_equal(res['bogosize'], 15000), + assert_equal(res['bestblock'], node.getblockhash(200)) + size = res['disk_size'] + assert size > 6400 + assert size < 64000 + assert_equal(len(res['bestblock']), 64) + assert_equal(len(res['hash_serialized_2']), 64) + + self.log.info("Test that gettxoutsetinfo() works for blockchain with just the genesis block") + b1hash = node.getblockhash(1) + node.invalidateblock(b1hash) + + res2 = node.gettxoutsetinfo() + assert_equal(res2['transactions'], 0) + assert_equal(res2['total_amount'], Decimal('0')) + assert_equal(res2['height'], 0) + assert_equal(res2['txouts'], 0) + assert_equal(res2['bogosize'], 0), + assert_equal(res2['bestblock'], node.getblockhash(0)) + assert_equal(len(res2['hash_serialized_2']), 64) + + self.log.info("Test that gettxoutsetinfo() returns the same result after invalidate/reconsider block") + node.reconsiderblock(b1hash) + + res3 = node.gettxoutsetinfo() + # The field 'disk_size' is non-deterministic and can thus not be + # compared between res and res3. Everything else should be the same. + del res['disk_size'], res3['disk_size'] + assert_equal(res, res3) + + def _test_getblockheader(self): + node = self.nodes[0] + + assert_raises_rpc_error(-8, "hash must be of length 64 (not 8, for 'nonsense')", node.getblockheader, "nonsense") + assert_raises_rpc_error(-8, "hash must be hexadecimal string (not 'ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844')", node.getblockheader, "ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844") + assert_raises_rpc_error(-5, "Block not found", node.getblockheader, "0cf7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844") + + besthash = node.getbestblockhash() + secondbesthash = node.getblockhash(199) + header = node.getblockheader(besthash) + + assert_equal(header['hash'], besthash) + assert_equal(header['height'], 200) + assert_equal(header['confirmations'], 1) + assert_equal(header['previousblockhash'], secondbesthash) + assert_is_hex_string(header['chainwork']) + assert_equal(header['nTx'], 1) + assert_is_hash_string(header['hash']) + assert_is_hash_string(header['previousblockhash']) + assert_is_hash_string(header['merkleroot']) + assert_is_hash_string(header['bits'], length=None) + assert isinstance(header['time'], int) + assert isinstance(header['mediantime'], int) + assert isinstance(header['nonce'], int) + assert isinstance(header['version'], int) + assert isinstance(int(header['versionHex'], 16), int) + assert isinstance(header['difficulty'], Decimal) + + def _test_getdifficulty(self): + difficulty = self.nodes[0].getdifficulty() + # 1 hash in 2 should be valid, so difficulty should be 1/2**31 + # binary => decimal => binary math is why we do this check + assert abs(difficulty * 2**31 - 1) < 0.0001 + + def _test_getnetworkhashps(self): + hashes_per_second = self.nodes[0].getnetworkhashps() + # This should be 2 hashes every 10 minutes or 1/300 + assert abs(hashes_per_second * 300 - 1) < 0.0001 + + def _test_stopatheight(self): + assert_equal(self.nodes[0].getblockcount(), 200) + self.nodes[0].generatetoaddress(6, self.nodes[0].get_deterministic_priv_key().address) + assert_equal(self.nodes[0].getblockcount(), 206) + self.log.debug('Node should not stop at this height') + assert_raises(subprocess.TimeoutExpired, lambda: self.nodes[0].process.wait(timeout=3)) + try: + self.nodes[0].generatetoaddress(1, self.nodes[0].get_deterministic_priv_key().address) + except (ConnectionError, http.client.BadStatusLine): + pass # The node already shut down before response + self.log.debug('Node should stop at this height...') + self.nodes[0].wait_until_stopped() + self.start_node(0) + assert_equal(self.nodes[0].getblockcount(), 207) + + def _test_waitforblockheight(self): + self.log.info("Test waitforblockheight") + node = self.nodes[0] + node.add_p2p_connection(P2PInterface()) + + current_height = node.getblock(node.getbestblockhash())['height'] + + # Create a fork somewhere below our current height, invalidate the tip + # of that fork, and then ensure that waitforblockheight still + # works as expected. + # + # (Previously this was broken based on setting + # `rpc/blockchain.cpp:latestblock` incorrectly.) + # + b20hash = node.getblockhash(20) + b20 = node.getblock(b20hash) + + def solve_and_send_block(prevhash, height, time): + b = create_block(prevhash, create_coinbase(height), time) + b.solve() + node.p2p.send_message(msg_block(b)) + node.p2p.sync_with_ping() + return b + + b21f = solve_and_send_block(int(b20hash, 16), 21, b20['time'] + 1) + b22f = solve_and_send_block(b21f.sha256, 22, b21f.nTime + 1) + + node.invalidateblock(b22f.hash) + + def assert_waitforheight(height, timeout=2): + assert_equal( + node.waitforblockheight(height, timeout)['height'], + current_height) + + assert_waitforheight(0) + assert_waitforheight(current_height - 1) + assert_waitforheight(current_height) + assert_waitforheight(current_height + 1) + + +if __name__ == '__main__': + BlockchainTest().main() diff --git a/test/functional/rpc_createmultisig.py b/test/functional/rpc_createmultisig.py new file mode 100755 index 000000000..3cc35a7b9 --- /dev/null +++ b/test/functional/rpc_createmultisig.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python3 +# Copyright (c) 2015-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test transaction signing using the signrawtransaction* RPCs.""" + +from test_framework.test_framework import BitcoinTestFramework +import decimal + +class RpcCreateMultiSigTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 3 + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def get_keys(self): + node0, node1, node2 = self.nodes + self.add = [node1.getnewaddress() for _ in range(self.nkeys)] + self.pub = [node1.getaddressinfo(a)["pubkey"] for a in self.add] + self.priv = [node1.dumpprivkey(a) for a in self.add] + self.final = node2.getnewaddress() + + def run_test(self): + node0,node1,node2 = self.nodes + + # 50 BTC each, rest will be 25 BTC each + node0.generate(149) + self.sync_all() + + self.moved = 0 + for self.nkeys in [3,5]: + for self.nsigs in [2,3]: + for self.output_type in ["bech32", "p2sh-segwit", "legacy"]: + self.get_keys() + self.do_multisig() + + self.checkbalances() + + def checkbalances(self): + node0,node1,node2 = self.nodes + node0.generate(100) + self.sync_all() + + bal0 = node0.getbalance() + bal1 = node1.getbalance() + bal2 = node2.getbalance() + + height = node0.getblockchaininfo()["blocks"] + assert 150 < height < 350 + total = 149*50 + (height-149-100)*25 + assert bal1 == 0 + assert bal2 == self.moved + assert bal0+bal1+bal2 == total + + def do_multisig(self): + node0,node1,node2 = self.nodes + + msig = node2.createmultisig(self.nsigs, self.pub, self.output_type) + madd = msig["address"] + mredeem = msig["redeemScript"] + if self.output_type == 'bech32': + assert madd[0:4] == "bcrt" # actually a bech32 address + + # compare against addmultisigaddress + msigw = node1.addmultisigaddress(self.nsigs, self.pub, None, self.output_type) + maddw = msigw["address"] + mredeemw = msigw["redeemScript"] + # addmultisigiaddress and createmultisig work the same + assert maddw == madd + assert mredeemw == mredeem + + txid = node0.sendtoaddress(madd, 40) + + tx = node0.getrawtransaction(txid, True) + vout = [v["n"] for v in tx["vout"] if madd in v["scriptPubKey"].get("addresses",[])] + assert len(vout) == 1 + vout = vout[0] + scriptPubKey = tx["vout"][vout]["scriptPubKey"]["hex"] + value = tx["vout"][vout]["value"] + prevtxs = [{"txid": txid, "vout": vout, "scriptPubKey": scriptPubKey, "redeemScript": mredeem, "amount": value}] + + node0.generate(1) + + outval = value - decimal.Decimal("0.00001000") + rawtx = node2.createrawtransaction([{"txid": txid, "vout": vout}], [{self.final: outval}]) + + rawtx2 = node2.signrawtransactionwithkey(rawtx, self.priv[0:self.nsigs-1], prevtxs) + rawtx3 = node2.signrawtransactionwithkey(rawtx2["hex"], [self.priv[-1]], prevtxs) + + self.moved += outval + tx = node0.sendrawtransaction(rawtx3["hex"], True) + blk = node0.generate(1)[0] + assert tx in node0.getblock(blk)["tx"] + + txinfo = node0.getrawtransaction(tx, True, blk) + self.log.info("n/m=%d/%d %s size=%d vsize=%d weight=%d" % (self.nsigs, self.nkeys, self.output_type, txinfo["size"], txinfo["vsize"], txinfo["weight"])) + +if __name__ == '__main__': + RpcCreateMultiSigTest().main() diff --git a/test/functional/rpc_decodescript.py b/test/functional/rpc_decodescript.py new file mode 100755 index 000000000..940386eee --- /dev/null +++ b/test/functional/rpc_decodescript.py @@ -0,0 +1,234 @@ +#!/usr/bin/env python3 +# Copyright (c) 2015-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test decoding scripts via decodescript RPC command.""" + +from test_framework.messages import CTransaction, sha256 +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, bytes_to_hex_str, hex_str_to_bytes + +from io import BytesIO + +class DecodeScriptTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 1 + + def decodescript_script_sig(self): + signature = '304502207fa7a6d1e0ee81132a269ad84e68d695483745cde8b541e3bf630749894e342a022100c1f7ab20e13e22fb95281a870f3dcf38d782e53023ee313d741ad0cfbc0c509001' + push_signature = '48' + signature + public_key = '03b0da749730dc9b4b1f4a14d6902877a92541f5368778853d9c4a0cb7802dcfb2' + push_public_key = '21' + public_key + + # below are test cases for all of the standard transaction types + + # 1) P2PK scriptSig + # the scriptSig of a public key scriptPubKey simply pushes a signature onto the stack + rpc_result = self.nodes[0].decodescript(push_signature) + assert_equal(signature, rpc_result['asm']) + + # 2) P2PKH scriptSig + rpc_result = self.nodes[0].decodescript(push_signature + push_public_key) + assert_equal(signature + ' ' + public_key, rpc_result['asm']) + + # 3) multisig scriptSig + # this also tests the leading portion of a P2SH multisig scriptSig + # OP_0 + rpc_result = self.nodes[0].decodescript('00' + push_signature + push_signature) + assert_equal('0 ' + signature + ' ' + signature, rpc_result['asm']) + + # 4) P2SH scriptSig + # an empty P2SH redeemScript is valid and makes for a very simple test case. + # thus, such a spending scriptSig would just need to pass the outer redeemScript + # hash test and leave true on the top of the stack. + rpc_result = self.nodes[0].decodescript('5100') + assert_equal('1 0', rpc_result['asm']) + + # 5) null data scriptSig - no such thing because null data scripts can not be spent. + # thus, no test case for that standard transaction type is here. + + def decodescript_script_pub_key(self): + public_key = '03b0da749730dc9b4b1f4a14d6902877a92541f5368778853d9c4a0cb7802dcfb2' + push_public_key = '21' + public_key + public_key_hash = '5dd1d3a048119c27b28293056724d9522f26d945' + push_public_key_hash = '14' + public_key_hash + uncompressed_public_key = '04b0da749730dc9b4b1f4a14d6902877a92541f5368778853d9c4a0cb7802dcfb25e01fc8fde47c96c98a4f3a8123e33a38a50cf9025cc8c4494a518f991792bb7' + push_uncompressed_public_key = '41' + uncompressed_public_key + p2wsh_p2pk_script_hash = 'd8590cf8ea0674cf3d49fd7ca249b85ef7485dea62c138468bddeb20cd6519f7' + + # below are test cases for all of the standard transaction types + + # 1) P2PK scriptPubKey + # OP_CHECKSIG + rpc_result = self.nodes[0].decodescript(push_public_key + 'ac') + assert_equal(public_key + ' OP_CHECKSIG', rpc_result['asm']) + # P2PK is translated to P2WPKH + assert_equal('0 ' + public_key_hash, rpc_result['segwit']['asm']) + + # 2) P2PKH scriptPubKey + # OP_DUP OP_HASH160 OP_EQUALVERIFY OP_CHECKSIG + rpc_result = self.nodes[0].decodescript('76a9' + push_public_key_hash + '88ac') + assert_equal('OP_DUP OP_HASH160 ' + public_key_hash + ' OP_EQUALVERIFY OP_CHECKSIG', rpc_result['asm']) + # P2PKH is translated to P2WPKH + assert_equal('0 ' + public_key_hash, rpc_result['segwit']['asm']) + + # 3) multisig scriptPubKey + # OP_CHECKMULTISIG + # just imagine that the pub keys used below are different. + # for our purposes here it does not matter that they are the same even though it is unrealistic. + multisig_script = '52' + push_public_key + push_public_key + push_public_key + '53ae' + rpc_result = self.nodes[0].decodescript(multisig_script) + assert_equal('2 ' + public_key + ' ' + public_key + ' ' + public_key + ' 3 OP_CHECKMULTISIG', rpc_result['asm']) + # multisig in P2WSH + multisig_script_hash = bytes_to_hex_str(sha256(hex_str_to_bytes(multisig_script))) + assert_equal('0 ' + multisig_script_hash, rpc_result['segwit']['asm']) + + # 4) P2SH scriptPubKey + # OP_HASH160 OP_EQUAL. + # push_public_key_hash here should actually be the hash of a redeem script. + # but this works the same for purposes of this test. + rpc_result = self.nodes[0].decodescript('a9' + push_public_key_hash + '87') + assert_equal('OP_HASH160 ' + public_key_hash + ' OP_EQUAL', rpc_result['asm']) + # P2SH does not work in segwit secripts. decodescript should not return a result for it. + assert 'segwit' not in rpc_result + + # 5) null data scriptPubKey + # use a signature look-alike here to make sure that we do not decode random data as a signature. + # this matters if/when signature sighash decoding comes along. + # would want to make sure that no such decoding takes place in this case. + signature_imposter = '48304502207fa7a6d1e0ee81132a269ad84e68d695483745cde8b541e3bf630749894e342a022100c1f7ab20e13e22fb95281a870f3dcf38d782e53023ee313d741ad0cfbc0c509001' + # OP_RETURN + rpc_result = self.nodes[0].decodescript('6a' + signature_imposter) + assert_equal('OP_RETURN ' + signature_imposter[2:], rpc_result['asm']) + + # 6) a CLTV redeem script. redeem scripts are in-effect scriptPubKey scripts, so adding a test here. + # OP_NOP2 is also known as OP_CHECKLOCKTIMEVERIFY. + # just imagine that the pub keys used below are different. + # for our purposes here it does not matter that they are the same even though it is unrealistic. + # + # OP_IF + # OP_CHECKSIGVERIFY + # OP_ELSE + # OP_CHECKLOCKTIMEVERIFY OP_DROP + # OP_ENDIF + # OP_CHECKSIG + # + # lock until block 500,000 + cltv_script = '63' + push_public_key + 'ad670320a107b17568' + push_public_key + 'ac' + rpc_result = self.nodes[0].decodescript(cltv_script) + assert_equal('OP_IF ' + public_key + ' OP_CHECKSIGVERIFY OP_ELSE 500000 OP_CHECKLOCKTIMEVERIFY OP_DROP OP_ENDIF ' + public_key + ' OP_CHECKSIG', rpc_result['asm']) + # CLTV script in P2WSH + cltv_script_hash = bytes_to_hex_str(sha256(hex_str_to_bytes(cltv_script))) + assert_equal('0 ' + cltv_script_hash, rpc_result['segwit']['asm']) + + # 7) P2PK scriptPubKey + # OP_CHECKSIG + rpc_result = self.nodes[0].decodescript(push_uncompressed_public_key + 'ac') + assert_equal(uncompressed_public_key + ' OP_CHECKSIG', rpc_result['asm']) + # uncompressed pubkeys are invalid for checksigs in segwit scripts. + # decodescript should not return a P2WPKH equivalent. + assert 'segwit' not in rpc_result + + # 8) multisig scriptPubKey with an uncompressed pubkey + # OP_CHECKMULTISIG + # just imagine that the pub keys used below are different. + # the purpose of this test is to check that a segwit script is not returned for bare multisig scripts + # with an uncompressed pubkey in them. + rpc_result = self.nodes[0].decodescript('52' + push_public_key + push_uncompressed_public_key +'52ae') + assert_equal('2 ' + public_key + ' ' + uncompressed_public_key + ' 2 OP_CHECKMULTISIG', rpc_result['asm']) + # uncompressed pubkeys are invalid for checksigs in segwit scripts. + # decodescript should not return a P2WPKH equivalent. + assert 'segwit' not in rpc_result + + # 9) P2WPKH scriptpubkey + # 0 + rpc_result = self.nodes[0].decodescript('00' + push_public_key_hash) + assert_equal('0 ' + public_key_hash, rpc_result['asm']) + # segwit scripts do not work nested into each other. + # a nested segwit script should not be returned in the results. + assert 'segwit' not in rpc_result + + # 10) P2WSH scriptpubkey + # 0 + # even though this hash is of a P2PK script which is better used as bare P2WPKH, it should not matter + # for the purpose of this test. + rpc_result = self.nodes[0].decodescript('0020' + p2wsh_p2pk_script_hash) + assert_equal('0 ' + p2wsh_p2pk_script_hash, rpc_result['asm']) + # segwit scripts do not work nested into each other. + # a nested segwit script should not be returned in the results. + assert 'segwit' not in rpc_result + + def decoderawtransaction_asm_sighashtype(self): + """Test decoding scripts via RPC command "decoderawtransaction". + + This test is in with the "decodescript" tests because they are testing the same "asm" script decodes. + """ + + # this test case uses a random plain vanilla mainnet transaction with a single P2PKH input and output + tx = '0100000001696a20784a2c70143f634e95227dbdfdf0ecd51647052e70854512235f5986ca010000008a47304402207174775824bec6c2700023309a168231ec80b82c6069282f5133e6f11cbb04460220570edc55c7c5da2ca687ebd0372d3546ebc3f810516a002350cac72dfe192dfb014104d3f898e6487787910a690410b7a917ef198905c27fb9d3b0a42da12aceae0544fc7088d239d9a48f2828a15a09e84043001f27cc80d162cb95404e1210161536ffffffff0100e1f505000000001976a914eb6c6e0cdb2d256a32d97b8df1fc75d1920d9bca88ac00000000' + rpc_result = self.nodes[0].decoderawtransaction(tx) + assert_equal('304402207174775824bec6c2700023309a168231ec80b82c6069282f5133e6f11cbb04460220570edc55c7c5da2ca687ebd0372d3546ebc3f810516a002350cac72dfe192dfb[ALL] 04d3f898e6487787910a690410b7a917ef198905c27fb9d3b0a42da12aceae0544fc7088d239d9a48f2828a15a09e84043001f27cc80d162cb95404e1210161536', rpc_result['vin'][0]['scriptSig']['asm']) + + # this test case uses a mainnet transaction that has a P2SH input and both P2PKH and P2SH outputs. + # it's from James D'Angelo's awesome introductory videos about multisig: https://www.youtube.com/watch?v=zIbUSaZBJgU and https://www.youtube.com/watch?v=OSA1pwlaypc + # verify that we have not altered scriptPubKey decoding. + tx = '01000000018d1f5635abd06e2c7e2ddf58dc85b3de111e4ad6e0ab51bb0dcf5e84126d927300000000fdfe0000483045022100ae3b4e589dfc9d48cb82d41008dc5fa6a86f94d5c54f9935531924602730ab8002202f88cf464414c4ed9fa11b773c5ee944f66e9b05cc1e51d97abc22ce098937ea01483045022100b44883be035600e9328a01b66c7d8439b74db64187e76b99a68f7893b701d5380220225bf286493e4c4adcf928c40f785422572eb232f84a0b83b0dea823c3a19c75014c695221020743d44be989540d27b1b4bbbcfd17721c337cb6bc9af20eb8a32520b393532f2102c0120a1dda9e51a938d39ddd9fe0ebc45ea97e1d27a7cbd671d5431416d3dd87210213820eb3d5f509d7438c9eeecb4157b2f595105e7cd564b3cdbb9ead3da41eed53aeffffffff02611e0000000000001976a914dc863734a218bfe83ef770ee9d41a27f824a6e5688acee2a02000000000017a9142a5edea39971049a540474c6a99edf0aa4074c588700000000' + rpc_result = self.nodes[0].decoderawtransaction(tx) + assert_equal('8e3730608c3b0bb5df54f09076e196bc292a8e39a78e73b44b6ba08c78f5cbb0', rpc_result['txid']) + assert_equal('0 3045022100ae3b4e589dfc9d48cb82d41008dc5fa6a86f94d5c54f9935531924602730ab8002202f88cf464414c4ed9fa11b773c5ee944f66e9b05cc1e51d97abc22ce098937ea[ALL] 3045022100b44883be035600e9328a01b66c7d8439b74db64187e76b99a68f7893b701d5380220225bf286493e4c4adcf928c40f785422572eb232f84a0b83b0dea823c3a19c75[ALL] 5221020743d44be989540d27b1b4bbbcfd17721c337cb6bc9af20eb8a32520b393532f2102c0120a1dda9e51a938d39ddd9fe0ebc45ea97e1d27a7cbd671d5431416d3dd87210213820eb3d5f509d7438c9eeecb4157b2f595105e7cd564b3cdbb9ead3da41eed53ae', rpc_result['vin'][0]['scriptSig']['asm']) + assert_equal('OP_DUP OP_HASH160 dc863734a218bfe83ef770ee9d41a27f824a6e56 OP_EQUALVERIFY OP_CHECKSIG', rpc_result['vout'][0]['scriptPubKey']['asm']) + assert_equal('OP_HASH160 2a5edea39971049a540474c6a99edf0aa4074c58 OP_EQUAL', rpc_result['vout'][1]['scriptPubKey']['asm']) + txSave = CTransaction() + txSave.deserialize(BytesIO(hex_str_to_bytes(tx))) + + # make sure that a specifically crafted op_return value will not pass all the IsDERSignature checks and then get decoded as a sighash type + tx = '01000000015ded05872fdbda629c7d3d02b194763ce3b9b1535ea884e3c8e765d42e316724020000006b48304502204c10d4064885c42638cbff3585915b322de33762598321145ba033fc796971e2022100bb153ad3baa8b757e30a2175bd32852d2e1cb9080f84d7e32fcdfd667934ef1b012103163c0ff73511ea1743fb5b98384a2ff09dd06949488028fd819f4d83f56264efffffffff0200000000000000000b6a0930060201000201000180380100000000001976a9141cabd296e753837c086da7a45a6c2fe0d49d7b7b88ac00000000' + rpc_result = self.nodes[0].decoderawtransaction(tx) + assert_equal('OP_RETURN 300602010002010001', rpc_result['vout'][0]['scriptPubKey']['asm']) + + # verify that we have not altered scriptPubKey processing even of a specially crafted P2PKH pubkeyhash and P2SH redeem script hash that is made to pass the der signature checks + tx = '01000000018d1f5635abd06e2c7e2ddf58dc85b3de111e4ad6e0ab51bb0dcf5e84126d927300000000fdfe0000483045022100ae3b4e589dfc9d48cb82d41008dc5fa6a86f94d5c54f9935531924602730ab8002202f88cf464414c4ed9fa11b773c5ee944f66e9b05cc1e51d97abc22ce098937ea01483045022100b44883be035600e9328a01b66c7d8439b74db64187e76b99a68f7893b701d5380220225bf286493e4c4adcf928c40f785422572eb232f84a0b83b0dea823c3a19c75014c695221020743d44be989540d27b1b4bbbcfd17721c337cb6bc9af20eb8a32520b393532f2102c0120a1dda9e51a938d39ddd9fe0ebc45ea97e1d27a7cbd671d5431416d3dd87210213820eb3d5f509d7438c9eeecb4157b2f595105e7cd564b3cdbb9ead3da41eed53aeffffffff02611e0000000000001976a914301102070101010101010102060101010101010188acee2a02000000000017a91430110207010101010101010206010101010101018700000000' + rpc_result = self.nodes[0].decoderawtransaction(tx) + assert_equal('OP_DUP OP_HASH160 3011020701010101010101020601010101010101 OP_EQUALVERIFY OP_CHECKSIG', rpc_result['vout'][0]['scriptPubKey']['asm']) + assert_equal('OP_HASH160 3011020701010101010101020601010101010101 OP_EQUAL', rpc_result['vout'][1]['scriptPubKey']['asm']) + + # some more full transaction tests of varying specific scriptSigs. used instead of + # tests in decodescript_script_sig because the decodescript RPC is specifically + # for working on scriptPubKeys (argh!). + push_signature = bytes_to_hex_str(txSave.vin[0].scriptSig)[2:(0x48*2+4)] + signature = push_signature[2:] + der_signature = signature[:-2] + signature_sighash_decoded = der_signature + '[ALL]' + signature_2 = der_signature + '82' + push_signature_2 = '48' + signature_2 + signature_2_sighash_decoded = der_signature + '[NONE|ANYONECANPAY]' + + # 1) P2PK scriptSig + txSave.vin[0].scriptSig = hex_str_to_bytes(push_signature) + rpc_result = self.nodes[0].decoderawtransaction(bytes_to_hex_str(txSave.serialize())) + assert_equal(signature_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm']) + + # make sure that the sighash decodes come out correctly for a more complex / lesser used case. + txSave.vin[0].scriptSig = hex_str_to_bytes(push_signature_2) + rpc_result = self.nodes[0].decoderawtransaction(bytes_to_hex_str(txSave.serialize())) + assert_equal(signature_2_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm']) + + # 2) multisig scriptSig + txSave.vin[0].scriptSig = hex_str_to_bytes('00' + push_signature + push_signature_2) + rpc_result = self.nodes[0].decoderawtransaction(bytes_to_hex_str(txSave.serialize())) + assert_equal('0 ' + signature_sighash_decoded + ' ' + signature_2_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm']) + + # 3) test a scriptSig that contains more than push operations. + # in fact, it contains an OP_RETURN with data specially crafted to cause improper decode if the code does not catch it. + txSave.vin[0].scriptSig = hex_str_to_bytes('6a143011020701010101010101020601010101010101') + rpc_result = self.nodes[0].decoderawtransaction(bytes_to_hex_str(txSave.serialize())) + assert_equal('OP_RETURN 3011020701010101010101020601010101010101', rpc_result['vin'][0]['scriptSig']['asm']) + + def run_test(self): + self.decodescript_script_sig() + self.decodescript_script_pub_key() + self.decoderawtransaction_asm_sighashtype() + +if __name__ == '__main__': + DecodeScriptTest().main() diff --git a/test/functional/rpc_deprecated.py b/test/functional/rpc_deprecated.py new file mode 100755 index 000000000..58074803c --- /dev/null +++ b/test/functional/rpc_deprecated.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python3 +# Copyright (c) 2017-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test deprecation of RPC calls.""" +from test_framework.test_framework import BitcoinTestFramework + +class DeprecatedRpcTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 2 + self.setup_clean_chain = True + self.extra_args = [[], ["-deprecatedrpc=validateaddress"]] + + def run_test(self): + # This test should be used to verify correct behaviour of deprecated + # RPC methods with and without the -deprecatedrpc flags. For example: + # + # self.log.info("Make sure that -deprecatedrpc=createmultisig allows it to take addresses") + # assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, [self.nodes[0].getnewaddress()]) + # self.nodes[1].createmultisig(1, [self.nodes[1].getnewaddress()]) + pass + +if __name__ == '__main__': + DeprecatedRpcTest().main() diff --git a/test/functional/rpc_fundrawtransaction.py b/test/functional/rpc_fundrawtransaction.py new file mode 100755 index 000000000..0c61e9ab6 --- /dev/null +++ b/test/functional/rpc_fundrawtransaction.py @@ -0,0 +1,754 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the fundrawtransaction RPC.""" + +from decimal import Decimal +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + assert_fee_amount, + assert_greater_than, + assert_greater_than_or_equal, + assert_raises_rpc_error, + connect_nodes_bi, + count_bytes, + find_vout_for_address, +) + + +def get_unspent(listunspent, amount): + for utx in listunspent: + if utx['amount'] == amount: + return utx + raise AssertionError('Could not find unspent with amount={}'.format(amount)) + +class RawTransactionsTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 4 + self.setup_clean_chain = True + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def setup_network(self, split=False): + self.setup_nodes() + + connect_nodes_bi(self.nodes, 0, 1) + connect_nodes_bi(self.nodes, 1, 2) + connect_nodes_bi(self.nodes, 0, 2) + connect_nodes_bi(self.nodes, 0, 3) + + def run_test(self): + min_relay_tx_fee = self.nodes[0].getnetworkinfo()['relayfee'] + # This test is not meant to test fee estimation and we'd like + # to be sure all txs are sent at a consistent desired feerate + for node in self.nodes: + node.settxfee(min_relay_tx_fee) + + # if the fee's positive delta is higher than this value tests will fail, + # neg. delta always fail the tests. + # The size of the signature of every input may be at most 2 bytes larger + # than a minimum sized signature. + + # = 2 bytes * minRelayTxFeePerByte + feeTolerance = 2 * min_relay_tx_fee/1000 + + self.nodes[2].generate(1) + self.sync_all() + self.nodes[0].generate(121) + self.sync_all() + + # ensure that setting changePosition in fundraw with an exact match is handled properly + rawmatch = self.nodes[2].createrawtransaction([], {self.nodes[2].getnewaddress():50}) + rawmatch = self.nodes[2].fundrawtransaction(rawmatch, {"changePosition":1, "subtractFeeFromOutputs":[0]}) + assert_equal(rawmatch["changepos"], -1) + + watchonly_address = self.nodes[0].getnewaddress() + watchonly_pubkey = self.nodes[0].getaddressinfo(watchonly_address)["pubkey"] + watchonly_amount = Decimal(200) + self.nodes[3].importpubkey(watchonly_pubkey, "", True) + watchonly_txid = self.nodes[0].sendtoaddress(watchonly_address, watchonly_amount) + + # Lock UTXO so nodes[0] doesn't accidentally spend it + watchonly_vout = find_vout_for_address(self.nodes[0], watchonly_txid, watchonly_address) + self.nodes[0].lockunspent(False, [{"txid": watchonly_txid, "vout": watchonly_vout}]) + + self.nodes[0].sendtoaddress(self.nodes[3].getnewaddress(), watchonly_amount / 10) + + self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.5) + self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0) + self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 5.0) + + self.nodes[0].generate(1) + self.sync_all() + + ############### + # simple test # + ############### + inputs = [ ] + outputs = { self.nodes[0].getnewaddress() : 1.0 } + rawtx = self.nodes[2].createrawtransaction(inputs, outputs) + dec_tx = self.nodes[2].decoderawtransaction(rawtx) + rawtxfund = self.nodes[2].fundrawtransaction(rawtx) + fee = rawtxfund['fee'] + dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) + assert(len(dec_tx['vin']) > 0) #test that we have enough inputs + + ############################## + # simple test with two coins # + ############################## + inputs = [ ] + outputs = { self.nodes[0].getnewaddress() : 2.2 } + rawtx = self.nodes[2].createrawtransaction(inputs, outputs) + dec_tx = self.nodes[2].decoderawtransaction(rawtx) + + rawtxfund = self.nodes[2].fundrawtransaction(rawtx) + fee = rawtxfund['fee'] + dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) + assert(len(dec_tx['vin']) > 0) #test if we have enough inputs + + ############################## + # simple test with two coins # + ############################## + inputs = [ ] + outputs = { self.nodes[0].getnewaddress() : 2.6 } + rawtx = self.nodes[2].createrawtransaction(inputs, outputs) + dec_tx = self.nodes[2].decoderawtransaction(rawtx) + + rawtxfund = self.nodes[2].fundrawtransaction(rawtx) + fee = rawtxfund['fee'] + dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) + assert(len(dec_tx['vin']) > 0) + assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '') + + + ################################ + # simple test with two outputs # + ################################ + inputs = [ ] + outputs = { self.nodes[0].getnewaddress() : 2.6, self.nodes[1].getnewaddress() : 2.5 } + rawtx = self.nodes[2].createrawtransaction(inputs, outputs) + dec_tx = self.nodes[2].decoderawtransaction(rawtx) + + rawtxfund = self.nodes[2].fundrawtransaction(rawtx) + fee = rawtxfund['fee'] + dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) + totalOut = 0 + for out in dec_tx['vout']: + totalOut += out['value'] + + assert(len(dec_tx['vin']) > 0) + assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '') + + + ######################################################################### + # test a fundrawtransaction with a VIN greater than the required amount # + ######################################################################### + utx = get_unspent(self.nodes[2].listunspent(), 5) + + inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}] + outputs = { self.nodes[0].getnewaddress() : 1.0 } + rawtx = self.nodes[2].createrawtransaction(inputs, outputs) + dec_tx = self.nodes[2].decoderawtransaction(rawtx) + assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) + + rawtxfund = self.nodes[2].fundrawtransaction(rawtx) + fee = rawtxfund['fee'] + dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) + totalOut = 0 + for out in dec_tx['vout']: + totalOut += out['value'] + + assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee + + + ##################################################################### + # test a fundrawtransaction with which will not get a change output # + ##################################################################### + utx = get_unspent(self.nodes[2].listunspent(), 5) + + inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}] + outputs = { self.nodes[0].getnewaddress() : Decimal(5.0) - fee - feeTolerance } + rawtx = self.nodes[2].createrawtransaction(inputs, outputs) + dec_tx = self.nodes[2].decoderawtransaction(rawtx) + assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) + + rawtxfund = self.nodes[2].fundrawtransaction(rawtx) + fee = rawtxfund['fee'] + dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) + totalOut = 0 + for out in dec_tx['vout']: + totalOut += out['value'] + + assert_equal(rawtxfund['changepos'], -1) + assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee + + + #################################################### + # test a fundrawtransaction with an invalid option # + #################################################### + utx = get_unspent(self.nodes[2].listunspent(), 5) + + inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ] + outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) } + rawtx = self.nodes[2].createrawtransaction(inputs, outputs) + dec_tx = self.nodes[2].decoderawtransaction(rawtx) + assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) + + assert_raises_rpc_error(-3, "Unexpected key foo", self.nodes[2].fundrawtransaction, rawtx, {'foo':'bar'}) + + # reserveChangeKey was deprecated and is now removed + assert_raises_rpc_error(-3, "Unexpected key reserveChangeKey", lambda: self.nodes[2].fundrawtransaction(hexstring=rawtx, options={'reserveChangeKey': True})) + + ############################################################ + # test a fundrawtransaction with an invalid change address # + ############################################################ + utx = get_unspent(self.nodes[2].listunspent(), 5) + + inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ] + outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) } + rawtx = self.nodes[2].createrawtransaction(inputs, outputs) + dec_tx = self.nodes[2].decoderawtransaction(rawtx) + assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) + + assert_raises_rpc_error(-5, "changeAddress must be a valid bitcoin address", self.nodes[2].fundrawtransaction, rawtx, {'changeAddress':'foobar'}) + + ############################################################ + # test a fundrawtransaction with a provided change address # + ############################################################ + utx = get_unspent(self.nodes[2].listunspent(), 5) + + inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ] + outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) } + rawtx = self.nodes[2].createrawtransaction(inputs, outputs) + dec_tx = self.nodes[2].decoderawtransaction(rawtx) + assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) + + change = self.nodes[2].getnewaddress() + assert_raises_rpc_error(-8, "changePosition out of bounds", self.nodes[2].fundrawtransaction, rawtx, {'changeAddress':change, 'changePosition':2}) + rawtxfund = self.nodes[2].fundrawtransaction(rawtx, {'changeAddress': change, 'changePosition': 0}) + dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) + out = dec_tx['vout'][0] + assert_equal(change, out['scriptPubKey']['addresses'][0]) + + ######################################################### + # test a fundrawtransaction with a provided change type # + ######################################################### + utx = get_unspent(self.nodes[2].listunspent(), 5) + + inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']} ] + outputs = { self.nodes[0].getnewaddress() : Decimal(4.0) } + rawtx = self.nodes[2].createrawtransaction(inputs, outputs) + assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[2].fundrawtransaction, rawtx, {'change_type': None}) + assert_raises_rpc_error(-5, "Unknown change type ''", self.nodes[2].fundrawtransaction, rawtx, {'change_type': ''}) + rawtx = self.nodes[2].fundrawtransaction(rawtx, {'change_type': 'bech32'}) + dec_tx = self.nodes[2].decoderawtransaction(rawtx['hex']) + assert_equal('witness_v0_keyhash', dec_tx['vout'][rawtx['changepos']]['scriptPubKey']['type']) + + ######################################################################### + # test a fundrawtransaction with a VIN smaller than the required amount # + ######################################################################### + utx = get_unspent(self.nodes[2].listunspent(), 1) + + inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}] + outputs = { self.nodes[0].getnewaddress() : 1.0 } + rawtx = self.nodes[2].createrawtransaction(inputs, outputs) + + # 4-byte version + 1-byte vin count + 36-byte prevout then script_len + rawtx = rawtx[:82] + "0100" + rawtx[84:] + + dec_tx = self.nodes[2].decoderawtransaction(rawtx) + assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) + assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex']) + + rawtxfund = self.nodes[2].fundrawtransaction(rawtx) + fee = rawtxfund['fee'] + dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) + totalOut = 0 + matchingOuts = 0 + for i, out in enumerate(dec_tx['vout']): + totalOut += out['value'] + if out['scriptPubKey']['addresses'][0] in outputs: + matchingOuts+=1 + else: + assert_equal(i, rawtxfund['changepos']) + + assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) + assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex']) + + assert_equal(matchingOuts, 1) + assert_equal(len(dec_tx['vout']), 2) + + + ########################################### + # test a fundrawtransaction with two VINs # + ########################################### + utx = get_unspent(self.nodes[2].listunspent(), 1) + utx2 = get_unspent(self.nodes[2].listunspent(), 5) + + inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ] + outputs = { self.nodes[0].getnewaddress() : 6.0 } + rawtx = self.nodes[2].createrawtransaction(inputs, outputs) + dec_tx = self.nodes[2].decoderawtransaction(rawtx) + assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) + + rawtxfund = self.nodes[2].fundrawtransaction(rawtx) + fee = rawtxfund['fee'] + dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) + totalOut = 0 + matchingOuts = 0 + for out in dec_tx['vout']: + totalOut += out['value'] + if out['scriptPubKey']['addresses'][0] in outputs: + matchingOuts+=1 + + assert_equal(matchingOuts, 1) + assert_equal(len(dec_tx['vout']), 2) + + matchingIns = 0 + for vinOut in dec_tx['vin']: + for vinIn in inputs: + if vinIn['txid'] == vinOut['txid']: + matchingIns+=1 + + assert_equal(matchingIns, 2) #we now must see two vins identical to vins given as params + + ######################################################### + # test a fundrawtransaction with two VINs and two vOUTs # + ######################################################### + utx = get_unspent(self.nodes[2].listunspent(), 1) + utx2 = get_unspent(self.nodes[2].listunspent(), 5) + + inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ] + outputs = { self.nodes[0].getnewaddress() : 6.0, self.nodes[0].getnewaddress() : 1.0 } + rawtx = self.nodes[2].createrawtransaction(inputs, outputs) + dec_tx = self.nodes[2].decoderawtransaction(rawtx) + assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) + + rawtxfund = self.nodes[2].fundrawtransaction(rawtx) + fee = rawtxfund['fee'] + dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) + totalOut = 0 + matchingOuts = 0 + for out in dec_tx['vout']: + totalOut += out['value'] + if out['scriptPubKey']['addresses'][0] in outputs: + matchingOuts+=1 + + assert_equal(matchingOuts, 2) + assert_equal(len(dec_tx['vout']), 3) + + ############################################## + # test a fundrawtransaction with invalid vin # + ############################################## + inputs = [ {'txid' : "1c7f966dab21119bac53213a2bc7532bff1fa844c124fd750a7d0b1332440bd1", 'vout' : 0} ] #invalid vin! + outputs = { self.nodes[0].getnewaddress() : 1.0} + rawtx = self.nodes[2].createrawtransaction(inputs, outputs) + dec_tx = self.nodes[2].decoderawtransaction(rawtx) + + assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[2].fundrawtransaction, rawtx) + + ############################################################ + #compare fee of a standard pubkeyhash transaction + inputs = [] + outputs = {self.nodes[1].getnewaddress():1.1} + rawtx = self.nodes[0].createrawtransaction(inputs, outputs) + fundedTx = self.nodes[0].fundrawtransaction(rawtx) + + #create same transaction over sendtoaddress + txId = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1.1) + signedFee = self.nodes[0].getrawmempool(True)[txId]['fee'] + + #compare fee + feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee) + assert(feeDelta >= 0 and feeDelta <= feeTolerance) + ############################################################ + + ############################################################ + #compare fee of a standard pubkeyhash transaction with multiple outputs + inputs = [] + outputs = {self.nodes[1].getnewaddress():1.1,self.nodes[1].getnewaddress():1.2,self.nodes[1].getnewaddress():0.1,self.nodes[1].getnewaddress():1.3,self.nodes[1].getnewaddress():0.2,self.nodes[1].getnewaddress():0.3} + rawtx = self.nodes[0].createrawtransaction(inputs, outputs) + fundedTx = self.nodes[0].fundrawtransaction(rawtx) + #create same transaction over sendtoaddress + txId = self.nodes[0].sendmany("", outputs) + signedFee = self.nodes[0].getrawmempool(True)[txId]['fee'] + + #compare fee + feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee) + assert(feeDelta >= 0 and feeDelta <= feeTolerance) + ############################################################ + + + ############################################################ + #compare fee of a 2of2 multisig p2sh transaction + + # create 2of2 addr + addr1 = self.nodes[1].getnewaddress() + addr2 = self.nodes[1].getnewaddress() + + addr1Obj = self.nodes[1].getaddressinfo(addr1) + addr2Obj = self.nodes[1].getaddressinfo(addr2) + + mSigObj = self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address'] + + inputs = [] + outputs = {mSigObj:1.1} + rawtx = self.nodes[0].createrawtransaction(inputs, outputs) + fundedTx = self.nodes[0].fundrawtransaction(rawtx) + + #create same transaction over sendtoaddress + txId = self.nodes[0].sendtoaddress(mSigObj, 1.1) + signedFee = self.nodes[0].getrawmempool(True)[txId]['fee'] + + #compare fee + feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee) + assert(feeDelta >= 0 and feeDelta <= feeTolerance) + ############################################################ + + + ############################################################ + #compare fee of a standard pubkeyhash transaction + + # create 4of5 addr + addr1 = self.nodes[1].getnewaddress() + addr2 = self.nodes[1].getnewaddress() + addr3 = self.nodes[1].getnewaddress() + addr4 = self.nodes[1].getnewaddress() + addr5 = self.nodes[1].getnewaddress() + + addr1Obj = self.nodes[1].getaddressinfo(addr1) + addr2Obj = self.nodes[1].getaddressinfo(addr2) + addr3Obj = self.nodes[1].getaddressinfo(addr3) + addr4Obj = self.nodes[1].getaddressinfo(addr4) + addr5Obj = self.nodes[1].getaddressinfo(addr5) + + mSigObj = self.nodes[1].addmultisigaddress(4, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey'], addr4Obj['pubkey'], addr5Obj['pubkey']])['address'] + + inputs = [] + outputs = {mSigObj:1.1} + rawtx = self.nodes[0].createrawtransaction(inputs, outputs) + fundedTx = self.nodes[0].fundrawtransaction(rawtx) + + #create same transaction over sendtoaddress + txId = self.nodes[0].sendtoaddress(mSigObj, 1.1) + signedFee = self.nodes[0].getrawmempool(True)[txId]['fee'] + + #compare fee + feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee) + assert(feeDelta >= 0 and feeDelta <= feeTolerance) + ############################################################ + + + ############################################################ + # spend a 2of2 multisig transaction over fundraw + + # create 2of2 addr + addr1 = self.nodes[2].getnewaddress() + addr2 = self.nodes[2].getnewaddress() + + addr1Obj = self.nodes[2].getaddressinfo(addr1) + addr2Obj = self.nodes[2].getaddressinfo(addr2) + + mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address'] + + + # send 1.2 BTC to msig addr + txId = self.nodes[0].sendtoaddress(mSigObj, 1.2) + self.sync_all() + self.nodes[1].generate(1) + self.sync_all() + + oldBalance = self.nodes[1].getbalance() + inputs = [] + outputs = {self.nodes[1].getnewaddress():1.1} + rawtx = self.nodes[2].createrawtransaction(inputs, outputs) + fundedTx = self.nodes[2].fundrawtransaction(rawtx) + + signedTx = self.nodes[2].signrawtransactionwithwallet(fundedTx['hex']) + txId = self.nodes[2].sendrawtransaction(signedTx['hex']) + self.sync_all() + self.nodes[1].generate(1) + self.sync_all() + + # make sure funds are received at node1 + assert_equal(oldBalance+Decimal('1.10000000'), self.nodes[1].getbalance()) + + ############################################################ + # locked wallet test + self.nodes[1].encryptwallet("test") + self.stop_nodes() + + self.start_nodes() + # This test is not meant to test fee estimation and we'd like + # to be sure all txs are sent at a consistent desired feerate + for node in self.nodes: + node.settxfee(min_relay_tx_fee) + + connect_nodes_bi(self.nodes,0,1) + connect_nodes_bi(self.nodes,1,2) + connect_nodes_bi(self.nodes,0,2) + connect_nodes_bi(self.nodes,0,3) + # Again lock the watchonly UTXO or nodes[0] may spend it, because + # lockunspent is memory-only and thus lost on restart + self.nodes[0].lockunspent(False, [{"txid": watchonly_txid, "vout": watchonly_vout}]) + self.sync_all() + + # drain the keypool + self.nodes[1].getnewaddress() + self.nodes[1].getrawchangeaddress() + inputs = [] + outputs = {self.nodes[0].getnewaddress():1.1} + rawtx = self.nodes[1].createrawtransaction(inputs, outputs) + # fund a transaction that requires a new key for the change output + # creating the key must be impossible because the wallet is locked + assert_raises_rpc_error(-4, "Keypool ran out, please call keypoolrefill first", self.nodes[1].fundrawtransaction, rawtx) + + #refill the keypool + self.nodes[1].walletpassphrase("test", 100) + self.nodes[1].keypoolrefill(8) #need to refill the keypool to get an internal change address + self.nodes[1].walletlock() + + assert_raises_rpc_error(-13, "walletpassphrase", self.nodes[1].sendtoaddress, self.nodes[0].getnewaddress(), 1.2) + + oldBalance = self.nodes[0].getbalance() + + inputs = [] + outputs = {self.nodes[0].getnewaddress():1.1} + rawtx = self.nodes[1].createrawtransaction(inputs, outputs) + fundedTx = self.nodes[1].fundrawtransaction(rawtx) + + #now we need to unlock + self.nodes[1].walletpassphrase("test", 600) + signedTx = self.nodes[1].signrawtransactionwithwallet(fundedTx['hex']) + txId = self.nodes[1].sendrawtransaction(signedTx['hex']) + self.nodes[1].generate(1) + self.sync_all() + + # make sure funds are received at node1 + assert_equal(oldBalance+Decimal('51.10000000'), self.nodes[0].getbalance()) + + + ############################################### + # multiple (~19) inputs tx test | Compare fee # + ############################################### + + #empty node1, send some small coins from node0 to node1 + self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True) + self.sync_all() + self.nodes[0].generate(1) + self.sync_all() + + for i in range(0,20): + self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01) + self.nodes[0].generate(1) + self.sync_all() + + #fund a tx with ~20 small inputs + inputs = [] + outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04} + rawtx = self.nodes[1].createrawtransaction(inputs, outputs) + fundedTx = self.nodes[1].fundrawtransaction(rawtx) + + #create same transaction over sendtoaddress + txId = self.nodes[1].sendmany("", outputs) + signedFee = self.nodes[1].getrawmempool(True)[txId]['fee'] + + #compare fee + feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee) + assert(feeDelta >= 0 and feeDelta <= feeTolerance*19) #~19 inputs + + + ############################################# + # multiple (~19) inputs tx test | sign/send # + ############################################# + + #again, empty node1, send some small coins from node0 to node1 + self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True) + self.sync_all() + self.nodes[0].generate(1) + self.sync_all() + + for i in range(0,20): + self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01) + self.nodes[0].generate(1) + self.sync_all() + + #fund a tx with ~20 small inputs + oldBalance = self.nodes[0].getbalance() + + inputs = [] + outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04} + rawtx = self.nodes[1].createrawtransaction(inputs, outputs) + fundedTx = self.nodes[1].fundrawtransaction(rawtx) + fundedAndSignedTx = self.nodes[1].signrawtransactionwithwallet(fundedTx['hex']) + txId = self.nodes[1].sendrawtransaction(fundedAndSignedTx['hex']) + self.sync_all() + self.nodes[0].generate(1) + self.sync_all() + assert_equal(oldBalance+Decimal('50.19000000'), self.nodes[0].getbalance()) #0.19+block reward + + ##################################################### + # test fundrawtransaction with OP_RETURN and no vin # + ##################################################### + + rawtx = "0100000000010000000000000000066a047465737400000000" + dec_tx = self.nodes[2].decoderawtransaction(rawtx) + + assert_equal(len(dec_tx['vin']), 0) + assert_equal(len(dec_tx['vout']), 1) + + rawtxfund = self.nodes[2].fundrawtransaction(rawtx) + dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) + + assert_greater_than(len(dec_tx['vin']), 0) # at least one vin + assert_equal(len(dec_tx['vout']), 2) # one change output added + + + ################################################## + # test a fundrawtransaction using only watchonly # + ################################################## + + inputs = [] + outputs = {self.nodes[2].getnewaddress() : watchonly_amount / 2} + rawtx = self.nodes[3].createrawtransaction(inputs, outputs) + + result = self.nodes[3].fundrawtransaction(rawtx, {'includeWatching': True }) + res_dec = self.nodes[0].decoderawtransaction(result["hex"]) + assert_equal(len(res_dec["vin"]), 1) + assert_equal(res_dec["vin"][0]["txid"], watchonly_txid) + + assert("fee" in result.keys()) + assert_greater_than(result["changepos"], -1) + + ############################################################### + # test fundrawtransaction using the entirety of watched funds # + ############################################################### + + inputs = [] + outputs = {self.nodes[2].getnewaddress() : watchonly_amount} + rawtx = self.nodes[3].createrawtransaction(inputs, outputs) + + # Backward compatibility test (2nd param is includeWatching) + result = self.nodes[3].fundrawtransaction(rawtx, True) + res_dec = self.nodes[0].decoderawtransaction(result["hex"]) + assert_equal(len(res_dec["vin"]), 2) + assert(res_dec["vin"][0]["txid"] == watchonly_txid or res_dec["vin"][1]["txid"] == watchonly_txid) + + assert_greater_than(result["fee"], 0) + assert_greater_than(result["changepos"], -1) + assert_equal(result["fee"] + res_dec["vout"][result["changepos"]]["value"], watchonly_amount / 10) + + signedtx = self.nodes[3].signrawtransactionwithwallet(result["hex"]) + assert(not signedtx["complete"]) + signedtx = self.nodes[0].signrawtransactionwithwallet(signedtx["hex"]) + assert(signedtx["complete"]) + self.nodes[0].sendrawtransaction(signedtx["hex"]) + self.nodes[0].generate(1) + self.sync_all() + + ####################### + # Test feeRate option # + ####################### + + # Make sure there is exactly one input so coin selection can't skew the result + assert_equal(len(self.nodes[3].listunspent(1)), 1) + + inputs = [] + outputs = {self.nodes[3].getnewaddress() : 1} + rawtx = self.nodes[3].createrawtransaction(inputs, outputs) + result = self.nodes[3].fundrawtransaction(rawtx) # uses min_relay_tx_fee (set by settxfee) + result2 = self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2*min_relay_tx_fee}) + result3 = self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 10*min_relay_tx_fee}) + result_fee_rate = result['fee'] * 1000 / count_bytes(result['hex']) + assert_fee_amount(result2['fee'], count_bytes(result2['hex']), 2 * result_fee_rate) + assert_fee_amount(result3['fee'], count_bytes(result3['hex']), 10 * result_fee_rate) + + ################################ + # Test no address reuse occurs # + ################################ + + result3 = self.nodes[3].fundrawtransaction(rawtx) + res_dec = self.nodes[0].decoderawtransaction(result3["hex"]) + changeaddress = "" + for out in res_dec['vout']: + if out['value'] > 1.0: + changeaddress += out['scriptPubKey']['addresses'][0] + assert(changeaddress != "") + nextaddr = self.nodes[3].getnewaddress() + # Now the change address key should be removed from the keypool + assert(changeaddress != nextaddr) + + ###################################### + # Test subtractFeeFromOutputs option # + ###################################### + + # Make sure there is exactly one input so coin selection can't skew the result + assert_equal(len(self.nodes[3].listunspent(1)), 1) + + inputs = [] + outputs = {self.nodes[2].getnewaddress(): 1} + rawtx = self.nodes[3].createrawtransaction(inputs, outputs) + + result = [self.nodes[3].fundrawtransaction(rawtx), # uses min_relay_tx_fee (set by settxfee) + self.nodes[3].fundrawtransaction(rawtx, {"subtractFeeFromOutputs": []}), # empty subtraction list + self.nodes[3].fundrawtransaction(rawtx, {"subtractFeeFromOutputs": [0]}), # uses min_relay_tx_fee (set by settxfee) + self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2*min_relay_tx_fee}), + self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2*min_relay_tx_fee, "subtractFeeFromOutputs": [0]})] + + dec_tx = [self.nodes[3].decoderawtransaction(tx_['hex']) for tx_ in result] + output = [d['vout'][1 - r['changepos']]['value'] for d, r in zip(dec_tx, result)] + change = [d['vout'][r['changepos']]['value'] for d, r in zip(dec_tx, result)] + + assert_equal(result[0]['fee'], result[1]['fee'], result[2]['fee']) + assert_equal(result[3]['fee'], result[4]['fee']) + assert_equal(change[0], change[1]) + assert_equal(output[0], output[1]) + assert_equal(output[0], output[2] + result[2]['fee']) + assert_equal(change[0] + result[0]['fee'], change[2]) + assert_equal(output[3], output[4] + result[4]['fee']) + assert_equal(change[3] + result[3]['fee'], change[4]) + + inputs = [] + outputs = {self.nodes[2].getnewaddress(): value for value in (1.0, 1.1, 1.2, 1.3)} + rawtx = self.nodes[3].createrawtransaction(inputs, outputs) + + result = [self.nodes[3].fundrawtransaction(rawtx), + # split the fee between outputs 0, 2, and 3, but not output 1 + self.nodes[3].fundrawtransaction(rawtx, {"subtractFeeFromOutputs": [0, 2, 3]})] + + dec_tx = [self.nodes[3].decoderawtransaction(result[0]['hex']), + self.nodes[3].decoderawtransaction(result[1]['hex'])] + + # Nested list of non-change output amounts for each transaction + output = [[out['value'] for i, out in enumerate(d['vout']) if i != r['changepos']] + for d, r in zip(dec_tx, result)] + + # List of differences in output amounts between normal and subtractFee transactions + share = [o0 - o1 for o0, o1 in zip(output[0], output[1])] + + # output 1 is the same in both transactions + assert_equal(share[1], 0) + + # the other 3 outputs are smaller as a result of subtractFeeFromOutputs + assert_greater_than(share[0], 0) + assert_greater_than(share[2], 0) + assert_greater_than(share[3], 0) + + # outputs 2 and 3 take the same share of the fee + assert_equal(share[2], share[3]) + + # output 0 takes at least as much share of the fee, and no more than 2 satoshis more, than outputs 2 and 3 + assert_greater_than_or_equal(share[0], share[2]) + assert_greater_than_or_equal(share[2] + Decimal(2e-8), share[0]) + + # the fee is the same in both transactions + assert_equal(result[0]['fee'], result[1]['fee']) + + # the total subtracted from the outputs is equal to the fee + assert_equal(share[0] + share[2] + share[3], result[0]['fee']) + +if __name__ == '__main__': + RawTransactionsTest().main() diff --git a/test/functional/rpc_getblockstats.py b/test/functional/rpc_getblockstats.py new file mode 100755 index 000000000..b24bed6ad --- /dev/null +++ b/test/functional/rpc_getblockstats.py @@ -0,0 +1,180 @@ +#!/usr/bin/env python3 +# Copyright (c) 2017-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. + +# +# Test getblockstats rpc call +# +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + assert_raises_rpc_error, +) +import json +import os +import time + +TESTSDIR = os.path.dirname(os.path.realpath(__file__)) + +class GetblockstatsTest(BitcoinTestFramework): + + start_height = 101 + max_stat_pos = 2 + STATS_NEED_TXINDEX = [ + 'avgfee', + 'avgfeerate', + 'maxfee', + 'maxfeerate', + 'medianfee', + 'feerate_percentiles', + 'minfee', + 'minfeerate', + 'totalfee', + 'utxo_size_inc', + ] + + def add_options(self, parser): + parser.add_argument('--gen-test-data', dest='gen_test_data', + default=False, action='store_true', + help='Generate test data') + parser.add_argument('--test-data', dest='test_data', + default='data/rpc_getblockstats.json', + action='store', metavar='FILE', + help='Test data file') + + def set_test_params(self): + self.num_nodes = 2 + self.extra_args = [['-txindex'], ['-paytxfee=0.003']] + self.setup_clean_chain = True + + def get_stats(self): + return [self.nodes[0].getblockstats(hash_or_height=self.start_height + i) for i in range(self.max_stat_pos+1)] + + def generate_test_data(self, filename): + mocktime = time.time() + self.nodes[0].generate(101) + + self.nodes[0].sendtoaddress(address=self.nodes[1].getnewaddress(), amount=10, subtractfeefromamount=True) + self.nodes[0].generate(1) + self.sync_all() + + self.nodes[0].sendtoaddress(address=self.nodes[0].getnewaddress(), amount=10, subtractfeefromamount=True) + self.nodes[0].sendtoaddress(address=self.nodes[0].getnewaddress(), amount=10, subtractfeefromamount=False) + self.nodes[1].sendtoaddress(address=self.nodes[0].getnewaddress(), amount=1, subtractfeefromamount=True) + self.sync_all() + self.nodes[0].generate(1) + + self.expected_stats = self.get_stats() + + blocks = [] + tip = self.nodes[0].getbestblockhash() + blockhash = None + height = 0 + while tip != blockhash: + blockhash = self.nodes[0].getblockhash(height) + blocks.append(self.nodes[0].getblock(blockhash, 0)) + height += 1 + + to_dump = { + 'blocks': blocks, + 'mocktime': int(mocktime), + 'stats': self.expected_stats, + } + with open(filename, 'w', encoding="utf8") as f: + json.dump(to_dump, f, sort_keys=True, indent=2) + + def load_test_data(self, filename): + with open(filename, 'r', encoding="utf8") as f: + d = json.load(f) + blocks = d['blocks'] + mocktime = d['mocktime'] + self.expected_stats = d['stats'] + + # Set the timestamps from the file so that the nodes can get out of Initial Block Download + self.nodes[0].setmocktime(mocktime) + self.nodes[1].setmocktime(mocktime) + + for b in blocks: + self.nodes[0].submitblock(b) + + def run_test(self): + test_data = os.path.join(TESTSDIR, self.options.test_data) + if self.options.gen_test_data: + self.generate_test_data(test_data) + else: + self.load_test_data(test_data) + + self.sync_all() + stats = self.get_stats() + expected_stats_noindex = [] + for stat_row in stats: + expected_stats_noindex.append({k: v for k, v in stat_row.items() if k not in self.STATS_NEED_TXINDEX}) + + # Make sure all valid statistics are included but nothing else is + expected_keys = self.expected_stats[0].keys() + assert_equal(set(stats[0].keys()), set(expected_keys)) + + assert_equal(stats[0]['height'], self.start_height) + assert_equal(stats[self.max_stat_pos]['height'], self.start_height + self.max_stat_pos) + + for i in range(self.max_stat_pos+1): + self.log.info('Checking block %d\n' % (i)) + assert_equal(stats[i], self.expected_stats[i]) + + # Check selecting block by hash too + blockhash = self.expected_stats[i]['blockhash'] + stats_by_hash = self.nodes[0].getblockstats(hash_or_height=blockhash) + assert_equal(stats_by_hash, self.expected_stats[i]) + + # Check with the node that has no txindex + stats_no_txindex = self.nodes[1].getblockstats(hash_or_height=blockhash, stats=list(expected_stats_noindex[i].keys())) + assert_equal(stats_no_txindex, expected_stats_noindex[i]) + + # Make sure each stat can be queried on its own + for stat in expected_keys: + for i in range(self.max_stat_pos+1): + result = self.nodes[0].getblockstats(hash_or_height=self.start_height + i, stats=[stat]) + assert_equal(list(result.keys()), [stat]) + if result[stat] != self.expected_stats[i][stat]: + self.log.info('result[%s] (%d) failed, %r != %r' % ( + stat, i, result[stat], self.expected_stats[i][stat])) + assert_equal(result[stat], self.expected_stats[i][stat]) + + # Make sure only the selected statistics are included (more than one) + some_stats = {'minfee', 'maxfee'} + stats = self.nodes[0].getblockstats(hash_or_height=1, stats=list(some_stats)) + assert_equal(set(stats.keys()), some_stats) + + # Test invalid parameters raise the proper json exceptions + tip = self.start_height + self.max_stat_pos + assert_raises_rpc_error(-8, 'Target block height %d after current tip %d' % (tip+1, tip), + self.nodes[0].getblockstats, hash_or_height=tip+1) + assert_raises_rpc_error(-8, 'Target block height %d is negative' % (-1), + self.nodes[0].getblockstats, hash_or_height=-1) + + # Make sure not valid stats aren't allowed + inv_sel_stat = 'asdfghjkl' + inv_stats = [ + [inv_sel_stat], + ['minfee' , inv_sel_stat], + [inv_sel_stat, 'minfee'], + ['minfee', inv_sel_stat, 'maxfee'], + ] + for inv_stat in inv_stats: + assert_raises_rpc_error(-8, 'Invalid selected statistic %s' % inv_sel_stat, + self.nodes[0].getblockstats, hash_or_height=1, stats=inv_stat) + + # Make sure we aren't always returning inv_sel_stat as the culprit stat + assert_raises_rpc_error(-8, 'Invalid selected statistic aaa%s' % inv_sel_stat, + self.nodes[0].getblockstats, hash_or_height=1, stats=['minfee' , 'aaa%s' % inv_sel_stat]) + + assert_raises_rpc_error(-8, 'One or more of the selected stats requires -txindex enabled', + self.nodes[1].getblockstats, hash_or_height=self.start_height + self.max_stat_pos) + + # Mainchain's genesis block shouldn't be found on regtest + assert_raises_rpc_error(-5, 'Block not found', self.nodes[0].getblockstats, + hash_or_height='000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f') + +if __name__ == '__main__': + GetblockstatsTest().main() diff --git a/test/functional/rpc_getchaintips.py b/test/functional/rpc_getchaintips.py new file mode 100755 index 000000000..c869c7262 --- /dev/null +++ b/test/functional/rpc_getchaintips.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the getchaintips RPC. + +- introduce a network split +- work on chains of different lengths +- join the network together again +- verify that getchaintips now returns two chain tips. +""" + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal + +class GetChainTipsTest (BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 4 + + def run_test(self): + tips = self.nodes[0].getchaintips() + assert_equal(len(tips), 1) + assert_equal(tips[0]['branchlen'], 0) + assert_equal(tips[0]['height'], 200) + assert_equal(tips[0]['status'], 'active') + + # Split the network and build two chains of different lengths. + self.split_network() + self.nodes[0].generatetoaddress(10, self.nodes[0].get_deterministic_priv_key().address) + self.nodes[2].generatetoaddress(20, self.nodes[2].get_deterministic_priv_key().address) + self.sync_all([self.nodes[:2], self.nodes[2:]]) + + tips = self.nodes[1].getchaintips () + assert_equal (len (tips), 1) + shortTip = tips[0] + assert_equal (shortTip['branchlen'], 0) + assert_equal (shortTip['height'], 210) + assert_equal (tips[0]['status'], 'active') + + tips = self.nodes[3].getchaintips () + assert_equal (len (tips), 1) + longTip = tips[0] + assert_equal (longTip['branchlen'], 0) + assert_equal (longTip['height'], 220) + assert_equal (tips[0]['status'], 'active') + + # Join the network halves and check that we now have two tips + # (at least at the nodes that previously had the short chain). + self.join_network () + + tips = self.nodes[0].getchaintips () + assert_equal (len (tips), 2) + assert_equal (tips[0], longTip) + + assert_equal (tips[1]['branchlen'], 10) + assert_equal (tips[1]['status'], 'valid-fork') + tips[1]['branchlen'] = 0 + tips[1]['status'] = 'active' + assert_equal (tips[1], shortTip) + +if __name__ == '__main__': + GetChainTipsTest ().main () diff --git a/test/functional/rpc_help.py b/test/functional/rpc_help.py new file mode 100755 index 000000000..be096af89 --- /dev/null +++ b/test/functional/rpc_help.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python3 +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test RPC help output.""" + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, assert_raises_rpc_error + + +class HelpRpcTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 1 + + def run_test(self): + node = self.nodes[0] + + # wrong argument count + assert_raises_rpc_error(-1, 'help', node.help, 'foo', 'bar') + + # invalid argument + assert_raises_rpc_error(-1, 'JSON value is not a string as expected', node.help, 0) + + # help of unknown command + assert_equal(node.help('foo'), 'help: unknown command: foo') + + # command titles + titles = [line[3:-3] for line in node.help().splitlines() if line.startswith('==')] + + components = ['Blockchain', 'Control', 'Generating', 'Mining', 'Network', 'Rawtransactions', 'Util'] + + if self.is_wallet_compiled(): + components.append('Wallet') + + if self.is_zmq_compiled(): + components.append('Zmq') + + assert_equal(titles, components) + + +if __name__ == '__main__': + HelpRpcTest().main() diff --git a/test/functional/rpc_invalidateblock.py b/test/functional/rpc_invalidateblock.py new file mode 100755 index 000000000..d8ecdd573 --- /dev/null +++ b/test/functional/rpc_invalidateblock.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the invalidateblock RPC.""" + +import time + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, connect_nodes_bi, sync_blocks + +class InvalidateTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 3 + + def setup_network(self): + self.setup_nodes() + + def run_test(self): + self.log.info("Make sure we repopulate setBlockIndexCandidates after InvalidateBlock:") + self.log.info("Mine 4 blocks on Node 0") + self.nodes[0].generatetoaddress(4, self.nodes[0].get_deterministic_priv_key().address) + assert(self.nodes[0].getblockcount() == 4) + besthash = self.nodes[0].getbestblockhash() + + self.log.info("Mine competing 6 blocks on Node 1") + self.nodes[1].generatetoaddress(6, self.nodes[1].get_deterministic_priv_key().address) + assert(self.nodes[1].getblockcount() == 6) + + self.log.info("Connect nodes to force a reorg") + connect_nodes_bi(self.nodes,0,1) + sync_blocks(self.nodes[0:2]) + assert(self.nodes[0].getblockcount() == 6) + badhash = self.nodes[1].getblockhash(2) + + self.log.info("Invalidate block 2 on node 0 and verify we reorg to node 0's original chain") + self.nodes[0].invalidateblock(badhash) + newheight = self.nodes[0].getblockcount() + newhash = self.nodes[0].getbestblockhash() + if (newheight != 4 or newhash != besthash): + raise AssertionError("Wrong tip for node0, hash %s, height %d"%(newhash,newheight)) + + self.log.info("Make sure we won't reorg to a lower work chain:") + connect_nodes_bi(self.nodes,1,2) + self.log.info("Sync node 2 to node 1 so both have 6 blocks") + sync_blocks(self.nodes[1:3]) + assert(self.nodes[2].getblockcount() == 6) + self.log.info("Invalidate block 5 on node 1 so its tip is now at 4") + self.nodes[1].invalidateblock(self.nodes[1].getblockhash(5)) + assert(self.nodes[1].getblockcount() == 4) + self.log.info("Invalidate block 3 on node 2, so its tip is now 2") + self.nodes[2].invalidateblock(self.nodes[2].getblockhash(3)) + assert(self.nodes[2].getblockcount() == 2) + self.log.info("..and then mine a block") + self.nodes[2].generatetoaddress(1, self.nodes[2].get_deterministic_priv_key().address) + self.log.info("Verify all nodes are at the right height") + time.sleep(5) + assert_equal(self.nodes[2].getblockcount(), 3) + assert_equal(self.nodes[0].getblockcount(), 4) + node1height = self.nodes[1].getblockcount() + if node1height < 4: + raise AssertionError("Node 1 reorged to a lower height: %d"%node1height) + +if __name__ == '__main__': + InvalidateTest().main() diff --git a/test/functional/rpc_named_arguments.py b/test/functional/rpc_named_arguments.py new file mode 100755 index 000000000..6c5c29dfd --- /dev/null +++ b/test/functional/rpc_named_arguments.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python3 +# Copyright (c) 2016-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test using named arguments for RPCs.""" + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + assert_raises_rpc_error, +) + +class NamedArgumentTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 1 + + def run_test(self): + node = self.nodes[0] + h = node.help(command='getblockchaininfo') + assert(h.startswith('getblockchaininfo\n')) + + assert_raises_rpc_error(-8, 'Unknown named parameter', node.help, random='getblockchaininfo') + + h = node.getblockhash(height=0) + node.getblock(blockhash=h) + + assert_equal(node.echo(), []) + assert_equal(node.echo(arg0=0,arg9=9), [0] + [None]*8 + [9]) + assert_equal(node.echo(arg1=1), [None, 1]) + assert_equal(node.echo(arg9=None), [None]*10) + assert_equal(node.echo(arg0=0,arg3=3,arg9=9), [0] + [None]*2 + [3] + [None]*5 + [9]) + +if __name__ == '__main__': + NamedArgumentTest().main() diff --git a/test/functional/rpc_net.py b/test/functional/rpc_net.py new file mode 100755 index 000000000..1e525214f --- /dev/null +++ b/test/functional/rpc_net.py @@ -0,0 +1,144 @@ +#!/usr/bin/env python3 +# Copyright (c) 2017-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test RPC calls related to net. + +Tests correspond to code in rpc/net.cpp. +""" + +from decimal import Decimal + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + assert_greater_than_or_equal, + assert_greater_than, + assert_raises_rpc_error, + connect_nodes_bi, + p2p_port, + wait_until, +) +from test_framework.mininode import P2PInterface +from test_framework.messages import CAddress, msg_addr, NODE_NETWORK, NODE_WITNESS + +class NetTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 2 + self.extra_args = [["-minrelaytxfee=0.00001000"],["-minrelaytxfee=0.00000500"]] + + def run_test(self): + self._test_connection_count() + self._test_getnettotals() + self._test_getnetworkinginfo() + self._test_getaddednodeinfo() + self._test_getpeerinfo() + self._test_getnodeaddresses() + + def _test_connection_count(self): + # connect_nodes_bi connects each node to the other + assert_equal(self.nodes[0].getconnectioncount(), 2) + + def _test_getnettotals(self): + # getnettotals totalbytesrecv and totalbytessent should be + # consistent with getpeerinfo. Since the RPC calls are not atomic, + # and messages might have been recvd or sent between RPC calls, call + # getnettotals before and after and verify that the returned values + # from getpeerinfo are bounded by those values. + net_totals_before = self.nodes[0].getnettotals() + peer_info = self.nodes[0].getpeerinfo() + net_totals_after = self.nodes[0].getnettotals() + assert_equal(len(peer_info), 2) + peers_recv = sum([peer['bytesrecv'] for peer in peer_info]) + peers_sent = sum([peer['bytessent'] for peer in peer_info]) + + assert_greater_than_or_equal(peers_recv, net_totals_before['totalbytesrecv']) + assert_greater_than_or_equal(net_totals_after['totalbytesrecv'], peers_recv) + assert_greater_than_or_equal(peers_sent, net_totals_before['totalbytessent']) + assert_greater_than_or_equal(net_totals_after['totalbytessent'], peers_sent) + + # test getnettotals and getpeerinfo by doing a ping + # the bytes sent/received should change + # note ping and pong are 32 bytes each + self.nodes[0].ping() + wait_until(lambda: (self.nodes[0].getnettotals()['totalbytessent'] >= net_totals_after['totalbytessent'] + 32 * 2), timeout=1) + wait_until(lambda: (self.nodes[0].getnettotals()['totalbytesrecv'] >= net_totals_after['totalbytesrecv'] + 32 * 2), timeout=1) + + peer_info_after_ping = self.nodes[0].getpeerinfo() + for before, after in zip(peer_info, peer_info_after_ping): + assert_greater_than_or_equal(after['bytesrecv_per_msg']['pong'], before['bytesrecv_per_msg']['pong'] + 32) + assert_greater_than_or_equal(after['bytessent_per_msg']['ping'], before['bytessent_per_msg']['ping'] + 32) + + def _test_getnetworkinginfo(self): + assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], True) + assert_equal(self.nodes[0].getnetworkinfo()['connections'], 2) + + self.nodes[0].setnetworkactive(False) + assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], False) + # Wait a bit for all sockets to close + wait_until(lambda: self.nodes[0].getnetworkinfo()['connections'] == 0, timeout=3) + + self.nodes[0].setnetworkactive(True) + connect_nodes_bi(self.nodes, 0, 1) + assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], True) + assert_equal(self.nodes[0].getnetworkinfo()['connections'], 2) + + def _test_getaddednodeinfo(self): + assert_equal(self.nodes[0].getaddednodeinfo(), []) + # add a node (node2) to node0 + ip_port = "127.0.0.1:{}".format(p2p_port(2)) + self.nodes[0].addnode(ip_port, 'add') + # check that the node has indeed been added + added_nodes = self.nodes[0].getaddednodeinfo(ip_port) + assert_equal(len(added_nodes), 1) + assert_equal(added_nodes[0]['addednode'], ip_port) + # check that a non-existent node returns an error + assert_raises_rpc_error(-24, "Node has not been added", self.nodes[0].getaddednodeinfo, '1.1.1.1') + + def _test_getpeerinfo(self): + peer_info = [x.getpeerinfo() for x in self.nodes] + # check both sides of bidirectional connection between nodes + # the address bound to on one side will be the source address for the other node + assert_equal(peer_info[0][0]['addrbind'], peer_info[1][0]['addr']) + assert_equal(peer_info[1][0]['addrbind'], peer_info[0][0]['addr']) + assert_equal(peer_info[0][0]['minfeefilter'], Decimal("0.00000500")) + assert_equal(peer_info[1][0]['minfeefilter'], Decimal("0.00001000")) + + def _test_getnodeaddresses(self): + self.nodes[0].add_p2p_connection(P2PInterface()) + + # send some addresses to the node via the p2p message addr + msg = msg_addr() + imported_addrs = [] + for i in range(256): + a = "123.123.123.{}".format(i) + imported_addrs.append(a) + addr = CAddress() + addr.time = 100000000 + addr.nServices = NODE_NETWORK | NODE_WITNESS + addr.ip = a + addr.port = 8333 + msg.addrs.append(addr) + self.nodes[0].p2p.send_and_ping(msg) + + # obtain addresses via rpc call and check they were ones sent in before + REQUEST_COUNT = 10 + node_addresses = self.nodes[0].getnodeaddresses(REQUEST_COUNT) + assert_equal(len(node_addresses), REQUEST_COUNT) + for a in node_addresses: + assert_greater_than(a["time"], 1527811200) # 1st June 2018 + assert_equal(a["services"], NODE_NETWORK | NODE_WITNESS) + assert a["address"] in imported_addrs + assert_equal(a["port"], 8333) + + assert_raises_rpc_error(-8, "Address count out of range", self.nodes[0].getnodeaddresses, -1) + + # addrman's size cannot be known reliably after insertion, as hash collisions may occur + # so only test that requesting a large number of addresses returns less than that + LARGE_REQUEST_COUNT = 10000 + node_addresses = self.nodes[0].getnodeaddresses(LARGE_REQUEST_COUNT) + assert_greater_than(LARGE_REQUEST_COUNT, len(node_addresses)) + +if __name__ == '__main__': + NetTest().main() diff --git a/test/functional/rpc_preciousblock.py b/test/functional/rpc_preciousblock.py new file mode 100755 index 000000000..72e6e6329 --- /dev/null +++ b/test/functional/rpc_preciousblock.py @@ -0,0 +1,114 @@ +#!/usr/bin/env python3 +# Copyright (c) 2015-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the preciousblock RPC.""" + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + connect_nodes_bi, + sync_blocks, +) + +def unidirectional_node_sync_via_rpc(node_src, node_dest): + blocks_to_copy = [] + blockhash = node_src.getbestblockhash() + while True: + try: + assert(len(node_dest.getblock(blockhash, False)) > 0) + break + except: + blocks_to_copy.append(blockhash) + blockhash = node_src.getblockheader(blockhash, True)['previousblockhash'] + blocks_to_copy.reverse() + for blockhash in blocks_to_copy: + blockdata = node_src.getblock(blockhash, False) + assert(node_dest.submitblock(blockdata) in (None, 'inconclusive')) + +def node_sync_via_rpc(nodes): + for node_src in nodes: + for node_dest in nodes: + if node_src is node_dest: + continue + unidirectional_node_sync_via_rpc(node_src, node_dest) + +class PreciousTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 3 + + def setup_network(self): + self.setup_nodes() + + def run_test(self): + self.log.info("Ensure submitblock can in principle reorg to a competing chain") + gen_address = lambda i: self.nodes[i].get_deterministic_priv_key().address # A non-wallet address to mine to + self.nodes[0].generatetoaddress(1, gen_address(0)) + assert_equal(self.nodes[0].getblockcount(), 1) + hashZ = self.nodes[1].generatetoaddress(2, gen_address(1))[-1] + assert_equal(self.nodes[1].getblockcount(), 2) + node_sync_via_rpc(self.nodes[0:3]) + assert_equal(self.nodes[0].getbestblockhash(), hashZ) + + self.log.info("Mine blocks A-B-C on Node 0") + hashC = self.nodes[0].generatetoaddress(3, gen_address(0))[-1] + assert_equal(self.nodes[0].getblockcount(), 5) + self.log.info("Mine competing blocks E-F-G on Node 1") + hashG = self.nodes[1].generatetoaddress(3, gen_address(1))[-1] + assert_equal(self.nodes[1].getblockcount(), 5) + assert(hashC != hashG) + self.log.info("Connect nodes and check no reorg occurs") + # Submit competing blocks via RPC so any reorg should occur before we proceed (no way to wait on inaction for p2p sync) + node_sync_via_rpc(self.nodes[0:2]) + connect_nodes_bi(self.nodes,0,1) + assert_equal(self.nodes[0].getbestblockhash(), hashC) + assert_equal(self.nodes[1].getbestblockhash(), hashG) + self.log.info("Make Node0 prefer block G") + self.nodes[0].preciousblock(hashG) + assert_equal(self.nodes[0].getbestblockhash(), hashG) + self.log.info("Make Node0 prefer block C again") + self.nodes[0].preciousblock(hashC) + assert_equal(self.nodes[0].getbestblockhash(), hashC) + self.log.info("Make Node1 prefer block C") + self.nodes[1].preciousblock(hashC) + sync_blocks(self.nodes[0:2]) # wait because node 1 may not have downloaded hashC + assert_equal(self.nodes[1].getbestblockhash(), hashC) + self.log.info("Make Node1 prefer block G again") + self.nodes[1].preciousblock(hashG) + assert_equal(self.nodes[1].getbestblockhash(), hashG) + self.log.info("Make Node0 prefer block G again") + self.nodes[0].preciousblock(hashG) + assert_equal(self.nodes[0].getbestblockhash(), hashG) + self.log.info("Make Node1 prefer block C again") + self.nodes[1].preciousblock(hashC) + assert_equal(self.nodes[1].getbestblockhash(), hashC) + self.log.info("Mine another block (E-F-G-)H on Node 0 and reorg Node 1") + self.nodes[0].generatetoaddress(1, gen_address(0)) + assert_equal(self.nodes[0].getblockcount(), 6) + sync_blocks(self.nodes[0:2]) + hashH = self.nodes[0].getbestblockhash() + assert_equal(self.nodes[1].getbestblockhash(), hashH) + self.log.info("Node1 should not be able to prefer block C anymore") + self.nodes[1].preciousblock(hashC) + assert_equal(self.nodes[1].getbestblockhash(), hashH) + self.log.info("Mine competing blocks I-J-K-L on Node 2") + self.nodes[2].generatetoaddress(4, gen_address(2)) + assert_equal(self.nodes[2].getblockcount(), 6) + hashL = self.nodes[2].getbestblockhash() + self.log.info("Connect nodes and check no reorg occurs") + node_sync_via_rpc(self.nodes[1:3]) + connect_nodes_bi(self.nodes,1,2) + connect_nodes_bi(self.nodes,0,2) + assert_equal(self.nodes[0].getbestblockhash(), hashH) + assert_equal(self.nodes[1].getbestblockhash(), hashH) + assert_equal(self.nodes[2].getbestblockhash(), hashL) + self.log.info("Make Node1 prefer block L") + self.nodes[1].preciousblock(hashL) + assert_equal(self.nodes[1].getbestblockhash(), hashL) + self.log.info("Make Node2 prefer block H") + self.nodes[2].preciousblock(hashH) + assert_equal(self.nodes[2].getbestblockhash(), hashH) + +if __name__ == '__main__': + PreciousTest().main() diff --git a/test/functional/rpc_psbt.py b/test/functional/rpc_psbt.py new file mode 100755 index 000000000..54dc87144 --- /dev/null +++ b/test/functional/rpc_psbt.py @@ -0,0 +1,229 @@ +#!/usr/bin/env python3 +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the Partially Signed Transaction RPCs. +""" + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, assert_raises_rpc_error, find_output + +import json +import os + +MAX_BIP125_RBF_SEQUENCE = 0xfffffffd + +# Create one-input, one-output, no-fee transaction: +class PSBTTest(BitcoinTestFramework): + + def set_test_params(self): + self.setup_clean_chain = False + self.num_nodes = 3 + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + # Create and fund a raw tx for sending 10 BTC + psbtx1 = self.nodes[0].walletcreatefundedpsbt([], {self.nodes[2].getnewaddress():10})['psbt'] + + # Node 1 should not be able to add anything to it but still return the psbtx same as before + psbtx = self.nodes[1].walletprocesspsbt(psbtx1)['psbt'] + assert_equal(psbtx1, psbtx) + + # Sign the transaction and send + signed_tx = self.nodes[0].walletprocesspsbt(psbtx)['psbt'] + final_tx = self.nodes[0].finalizepsbt(signed_tx)['hex'] + self.nodes[0].sendrawtransaction(final_tx) + + # Create p2sh, p2wpkh, and p2wsh addresses + pubkey0 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())['pubkey'] + pubkey1 = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress())['pubkey'] + pubkey2 = self.nodes[2].getaddressinfo(self.nodes[2].getnewaddress())['pubkey'] + p2sh = self.nodes[1].addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "legacy")['address'] + p2wsh = self.nodes[1].addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "bech32")['address'] + p2sh_p2wsh = self.nodes[1].addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "p2sh-segwit")['address'] + p2wpkh = self.nodes[1].getnewaddress("", "bech32") + p2pkh = self.nodes[1].getnewaddress("", "legacy") + p2sh_p2wpkh = self.nodes[1].getnewaddress("", "p2sh-segwit") + + # fund those addresses + rawtx = self.nodes[0].createrawtransaction([], {p2sh:10, p2wsh:10, p2wpkh:10, p2sh_p2wsh:10, p2sh_p2wpkh:10, p2pkh:10}) + rawtx = self.nodes[0].fundrawtransaction(rawtx, {"changePosition":3}) + signed_tx = self.nodes[0].signrawtransactionwithwallet(rawtx['hex'])['hex'] + txid = self.nodes[0].sendrawtransaction(signed_tx) + self.nodes[0].generate(6) + self.sync_all() + + # Find the output pos + p2sh_pos = -1 + p2wsh_pos = -1 + p2wpkh_pos = -1 + p2pkh_pos = -1 + p2sh_p2wsh_pos = -1 + p2sh_p2wpkh_pos = -1 + decoded = self.nodes[0].decoderawtransaction(signed_tx) + for out in decoded['vout']: + if out['scriptPubKey']['addresses'][0] == p2sh: + p2sh_pos = out['n'] + elif out['scriptPubKey']['addresses'][0] == p2wsh: + p2wsh_pos = out['n'] + elif out['scriptPubKey']['addresses'][0] == p2wpkh: + p2wpkh_pos = out['n'] + elif out['scriptPubKey']['addresses'][0] == p2sh_p2wsh: + p2sh_p2wsh_pos = out['n'] + elif out['scriptPubKey']['addresses'][0] == p2sh_p2wpkh: + p2sh_p2wpkh_pos = out['n'] + elif out['scriptPubKey']['addresses'][0] == p2pkh: + p2pkh_pos = out['n'] + + # spend single key from node 1 + rawtx = self.nodes[1].walletcreatefundedpsbt([{"txid":txid,"vout":p2wpkh_pos},{"txid":txid,"vout":p2sh_p2wpkh_pos},{"txid":txid,"vout":p2pkh_pos}], {self.nodes[1].getnewaddress():29.99})['psbt'] + walletprocesspsbt_out = self.nodes[1].walletprocesspsbt(rawtx) + assert_equal(walletprocesspsbt_out['complete'], True) + self.nodes[1].sendrawtransaction(self.nodes[1].finalizepsbt(walletprocesspsbt_out['psbt'])['hex']) + + # partially sign multisig things with node 1 + psbtx = self.nodes[1].walletcreatefundedpsbt([{"txid":txid,"vout":p2wsh_pos},{"txid":txid,"vout":p2sh_pos},{"txid":txid,"vout":p2sh_p2wsh_pos}], {self.nodes[1].getnewaddress():29.99})['psbt'] + walletprocesspsbt_out = self.nodes[1].walletprocesspsbt(psbtx) + psbtx = walletprocesspsbt_out['psbt'] + assert_equal(walletprocesspsbt_out['complete'], False) + + # partially sign with node 2. This should be complete and sendable + walletprocesspsbt_out = self.nodes[2].walletprocesspsbt(psbtx) + assert_equal(walletprocesspsbt_out['complete'], True) + self.nodes[2].sendrawtransaction(self.nodes[2].finalizepsbt(walletprocesspsbt_out['psbt'])['hex']) + + # check that walletprocesspsbt fails to decode a non-psbt + rawtx = self.nodes[1].createrawtransaction([{"txid":txid,"vout":p2wpkh_pos}], {self.nodes[1].getnewaddress():9.99}) + assert_raises_rpc_error(-22, "TX decode failed", self.nodes[1].walletprocesspsbt, rawtx) + + # Convert a non-psbt to psbt and make sure we can decode it + rawtx = self.nodes[0].createrawtransaction([], {self.nodes[1].getnewaddress():10}) + rawtx = self.nodes[0].fundrawtransaction(rawtx) + new_psbt = self.nodes[0].converttopsbt(rawtx['hex']) + self.nodes[0].decodepsbt(new_psbt) + + # Make sure that a psbt with signatures cannot be converted + signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx['hex']) + assert_raises_rpc_error(-22, "TX decode failed", self.nodes[0].converttopsbt, signedtx['hex']) + + # Explicitly allow converting non-empty txs + new_psbt = self.nodes[0].converttopsbt(rawtx['hex']) + self.nodes[0].decodepsbt(new_psbt) + + # Create outputs to nodes 1 and 2 + node1_addr = self.nodes[1].getnewaddress() + node2_addr = self.nodes[2].getnewaddress() + txid1 = self.nodes[0].sendtoaddress(node1_addr, 13) + txid2 =self.nodes[0].sendtoaddress(node2_addr, 13) + self.nodes[0].generate(6) + self.sync_all() + vout1 = find_output(self.nodes[1], txid1, 13) + vout2 = find_output(self.nodes[2], txid2, 13) + + # Create a psbt spending outputs from nodes 1 and 2 + psbt_orig = self.nodes[0].createpsbt([{"txid":txid1, "vout":vout1}, {"txid":txid2, "vout":vout2}], {self.nodes[0].getnewaddress():25.999}) + + # Update psbts, should only have data for one input and not the other + psbt1 = self.nodes[1].walletprocesspsbt(psbt_orig)['psbt'] + psbt1_decoded = self.nodes[0].decodepsbt(psbt1) + assert psbt1_decoded['inputs'][0] and not psbt1_decoded['inputs'][1] + psbt2 = self.nodes[2].walletprocesspsbt(psbt_orig)['psbt'] + psbt2_decoded = self.nodes[0].decodepsbt(psbt2) + assert not psbt2_decoded['inputs'][0] and psbt2_decoded['inputs'][1] + + # Combine, finalize, and send the psbts + combined = self.nodes[0].combinepsbt([psbt1, psbt2]) + finalized = self.nodes[0].finalizepsbt(combined)['hex'] + self.nodes[0].sendrawtransaction(finalized) + self.nodes[0].generate(6) + self.sync_all() + + # Test additional args in walletcreatepsbt + # Make sure both pre-included and funded inputs + # have the correct sequence numbers based on + # replaceable arg + block_height = self.nodes[0].getblockcount() + unspent = self.nodes[0].listunspent()[0] + psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], block_height+2, {"replaceable":True}, False) + decoded_psbt = self.nodes[0].decodepsbt(psbtx_info["psbt"]) + for tx_in, psbt_in in zip(decoded_psbt["tx"]["vin"], decoded_psbt["inputs"]): + assert_equal(tx_in["sequence"], MAX_BIP125_RBF_SEQUENCE) + assert "bip32_derivs" not in psbt_in + assert_equal(decoded_psbt["tx"]["locktime"], block_height+2) + + # Same construction with only locktime set + psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], block_height, {}, True) + decoded_psbt = self.nodes[0].decodepsbt(psbtx_info["psbt"]) + for tx_in, psbt_in in zip(decoded_psbt["tx"]["vin"], decoded_psbt["inputs"]): + assert tx_in["sequence"] > MAX_BIP125_RBF_SEQUENCE + assert "bip32_derivs" in psbt_in + assert_equal(decoded_psbt["tx"]["locktime"], block_height) + + # Same construction without optional arguments + psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}]) + decoded_psbt = self.nodes[0].decodepsbt(psbtx_info["psbt"]) + for tx_in in decoded_psbt["tx"]["vin"]: + assert tx_in["sequence"] > MAX_BIP125_RBF_SEQUENCE + assert_equal(decoded_psbt["tx"]["locktime"], 0) + + + # BIP 174 Test Vectors + + # Check that unknown values are just passed through + unknown_psbt = "cHNidP8BAD8CAAAAAf//////////////////////////////////////////AAAAAAD/////AQAAAAAAAAAAA2oBAAAAAAAACg8BAgMEBQYHCAkPAQIDBAUGBwgJCgsMDQ4PAAA=" + unknown_out = self.nodes[0].walletprocesspsbt(unknown_psbt)['psbt'] + assert_equal(unknown_psbt, unknown_out) + + # Open the data file + with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data/rpc_psbt.json'), encoding='utf-8') as f: + d = json.load(f) + invalids = d['invalid'] + valids = d['valid'] + creators = d['creator'] + signers = d['signer'] + combiners = d['combiner'] + finalizers = d['finalizer'] + extractors = d['extractor'] + + # Invalid PSBTs + for invalid in invalids: + assert_raises_rpc_error(-22, "TX decode failed", self.nodes[0].decodepsbt, invalid) + + # Valid PSBTs + for valid in valids: + self.nodes[0].decodepsbt(valid) + + # Creator Tests + for creator in creators: + created_tx = self.nodes[0].createpsbt(creator['inputs'], creator['outputs']) + assert_equal(created_tx, creator['result']) + + # Signer tests + for i, signer in enumerate(signers): + self.nodes[2].createwallet("wallet{}".format(i)) + wrpc = self.nodes[2].get_wallet_rpc("wallet{}".format(i)) + for key in signer['privkeys']: + wrpc.importprivkey(key) + signed_tx = wrpc.walletprocesspsbt(signer['psbt'])['psbt'] + assert_equal(signed_tx, signer['result']) + + # Combiner test + for combiner in combiners: + combined = self.nodes[2].combinepsbt(combiner['combine']) + assert_equal(combined, combiner['result']) + + # Finalizer test + for finalizer in finalizers: + finalized = self.nodes[2].finalizepsbt(finalizer['finalize'], False)['psbt'] + assert_equal(finalized, finalizer['result']) + + # Extractor test + for extractor in extractors: + extracted = self.nodes[2].finalizepsbt(extractor['extract'], True)['hex'] + assert_equal(extracted, extractor['result']) + + +if __name__ == '__main__': + PSBTTest().main() diff --git a/test/functional/rpc_rawtransaction.py b/test/functional/rpc_rawtransaction.py new file mode 100755 index 000000000..8ed490f55 --- /dev/null +++ b/test/functional/rpc_rawtransaction.py @@ -0,0 +1,443 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the rawtransaction RPCs. + +Test the following RPCs: + - createrawtransaction + - signrawtransactionwithwallet + - sendrawtransaction + - decoderawtransaction + - getrawtransaction +""" + +from collections import OrderedDict +from decimal import Decimal +from io import BytesIO +from test_framework.messages import CTransaction, ToHex +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, assert_raises_rpc_error, bytes_to_hex_str, connect_nodes_bi, hex_str_to_bytes + +class multidict(dict): + """Dictionary that allows duplicate keys. + + Constructed with a list of (key, value) tuples. When dumped by the json module, + will output invalid json with repeated keys, eg: + >>> json.dumps(multidict([(1,2),(1,2)]) + '{"1": 2, "1": 2}' + + Used to test calls to rpc methods with repeated keys in the json object.""" + + def __init__(self, x): + dict.__init__(self, x) + self.x = x + + def items(self): + return self.x + + +# Create one-input, one-output, no-fee transaction: +class RawTransactionsTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 3 + self.extra_args = [["-addresstype=legacy"], ["-addresstype=legacy"], ["-addresstype=legacy"]] + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def setup_network(self, split=False): + super().setup_network() + connect_nodes_bi(self.nodes, 0, 2) + + def run_test(self): + self.log.info('prepare some coins for multiple *rawtransaction commands') + self.nodes[2].generate(1) + self.sync_all() + self.nodes[0].generate(101) + self.sync_all() + self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.5) + self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.0) + self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),5.0) + self.sync_all() + self.nodes[0].generate(5) + self.sync_all() + + self.log.info('Test getrawtransaction on genesis block coinbase returns an error') + block = self.nodes[0].getblock(self.nodes[0].getblockhash(0)) + assert_raises_rpc_error(-5, "The genesis block coinbase is not considered an ordinary transaction", self.nodes[0].getrawtransaction, block['merkleroot']) + + self.log.info('Check parameter types and required parameters of createrawtransaction') + # Test `createrawtransaction` required parameters + assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction) + assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction, []) + + # Test `createrawtransaction` invalid extra parameters + assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction, [], {}, 0, False, 'foo') + + # Test `createrawtransaction` invalid `inputs` + txid = '1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000' + assert_raises_rpc_error(-3, "Expected type array", self.nodes[0].createrawtransaction, 'foo', {}) + assert_raises_rpc_error(-1, "JSON value is not an object as expected", self.nodes[0].createrawtransaction, ['foo'], {}) + assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].createrawtransaction, [{}], {}) + assert_raises_rpc_error(-8, "txid must be of length 64 (not 3, for 'foo')", self.nodes[0].createrawtransaction, [{'txid': 'foo'}], {}) + assert_raises_rpc_error(-8, "txid must be hexadecimal string (not 'ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844')", self.nodes[0].createrawtransaction, [{'txid': 'ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844'}], {}) + assert_raises_rpc_error(-8, "Invalid parameter, missing vout key", self.nodes[0].createrawtransaction, [{'txid': txid}], {}) + assert_raises_rpc_error(-8, "Invalid parameter, missing vout key", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 'foo'}], {}) + assert_raises_rpc_error(-8, "Invalid parameter, vout must be positive", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': -1}], {}) + assert_raises_rpc_error(-8, "Invalid parameter, sequence number is out of range", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 0, 'sequence': -1}], {}) + + # Test `createrawtransaction` invalid `outputs` + address = self.nodes[0].getnewaddress() + address2 = self.nodes[0].getnewaddress() + assert_raises_rpc_error(-1, "JSON value is not an array as expected", self.nodes[0].createrawtransaction, [], 'foo') + self.nodes[0].createrawtransaction(inputs=[], outputs={}) # Should not throw for backwards compatibility + self.nodes[0].createrawtransaction(inputs=[], outputs=[]) + assert_raises_rpc_error(-8, "Data must be hexadecimal string", self.nodes[0].createrawtransaction, [], {'data': 'foo'}) + assert_raises_rpc_error(-5, "Invalid Bitcoin address", self.nodes[0].createrawtransaction, [], {'foo': 0}) + assert_raises_rpc_error(-3, "Invalid amount", self.nodes[0].createrawtransaction, [], {address: 'foo'}) + assert_raises_rpc_error(-3, "Amount out of range", self.nodes[0].createrawtransaction, [], {address: -1}) + assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: %s" % address, self.nodes[0].createrawtransaction, [], multidict([(address, 1), (address, 1)])) + assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: %s" % address, self.nodes[0].createrawtransaction, [], [{address: 1}, {address: 1}]) + assert_raises_rpc_error(-8, "Invalid parameter, key-value pair must contain exactly one key", self.nodes[0].createrawtransaction, [], [{'a': 1, 'b': 2}]) + assert_raises_rpc_error(-8, "Invalid parameter, key-value pair not an object as expected", self.nodes[0].createrawtransaction, [], [['key-value pair1'], ['2']]) + + # Test `createrawtransaction` invalid `locktime` + assert_raises_rpc_error(-3, "Expected type number", self.nodes[0].createrawtransaction, [], {}, 'foo') + assert_raises_rpc_error(-8, "Invalid parameter, locktime out of range", self.nodes[0].createrawtransaction, [], {}, -1) + assert_raises_rpc_error(-8, "Invalid parameter, locktime out of range", self.nodes[0].createrawtransaction, [], {}, 4294967296) + + # Test `createrawtransaction` invalid `replaceable` + assert_raises_rpc_error(-3, "Expected type bool", self.nodes[0].createrawtransaction, [], {}, 0, 'foo') + + self.log.info('Check that createrawtransaction accepts an array and object as outputs') + tx = CTransaction() + # One output + tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs={address: 99})))) + assert_equal(len(tx.vout), 1) + assert_equal( + bytes_to_hex_str(tx.serialize()), + self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}]), + ) + # Two outputs + tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=OrderedDict([(address, 99), (address2, 99)]))))) + assert_equal(len(tx.vout), 2) + assert_equal( + bytes_to_hex_str(tx.serialize()), + self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {address2: 99}]), + ) + # Two data outputs + tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=multidict([('data', '99'), ('data', '99')]))))) + assert_equal(len(tx.vout), 2) + assert_equal( + bytes_to_hex_str(tx.serialize()), + self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{'data': '99'}, {'data': '99'}]), + ) + # Multiple mixed outputs + tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=multidict([(address, 99), ('data', '99'), ('data', '99')]))))) + assert_equal(len(tx.vout), 3) + assert_equal( + bytes_to_hex_str(tx.serialize()), + self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {'data': '99'}, {'data': '99'}]), + ) + + for type in ["bech32", "p2sh-segwit", "legacy"]: + addr = self.nodes[0].getnewaddress("", type) + addrinfo = self.nodes[0].getaddressinfo(addr) + pubkey = addrinfo["scriptPubKey"] + + self.log.info('sendrawtransaction with missing prevtx info (%s)' %(type)) + + # Test `signrawtransactionwithwallet` invalid `prevtxs` + inputs = [ {'txid' : txid, 'vout' : 3, 'sequence' : 1000}] + outputs = { self.nodes[0].getnewaddress() : 1 } + rawtx = self.nodes[0].createrawtransaction(inputs, outputs) + + prevtx = dict(txid=txid, scriptPubKey=pubkey, vout=3, amount=1) + succ = self.nodes[0].signrawtransactionwithwallet(rawtx, [prevtx]) + assert succ["complete"] + if type == "legacy": + del prevtx["amount"] + succ = self.nodes[0].signrawtransactionwithwallet(rawtx, [prevtx]) + assert succ["complete"] + + if type != "legacy": + assert_raises_rpc_error(-3, "Missing amount", self.nodes[0].signrawtransactionwithwallet, rawtx, [ + { + "txid": txid, + "scriptPubKey": pubkey, + "vout": 3, + } + ]) + + assert_raises_rpc_error(-3, "Missing vout", self.nodes[0].signrawtransactionwithwallet, rawtx, [ + { + "txid": txid, + "scriptPubKey": pubkey, + "amount": 1, + } + ]) + assert_raises_rpc_error(-3, "Missing txid", self.nodes[0].signrawtransactionwithwallet, rawtx, [ + { + "scriptPubKey": pubkey, + "vout": 3, + "amount": 1, + } + ]) + assert_raises_rpc_error(-3, "Missing scriptPubKey", self.nodes[0].signrawtransactionwithwallet, rawtx, [ + { + "txid": txid, + "vout": 3, + "amount": 1 + } + ]) + + ######################################### + # sendrawtransaction with missing input # + ######################################### + + self.log.info('sendrawtransaction with missing input') + inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1}] #won't exists + outputs = { self.nodes[0].getnewaddress() : 4.998 } + rawtx = self.nodes[2].createrawtransaction(inputs, outputs) + rawtx = self.nodes[2].signrawtransactionwithwallet(rawtx) + + # This will raise an exception since there are missing inputs + assert_raises_rpc_error(-25, "Missing inputs", self.nodes[2].sendrawtransaction, rawtx['hex']) + + ##################################### + # getrawtransaction with block hash # + ##################################### + + # make a tx by sending then generate 2 blocks; block1 has the tx in it + tx = self.nodes[2].sendtoaddress(self.nodes[1].getnewaddress(), 1) + block1, block2 = self.nodes[2].generate(2) + self.sync_all() + # We should be able to get the raw transaction by providing the correct block + gottx = self.nodes[0].getrawtransaction(tx, True, block1) + assert_equal(gottx['txid'], tx) + assert_equal(gottx['in_active_chain'], True) + # We should not have the 'in_active_chain' flag when we don't provide a block + gottx = self.nodes[0].getrawtransaction(tx, True) + assert_equal(gottx['txid'], tx) + assert 'in_active_chain' not in gottx + # We should not get the tx if we provide an unrelated block + assert_raises_rpc_error(-5, "No such transaction found", self.nodes[0].getrawtransaction, tx, True, block2) + # An invalid block hash should raise the correct errors + assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].getrawtransaction, tx, True, True) + assert_raises_rpc_error(-8, "parameter 3 must be of length 64 (not 6, for 'foobar')", self.nodes[0].getrawtransaction, tx, True, "foobar") + assert_raises_rpc_error(-8, "parameter 3 must be of length 64 (not 8, for 'abcd1234')", self.nodes[0].getrawtransaction, tx, True, "abcd1234") + assert_raises_rpc_error(-8, "parameter 3 must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].getrawtransaction, tx, True, "ZZZ0000000000000000000000000000000000000000000000000000000000000") + assert_raises_rpc_error(-5, "Block hash not found", self.nodes[0].getrawtransaction, tx, True, "0000000000000000000000000000000000000000000000000000000000000000") + # Undo the blocks and check in_active_chain + self.nodes[0].invalidateblock(block1) + gottx = self.nodes[0].getrawtransaction(txid=tx, verbose=True, blockhash=block1) + assert_equal(gottx['in_active_chain'], False) + self.nodes[0].reconsiderblock(block1) + assert_equal(self.nodes[0].getbestblockhash(), block2) + + ######################### + # RAW TX MULTISIG TESTS # + ######################### + # 2of2 test + addr1 = self.nodes[2].getnewaddress() + addr2 = self.nodes[2].getnewaddress() + + addr1Obj = self.nodes[2].getaddressinfo(addr1) + addr2Obj = self.nodes[2].getaddressinfo(addr2) + + # Tests for createmultisig and addmultisigaddress + assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, ["01020304"]) + self.nodes[0].createmultisig(2, [addr1Obj['pubkey'], addr2Obj['pubkey']]) # createmultisig can only take public keys + assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 2, [addr1Obj['pubkey'], addr1]) # addmultisigaddress can take both pubkeys and addresses so long as they are in the wallet, which is tested here. + + mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr1])['address'] + + #use balance deltas instead of absolute values + bal = self.nodes[2].getbalance() + + # send 1.2 BTC to msig adr + txId = self.nodes[0].sendtoaddress(mSigObj, 1.2) + self.sync_all() + self.nodes[0].generate(1) + self.sync_all() + assert_equal(self.nodes[2].getbalance(), bal+Decimal('1.20000000')) #node2 has both keys of the 2of2 ms addr., tx should affect the balance + + + # 2of3 test from different nodes + bal = self.nodes[2].getbalance() + addr1 = self.nodes[1].getnewaddress() + addr2 = self.nodes[2].getnewaddress() + addr3 = self.nodes[2].getnewaddress() + + addr1Obj = self.nodes[1].getaddressinfo(addr1) + addr2Obj = self.nodes[2].getaddressinfo(addr2) + addr3Obj = self.nodes[2].getaddressinfo(addr3) + + mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey']])['address'] + + txId = self.nodes[0].sendtoaddress(mSigObj, 2.2) + decTx = self.nodes[0].gettransaction(txId) + rawTx = self.nodes[0].decoderawtransaction(decTx['hex']) + self.sync_all() + self.nodes[0].generate(1) + self.sync_all() + + #THIS IS AN INCOMPLETE FEATURE + #NODE2 HAS TWO OF THREE KEY AND THE FUNDS SHOULD BE SPENDABLE AND COUNT AT BALANCE CALCULATION + assert_equal(self.nodes[2].getbalance(), bal) #for now, assume the funds of a 2of3 multisig tx are not marked as spendable + + txDetails = self.nodes[0].gettransaction(txId, True) + rawTx = self.nodes[0].decoderawtransaction(txDetails['hex']) + vout = False + for outpoint in rawTx['vout']: + if outpoint['value'] == Decimal('2.20000000'): + vout = outpoint + break + + bal = self.nodes[0].getbalance() + inputs = [{ "txid" : txId, "vout" : vout['n'], "scriptPubKey" : vout['scriptPubKey']['hex'], "amount" : vout['value']}] + outputs = { self.nodes[0].getnewaddress() : 2.19 } + rawTx = self.nodes[2].createrawtransaction(inputs, outputs) + rawTxPartialSigned = self.nodes[1].signrawtransactionwithwallet(rawTx, inputs) + assert_equal(rawTxPartialSigned['complete'], False) #node1 only has one key, can't comp. sign the tx + + rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx, inputs) + assert_equal(rawTxSigned['complete'], True) #node2 can sign the tx compl., own two of three keys + self.nodes[2].sendrawtransaction(rawTxSigned['hex']) + rawTx = self.nodes[0].decoderawtransaction(rawTxSigned['hex']) + self.sync_all() + self.nodes[0].generate(1) + self.sync_all() + assert_equal(self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000')) #block reward + tx + + # 2of2 test for combining transactions + bal = self.nodes[2].getbalance() + addr1 = self.nodes[1].getnewaddress() + addr2 = self.nodes[2].getnewaddress() + + addr1Obj = self.nodes[1].getaddressinfo(addr1) + addr2Obj = self.nodes[2].getaddressinfo(addr2) + + self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address'] + mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address'] + mSigObjValid = self.nodes[2].getaddressinfo(mSigObj) + + txId = self.nodes[0].sendtoaddress(mSigObj, 2.2) + decTx = self.nodes[0].gettransaction(txId) + rawTx2 = self.nodes[0].decoderawtransaction(decTx['hex']) + self.sync_all() + self.nodes[0].generate(1) + self.sync_all() + + assert_equal(self.nodes[2].getbalance(), bal) # the funds of a 2of2 multisig tx should not be marked as spendable + + txDetails = self.nodes[0].gettransaction(txId, True) + rawTx2 = self.nodes[0].decoderawtransaction(txDetails['hex']) + vout = False + for outpoint in rawTx2['vout']: + if outpoint['value'] == Decimal('2.20000000'): + vout = outpoint + break + + bal = self.nodes[0].getbalance() + inputs = [{ "txid" : txId, "vout" : vout['n'], "scriptPubKey" : vout['scriptPubKey']['hex'], "redeemScript" : mSigObjValid['hex'], "amount" : vout['value']}] + outputs = { self.nodes[0].getnewaddress() : 2.19 } + rawTx2 = self.nodes[2].createrawtransaction(inputs, outputs) + rawTxPartialSigned1 = self.nodes[1].signrawtransactionwithwallet(rawTx2, inputs) + self.log.debug(rawTxPartialSigned1) + assert_equal(rawTxPartialSigned1['complete'], False) #node1 only has one key, can't comp. sign the tx + + rawTxPartialSigned2 = self.nodes[2].signrawtransactionwithwallet(rawTx2, inputs) + self.log.debug(rawTxPartialSigned2) + assert_equal(rawTxPartialSigned2['complete'], False) #node2 only has one key, can't comp. sign the tx + rawTxComb = self.nodes[2].combinerawtransaction([rawTxPartialSigned1['hex'], rawTxPartialSigned2['hex']]) + self.log.debug(rawTxComb) + self.nodes[2].sendrawtransaction(rawTxComb) + rawTx2 = self.nodes[0].decoderawtransaction(rawTxComb) + self.sync_all() + self.nodes[0].generate(1) + self.sync_all() + assert_equal(self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000')) #block reward + tx + + # decoderawtransaction tests + # witness transaction + encrawtx = "010000000001010000000000000072c1a6a246ae63f74f931e8365e15a089c68d61900000000000000000000ffffffff0100e1f50500000000000102616100000000" + decrawtx = self.nodes[0].decoderawtransaction(encrawtx, True) # decode as witness transaction + assert_equal(decrawtx['vout'][0]['value'], Decimal('1.00000000')) + assert_raises_rpc_error(-22, 'TX decode failed', self.nodes[0].decoderawtransaction, encrawtx, False) # force decode as non-witness transaction + # non-witness transaction + encrawtx = "01000000010000000000000072c1a6a246ae63f74f931e8365e15a089c68d61900000000000000000000ffffffff0100e1f505000000000000000000" + decrawtx = self.nodes[0].decoderawtransaction(encrawtx, False) # decode as non-witness transaction + assert_equal(decrawtx['vout'][0]['value'], Decimal('1.00000000')) + + # getrawtransaction tests + # 1. valid parameters - only supply txid + txHash = rawTx["hash"] + assert_equal(self.nodes[0].getrawtransaction(txHash), rawTxSigned['hex']) + + # 2. valid parameters - supply txid and 0 for non-verbose + assert_equal(self.nodes[0].getrawtransaction(txHash, 0), rawTxSigned['hex']) + + # 3. valid parameters - supply txid and False for non-verbose + assert_equal(self.nodes[0].getrawtransaction(txHash, False), rawTxSigned['hex']) + + # 4. valid parameters - supply txid and 1 for verbose. + # We only check the "hex" field of the output so we don't need to update this test every time the output format changes. + assert_equal(self.nodes[0].getrawtransaction(txHash, 1)["hex"], rawTxSigned['hex']) + + # 5. valid parameters - supply txid and True for non-verbose + assert_equal(self.nodes[0].getrawtransaction(txHash, True)["hex"], rawTxSigned['hex']) + + # 6. invalid parameters - supply txid and string "Flase" + assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txHash, "Flase") + + # 7. invalid parameters - supply txid and empty array + assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txHash, []) + + # 8. invalid parameters - supply txid and empty dict + assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txHash, {}) + + inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 1000}] + outputs = { self.nodes[0].getnewaddress() : 1 } + rawtx = self.nodes[0].createrawtransaction(inputs, outputs) + decrawtx= self.nodes[0].decoderawtransaction(rawtx) + assert_equal(decrawtx['vin'][0]['sequence'], 1000) + + # 9. invalid parameters - sequence number out of range + inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : -1}] + outputs = { self.nodes[0].getnewaddress() : 1 } + assert_raises_rpc_error(-8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs) + + # 10. invalid parameters - sequence number out of range + inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 4294967296}] + outputs = { self.nodes[0].getnewaddress() : 1 } + assert_raises_rpc_error(-8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs) + + inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 4294967294}] + outputs = { self.nodes[0].getnewaddress() : 1 } + rawtx = self.nodes[0].createrawtransaction(inputs, outputs) + decrawtx= self.nodes[0].decoderawtransaction(rawtx) + assert_equal(decrawtx['vin'][0]['sequence'], 4294967294) + + #################################### + # TRANSACTION VERSION NUMBER TESTS # + #################################### + + # Test the minimum transaction version number that fits in a signed 32-bit integer. + tx = CTransaction() + tx.nVersion = -0x80000000 + rawtx = ToHex(tx) + decrawtx = self.nodes[0].decoderawtransaction(rawtx) + assert_equal(decrawtx['version'], -0x80000000) + + # Test the maximum transaction version number that fits in a signed 32-bit integer. + tx = CTransaction() + tx.nVersion = 0x7fffffff + rawtx = ToHex(tx) + decrawtx = self.nodes[0].decoderawtransaction(rawtx) + assert_equal(decrawtx['version'], 0x7fffffff) + +if __name__ == '__main__': + RawTransactionsTest().main() diff --git a/test/functional/rpc_scantxoutset.py b/test/functional/rpc_scantxoutset.py new file mode 100755 index 000000000..96f9ccdbd --- /dev/null +++ b/test/functional/rpc_scantxoutset.py @@ -0,0 +1,95 @@ +#!/usr/bin/env python3 +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the scantxoutset rpc call.""" +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal + +from decimal import Decimal +import shutil +import os + +class ScantxoutsetTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 1 + self.setup_clean_chain = True + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + self.log.info("Mining blocks...") + self.nodes[0].generate(110) + + addr_P2SH_SEGWIT = self.nodes[0].getnewaddress("", "p2sh-segwit") + pubk1 = self.nodes[0].getaddressinfo(addr_P2SH_SEGWIT)['pubkey'] + addr_LEGACY = self.nodes[0].getnewaddress("", "legacy") + pubk2 = self.nodes[0].getaddressinfo(addr_LEGACY)['pubkey'] + addr_BECH32 = self.nodes[0].getnewaddress("", "bech32") + pubk3 = self.nodes[0].getaddressinfo(addr_BECH32)['pubkey'] + self.nodes[0].sendtoaddress(addr_P2SH_SEGWIT, 0.001) + self.nodes[0].sendtoaddress(addr_LEGACY, 0.002) + self.nodes[0].sendtoaddress(addr_BECH32, 0.004) + + #send to child keys of tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK + self.nodes[0].sendtoaddress("mkHV1C6JLheLoUSSZYk7x3FH5tnx9bu7yc", 0.008) # (m/0'/0'/0') + self.nodes[0].sendtoaddress("mipUSRmJAj2KrjSvsPQtnP8ynUon7FhpCR", 0.016) # (m/0'/0'/1') + self.nodes[0].sendtoaddress("n37dAGe6Mq1HGM9t4b6rFEEsDGq7Fcgfqg", 0.032) # (m/0'/0'/1500') + self.nodes[0].sendtoaddress("mqS9Rpg8nNLAzxFExsgFLCnzHBsoQ3PRM6", 0.064) # (m/0'/0'/0) + self.nodes[0].sendtoaddress("mnTg5gVWr3rbhHaKjJv7EEEc76ZqHgSj4S", 0.128) # (m/0'/0'/1) + self.nodes[0].sendtoaddress("mketCd6B9U9Uee1iCsppDJJBHfvi6U6ukC", 0.256) # (m/0'/0'/1500) + self.nodes[0].sendtoaddress("mj8zFzrbBcdaWXowCQ1oPZ4qioBVzLzAp7", 0.512) # (m/1/1/0') + self.nodes[0].sendtoaddress("mfnKpKQEftniaoE1iXuMMePQU3PUpcNisA", 1.024) # (m/1/1/1') + self.nodes[0].sendtoaddress("mou6cB1kaP1nNJM1sryW6YRwnd4shTbXYQ", 2.048) # (m/1/1/1500') + self.nodes[0].sendtoaddress("mtfUoUax9L4tzXARpw1oTGxWyoogp52KhJ", 4.096) # (m/1/1/0) + self.nodes[0].sendtoaddress("mxp7w7j8S1Aq6L8StS2PqVvtt4HGxXEvdy", 8.192) # (m/1/1/1) + self.nodes[0].sendtoaddress("mpQ8rokAhp1TAtJQR6F6TaUmjAWkAWYYBq", 16.384) # (m/1/1/1500) + + + self.nodes[0].generate(1) + + self.log.info("Stop node, remove wallet, mine again some blocks...") + self.stop_node(0) + shutil.rmtree(os.path.join(self.nodes[0].datadir, "regtest", 'wallets')) + self.start_node(0) + self.nodes[0].generate(110) + + self.restart_node(0, ['-nowallet']) + self.log.info("Test if we have found the non HD unspent outputs.") + assert_equal(self.nodes[0].scantxoutset("start", [ "pkh(" + pubk1 + ")", "pkh(" + pubk2 + ")", "pkh(" + pubk3 + ")"])['total_amount'], Decimal("0.002")) + assert_equal(self.nodes[0].scantxoutset("start", [ "wpkh(" + pubk1 + ")", "wpkh(" + pubk2 + ")", "wpkh(" + pubk3 + ")"])['total_amount'], Decimal("0.004")) + assert_equal(self.nodes[0].scantxoutset("start", [ "sh(wpkh(" + pubk1 + "))", "sh(wpkh(" + pubk2 + "))", "sh(wpkh(" + pubk3 + "))"])['total_amount'], Decimal("0.001")) + assert_equal(self.nodes[0].scantxoutset("start", [ "combo(" + pubk1 + ")", "combo(" + pubk2 + ")", "combo(" + pubk3 + ")"])['total_amount'], Decimal("0.007")) + assert_equal(self.nodes[0].scantxoutset("start", [ "addr(" + addr_P2SH_SEGWIT + ")", "addr(" + addr_LEGACY + ")", "addr(" + addr_BECH32 + ")"])['total_amount'], Decimal("0.007")) + assert_equal(self.nodes[0].scantxoutset("start", [ "addr(" + addr_P2SH_SEGWIT + ")", "addr(" + addr_LEGACY + ")", "combo(" + pubk3 + ")"])['total_amount'], Decimal("0.007")) + + self.log.info("Test extended key derivation.") + assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/0h)"])['total_amount'], Decimal("0.008")) + assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0'/1h)"])['total_amount'], Decimal("0.016")) + assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/1500')"])['total_amount'], Decimal("0.032")) + assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0h/0)"])['total_amount'], Decimal("0.064")) + assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/1)"])['total_amount'], Decimal("0.128")) + assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/1500)"])['total_amount'], Decimal("0.256")) + assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/*h)", "range": 1499}])['total_amount'], Decimal("0.024")) + assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0'/*h)", "range": 1500}])['total_amount'], Decimal("0.056")) + assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/*)", "range": 1499}])['total_amount'], Decimal("0.192")) + assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/*)", "range": 1500}])['total_amount'], Decimal("0.448")) + assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0')"])['total_amount'], Decimal("0.512")) + assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1')"])['total_amount'], Decimal("1.024")) + assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1500h)"])['total_amount'], Decimal("2.048")) + assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0)"])['total_amount'], Decimal("4.096")) + assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1)"])['total_amount'], Decimal("8.192")) + assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1500)"])['total_amount'], Decimal("16.384")) + assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/0)"])['total_amount'], Decimal("4.096")) + assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/1)"])['total_amount'], Decimal("8.192")) + assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/1500)"])['total_amount'], Decimal("16.384")) + assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*')", "range": 1499}])['total_amount'], Decimal("1.536")) + assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*')", "range": 1500}])['total_amount'], Decimal("3.584")) + assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)", "range": 1499}])['total_amount'], Decimal("12.288")) + assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)", "range": 1500}])['total_amount'], Decimal("28.672")) + assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)", "range": 1499}])['total_amount'], Decimal("12.288")) + assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)", "range": 1500}])['total_amount'], Decimal("28.672")) + +if __name__ == '__main__': + ScantxoutsetTest().main() diff --git a/test/functional/rpc_signmessage.py b/test/functional/rpc_signmessage.py new file mode 100755 index 000000000..ad0e29b45 --- /dev/null +++ b/test/functional/rpc_signmessage.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python3 +# Copyright (c) 2016-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test RPC commands for signing and verifying messages.""" + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal + +class SignMessagesTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 1 + self.extra_args = [["-addresstype=legacy"]] + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + message = 'This is just a test message' + + self.log.info('test signing with priv_key') + priv_key = 'cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N' + address = 'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB' + expected_signature = 'INbVnW4e6PeRmsv2Qgu8NuopvrVjkcxob+sX8OcZG0SALhWybUjzMLPdAsXI46YZGb0KQTRii+wWIQzRpG/U+S0=' + signature = self.nodes[0].signmessagewithprivkey(priv_key, message) + assert_equal(expected_signature, signature) + assert(self.nodes[0].verifymessage(address, signature, message)) + + self.log.info('test signing with an address with wallet') + address = self.nodes[0].getnewaddress() + signature = self.nodes[0].signmessage(address, message) + assert(self.nodes[0].verifymessage(address, signature, message)) + + self.log.info('test verifying with another address should not work') + other_address = self.nodes[0].getnewaddress() + other_signature = self.nodes[0].signmessage(other_address, message) + assert(not self.nodes[0].verifymessage(other_address, signature, message)) + assert(not self.nodes[0].verifymessage(address, other_signature, message)) + +if __name__ == '__main__': + SignMessagesTest().main() diff --git a/test/functional/rpc_signrawtransaction.py b/test/functional/rpc_signrawtransaction.py new file mode 100755 index 000000000..291538df6 --- /dev/null +++ b/test/functional/rpc_signrawtransaction.py @@ -0,0 +1,153 @@ +#!/usr/bin/env python3 +# Copyright (c) 2015-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test transaction signing using the signrawtransaction* RPCs.""" + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, assert_raises_rpc_error + + +class SignRawTransactionsTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 1 + self.extra_args = [["-deprecatedrpc=signrawtransaction"]] + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def successful_signing_test(self): + """Create and sign a valid raw transaction with one input. + + Expected results: + + 1) The transaction has a complete set of signatures + 2) No script verification error occurred""" + privKeys = ['cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N', 'cVKpPfVKSJxKqVpE9awvXNWuLHCa5j5tiE7K6zbUSptFpTEtiFrA'] + + inputs = [ + # Valid pay-to-pubkey scripts + {'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0, + 'scriptPubKey': '76a91460baa0f494b38ce3c940dea67f3804dc52d1fb9488ac'}, + {'txid': '83a4f6a6b73660e13ee6cb3c6063fa3759c50c9b7521d0536022961898f4fb02', 'vout': 0, + 'scriptPubKey': '76a914669b857c03a5ed269d5d85a1ffac9ed5d663072788ac'}, + ] + + outputs = {'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB': 0.1} + + rawTx = self.nodes[0].createrawtransaction(inputs, outputs) + rawTxSigned = self.nodes[0].signrawtransactionwithkey(rawTx, privKeys, inputs) + + # 1) The transaction has a complete set of signatures + assert rawTxSigned['complete'] + + # 2) No script verification error occurred + assert 'errors' not in rawTxSigned + + def test_with_lock_outputs(self): + """Test correct error reporting when trying to sign a locked output""" + self.nodes[0].encryptwallet("password") + + rawTx = '020000000156b958f78e3f24e0b2f4e4db1255426b0902027cb37e3ddadb52e37c3557dddb0000000000ffffffff01c0a6b929010000001600149a2ee8c77140a053f36018ac8124a6ececc1668a00000000' + + assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].signrawtransactionwithwallet, rawTx) + + def script_verification_error_test(self): + """Create and sign a raw transaction with valid (vin 0), invalid (vin 1) and one missing (vin 2) input script. + + Expected results: + + 3) The transaction has no complete set of signatures + 4) Two script verification errors occurred + 5) Script verification errors have certain properties ("txid", "vout", "scriptSig", "sequence", "error") + 6) The verification errors refer to the invalid (vin 1) and missing input (vin 2)""" + privKeys = ['cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N'] + + inputs = [ + # Valid pay-to-pubkey script + {'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0}, + # Invalid script + {'txid': '5b8673686910442c644b1f4993d8f7753c7c8fcb5c87ee40d56eaeef25204547', 'vout': 7}, + # Missing scriptPubKey + {'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 1}, + ] + + scripts = [ + # Valid pay-to-pubkey script + {'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0, + 'scriptPubKey': '76a91460baa0f494b38ce3c940dea67f3804dc52d1fb9488ac'}, + # Invalid script + {'txid': '5b8673686910442c644b1f4993d8f7753c7c8fcb5c87ee40d56eaeef25204547', 'vout': 7, + 'scriptPubKey': 'badbadbadbad'} + ] + + outputs = {'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB': 0.1} + + rawTx = self.nodes[0].createrawtransaction(inputs, outputs) + + # Make sure decoderawtransaction is at least marginally sane + decodedRawTx = self.nodes[0].decoderawtransaction(rawTx) + for i, inp in enumerate(inputs): + assert_equal(decodedRawTx["vin"][i]["txid"], inp["txid"]) + assert_equal(decodedRawTx["vin"][i]["vout"], inp["vout"]) + + # Make sure decoderawtransaction throws if there is extra data + assert_raises_rpc_error(-22, "TX decode failed", self.nodes[0].decoderawtransaction, rawTx + "00") + + rawTxSigned = self.nodes[0].signrawtransactionwithkey(rawTx, privKeys, scripts) + + # 3) The transaction has no complete set of signatures + assert not rawTxSigned['complete'] + + # 4) Two script verification errors occurred + assert 'errors' in rawTxSigned + assert_equal(len(rawTxSigned['errors']), 2) + + # 5) Script verification errors have certain properties + assert 'txid' in rawTxSigned['errors'][0] + assert 'vout' in rawTxSigned['errors'][0] + assert 'witness' in rawTxSigned['errors'][0] + assert 'scriptSig' in rawTxSigned['errors'][0] + assert 'sequence' in rawTxSigned['errors'][0] + assert 'error' in rawTxSigned['errors'][0] + + # 6) The verification errors refer to the invalid (vin 1) and missing input (vin 2) + assert_equal(rawTxSigned['errors'][0]['txid'], inputs[1]['txid']) + assert_equal(rawTxSigned['errors'][0]['vout'], inputs[1]['vout']) + assert_equal(rawTxSigned['errors'][1]['txid'], inputs[2]['txid']) + assert_equal(rawTxSigned['errors'][1]['vout'], inputs[2]['vout']) + assert not rawTxSigned['errors'][0]['witness'] + + # Now test signing failure for transaction with input witnesses + p2wpkh_raw_tx = "01000000000102fff7f7881a8099afa6940d42d1e7f6362bec38171ea3edf433541db4e4ad969f00000000494830450221008b9d1dc26ba6a9cb62127b02742fa9d754cd3bebf337f7a55d114c8e5cdd30be022040529b194ba3f9281a99f2b1c0a19c0489bc22ede944ccf4ecbab4cc618ef3ed01eeffffffef51e1b804cc89d182d279655c3aa89e815b1b309fe287d9b2b55d57b90ec68a0100000000ffffffff02202cb206000000001976a9148280b37df378db99f66f85c95a783a76ac7a6d5988ac9093510d000000001976a9143bde42dbee7e4dbe6a21b2d50ce2f0167faa815988ac000247304402203609e17b84f6a7d30c80bfa610b5b4542f32a8a0d5447a12fb1366d7f01cc44a0220573a954c4518331561406f90300e8f3358f51928d43c212a8caed02de67eebee0121025476c2e83188368da1ff3e292e7acafcdb3566bb0ad253f62fc70f07aeee635711000000" + + rawTxSigned = self.nodes[0].signrawtransactionwithwallet(p2wpkh_raw_tx) + + # 7) The transaction has no complete set of signatures + assert not rawTxSigned['complete'] + + # 8) Two script verification errors occurred + assert 'errors' in rawTxSigned + assert_equal(len(rawTxSigned['errors']), 2) + + # 9) Script verification errors have certain properties + assert 'txid' in rawTxSigned['errors'][0] + assert 'vout' in rawTxSigned['errors'][0] + assert 'witness' in rawTxSigned['errors'][0] + assert 'scriptSig' in rawTxSigned['errors'][0] + assert 'sequence' in rawTxSigned['errors'][0] + assert 'error' in rawTxSigned['errors'][0] + + # Non-empty witness checked here + assert_equal(rawTxSigned['errors'][1]['witness'], ["304402203609e17b84f6a7d30c80bfa610b5b4542f32a8a0d5447a12fb1366d7f01cc44a0220573a954c4518331561406f90300e8f3358f51928d43c212a8caed02de67eebee01", "025476c2e83188368da1ff3e292e7acafcdb3566bb0ad253f62fc70f07aeee6357"]) + assert not rawTxSigned['errors'][0]['witness'] + + def run_test(self): + self.successful_signing_test() + self.script_verification_error_test() + self.test_with_lock_outputs() + + +if __name__ == '__main__': + SignRawTransactionsTest().main() diff --git a/test/functional/rpc_txoutproof.py b/test/functional/rpc_txoutproof.py new file mode 100755 index 000000000..8913b8698 --- /dev/null +++ b/test/functional/rpc_txoutproof.py @@ -0,0 +1,114 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test gettxoutproof and verifytxoutproof RPCs.""" + +from test_framework.messages import CMerkleBlock, FromHex, ToHex +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, assert_raises_rpc_error, connect_nodes + +class MerkleBlockTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 4 + self.setup_clean_chain = True + # Nodes 0/1 are "wallet" nodes, Nodes 2/3 are used for testing + self.extra_args = [[], [], [], ["-txindex"]] + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def setup_network(self): + self.setup_nodes() + connect_nodes(self.nodes[0], 1) + connect_nodes(self.nodes[0], 2) + connect_nodes(self.nodes[0], 3) + + self.sync_all() + + def run_test(self): + self.log.info("Mining blocks...") + self.nodes[0].generate(105) + self.sync_all() + + chain_height = self.nodes[1].getblockcount() + assert_equal(chain_height, 105) + assert_equal(self.nodes[1].getbalance(), 0) + assert_equal(self.nodes[2].getbalance(), 0) + + node0utxos = self.nodes[0].listunspent(1) + tx1 = self.nodes[0].createrawtransaction([node0utxos.pop()], {self.nodes[1].getnewaddress(): 49.99}) + txid1 = self.nodes[0].sendrawtransaction(self.nodes[0].signrawtransactionwithwallet(tx1)["hex"]) + tx2 = self.nodes[0].createrawtransaction([node0utxos.pop()], {self.nodes[1].getnewaddress(): 49.99}) + txid2 = self.nodes[0].sendrawtransaction(self.nodes[0].signrawtransactionwithwallet(tx2)["hex"]) + # This will raise an exception because the transaction is not yet in a block + assert_raises_rpc_error(-5, "Transaction not yet in block", self.nodes[0].gettxoutproof, [txid1]) + + self.nodes[0].generate(1) + blockhash = self.nodes[0].getblockhash(chain_height + 1) + self.sync_all() + + txlist = [] + blocktxn = self.nodes[0].getblock(blockhash, True)["tx"] + txlist.append(blocktxn[1]) + txlist.append(blocktxn[2]) + + assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid1])), [txid1]) + assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid1, txid2])), txlist) + assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid1, txid2], blockhash)), txlist) + + txin_spent = self.nodes[1].listunspent(1).pop() + tx3 = self.nodes[1].createrawtransaction([txin_spent], {self.nodes[0].getnewaddress(): 49.98}) + txid3 = self.nodes[0].sendrawtransaction(self.nodes[1].signrawtransactionwithwallet(tx3)["hex"]) + self.nodes[0].generate(1) + self.sync_all() + + txid_spent = txin_spent["txid"] + txid_unspent = txid1 if txin_spent["txid"] != txid1 else txid2 + + # Invalid txids + assert_raises_rpc_error(-8, "txid must be of length 64 (not 32, for '00000000000000000000000000000000')", self.nodes[2].gettxoutproof, ["00000000000000000000000000000000"], blockhash) + assert_raises_rpc_error(-8, "txid must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[2].gettxoutproof, ["ZZZ0000000000000000000000000000000000000000000000000000000000000"], blockhash) + # Invalid blockhashes + assert_raises_rpc_error(-8, "blockhash must be of length 64 (not 32, for '00000000000000000000000000000000')", self.nodes[2].gettxoutproof, [txid_spent], "00000000000000000000000000000000") + assert_raises_rpc_error(-8, "blockhash must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[2].gettxoutproof, [txid_spent], "ZZZ0000000000000000000000000000000000000000000000000000000000000") + # We can't find the block from a fully-spent tx + assert_raises_rpc_error(-5, "Transaction not yet in block", self.nodes[2].gettxoutproof, [txid_spent]) + # We can get the proof if we specify the block + assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid_spent], blockhash)), [txid_spent]) + # We can't get the proof if we specify a non-existent block + assert_raises_rpc_error(-5, "Block not found", self.nodes[2].gettxoutproof, [txid_spent], "0000000000000000000000000000000000000000000000000000000000000000") + # We can get the proof if the transaction is unspent + assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid_unspent])), [txid_unspent]) + # We can get the proof if we provide a list of transactions and one of them is unspent. The ordering of the list should not matter. + assert_equal(sorted(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid1, txid2]))), sorted(txlist)) + assert_equal(sorted(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid2, txid1]))), sorted(txlist)) + # We can always get a proof if we have a -txindex + assert_equal(self.nodes[2].verifytxoutproof(self.nodes[3].gettxoutproof([txid_spent])), [txid_spent]) + # We can't get a proof if we specify transactions from different blocks + assert_raises_rpc_error(-5, "Not all transactions found in specified or retrieved block", self.nodes[2].gettxoutproof, [txid1, txid3]) + + # Now we'll try tweaking a proof. + proof = self.nodes[3].gettxoutproof([txid1, txid2]) + assert txid1 in self.nodes[0].verifytxoutproof(proof) + assert txid2 in self.nodes[1].verifytxoutproof(proof) + + tweaked_proof = FromHex(CMerkleBlock(), proof) + + # Make sure that our serialization/deserialization is working + assert txid1 in self.nodes[2].verifytxoutproof(ToHex(tweaked_proof)) + + # Check to see if we can go up the merkle tree and pass this off as a + # single-transaction block + tweaked_proof.txn.nTransactions = 1 + tweaked_proof.txn.vHash = [tweaked_proof.header.hashMerkleRoot] + tweaked_proof.txn.vBits = [True] + [False]*7 + + for n in self.nodes: + assert not n.verifytxoutproof(ToHex(tweaked_proof)) + + # TODO: try more variants, eg transactions at different depths, and + # verify that the proofs are invalid + +if __name__ == '__main__': + MerkleBlockTest().main() diff --git a/test/functional/rpc_uptime.py b/test/functional/rpc_uptime.py new file mode 100755 index 000000000..20b634155 --- /dev/null +++ b/test/functional/rpc_uptime.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python3 +# Copyright (c) 2017-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the RPC call related to the uptime command. + +Test corresponds to code in rpc/server.cpp. +""" + +import time + +from test_framework.test_framework import BitcoinTestFramework + + +class UptimeTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 1 + self.setup_clean_chain = True + + def run_test(self): + self._test_uptime() + + def _test_uptime(self): + wait_time = 10 + self.nodes[0].setmocktime(int(time.time() + wait_time)) + assert(self.nodes[0].uptime() >= wait_time) + + +if __name__ == '__main__': + UptimeTest().main() diff --git a/test/functional/rpc_users.py b/test/functional/rpc_users.py new file mode 100755 index 000000000..102dd2259 --- /dev/null +++ b/test/functional/rpc_users.py @@ -0,0 +1,206 @@ +#!/usr/bin/env python3 +# Copyright (c) 2015-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test multiple RPC users.""" + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + get_datadir_path, + str_to_b64str, +) + +import os +import http.client +import urllib.parse +import subprocess +from random import SystemRandom +import string +import configparser +import sys + + +class HTTPBasicsTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 2 + + def setup_chain(self): + super().setup_chain() + #Append rpcauth to bitcoin.conf before initialization + rpcauth = "rpcauth=rt:93648e835a54c573682c2eb19f882535$7681e9c5b74bdd85e78166031d2058e1069b3ed7ed967c93fc63abba06f31144" + rpcauth2 = "rpcauth=rt2:f8607b1a88861fac29dfccf9b52ff9f$ff36a0c23c8c62b4846112e50fa888416e94c17bfd4c42f88fd8f55ec6a3137e" + rpcuser = "rpcuser=rpcuser💻" + rpcpassword = "rpcpassword=rpcpassword🔑" + + self.user = ''.join(SystemRandom().choice(string.ascii_letters + string.digits) for _ in range(10)) + config = configparser.ConfigParser() + config.read_file(open(self.options.configfile)) + gen_rpcauth = config['environment']['RPCAUTH'] + p = subprocess.Popen([sys.executable, gen_rpcauth, self.user], stdout=subprocess.PIPE, universal_newlines=True) + lines = p.stdout.read().splitlines() + rpcauth3 = lines[1] + self.password = lines[3] + + with open(os.path.join(get_datadir_path(self.options.tmpdir, 0), "bitcoin.conf"), 'a', encoding='utf8') as f: + f.write(rpcauth+"\n") + f.write(rpcauth2+"\n") + f.write(rpcauth3+"\n") + with open(os.path.join(get_datadir_path(self.options.tmpdir, 1), "bitcoin.conf"), 'a', encoding='utf8') as f: + f.write(rpcuser+"\n") + f.write(rpcpassword+"\n") + + def run_test(self): + + ################################################## + # Check correctness of the rpcauth config option # + ################################################## + url = urllib.parse.urlparse(self.nodes[0].url) + + #Old authpair + authpair = url.username + ':' + url.password + + #New authpair generated via share/rpcauth tool + password = "cA773lm788buwYe4g4WT+05pKyNruVKjQ25x3n0DQcM=" + + #Second authpair with different username + password2 = "8/F3uMDw4KSEbw96U3CA1C4X05dkHDN2BPFjTgZW4KI=" + authpairnew = "rt:"+password + + self.log.info('Correct...') + headers = {"Authorization": "Basic " + str_to_b64str(authpair)} + + conn = http.client.HTTPConnection(url.hostname, url.port) + conn.connect() + conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) + resp = conn.getresponse() + assert_equal(resp.status, 200) + conn.close() + + #Use new authpair to confirm both work + self.log.info('Correct...') + headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)} + + conn = http.client.HTTPConnection(url.hostname, url.port) + conn.connect() + conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) + resp = conn.getresponse() + assert_equal(resp.status, 200) + conn.close() + + #Wrong login name with rt's password + self.log.info('Wrong...') + authpairnew = "rtwrong:"+password + headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)} + + conn = http.client.HTTPConnection(url.hostname, url.port) + conn.connect() + conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) + resp = conn.getresponse() + assert_equal(resp.status, 401) + conn.close() + + #Wrong password for rt + self.log.info('Wrong...') + authpairnew = "rt:"+password+"wrong" + headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)} + + conn = http.client.HTTPConnection(url.hostname, url.port) + conn.connect() + conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) + resp = conn.getresponse() + assert_equal(resp.status, 401) + conn.close() + + #Correct for rt2 + self.log.info('Correct...') + authpairnew = "rt2:"+password2 + headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)} + + conn = http.client.HTTPConnection(url.hostname, url.port) + conn.connect() + conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) + resp = conn.getresponse() + assert_equal(resp.status, 200) + conn.close() + + #Wrong password for rt2 + self.log.info('Wrong...') + authpairnew = "rt2:"+password2+"wrong" + headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)} + + conn = http.client.HTTPConnection(url.hostname, url.port) + conn.connect() + conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) + resp = conn.getresponse() + assert_equal(resp.status, 401) + conn.close() + + #Correct for randomly generated user + self.log.info('Correct...') + authpairnew = self.user+":"+self.password + headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)} + + conn = http.client.HTTPConnection(url.hostname, url.port) + conn.connect() + conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) + resp = conn.getresponse() + assert_equal(resp.status, 200) + conn.close() + + #Wrong password for randomly generated user + self.log.info('Wrong...') + authpairnew = self.user+":"+self.password+"Wrong" + headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)} + + conn = http.client.HTTPConnection(url.hostname, url.port) + conn.connect() + conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) + resp = conn.getresponse() + assert_equal(resp.status, 401) + conn.close() + + ############################################################### + # Check correctness of the rpcuser/rpcpassword config options # + ############################################################### + url = urllib.parse.urlparse(self.nodes[1].url) + + # rpcuser and rpcpassword authpair + self.log.info('Correct...') + rpcuserauthpair = "rpcuser💻:rpcpassword🔑" + + headers = {"Authorization": "Basic " + str_to_b64str(rpcuserauthpair)} + + conn = http.client.HTTPConnection(url.hostname, url.port) + conn.connect() + conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) + resp = conn.getresponse() + assert_equal(resp.status, 200) + conn.close() + + #Wrong login name with rpcuser's password + rpcuserauthpair = "rpcuserwrong:rpcpassword" + headers = {"Authorization": "Basic " + str_to_b64str(rpcuserauthpair)} + + conn = http.client.HTTPConnection(url.hostname, url.port) + conn.connect() + conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) + resp = conn.getresponse() + assert_equal(resp.status, 401) + conn.close() + + #Wrong password for rpcuser + self.log.info('Wrong...') + rpcuserauthpair = "rpcuser:rpcpasswordwrong" + headers = {"Authorization": "Basic " + str_to_b64str(rpcuserauthpair)} + + conn = http.client.HTTPConnection(url.hostname, url.port) + conn.connect() + conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) + resp = conn.getresponse() + assert_equal(resp.status, 401) + conn.close() + + +if __name__ == '__main__': + HTTPBasicsTest ().main () diff --git a/test/functional/rpc_zmq.py b/test/functional/rpc_zmq.py new file mode 100755 index 000000000..bfa6b06f6 --- /dev/null +++ b/test/functional/rpc_zmq.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python3 +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test for the ZMQ RPC methods.""" + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal + + +class RPCZMQTest(BitcoinTestFramework): + + address = "tcp://127.0.0.1:28332" + + def set_test_params(self): + self.num_nodes = 1 + self.setup_clean_chain = True + + def skip_test_if_missing_module(self): + self.skip_if_no_py3_zmq() + self.skip_if_no_bitcoind_zmq() + + def run_test(self): + self._test_getzmqnotifications() + + def _test_getzmqnotifications(self): + self.restart_node(0, extra_args=[]) + assert_equal(self.nodes[0].getzmqnotifications(), []) + + self.restart_node(0, extra_args=["-zmqpubhashtx=%s" % self.address]) + assert_equal(self.nodes[0].getzmqnotifications(), [ + {"type": "pubhashtx", "address": self.address}, + ]) + + +if __name__ == '__main__': + RPCZMQTest().main() diff --git a/test/functional/test_framework/address.py b/test/functional/test_framework/address.py index 96bebe1ea..d1fb97b02 100644 --- a/test/functional/test_framework/address.py +++ b/test/functional/test_framework/address.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright (c) 2016 The Bitcoin Core developers +# Copyright (c) 2016-2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Encode and decode BASE58, P2PKH and P2SH addresses.""" @@ -7,6 +7,8 @@ from .script import hash256, hash160, sha256, CScript, OP_0 from .util import bytes_to_hex_str, hex_str_to_bytes +from . import segwit_addr + chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz' def byte_to_base58(b, version): @@ -49,6 +51,22 @@ def key_to_p2sh_p2wpkh(key, main = False): p2shscript = CScript([OP_0, hash160(key)]) return script_to_p2sh(p2shscript, main) +def program_to_witness(version, program, main = False): + if (type(program) is str): + program = hex_str_to_bytes(program) + assert 0 <= version <= 16 + assert 2 <= len(program) <= 40 + assert version > 0 or len(program) in [20, 32] + return segwit_addr.encode("bc" if main else "bcrt", version, program) + +def script_to_p2wsh(script, main = False): + script = check_script(script) + return program_to_witness(0, sha256(script), main) + +def key_to_p2wpkh(key, main = False): + key = check_key(key) + return program_to_witness(0, hash160(key), main) + def script_to_p2sh_p2wsh(script, main = False): script = check_script(script) p2shscript = CScript([OP_0, sha256(script)]) diff --git a/test/functional/test_framework/authproxy.py b/test/functional/test_framework/authproxy.py index b3671cbdc..1140fe9b3 100644 --- a/test/functional/test_framework/authproxy.py +++ b/test/functional/test_framework/authproxy.py @@ -33,24 +33,18 @@ - uses standard Python json lib """ -try: - import http.client as httplib -except ImportError: - import httplib import base64 import decimal +import http.client import json import logging +import os import socket import time -try: - import urllib.parse as urlparse -except ImportError: - import urlparse - -USER_AGENT = "AuthServiceProxy/0.1" +import urllib.parse HTTP_TIMEOUT = 30 +USER_AGENT = "AuthServiceProxy/0.1" log = logging.getLogger("BitcoinRPC") @@ -60,7 +54,7 @@ def __init__(self, rpc_error): errmsg = '%(message)s (%(code)i)' % rpc_error except (KeyError, TypeError): errmsg = '' - Exception.__init__(self, errmsg) + super().__init__(errmsg) self.error = rpc_error @@ -69,40 +63,21 @@ def EncodeDecimal(o): return str(o) raise TypeError(repr(o) + " is not JSON serializable") -class AuthServiceProxy(object): +class AuthServiceProxy(): __id_count = 0 # ensure_ascii: escape unicode as \uXXXX, passed to json.dumps def __init__(self, service_url, service_name=None, timeout=HTTP_TIMEOUT, connection=None, ensure_ascii=True): self.__service_url = service_url self._service_name = service_name - self.ensure_ascii = ensure_ascii # can be toggled on the fly by tests - self.__url = urlparse.urlparse(service_url) - if self.__url.port is None: - port = 80 - else: - port = self.__url.port - (user, passwd) = (self.__url.username, self.__url.password) - try: - user = user.encode('utf8') - except AttributeError: - pass - try: - passwd = passwd.encode('utf8') - except AttributeError: - pass + self.ensure_ascii = ensure_ascii # can be toggled on the fly by tests + self.__url = urllib.parse.urlparse(service_url) + user = None if self.__url.username is None else self.__url.username.encode('utf8') + passwd = None if self.__url.password is None else self.__url.password.encode('utf8') authpair = user + b':' + passwd self.__auth_header = b'Basic ' + base64.b64encode(authpair) - - if connection: - # Callables re-use the connection of the original proxy - self.__conn = connection - elif self.__url.scheme == 'https': - self.__conn = httplib.HTTPSConnection(self.__url.hostname, port, - timeout=timeout) - else: - self.__conn = httplib.HTTPConnection(self.__url.hostname, port, - timeout=timeout) + self.timeout = timeout + self._set_conn(connection) def __getattr__(self, name): if name.startswith('__') and name.endswith('__'): @@ -121,34 +96,41 @@ def _request(self, method, path, postdata): 'User-Agent': USER_AGENT, 'Authorization': self.__auth_header, 'Content-type': 'application/json'} + if os.name == 'nt': + # Windows somehow does not like to re-use connections + # TODO: Find out why the connection would disconnect occasionally and make it reusable on Windows + self._set_conn() try: self.__conn.request(method, path, postdata, headers) return self._get_response() - except httplib.BadStatusLine as e: - if e.line == "''": # if connection was closed, try again + except http.client.BadStatusLine as e: + if e.line == "''": # if connection was closed, try again self.__conn.close() self.__conn.request(method, path, postdata, headers) return self._get_response() else: raise - except (BrokenPipeError,ConnectionResetError): + except (BrokenPipeError, ConnectionResetError): # Python 3.5+ raises BrokenPipeError instead of BadStatusLine when the connection was reset # ConnectionResetError happens on FreeBSD with Python 3.4 self.__conn.close() self.__conn.request(method, path, postdata, headers) return self._get_response() - def __call__(self, *args, **argsn): + def get_request(self, *args, **argsn): AuthServiceProxy.__id_count += 1 - log.debug("-%s-> %s %s"%(AuthServiceProxy.__id_count, self._service_name, - json.dumps(args, default=EncodeDecimal, ensure_ascii=self.ensure_ascii))) + log.debug("-%s-> %s %s" % (AuthServiceProxy.__id_count, self._service_name, + json.dumps(args, default=EncodeDecimal, ensure_ascii=self.ensure_ascii))) if args and argsn: raise ValueError('Cannot handle both named and positional arguments') - postdata = json.dumps({'version': '1.1', - 'method': self._service_name, - 'params': args or argsn, - 'id': AuthServiceProxy.__id_count}, default=EncodeDecimal, ensure_ascii=self.ensure_ascii) + return {'version': '1.1', + 'method': self._service_name, + 'params': args or argsn, + 'id': AuthServiceProxy.__id_count} + + def __call__(self, *args, **argsn): + postdata = json.dumps(self.get_request(*args, **argsn), default=EncodeDecimal, ensure_ascii=self.ensure_ascii) response = self._request('POST', self.__url.path, postdata.encode('utf-8')) if response['error'] is not None: raise JSONRPCException(response['error']) @@ -158,16 +140,16 @@ def __call__(self, *args, **argsn): else: return response['result'] - def _batch(self, rpc_call_list): + def batch(self, rpc_call_list): postdata = json.dumps(list(rpc_call_list), default=EncodeDecimal, ensure_ascii=self.ensure_ascii) - log.debug("--> "+postdata) + log.debug("--> " + postdata) return self._request('POST', self.__url.path, postdata.encode('utf-8')) def _get_response(self): req_start_time = time.time() try: http_response = self.__conn.getresponse() - except socket.timeout as e: + except socket.timeout: raise JSONRPCException({ 'code': -344, 'message': '%r RPC took longer than %f seconds. Consider ' @@ -187,10 +169,20 @@ def _get_response(self): response = json.loads(responsedata, parse_float=decimal.Decimal) elapsed = time.time() - req_start_time if "error" in response and response["error"] is None: - log.debug("<-%s- [%.6f] %s"%(response["id"], elapsed, json.dumps(response["result"], default=EncodeDecimal, ensure_ascii=self.ensure_ascii))) + log.debug("<-%s- [%.6f] %s" % (response["id"], elapsed, json.dumps(response["result"], default=EncodeDecimal, ensure_ascii=self.ensure_ascii))) else: - log.debug("<-- [%.6f] %s"%(elapsed,responsedata)) + log.debug("<-- [%.6f] %s" % (elapsed, responsedata)) return response def __truediv__(self, relative_uri): return AuthServiceProxy("{}/{}".format(self.__service_url, relative_uri), self._service_name, connection=self.__conn) + + def _set_conn(self, connection=None): + port = 80 if self.__url.port is None else self.__url.port + if connection: + self.__conn = connection + self.timeout = connection.timeout + elif self.__url.scheme == 'https': + self.__conn = http.client.HTTPSConnection(self.__url.hostname, port, timeout=self.timeout) + else: + self.__conn = http.client.HTTPConnection(self.__url.hostname, port, timeout=self.timeout) diff --git a/test/functional/test_framework/bignum.py b/test/functional/test_framework/bignum.py index 024611da6..db5ccd62c 100644 --- a/test/functional/test_framework/bignum.py +++ b/test/functional/test_framework/bignum.py @@ -26,12 +26,6 @@ def bn2bin(v): i -= 1 return s -def bin2bn(s): - l = 0 - for ch in s: - l = (l << 8) | ch - return l - def bn2mpi(v): have_ext = False if v.bit_length() > 0: @@ -54,30 +48,6 @@ def bn2mpi(v): v_bin[0] |= 0x80 return s + ext + v_bin -def mpi2bn(s): - if len(s) < 4: - return None - s_size = bytes(s[:4]) - v_len = struct.unpack(b">I", s_size)[0] - if len(s) != (v_len + 4): - return None - if v_len == 0: - return 0 - - v_str = bytearray(s[4:]) - neg = False - i = v_str[0] - if i & 0x80: - neg = True - i &= ~0x80 - v_str[0] = i - - v = bin2bn(v_str) - - if neg: - return -v - return v - # bitcoin-specific little endian format, with implicit size def mpi2vch(s): r = s[4:] # strip size @@ -86,12 +56,3 @@ def mpi2vch(s): def bn2vch(v): return bytes(mpi2vch(bn2mpi(v))) - -def vch2mpi(s): - r = struct.pack(b">I", len(s)) # size - r += s[::-1] # reverse string, converting LE->BE - return r - -def vch2bn(s): - return mpi2bn(vch2mpi(s)) - diff --git a/test/functional/test_framework/blocktools.py b/test/functional/test_framework/blocktools.py index 5dcf516dc..35004fb58 100644 --- a/test/functional/test_framework/blocktools.py +++ b/test/functional/test_framework/blocktools.py @@ -1,40 +1,74 @@ #!/usr/bin/env python3 -# Copyright (c) 2015-2016 The Bitcoin Core developers +# Copyright (c) 2015-2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Utilities for manipulating blocks and transactions.""" -from .mininode import * -from .script import CScript, OP_TRUE, OP_CHECKSIG, OP_RETURN +from .address import ( + key_to_p2sh_p2wpkh, + key_to_p2wpkh, + script_to_p2sh_p2wsh, + script_to_p2wsh, +) +from .messages import ( + CBlock, + COIN, + COutPoint, + CTransaction, + CTxIn, + CTxInWitness, + CTxOut, + FromHex, + ToHex, + bytes_to_hex_str, + hash256, + hex_str_to_bytes, + ser_string, + ser_uint256, + sha256, + uint256_from_str, +) +from .script import ( + CScript, + OP_0, + OP_1, + OP_CHECKMULTISIG, + OP_CHECKSIG, + OP_RETURN, + OP_TRUE, + hash160, +) +from .util import assert_equal +from io import BytesIO -# Create a block (with regtest difficulty) -def create_block(hashprev, coinbase, nTime=None): +# From BIP141 +WITNESS_COMMITMENT_HEADER = b"\xaa\x21\xa9\xed" + +def create_block(hashprev, coinbase, ntime=None): + """Create a block (with regtest difficulty).""" block = CBlock() - if nTime is None: + if ntime is None: import time - block.nTime = int(time.time()+600) + block.nTime = int(time.time() + 600) else: - block.nTime = nTime + block.nTime = ntime block.hashPrevBlock = hashprev - block.nBits = 0x207fffff # Will break after a difficulty adjustment... + block.nBits = 0x207fffff # difficulty retargeting is disabled in REGTEST chainparams block.vtx.append(coinbase) block.hashMerkleRoot = block.calc_merkle_root() block.calc_sha256() return block -# From BIP141 -WITNESS_COMMITMENT_HEADER = b"\xaa\x21\xa9\xed" - - def get_witness_script(witness_root, witness_nonce): - witness_commitment = uint256_from_str(hash256(ser_uint256(witness_root)+ser_uint256(witness_nonce))) + witness_commitment = uint256_from_str(hash256(ser_uint256(witness_root) + ser_uint256(witness_nonce))) output_data = WITNESS_COMMITMENT_HEADER + ser_uint256(witness_commitment) return CScript([OP_RETURN, output_data]) - -# According to BIP141, blocks with witness rules active must commit to the -# hash of all in-block transactions including witness. def add_witness_commitment(block, nonce=0): + """Add a witness commitment to the block's coinbase transaction. + + According to BIP141, blocks with witness rules active must commit to the + hash of all in-block transactions including witness.""" # First calculate the merkle root of the block's # transactions, with witnesses. witness_nonce = nonce @@ -49,7 +83,6 @@ def add_witness_commitment(block, nonce=0): block.hashMerkleRoot = block.calc_merkle_root() block.rehash() - def serialize_script_num(value): r = bytearray(0) if value == 0: @@ -65,46 +98,122 @@ def serialize_script_num(value): r[-1] |= 0x80 return r -# Create a coinbase transaction, assuming no miner fees. -# If pubkey is passed in, the coinbase output will be a P2PK output; -# otherwise an anyone-can-spend output. -def create_coinbase(height, pubkey = None): +def create_coinbase(height, pubkey=None): + """Create a coinbase transaction, assuming no miner fees. + + If pubkey is passed in, the coinbase output will be a P2PK output; + otherwise an anyone-can-spend output.""" coinbase = CTransaction() - coinbase.vin.append(CTxIn(COutPoint(0, 0xffffffff), - ser_string(serialize_script_num(height)), 0xffffffff)) + coinbase.vin.append(CTxIn(COutPoint(0, 0xffffffff), + ser_string(serialize_script_num(height)), 0xffffffff)) coinbaseoutput = CTxOut() coinbaseoutput.nValue = 50 * COIN - halvings = int(height/150) # regtest + halvings = int(height / 150) # regtest coinbaseoutput.nValue >>= halvings - if (pubkey != None): + if (pubkey is not None): coinbaseoutput.scriptPubKey = CScript([pubkey, OP_CHECKSIG]) else: coinbaseoutput.scriptPubKey = CScript([OP_TRUE]) - coinbase.vout = [ coinbaseoutput ] + coinbase.vout = [coinbaseoutput] coinbase.calc_sha256() return coinbase -# Create a transaction. -# If the scriptPubKey is not specified, make it anyone-can-spend. -def create_transaction(prevtx, n, sig, value, scriptPubKey=CScript()): +def create_tx_with_script(prevtx, n, script_sig=b"", *, amount, script_pub_key=CScript()): + """Return one-input, one-output transaction object + spending the prevtx's n-th output with the given amount. + + Can optionally pass scriptPubKey and scriptSig, default is anyone-can-spend output. + """ tx = CTransaction() assert(n < len(prevtx.vout)) - tx.vin.append(CTxIn(COutPoint(prevtx.sha256, n), sig, 0xffffffff)) - tx.vout.append(CTxOut(value, scriptPubKey)) + tx.vin.append(CTxIn(COutPoint(prevtx.sha256, n), script_sig, 0xffffffff)) + tx.vout.append(CTxOut(amount, script_pub_key)) tx.calc_sha256() return tx -def get_legacy_sigopcount_block(block, fAccurate=True): +def create_transaction(node, txid, to_address, *, amount): + """ Return signed transaction spending the first output of the + input txid. Note that the node must be able to sign for the + output that is being spent, and the node must not be running + multiple wallets. + """ + raw_tx = create_raw_transaction(node, txid, to_address, amount=amount) + tx = CTransaction() + tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx))) + return tx + +def create_raw_transaction(node, txid, to_address, *, amount): + """ Return raw signed transaction spending the first output of the + input txid. Note that the node must be able to sign for the + output that is being spent, and the node must not be running + multiple wallets. + """ + rawtx = node.createrawtransaction(inputs=[{"txid": txid, "vout": 0}], outputs={to_address: amount}) + signresult = node.signrawtransactionwithwallet(rawtx) + assert_equal(signresult["complete"], True) + return signresult['hex'] + +def get_legacy_sigopcount_block(block, accurate=True): count = 0 for tx in block.vtx: - count += get_legacy_sigopcount_tx(tx, fAccurate) + count += get_legacy_sigopcount_tx(tx, accurate) return count -def get_legacy_sigopcount_tx(tx, fAccurate=True): +def get_legacy_sigopcount_tx(tx, accurate=True): count = 0 for i in tx.vout: - count += i.scriptPubKey.GetSigOpCount(fAccurate) + count += i.scriptPubKey.GetSigOpCount(accurate) for j in tx.vin: # scriptSig might be of type bytes, so convert to CScript for the moment - count += CScript(j.scriptSig).GetSigOpCount(fAccurate) + count += CScript(j.scriptSig).GetSigOpCount(accurate) return count + +def witness_script(use_p2wsh, pubkey): + """Create a scriptPubKey for a pay-to-wtiness TxOut. + + This is either a P2WPKH output for the given pubkey, or a P2WSH output of a + 1-of-1 multisig for the given pubkey. Returns the hex encoding of the + scriptPubKey.""" + if not use_p2wsh: + # P2WPKH instead + pubkeyhash = hash160(hex_str_to_bytes(pubkey)) + pkscript = CScript([OP_0, pubkeyhash]) + else: + # 1-of-1 multisig + witness_program = CScript([OP_1, hex_str_to_bytes(pubkey), OP_1, OP_CHECKMULTISIG]) + scripthash = sha256(witness_program) + pkscript = CScript([OP_0, scripthash]) + return bytes_to_hex_str(pkscript) + +def create_witness_tx(node, use_p2wsh, utxo, pubkey, encode_p2sh, amount): + """Return a transaction (in hex) that spends the given utxo to a segwit output. + + Optionally wrap the segwit output using P2SH.""" + if use_p2wsh: + program = CScript([OP_1, hex_str_to_bytes(pubkey), OP_1, OP_CHECKMULTISIG]) + addr = script_to_p2sh_p2wsh(program) if encode_p2sh else script_to_p2wsh(program) + else: + addr = key_to_p2sh_p2wpkh(pubkey) if encode_p2sh else key_to_p2wpkh(pubkey) + if not encode_p2sh: + assert_equal(node.getaddressinfo(addr)['scriptPubKey'], witness_script(use_p2wsh, pubkey)) + return node.createrawtransaction([utxo], {addr: amount}) + +def send_to_witness(use_p2wsh, node, utxo, pubkey, encode_p2sh, amount, sign=True, insert_redeem_script=""): + """Create a transaction spending a given utxo to a segwit output. + + The output corresponds to the given pubkey: use_p2wsh determines whether to + use P2WPKH or P2WSH; encode_p2sh determines whether to wrap in P2SH. + sign=True will have the given node sign the transaction. + insert_redeem_script will be added to the scriptSig, if given.""" + tx_to_witness = create_witness_tx(node, use_p2wsh, utxo, pubkey, encode_p2sh, amount) + if (sign): + signed = node.signrawtransactionwithwallet(tx_to_witness) + assert("errors" not in signed or len(["errors"]) == 0) + return node.sendrawtransaction(signed["hex"]) + else: + if (insert_redeem_script): + tx = FromHex(CTransaction(), tx_to_witness) + tx.vin[0].scriptSig += CScript([hex_str_to_bytes(insert_redeem_script)]) + tx_to_witness = ToHex(tx) + + return node.sendrawtransaction(tx_to_witness) diff --git a/test/functional/test_framework/coverage.py b/test/functional/test_framework/coverage.py index 227b1a17a..7705dd3e4 100644 --- a/test/functional/test_framework/coverage.py +++ b/test/functional/test_framework/coverage.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright (c) 2015-2016 The Bitcoin Core developers +# Copyright (c) 2015-2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Utilities for doing coverage analysis on the RPC interface. @@ -14,7 +14,7 @@ REFERENCE_FILENAME = 'rpc_interface.txt' -class AuthServiceProxyWrapper(object): +class AuthServiceProxyWrapper(): """ An object that wraps AuthServiceProxy to record specific RPC calls. @@ -31,10 +31,11 @@ def __init__(self, auth_service_proxy_instance, coverage_logfile=None): self.auth_service_proxy_instance = auth_service_proxy_instance self.coverage_logfile = coverage_logfile - def __getattr__(self, *args, **kwargs): - return_val = self.auth_service_proxy_instance.__getattr__( - *args, **kwargs) - + def __getattr__(self, name): + return_val = getattr(self.auth_service_proxy_instance, name) + if not isinstance(return_val, type(self.auth_service_proxy_instance)): + # If proxy getattr returned an unwrapped value, do the same here. + return return_val return AuthServiceProxyWrapper(return_val, self.coverage_logfile) def __call__(self, *args, **kwargs): @@ -44,20 +45,23 @@ def __call__(self, *args, **kwargs): """ return_val = self.auth_service_proxy_instance.__call__(*args, **kwargs) + self._log_call() + return return_val + + def _log_call(self): rpc_method = self.auth_service_proxy_instance._service_name if self.coverage_logfile: with open(self.coverage_logfile, 'a+', encoding='utf8') as f: f.write("%s\n" % rpc_method) - return return_val - - @property - def url(self): - return self.auth_service_proxy_instance.url - def __truediv__(self, relative_uri): - return AuthServiceProxyWrapper(self.auth_service_proxy_instance / relative_uri) + return AuthServiceProxyWrapper(self.auth_service_proxy_instance / relative_uri, + self.coverage_logfile) + + def get_request(self, *args, **kwargs): + self._log_call() + return self.auth_service_proxy_instance.get_request(*args, **kwargs) def get_filename(dirname, n_node): """ diff --git a/test/functional/test_framework/key.py b/test/functional/test_framework/key.py index 85a6158a2..1b3e510dc 100644 --- a/test/functional/test_framework/key.py +++ b/test/functional/test_framework/key.py @@ -10,7 +10,6 @@ import ctypes import ctypes.util import hashlib -import sys ssl = ctypes.cdll.LoadLibrary(ctypes.util.find_library ('ssl') or 'libeay32') @@ -84,7 +83,7 @@ def _check_result(val, func, args): ssl.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p ssl.EC_KEY_new_by_curve_name.errcheck = _check_result -class CECKey(object): +class CECKey(): """Wrapper around OpenSSL's EC_KEY""" POINT_CONVERSION_COMPRESSED = 2 @@ -223,10 +222,5 @@ def __str__(self): return repr(self) def __repr__(self): - # Always have represent as b'' so test cases don't have to - # change for py2/3 - if sys.version > '3': - return '%s(%s)' % (self.__class__.__name__, super(CPubKey, self).__repr__()) - else: - return '%s(b%s)' % (self.__class__.__name__, super(CPubKey, self).__repr__()) + return '%s(%s)' % (self.__class__.__name__, super(CPubKey, self).__repr__()) diff --git a/test/functional/test_framework/messages.py b/test/functional/test_framework/messages.py new file mode 100755 index 000000000..8e9372767 --- /dev/null +++ b/test/functional/test_framework/messages.py @@ -0,0 +1,1442 @@ +#!/usr/bin/env python3 +# Copyright (c) 2010 ArtForz -- public domain half-a-node +# Copyright (c) 2012 Jeff Garzik +# Copyright (c) 2010-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Bitcoin test framework primitive and message structures + +CBlock, CTransaction, CBlockHeader, CTxIn, CTxOut, etc....: + data structures that should map to corresponding structures in + bitcoin/primitives + +msg_block, msg_tx, msg_headers, etc.: + data structures that represent network messages + +ser_*, deser_*: functions that handle serialization/deserialization. + +Classes use __slots__ to ensure extraneous attributes aren't accidentally added +by tests, compromising their intended effect. +""" +from codecs import encode +import copy +import hashlib +from io import BytesIO +import random +import socket +import struct +import time + +from test_framework.siphash import siphash256 +from test_framework.util import hex_str_to_bytes, bytes_to_hex_str + +MIN_VERSION_SUPPORTED = 60001 +MY_VERSION = 70014 # past bip-31 for ping/pong +MY_SUBVERSION = b"/python-mininode-tester:0.0.3/" +MY_RELAY = 1 # from version 70001 onwards, fRelay should be appended to version messages (BIP37) + +MAX_INV_SZ = 50000 +MAX_LOCATOR_SZ = 101 +MAX_BLOCK_BASE_SIZE = 1000000 + +COIN = 100000000 # 1 btc in satoshis + +BIP125_SEQUENCE_NUMBER = 0xfffffffd # Sequence number that is BIP 125 opt-in and BIP 68-opt-out + +NODE_NETWORK = (1 << 0) +# NODE_GETUTXO = (1 << 1) +NODE_BLOOM = (1 << 2) +NODE_WITNESS = (1 << 3) +NODE_NETWORK_LIMITED = (1 << 10) + +MSG_TX = 1 +MSG_BLOCK = 2 +MSG_WITNESS_FLAG = 1 << 30 +MSG_TYPE_MASK = 0xffffffff >> 2 + +# Serialization/deserialization tools +def sha256(s): + return hashlib.new('sha256', s).digest() + +def ripemd160(s): + return hashlib.new('ripemd160', s).digest() + +def hash256(s): + return sha256(sha256(s)) + +def ser_compact_size(l): + r = b"" + if l < 253: + r = struct.pack("B", l) + elif l < 0x10000: + r = struct.pack(">= 32 + return rs + + +def uint256_from_str(s): + r = 0 + t = struct.unpack("> 24) & 0xFF + v = (c & 0xFFFFFF) << (8 * (nbytes - 3)) + return v + + +def deser_vector(f, c): + nit = deser_compact_size(f) + r = [] + for i in range(nit): + t = c() + t.deserialize(f) + r.append(t) + return r + + +# ser_function_name: Allow for an alternate serialization function on the +# entries in the vector (we use this for serializing the vector of transactions +# for a witness block). +def ser_vector(l, ser_function_name=None): + r = ser_compact_size(len(l)) + for i in l: + if ser_function_name: + r += getattr(i, ser_function_name)() + else: + r += i.serialize() + return r + + +def deser_uint256_vector(f): + nit = deser_compact_size(f) + r = [] + for i in range(nit): + t = deser_uint256(f) + r.append(t) + return r + + +def ser_uint256_vector(l): + r = ser_compact_size(len(l)) + for i in l: + r += ser_uint256(i) + return r + + +def deser_string_vector(f): + nit = deser_compact_size(f) + r = [] + for i in range(nit): + t = deser_string(f) + r.append(t) + return r + + +def ser_string_vector(l): + r = ser_compact_size(len(l)) + for sv in l: + r += ser_string(sv) + return r + + +# Deserialize from a hex string representation (eg from RPC) +def FromHex(obj, hex_string): + obj.deserialize(BytesIO(hex_str_to_bytes(hex_string))) + return obj + +# Convert a binary-serializable object to hex (eg for submission via RPC) +def ToHex(obj): + return bytes_to_hex_str(obj.serialize()) + +# Objects that map to bitcoind objects, which can be serialized/deserialized + + +class CAddress: + __slots__ = ("ip", "nServices", "pchReserved", "port", "time") + + def __init__(self): + self.time = 0 + self.nServices = 1 + self.pchReserved = b"\x00" * 10 + b"\xff" * 2 + self.ip = "0.0.0.0" + self.port = 0 + + def deserialize(self, f, with_time=True): + if with_time: + self.time = struct.unpack("H", f.read(2))[0] + + def serialize(self, with_time=True): + r = b"" + if with_time: + r += struct.pack("H", self.port) + return r + + def __repr__(self): + return "CAddress(nServices=%i ip=%s port=%i)" % (self.nServices, + self.ip, self.port) + + +class CInv: + __slots__ = ("hash", "type") + + typemap = { + 0: "Error", + 1: "TX", + 2: "Block", + 1|MSG_WITNESS_FLAG: "WitnessTx", + 2|MSG_WITNESS_FLAG : "WitnessBlock", + 4: "CompactBlock" + } + + def __init__(self, t=0, h=0): + self.type = t + self.hash = h + + def deserialize(self, f): + self.type = struct.unpack(" 21000000 * COIN: + return False + return True + + def __repr__(self): + return "CTransaction(nVersion=%i vin=%s vout=%s wit=%s nLockTime=%i)" \ + % (self.nVersion, repr(self.vin), repr(self.vout), repr(self.wit), self.nLockTime) + + +class CBlockHeader: + __slots__ = ("hash", "hashMerkleRoot", "hashPrevBlock", "nBits", "nNonce", + "nTime", "nVersion", "sha256") + + def __init__(self, header=None): + if header is None: + self.set_null() + else: + self.nVersion = header.nVersion + self.hashPrevBlock = header.hashPrevBlock + self.hashMerkleRoot = header.hashMerkleRoot + self.nTime = header.nTime + self.nBits = header.nBits + self.nNonce = header.nNonce + self.sha256 = header.sha256 + self.hash = header.hash + self.calc_sha256() + + def set_null(self): + self.nVersion = 1 + self.hashPrevBlock = 0 + self.hashMerkleRoot = 0 + self.nTime = 0 + self.nBits = 0 + self.nNonce = 0 + self.sha256 = None + self.hash = None + + def deserialize(self, f): + self.nVersion = struct.unpack(" 1: + newhashes = [] + for i in range(0, len(hashes), 2): + i2 = min(i+1, len(hashes)-1) + newhashes.append(hash256(hashes[i] + hashes[i2])) + hashes = newhashes + return uint256_from_str(hashes[0]) + + def calc_merkle_root(self): + hashes = [] + for tx in self.vtx: + tx.calc_sha256() + hashes.append(ser_uint256(tx.sha256)) + return self.get_merkle_root(hashes) + + def calc_witness_merkle_root(self): + # For witness root purposes, the hash of the + # coinbase, with witness, is defined to be 0...0 + hashes = [ser_uint256(0)] + + for tx in self.vtx[1:]: + # Calculate the hashes with witness data + hashes.append(ser_uint256(tx.calc_sha256(True))) + + return self.get_merkle_root(hashes) + + def is_valid(self): + self.calc_sha256() + target = uint256_from_compact(self.nBits) + if self.sha256 > target: + return False + for tx in self.vtx: + if not tx.is_valid(): + return False + if self.calc_merkle_root() != self.hashMerkleRoot: + return False + return True + + def solve(self): + self.rehash() + target = uint256_from_compact(self.nBits) + while self.sha256 > target: + self.nNonce += 1 + self.rehash() + + def __repr__(self): + return "CBlock(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x vtx=%s)" \ + % (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot, + time.ctime(self.nTime), self.nBits, self.nNonce, repr(self.vtx)) + + +class PrefilledTransaction: + __slots__ = ("index", "tx") + + def __init__(self, index=0, tx = None): + self.index = index + self.tx = tx + + def deserialize(self, f): + self.index = deser_compact_size(f) + self.tx = CTransaction() + self.tx.deserialize(f) + + def serialize(self, with_witness=True): + r = b"" + r += ser_compact_size(self.index) + if with_witness: + r += self.tx.serialize_with_witness() + else: + r += self.tx.serialize_without_witness() + return r + + def serialize_without_witness(self): + return self.serialize(with_witness=False) + + def serialize_with_witness(self): + return self.serialize(with_witness=True) + + def __repr__(self): + return "PrefilledTransaction(index=%d, tx=%s)" % (self.index, repr(self.tx)) + + +# This is what we send on the wire, in a cmpctblock message. +class P2PHeaderAndShortIDs: + __slots__ = ("header", "nonce", "prefilled_txn", "prefilled_txn_length", + "shortids", "shortids_length") + + def __init__(self): + self.header = CBlockHeader() + self.nonce = 0 + self.shortids_length = 0 + self.shortids = [] + self.prefilled_txn_length = 0 + self.prefilled_txn = [] + + def deserialize(self, f): + self.header.deserialize(f) + self.nonce = struct.unpack("= 70001: + # Relay field is optional for version 70001 onwards + try: + self.nRelay = struct.unpack(" +class msg_headers: + __slots__ = ("headers",) + command = b"headers" + + def __init__(self, headers=None): + self.headers = headers if headers is not None else [] + + def deserialize(self, f): + # comment in bitcoind indicates these should be deserialized as blocks + blocks = deser_vector(f, CBlock) + for x in blocks: + self.headers.append(CBlockHeader(x)) + + def serialize(self): + blocks = [CBlock(x) for x in self.headers] + return ser_vector(blocks) + + def __repr__(self): + return "msg_headers(headers=%s)" % repr(self.headers) + + +class msg_reject: + __slots__ = ("code", "data", "message", "reason") + command = b"reject" + REJECT_MALFORMED = 1 + + def __init__(self): + self.message = b"" + self.code = 0 + self.reason = b"" + self.data = 0 + + def deserialize(self, f): + self.message = deser_string(f) + self.code = struct.unpack(">= 32 - return rs - - -def uint256_from_str(s): - r = 0 - t = struct.unpack("> 24) & 0xFF - v = (c & 0xFFFFFF) << (8 * (nbytes - 3)) - return v - - -def deser_vector(f, c): - nit = deser_compact_size(f) - r = [] - for i in range(nit): - t = c() - t.deserialize(f) - r.append(t) - return r - - -# ser_function_name: Allow for an alternate serialization function on the -# entries in the vector (we use this for serializing the vector of transactions -# for a witness block). -def ser_vector(l, ser_function_name=None): - r = ser_compact_size(len(l)) - for i in l: - if ser_function_name: - r += getattr(i, ser_function_name)() - else: - r += i.serialize() - return r - - -def deser_uint256_vector(f): - nit = deser_compact_size(f) - r = [] - for i in range(nit): - t = deser_uint256(f) - r.append(t) - return r - - -def ser_uint256_vector(l): - r = ser_compact_size(len(l)) - for i in l: - r += ser_uint256(i) - return r - - -def deser_string_vector(f): - nit = deser_compact_size(f) - r = [] - for i in range(nit): - t = deser_string(f) - r.append(t) - return r - - -def ser_string_vector(l): - r = ser_compact_size(len(l)) - for sv in l: - r += ser_string(sv) - return r - - -def deser_int_vector(f): - nit = deser_compact_size(f) - r = [] - for i in range(nit): - t = struct.unpack("H", f.read(2))[0] - - def serialize(self): - r = b"" - r += struct.pack("H", self.port) - return r - - def __repr__(self): - return "CAddress(nServices=%i ip=%s port=%i)" % (self.nServices, - self.ip, self.port) - -MSG_WITNESS_FLAG = 1<<30 - -class CInv(object): - typemap = { - 0: "Error", - 1: "TX", - 2: "Block", - 1|MSG_WITNESS_FLAG: "WitnessTx", - 2|MSG_WITNESS_FLAG : "WitnessBlock", - 4: "CompactBlock" - } - - def __init__(self, t=0, h=0): - self.type = t - self.hash = h - - def deserialize(self, f): - self.type = struct.unpack(" 21000000 * COIN: - return False - return True - - def __repr__(self): - return "CTransaction(nVersion=%i vin=%s vout=%s wit=%s nLockTime=%i)" \ - % (self.nVersion, repr(self.vin), repr(self.vout), repr(self.wit), self.nLockTime) - - -class CBlockHeader(object): - def __init__(self, header=None): - if header is None: - self.set_null() - else: - self.nVersion = header.nVersion - self.hashPrevBlock = header.hashPrevBlock - self.hashMerkleRoot = header.hashMerkleRoot - self.nTime = header.nTime - self.nBits = header.nBits - self.nNonce = header.nNonce - self.sha256 = header.sha256 - self.hash = header.hash - self.calc_sha256() - - def set_null(self): - self.nVersion = 1 - self.hashPrevBlock = 0 - self.hashMerkleRoot = 0 - self.nTime = 0 - self.nBits = 0 - self.nNonce = 0 - self.sha256 = None - self.hash = None - - def deserialize(self, f): - self.nVersion = struct.unpack(" 1: - newhashes = [] - for i in range(0, len(hashes), 2): - i2 = min(i+1, len(hashes)-1) - newhashes.append(hash256(hashes[i] + hashes[i2])) - hashes = newhashes - return uint256_from_str(hashes[0]) - - def calc_merkle_root(self): - hashes = [] - for tx in self.vtx: - tx.calc_sha256() - hashes.append(ser_uint256(tx.sha256)) - return self.get_merkle_root(hashes) - - def calc_witness_merkle_root(self): - # For witness root purposes, the hash of the - # coinbase, with witness, is defined to be 0...0 - hashes = [ser_uint256(0)] - - for tx in self.vtx[1:]: - # Calculate the hashes with witness data - hashes.append(ser_uint256(tx.calc_sha256(True))) - - return self.get_merkle_root(hashes) - - def is_valid(self): - self.calc_sha256() - target = uint256_from_compact(self.nBits) - if self.sha256 > target: - return False - for tx in self.vtx: - if not tx.is_valid(): - return False - if self.calc_merkle_root() != self.hashMerkleRoot: - return False - return True - - def solve(self): - self.rehash() - target = uint256_from_compact(self.nBits) - while self.sha256 > target: - self.nNonce += 1 - self.rehash() - - def __repr__(self): - return "CBlock(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x vtx=%s)" \ - % (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot, - time.ctime(self.nTime), self.nBits, self.nNonce, repr(self.vtx)) - - -class CUnsignedAlert(object): - def __init__(self): - self.nVersion = 1 - self.nRelayUntil = 0 - self.nExpiration = 0 - self.nID = 0 - self.nCancel = 0 - self.setCancel = [] - self.nMinVer = 0 - self.nMaxVer = 0 - self.setSubVer = [] - self.nPriority = 0 - self.strComment = b"" - self.strStatusBar = b"" - self.strReserved = b"" - - def deserialize(self, f): - self.nVersion = struct.unpack("= 106: - self.addrFrom = CAddress() - self.addrFrom.deserialize(f) - self.nNonce = struct.unpack("= 209: - self.nStartingHeight = struct.unpack("= 70001: - # Relay field is optional for version 70001 onwards - try: - self.nRelay = struct.unpack(" -class msg_headers(object): - command = b"headers" - - def __init__(self): - self.headers = [] - - def deserialize(self, f): - # comment in bitcoind indicates these should be deserialized as blocks - blocks = deser_vector(f, CBlock) - for x in blocks: - self.headers.append(CBlockHeader(x)) - - def serialize(self): - blocks = [CBlock(x) for x in self.headers] - return ser_vector(blocks) - - def __repr__(self): - return "msg_headers(headers=%s)" % repr(self.headers) - - -class msg_reject(object): - command = b"reject" - REJECT_MALFORMED = 1 - - def __init__(self): - self.message = b"" - self.code = 0 - self.reason = b"" - self.data = 0 - - def deserialize(self, f): - self.message = deser_string(f) - self.code = struct.unpack(" 0: + self.recvbuf += t + self._on_data() - def __repr__(self): - return "msg_cmpctblock(HeaderAndShortIDs=%s)" % repr(self.header_and_shortids) + def _on_data(self): + """Try to read P2P messages from the recv buffer. -class msg_getblocktxn(object): - command = b"getblocktxn" + This method reads data from the buffer in a loop. It deserializes, + parses and verifies the P2P header, then passes the P2P payload to + the on_message callback for processing.""" + try: + while True: + if len(self.recvbuf) < 4: + return + if self.recvbuf[:4] != MAGIC_BYTES[self.network]: + raise ValueError("got garbage %s" % repr(self.recvbuf)) + if len(self.recvbuf) < 4 + 12 + 4 + 4: + return + command = self.recvbuf[4:4+12].split(b"\x00", 1)[0] + msglen = struct.unpack(" 500: + log_message += "... (msg truncated)" + logger.debug(log_message) - def __repr__(self): - return "msg_blocktxn(block_transactions=%s)" % (repr(self.block_transactions)) -class msg_witness_blocktxn(msg_blocktxn): - def serialize(self): - r = b"" - r += self.block_transactions.serialize(with_witness=True) - return r +class P2PInterface(P2PConnection): + """A high-level P2P interface class for communicating with a Bitcoin node. -class NodeConnCB(object): - """Callback and helper functions for P2P connection to a bitcoind node. + This class provides high-level callbacks for processing P2P message + payloads, as well as convenience methods for interacting with the + node over P2P. Individual testcases should subclass this and override the on_* methods - if they want to alter message handling behaviour. - """ - + if they want to alter message handling behaviour.""" def __init__(self): - # Track whether we have a P2P connection open to the node - self.connected = False - self.connection = None + super().__init__() # Track number of messages of each type received and the most recent # message of each type @@ -1474,107 +239,91 @@ def __init__(self): # A count of the number of ping messages we've sent to the node self.ping_counter = 1 - # deliver_sleep_time is helpful for debugging race conditions in p2p - # tests; it causes message delivery to sleep for the specified time - # before acquiring the global lock and delivering the next message. - self.deliver_sleep_time = None + # The network services received from the peer + self.nServices = 0 + + def peer_connect(self, *args, services=NODE_NETWORK|NODE_WITNESS, send_version=True, **kwargs): + create_conn = super().peer_connect(*args, **kwargs) + + if send_version: + # Send a version msg + vt = msg_version() + vt.nServices = services + vt.addrTo.ip = self.dstaddr + vt.addrTo.port = self.dstport + vt.addrFrom.ip = "0.0.0.0" + vt.addrFrom.port = 0 + self.on_connection_send_msg = vt # Will be sent soon after connection_made - # Remember the services our peer has advertised - self.peer_services = None + return create_conn # Message receiving methods - def deliver(self, conn, message): + def on_message(self, message): """Receive message and dispatch message to appropriate callback. We keep a count of how many of each message type has been received - and the most recent message of each type. - - Optionally waits for deliver_sleep_time before dispatching message. - """ - - deliver_sleep = self.get_deliver_sleep_time() - if deliver_sleep is not None: - time.sleep(deliver_sleep) + and the most recent message of each type.""" with mininode_lock: try: command = message.command.decode('ascii') self.message_count[command] += 1 self.last_message[command] = message - getattr(self, 'on_' + command)(conn, message) + getattr(self, 'on_' + command)(message) except: - print("ERROR delivering %s (%s)" % (repr(message), - sys.exc_info()[0])) + print("ERROR delivering %s (%s)" % (repr(message), sys.exc_info()[0])) raise - def set_deliver_sleep_time(self, value): - with mininode_lock: - self.deliver_sleep_time = value - - def get_deliver_sleep_time(self): - with mininode_lock: - return self.deliver_sleep_time - # Callback methods. Can be overridden by subclasses in individual test # cases to provide custom message handling behaviour. - def on_open(self, conn): - self.connected = True - - def on_close(self, conn): - self.connected = False - self.connection = None - - def on_addr(self, conn, message): pass - def on_alert(self, conn, message): pass - def on_block(self, conn, message): pass - def on_blocktxn(self, conn, message): pass - def on_cmpctblock(self, conn, message): pass - def on_feefilter(self, conn, message): pass - def on_getaddr(self, conn, message): pass - def on_getblocks(self, conn, message): pass - def on_getblocktxn(self, conn, message): pass - def on_getdata(self, conn, message): pass - def on_getheaders(self, conn, message): pass - def on_headers(self, conn, message): pass - def on_mempool(self, conn): pass - def on_pong(self, conn, message): pass - def on_reject(self, conn, message): pass - def on_sendcmpct(self, conn, message): pass - def on_sendheaders(self, conn, message): pass - def on_tx(self, conn, message): pass - - def on_inv(self, conn, message): + def on_open(self): + pass + + def on_close(self): + pass + + def on_addr(self, message): pass + def on_block(self, message): pass + def on_blocktxn(self, message): pass + def on_cmpctblock(self, message): pass + def on_feefilter(self, message): pass + def on_getaddr(self, message): pass + def on_getblocks(self, message): pass + def on_getblocktxn(self, message): pass + def on_getdata(self, message): pass + def on_getheaders(self, message): pass + def on_headers(self, message): pass + def on_mempool(self, message): pass + def on_pong(self, message): pass + def on_reject(self, message): pass + def on_sendcmpct(self, message): pass + def on_sendheaders(self, message): pass + def on_tx(self, message): pass + + def on_inv(self, message): want = msg_getdata() for i in message.inv: if i.type != 0: want.inv.append(i) if len(want.inv): - conn.send_message(want) + self.send_message(want) - def on_ping(self, conn, message): - if conn.ver_send > BIP0031_VERSION: - conn.send_message(msg_pong(message.nonce)) + def on_ping(self, message): + self.send_message(msg_pong(message.nonce)) - def on_verack(self, conn, message): - conn.ver_recv = conn.ver_send + def on_verack(self, message): self.verack_received = True - def on_version(self, conn, message): - if message.nVersion >= 209: - conn.send_message(msg_verack()) - conn.ver_send = min(MY_VERSION, message.nVersion) - if message.nVersion < 209: - conn.ver_recv = conn.ver_send - conn.nServices = message.nServices + def on_version(self, message): + assert message.nVersion >= MIN_VERSION_SUPPORTED, "Version {} received. Test framework only supports versions greater than {}".format(message.nVersion, MIN_VERSION_SUPPORTED) + self.send_message(msg_verack()) + self.nServices = message.nServices # Connection helper methods - def add_connection(self, conn): - self.connection = conn - def wait_for_disconnect(self, timeout=60): - test_function = lambda: not self.connected + test_function = lambda: not self.is_connected wait_until(test_function, timeout=timeout, lock=mininode_lock) # Message receiving helper methods @@ -1583,11 +332,32 @@ def wait_for_block(self, blockhash, timeout=60): test_function = lambda: self.last_message.get("block") and self.last_message["block"].block.rehash() == blockhash wait_until(test_function, timeout=timeout, lock=mininode_lock) + def wait_for_header(self, blockhash, timeout=60): + def test_function(): + last_headers = self.last_message.get('headers') + if not last_headers: + return False + return last_headers.headers[0].rehash() == blockhash + + wait_until(test_function, timeout=timeout, lock=mininode_lock) + def wait_for_getdata(self, timeout=60): + """Waits for a getdata message. + + Receiving any getdata message will satisfy the predicate. the last_message["getdata"] + value must be explicitly cleared before calling this method, or this will return + immediately with success. TODO: change this method to take a hash value and only + return true if the correct block/tx has been requested.""" test_function = lambda: self.last_message.get("getdata") wait_until(test_function, timeout=timeout, lock=mininode_lock) def wait_for_getheaders(self, timeout=60): + """Waits for a getheaders message. + + Receiving any getheaders message will satisfy the predicate. the last_message["getheaders"] + value must be explicitly cleared before calling this method, or this will return + immediately with success. TODO: change this method to take a hash value and only + return true if the correct block header has been requested.""" test_function = lambda: self.last_message.get("getheaders") wait_until(test_function, timeout=timeout, lock=mininode_lock) @@ -1606,12 +376,6 @@ def wait_for_verack(self, timeout=60): # Message sending helper functions - def send_message(self, message): - if self.connection: - self.connection.send_message(message) - else: - logger.error("Cannot send message. No connection to node!") - def send_and_ping(self, message): self.send_message(message) self.sync_with_ping() @@ -1623,227 +387,157 @@ def sync_with_ping(self, timeout=60): wait_until(test_function, timeout=timeout, lock=mininode_lock) self.ping_counter += 1 -# The actual NodeConn class -# This class provides an interface for a p2p connection to a specified node -class NodeConn(asyncore.dispatcher): - messagemap = { - b"version": msg_version, - b"verack": msg_verack, - b"addr": msg_addr, - b"alert": msg_alert, - b"inv": msg_inv, - b"getdata": msg_getdata, - b"getblocks": msg_getblocks, - b"tx": msg_tx, - b"block": msg_block, - b"getaddr": msg_getaddr, - b"ping": msg_ping, - b"pong": msg_pong, - b"headers": msg_headers, - b"getheaders": msg_getheaders, - b"reject": msg_reject, - b"mempool": msg_mempool, - b"feefilter": msg_feefilter, - b"sendheaders": msg_sendheaders, - b"sendcmpct": msg_sendcmpct, - b"cmpctblock": msg_cmpctblock, - b"getblocktxn": msg_getblocktxn, - b"blocktxn": msg_blocktxn - } - MAGIC_BYTES = { - "mainnet": b"\xf9\xbe\xb4\xd9", # mainnet - "testnet3": b"\x0b\x11\x09\x07", # testnet3 - "regtest": b"\xfa\xbf\xb5\xda", # regtest - } - - def __init__(self, dstaddr, dstport, rpc, callback, net="regtest", services=NODE_NETWORK, send_version=True): - asyncore.dispatcher.__init__(self, map=mininode_socket_map) - self.dstaddr = dstaddr - self.dstport = dstport - self.create_socket(socket.AF_INET, socket.SOCK_STREAM) - self.sendbuf = b"" - self.recvbuf = b"" - self.ver_send = 209 - self.ver_recv = 209 - self.last_sent = 0 - self.state = "connecting" - self.network = net - self.cb = callback - self.disconnect = False - self.nServices = 0 - if send_version: - # stuff version msg into sendbuf - vt = msg_version() - vt.nServices = services - vt.addrTo.ip = self.dstaddr - vt.addrTo.port = self.dstport - vt.addrFrom.ip = "0.0.0.0" - vt.addrFrom.port = 0 - self.send_message(vt, True) +# One lock for synchronizing all data access between the network event loop (see +# NetworkThread below) and the thread running the test logic. For simplicity, +# P2PConnection acquires this lock whenever delivering a message to a P2PInterface. +# This lock should be acquired in the thread running the test logic to synchronize +# access to any data shared with the P2PInterface or P2PConnection. +mininode_lock = threading.RLock() - logger.info('Connecting to Bitcoin Node: %s:%d' % (self.dstaddr, self.dstport)) - try: - self.connect((dstaddr, dstport)) - except: - self.handle_close() - self.rpc = rpc - - def handle_connect(self): - if self.state != "connected": - logger.debug("Connected & Listening: %s:%d" % (self.dstaddr, self.dstport)) - self.state = "connected" - self.cb.on_open(self) - - def handle_close(self): - logger.debug("Closing connection to: %s:%d" % (self.dstaddr, self.dstport)) - self.state = "closed" - self.recvbuf = b"" - self.sendbuf = b"" - try: - self.close() - except: - pass - self.cb.on_close(self) +class NetworkThread(threading.Thread): + network_event_loop = None - def handle_read(self): - t = self.recv(8192) - if len(t) > 0: - self.recvbuf += t - self.got_data() + def __init__(self): + super().__init__(name="NetworkThread") + # There is only one event loop and no more than one thread must be created + assert not self.network_event_loop - def readable(self): - return True + NetworkThread.network_event_loop = asyncio.new_event_loop() - def writable(self): - with mininode_lock: - pre_connection = self.state == "connecting" - length = len(self.sendbuf) - return (length > 0 or pre_connection) + def run(self): + """Start the network thread.""" + self.network_event_loop.run_forever() - def handle_write(self): - with mininode_lock: - # asyncore does not expose socket connection, only the first read/write - # event, thus we must check connection manually here to know when we - # actually connect - if self.state == "connecting": - self.handle_connect() - if not self.writable(): - return + def close(self, timeout=10): + """Close the connections and network event loop.""" + self.network_event_loop.call_soon_threadsafe(self.network_event_loop.stop) + wait_until(lambda: not self.network_event_loop.is_running(), timeout=timeout) + self.network_event_loop.close() + self.join(timeout) - try: - sent = self.send(self.sendbuf) - except: - self.handle_close() - return - self.sendbuf = self.sendbuf[sent:] - def got_data(self): - try: - while True: - if len(self.recvbuf) < 4: - return - if self.recvbuf[:4] != self.MAGIC_BYTES[self.network]: - raise ValueError("got garbage %s" % repr(self.recvbuf)) - if self.ver_recv < 209: - if len(self.recvbuf) < 4 + 12 + 4: - return - command = self.recvbuf[4:4+12].split(b"\x00", 1)[0] - msglen = struct.unpack("= 209: - th = sha256(data) - h = sha256(th) - tmsg += h[:4] - tmsg += data with mininode_lock: - self.sendbuf += tmsg - self.last_sent = time.time() - - def got_message(self, message): - if message.command == b"version": - if message.nVersion <= BIP0031_VERSION: - self.messagemap[b'ping'] = msg_ping_prebip31 - if self.last_sent + 30 * 60 < time.time(): - self.send_message(self.messagemap[b'ping']()) - self._log_message("receive", message) - self.cb.deliver(self, message) + for block in blocks: + self.block_store[block.sha256] = block + self.last_block_hash = block.sha256 - def _log_message(self, direction, msg): - if direction == "send": - log_message = "Send message to " - elif direction == "receive": - log_message = "Received message from " - log_message += "%s:%d: %s" % (self.dstaddr, self.dstport, repr(msg)[:500]) - if len(log_message) > 500: - log_message += "... (msg truncated)" - logger.debug(log_message) + reject_reason = [reject_reason] if reject_reason else [] + with node.assert_debug_log(expected_msgs=reject_reason): + self.send_message(msg_headers([CBlockHeader(blocks[-1])])) - def disconnect_node(self): - self.disconnect = True + if request_block: + wait_until(lambda: blocks[-1].sha256 in self.getdata_requests, timeout=timeout, lock=mininode_lock) + if expect_disconnect: + self.wait_for_disconnect() + else: + self.sync_with_ping() -class NetworkThread(Thread): - def run(self): - while mininode_socket_map: - # We check for whether to disconnect outside of the asyncore - # loop to workaround the behavior of asyncore when using - # select - disconnected = [] - for fd, obj in mininode_socket_map.items(): - if obj.disconnect: - disconnected.append(obj) - [ obj.handle_close() for obj in disconnected ] - asyncore.loop(0.1, use_poll=True, map=mininode_socket_map, count=1) - logger.debug("Network thread closing") - - -# An exception we can raise if we detect a potential disconnect -# (p2p or rpc) before the test is complete -class EarlyDisconnectError(Exception): - def __init__(self, value): - self.value = value - - def __str__(self): - return repr(self.value) + if success: + wait_until(lambda: node.getbestblockhash() == blocks[-1].hash, timeout=timeout) + else: + assert node.getbestblockhash() != blocks[-1].hash + + def send_txs_and_test(self, txs, node, *, success=True, expect_disconnect=False, reject_reason=None): + """Send txs to test node and test whether they're accepted to the mempool. + + - add all txs to our tx_store + - send tx messages for all txs + - if success is True/False: assert that the txs are/are not accepted to the mempool + - if expect_disconnect is True: Skip the sync with ping + - if reject_reason is set: assert that the correct reject message is logged.""" + + with mininode_lock: + for tx in txs: + self.tx_store[tx.sha256] = tx + + reject_reason = [reject_reason] if reject_reason else [] + with node.assert_debug_log(expected_msgs=reject_reason): + for tx in txs: + self.send_message(msg_tx(tx)) + + if expect_disconnect: + self.wait_for_disconnect() + else: + self.sync_with_ping() + + raw_mempool = node.getrawmempool() + if success: + # Check that all txs are now in the mempool + for tx in txs: + assert tx.hash in raw_mempool, "{} not found in mempool".format(tx.hash) + else: + # Check that none of the txs are now in the mempool + for tx in txs: + assert tx.hash not in raw_mempool, "{} tx found in mempool".format(tx.hash) diff --git a/test/functional/test_framework/netutil.py b/test/functional/test_framework/netutil.py index 45d8e22d2..45c182f63 100644 --- a/test/functional/test_framework/netutil.py +++ b/test/functional/test_framework/netutil.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright (c) 2014-2016 The Bitcoin Core developers +# Copyright (c) 2014-2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Linux network utilities. @@ -9,23 +9,22 @@ import sys import socket -import fcntl import struct import array import os from binascii import unhexlify, hexlify -STATE_ESTABLISHED = '01' -STATE_SYN_SENT = '02' -STATE_SYN_RECV = '03' -STATE_FIN_WAIT1 = '04' -STATE_FIN_WAIT2 = '05' -STATE_TIME_WAIT = '06' -STATE_CLOSE = '07' -STATE_CLOSE_WAIT = '08' -STATE_LAST_ACK = '09' +# STATE_ESTABLISHED = '01' +# STATE_SYN_SENT = '02' +# STATE_SYN_RECV = '03' +# STATE_FIN_WAIT1 = '04' +# STATE_FIN_WAIT2 = '05' +# STATE_TIME_WAIT = '06' +# STATE_CLOSE = '07' +# STATE_CLOSE_WAIT = '08' +# STATE_LAST_ACK = '09' STATE_LISTEN = '0A' -STATE_CLOSING = '0B' +# STATE_CLOSING = '0B' def get_socket_inodes(pid): ''' @@ -90,6 +89,8 @@ def all_interfaces(): ''' Return all interfaces that are up ''' + import fcntl # Linux only, so only import when required + is_64bits = sys.maxsize > 2**32 struct_size = 40 if is_64bits else 32 s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) @@ -106,7 +107,7 @@ def all_interfaces(): max_possible *= 2 else: break - namestr = names.tostring() + namestr = names.tobytes() return [(namestr[i:i+16].split(b'\0', 1)[0], socket.inet_ntoa(namestr[i+20:i+24])) for i in range(0, outbytes, struct_size)] diff --git a/test/functional/test_framework/script.py b/test/functional/test_framework/script.py index 3d9572788..2fe44010b 100644 --- a/test/functional/test_framework/script.py +++ b/test/functional/test_framework/script.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright (c) 2015-2016 The Bitcoin Core developers +# Copyright (c) 2015-2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Functionality to build scripts, as well as SignatureHash(). @@ -7,25 +7,15 @@ This file is modified from python-bitcoinlib. """ -from .mininode import CTransaction, CTxOut, sha256, hash256, uint256_from_str, ser_uint256, ser_string +from .messages import CTransaction, CTxOut, sha256, hash256, uint256_from_str, ser_uint256, ser_string + from binascii import hexlify import hashlib - -import sys -bchr = chr -bord = ord -if sys.version > '3': - long = int - bchr = lambda x: bytes([x]) - bord = lambda x: x - import struct from .bignum import bn2vch -MAX_SCRIPT_SIZE = 10000 MAX_SCRIPT_ELEMENT_SIZE = 520 -MAX_SCRIPT_OPCODES = 201 OPCODE_NAMES = {} @@ -36,15 +26,15 @@ def hash160(s): _opcode_instances = [] class CScriptOp(int): """A single script opcode""" - __slots__ = [] + __slots__ = () @staticmethod def encode_op_pushdata(d): """Encode a PUSHDATA op, returning bytes""" if len(d) < 0x4c: - return b'' + bchr(len(d)) + d # OP_PUSHDATA + return b'' + bytes([len(d)]) + d # OP_PUSHDATA elif len(d) <= 0xff: - return b'\x4c' + bchr(len(d)) + d # OP_PUSHDATA1 + return b'\x4c' + bytes([len(d)]) + d # OP_PUSHDATA1 elif len(d) <= 0xffff: return b'\x4d' + struct.pack(b' OP_PUSHDATA4: @@ -719,21 +471,21 @@ def raw_iter(self): pushdata_type = 'PUSHDATA1' if i >= len(self): raise CScriptInvalidError('PUSHDATA1: missing data length') - datasize = bord(self[i]) + datasize = self[i] i += 1 elif opcode == OP_PUSHDATA2: pushdata_type = 'PUSHDATA2' if i + 1 >= len(self): raise CScriptInvalidError('PUSHDATA2: missing data length') - datasize = bord(self[i]) + (bord(self[i+1]) << 8) + datasize = self[i] + (self[i+1] << 8) i += 2 elif opcode == OP_PUSHDATA4: pushdata_type = 'PUSHDATA4' if i + 3 >= len(self): raise CScriptInvalidError('PUSHDATA4: missing data length') - datasize = bord(self[i]) + (bord(self[i+1]) << 8) + (bord(self[i+2]) << 16) + (bord(self[i+3]) << 24) + datasize = self[i] + (self[i+1] << 8) + (self[i+2] << 16) + (self[i+3] << 24) i += 4 else: @@ -771,11 +523,9 @@ def __iter__(self): yield CScriptOp(opcode) def __repr__(self): - # For Python3 compatibility add b before strings so testcases don't - # need to change def _repr(o): if isinstance(o, bytes): - return b"x('%s')" % hexlify(o).decode('ascii') + return "x('%s')" % hexlify(o).decode('ascii') else: return repr(o) @@ -886,7 +636,7 @@ def SignatureHash(script, txTo, inIdx, hashtype): txtmp.vin = [] txtmp.vin.append(tmp) - s = txtmp.serialize() + s = txtmp.serialize_without_witness() s += struct.pack(b"> 25 + chk = (chk & 0x1ffffff) << 5 ^ value + for i in range(5): + chk ^= generator[i] if ((top >> i) & 1) else 0 + return chk + + +def bech32_hrp_expand(hrp): + """Expand the HRP into values for checksum computation.""" + return [ord(x) >> 5 for x in hrp] + [0] + [ord(x) & 31 for x in hrp] + + +def bech32_verify_checksum(hrp, data): + """Verify a checksum given HRP and converted data characters.""" + return bech32_polymod(bech32_hrp_expand(hrp) + data) == 1 + + +def bech32_create_checksum(hrp, data): + """Compute the checksum values given HRP and data.""" + values = bech32_hrp_expand(hrp) + data + polymod = bech32_polymod(values + [0, 0, 0, 0, 0, 0]) ^ 1 + return [(polymod >> 5 * (5 - i)) & 31 for i in range(6)] + + +def bech32_encode(hrp, data): + """Compute a Bech32 string given HRP and data values.""" + combined = data + bech32_create_checksum(hrp, data) + return hrp + '1' + ''.join([CHARSET[d] for d in combined]) + + +def bech32_decode(bech): + """Validate a Bech32 string, and determine HRP and data.""" + if ((any(ord(x) < 33 or ord(x) > 126 for x in bech)) or + (bech.lower() != bech and bech.upper() != bech)): + return (None, None) + bech = bech.lower() + pos = bech.rfind('1') + if pos < 1 or pos + 7 > len(bech) or len(bech) > 90: + return (None, None) + if not all(x in CHARSET for x in bech[pos+1:]): + return (None, None) + hrp = bech[:pos] + data = [CHARSET.find(x) for x in bech[pos+1:]] + if not bech32_verify_checksum(hrp, data): + return (None, None) + return (hrp, data[:-6]) + + +def convertbits(data, frombits, tobits, pad=True): + """General power-of-2 base conversion.""" + acc = 0 + bits = 0 + ret = [] + maxv = (1 << tobits) - 1 + max_acc = (1 << (frombits + tobits - 1)) - 1 + for value in data: + if value < 0 or (value >> frombits): + return None + acc = ((acc << frombits) | value) & max_acc + bits += frombits + while bits >= tobits: + bits -= tobits + ret.append((acc >> bits) & maxv) + if pad: + if bits: + ret.append((acc << (tobits - bits)) & maxv) + elif bits >= frombits or ((acc << (tobits - bits)) & maxv): + return None + return ret + + +def decode(hrp, addr): + """Decode a segwit address.""" + hrpgot, data = bech32_decode(addr) + if hrpgot != hrp: + return (None, None) + decoded = convertbits(data[1:], 5, 8, False) + if decoded is None or len(decoded) < 2 or len(decoded) > 40: + return (None, None) + if data[0] > 16: + return (None, None) + if data[0] == 0 and len(decoded) != 20 and len(decoded) != 32: + return (None, None) + return (data[0], decoded) + + +def encode(hrp, witver, witprog): + """Encode a segwit address.""" + ret = bech32_encode(hrp, [witver] + convertbits(witprog, 8, 5)) + if decode(hrp, ret) == (None, None): + return None + return ret diff --git a/test/functional/test_framework/siphash.py b/test/functional/test_framework/siphash.py index f68ecad36..85836845d 100644 --- a/test/functional/test_framework/siphash.py +++ b/test/functional/test_framework/siphash.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright (c) 2016 The Bitcoin Core developers +# Copyright (c) 2016-2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Specialized SipHash-2-4 implementations. diff --git a/test/functional/test_framework/socks5.py b/test/functional/test_framework/socks5.py index a08b03ed2..dd0f20926 100644 --- a/test/functional/test_framework/socks5.py +++ b/test/functional/test_framework/socks5.py @@ -1,15 +1,17 @@ #!/usr/bin/env python3 -# Copyright (c) 2015-2016 The Bitcoin Core developers +# Copyright (c) 2015-2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Dummy Socks5 server for testing.""" -import socket, threading, queue +import socket +import threading +import queue import logging logger = logging.getLogger("TestFramework.socks5") -### Protocol constants +# Protocol constants class Command: CONNECT = 0x01 @@ -18,7 +20,7 @@ class AddressType: DOMAINNAME = 0x03 IPV6 = 0x04 -### Utility functions +# Utility functions def recvall(s, n): """Receive n bytes from a socket, or fail.""" rv = bytearray() @@ -30,8 +32,8 @@ def recvall(s, n): n -= len(d) return rv -### Implementation classes -class Socks5Configuration(object): +# Implementation classes +class Socks5Configuration(): """Proxy configuration.""" def __init__(self): self.addr = None # Bind address (must be set) @@ -39,7 +41,7 @@ def __init__(self): self.unauth = False # Support unauthenticated self.auth = False # Support authentication -class Socks5Command(object): +class Socks5Command(): """Information about an incoming socks5 command.""" def __init__(self, cmd, atyp, addr, port, username, password): self.cmd = cmd # Command (one of Command.*) @@ -51,7 +53,7 @@ def __init__(self, cmd, atyp, addr, port, username, password): def __repr__(self): return 'Socks5Command(%s,%s,%s,%s,%s,%s)' % (self.cmd, self.atyp, self.addr, self.port, self.username, self.password) -class Socks5Connection(object): +class Socks5Connection(): def __init__(self, serv, conn, peer): self.serv = serv self.conn = conn @@ -91,7 +93,7 @@ def handle(self): self.conn.sendall(bytearray([0x01, 0x00])) # Read connect request - (ver,cmd,rsv,atyp) = recvall(self.conn, 4) + ver, cmd, _, atyp = recvall(self.conn, 4) if ver != 0x05: raise IOError('Invalid socks version %i in connect request' % ver) if cmd != Command.CONNECT: @@ -122,7 +124,7 @@ def handle(self): finally: self.conn.close() -class Socks5Server(object): +class Socks5Server(): def __init__(self, conf): self.conf = conf self.s = socket.socket(conf.af) @@ -141,7 +143,7 @@ def run(self): thread = threading.Thread(None, conn.handle) thread.daemon = True thread.start() - + def start(self): assert(not self.running) self.running = True diff --git a/test/functional/test_framework/test_framework.py b/test/functional/test_framework/test_framework.py index 7aabdd9c0..97710bd6c 100755 --- a/test/functional/test_framework/test_framework.py +++ b/test/functional/test_framework/test_framework.py @@ -1,24 +1,24 @@ #!/usr/bin/env python3 -# Copyright (c) 2014-2016 The Bitcoin Core developers +# Copyright (c) 2014-2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Base class for RPC testing.""" -from collections import deque +import configparser from enum import Enum import logging -import optparse +import argparse import os import pdb import shutil import sys import tempfile import time -import traceback from .authproxy import JSONRPCException from . import coverage from .test_node import TestNode +from .mininode import NetworkThread from .util import ( MAX_NODES, PortSeed, @@ -26,8 +26,8 @@ check_json_precision, connect_nodes_bi, disconnect_nodes, + get_datadir_path, initialize_datadir, - log_filename, p2p_port, set_node_times, sync_blocks, @@ -43,7 +43,35 @@ class TestStatus(Enum): TEST_EXIT_FAILED = 1 TEST_EXIT_SKIPPED = 77 -class BitcoinTestFramework(object): + +class SkipTest(Exception): + """This exception is raised to skip a test""" + + def __init__(self, message): + self.message = message + + +class BitcoinTestMetaClass(type): + """Metaclass for BitcoinTestFramework. + + Ensures that any attempt to register a subclass of `BitcoinTestFramework` + adheres to a standard whereby the subclass overrides `set_test_params` and + `run_test` but DOES NOT override either `__init__` or `main`. If any of + those standards are violated, a ``TypeError`` is raised.""" + + def __new__(cls, clsname, bases, dct): + if not clsname == 'BitcoinTestFramework': + if not ('run_test' in dct and 'set_test_params' in dct): + raise TypeError("BitcoinTestFramework subclasses must override " + "'run_test' and 'set_test_params'") + if '__init__' in dct or 'main' in dct: + raise TypeError("BitcoinTestFramework subclasses may not override " + "'__init__' or 'main'") + + return super().__new__(cls, clsname, bases, dct) + + +class BitcoinTestFramework(metaclass=BitcoinTestMetaClass): """Base class for a bitcoin test script. Individual bitcoin test scripts should subclass this class and override the set_test_params() and run_test() methods. @@ -63,7 +91,11 @@ def __init__(self): """Sets test framework defaults. Do not override this method. Instead, override the set_test_params() method""" self.setup_clean_chain = False self.nodes = [] + self.network_thread = None self.mocktime = 0 + self.rpc_timewait = 60 # Wait for up to 60 seconds for the RPC server to respond + self.supports_cli = False + self.bind_to_localhost_only = True self.set_test_params() assert hasattr(self, "num_nodes"), "Test must set self.num_nodes in set_test_params()" @@ -71,39 +103,49 @@ def __init__(self): def main(self): """Main function. This should not be overridden by the subclass test scripts.""" - parser = optparse.OptionParser(usage="%prog [options]") - parser.add_option("--nocleanup", dest="nocleanup", default=False, action="store_true", - help="Leave bitcoinds and test.* datadir on exit or error") - parser.add_option("--noshutdown", dest="noshutdown", default=False, action="store_true", - help="Don't stop bitcoinds after the test execution") - parser.add_option("--srcdir", dest="srcdir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + "/../../../src"), - help="Source directory containing bitcoind/bitcoin-cli (default: %default)") - parser.add_option("--cachedir", dest="cachedir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + "/../../cache"), - help="Directory for caching pregenerated datadirs") - parser.add_option("--tmpdir", dest="tmpdir", help="Root directory for datadirs") - parser.add_option("-l", "--loglevel", dest="loglevel", default="INFO", - help="log events at this level and higher to the console. Can be set to DEBUG, INFO, WARNING, ERROR or CRITICAL. Passing --loglevel DEBUG will output all logs to console. Note that logs at all levels are always written to the test_framework.log file in the temporary test directory.") - parser.add_option("--tracerpc", dest="trace_rpc", default=False, action="store_true", - help="Print out all RPC calls as they are made") - parser.add_option("--portseed", dest="port_seed", default=os.getpid(), type='int', - help="The seed to use for assigning port numbers (default: current process id)") - parser.add_option("--coveragedir", dest="coveragedir", - help="Write tested RPC commands into this directory") - parser.add_option("--configfile", dest="configfile", - help="Location of the test framework config file") - parser.add_option("--pdbonfailure", dest="pdbonfailure", default=False, action="store_true", - help="Attach a python debugger if test fails") + parser = argparse.ArgumentParser(usage="%(prog)s [options]") + parser.add_argument("--nocleanup", dest="nocleanup", default=False, action="store_true", + help="Leave bitcoinds and test.* datadir on exit or error") + parser.add_argument("--noshutdown", dest="noshutdown", default=False, action="store_true", + help="Don't stop bitcoinds after the test execution") + parser.add_argument("--cachedir", dest="cachedir", default=os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/../../cache"), + help="Directory for caching pregenerated datadirs (default: %(default)s)") + parser.add_argument("--tmpdir", dest="tmpdir", help="Root directory for datadirs") + parser.add_argument("-l", "--loglevel", dest="loglevel", default="INFO", + help="log events at this level and higher to the console. Can be set to DEBUG, INFO, WARNING, ERROR or CRITICAL. Passing --loglevel DEBUG will output all logs to console. Note that logs at all levels are always written to the test_framework.log file in the temporary test directory.") + parser.add_argument("--tracerpc", dest="trace_rpc", default=False, action="store_true", + help="Print out all RPC calls as they are made") + parser.add_argument("--portseed", dest="port_seed", default=os.getpid(), type=int, + help="The seed to use for assigning port numbers (default: current process id)") + parser.add_argument("--coveragedir", dest="coveragedir", + help="Write tested RPC commands into this directory") + parser.add_argument("--configfile", dest="configfile", + default=os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/../../config.ini"), + help="Location of the test framework config file (default: %(default)s)") + parser.add_argument("--pdbonfailure", dest="pdbonfailure", default=False, action="store_true", + help="Attach a python debugger if test fails") + parser.add_argument("--usecli", dest="usecli", default=False, action="store_true", + help="use bitcoin-cli instead of RPC for all commands") self.add_options(parser) - (self.options, self.args) = parser.parse_args() + self.options = parser.parse_args() PortSeed.n = self.options.port_seed - os.environ['PATH'] = self.options.srcdir + ":" + self.options.srcdir + "/qt:" + os.environ['PATH'] - check_json_precision() self.options.cachedir = os.path.abspath(self.options.cachedir) + config = configparser.ConfigParser() + config.read_file(open(self.options.configfile)) + self.options.bitcoind = os.getenv("BITCOIND", default=config["environment"]["BUILDDIR"] + '/src/bitcoind' + config["environment"]["EXEEXT"]) + self.options.bitcoincli = os.getenv("BITCOINCLI", default=config["environment"]["BUILDDIR"] + '/src/bitcoin-cli' + config["environment"]["EXEEXT"]) + + os.environ['PATH'] = os.pathsep.join([ + os.path.join(config['environment']['BUILDDIR'], 'src'), + os.path.join(config['environment']['BUILDDIR'], 'src', 'qt'), + os.environ['PATH'] + ]) + # Set up temp directory and start logging if self.options.tmpdir: self.options.tmpdir = os.path.abspath(self.options.tmpdir) @@ -112,11 +154,19 @@ def main(self): self.options.tmpdir = tempfile.mkdtemp(prefix="test") self._start_logging() + self.log.debug('Setting up network thread') + self.network_thread = NetworkThread() + self.network_thread.start() + success = TestStatus.FAILED try: + if self.options.usecli and not self.supports_cli: + raise SkipTest("--usecli specified but test does not support using CLI") + self.skip_test_if_missing_module() self.setup_chain() self.setup_network() + self.import_deterministic_coinbase_privkeys() self.run_test() success = TestStatus.PASSED except JSONRPCException as e: @@ -137,44 +187,38 @@ def main(self): print("Testcase failed. Attaching python debugger. Enter ? for help") pdb.set_trace() + self.log.debug('Closing down network thread') + self.network_thread.close() if not self.options.noshutdown: self.log.info("Stopping nodes") if self.nodes: self.stop_nodes() else: + for node in self.nodes: + node.cleanup_on_exit = False self.log.info("Note: bitcoinds were not stopped and may still be running") if not self.options.nocleanup and not self.options.noshutdown and success != TestStatus.FAILED: - self.log.info("Cleaning up") - shutil.rmtree(self.options.tmpdir) + self.log.info("Cleaning up {} on exit".format(self.options.tmpdir)) + cleanup_tree_on_exit = True else: self.log.warning("Not cleaning up dir %s" % self.options.tmpdir) - if os.getenv("PYTHON_DEBUG", ""): - # Dump the end of the debug logs, to aid in debugging rare - # travis failures. - import glob - filenames = [self.options.tmpdir + "/test_framework.log"] - filenames += glob.glob(self.options.tmpdir + "/node*/regtest/debug.log") - MAX_LINES_TO_PRINT = 1000 - for fn in filenames: - try: - with open(fn, 'r') as f: - print("From", fn, ":") - print("".join(deque(f, MAX_LINES_TO_PRINT))) - except OSError: - print("Opening file %s failed." % fn) - traceback.print_exc() + cleanup_tree_on_exit = False if success == TestStatus.PASSED: self.log.info("Tests successful") - sys.exit(TEST_EXIT_PASSED) + exit_code = TEST_EXIT_PASSED elif success == TestStatus.SKIPPED: self.log.info("Test skipped") - sys.exit(TEST_EXIT_SKIPPED) + exit_code = TEST_EXIT_SKIPPED else: self.log.error("Test failed. Test logging available at %s/test_framework.log", self.options.tmpdir) - logging.shutdown() - sys.exit(TEST_EXIT_FAILED) + self.log.error("Hint: Call {} '{}' to consolidate all logs".format(os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + "/../combine_logs.py"), self.options.tmpdir)) + exit_code = TEST_EXIT_FAILED + logging.shutdown() + if cleanup_tree_on_exit: + shutil.rmtree(self.options.tmpdir) + sys.exit(exit_code) # Methods to override in subclass test scripts. def set_test_params(self): @@ -185,6 +229,10 @@ def add_options(self, parser): """Override this method to add command-line options to the test""" pass + def skip_test_if_missing_module(self): + """Override this method to skip a test if a module is not compiled""" + pass + def setup_chain(self): """Override this method to customize blockchain setup""" self.log.info("Initializing test directory " + self.options.tmpdir) @@ -212,36 +260,53 @@ def setup_nodes(self): self.add_nodes(self.num_nodes, extra_args) self.start_nodes() + def import_deterministic_coinbase_privkeys(self): + if self.setup_clean_chain: + return + + for n in self.nodes: + try: + n.getwalletinfo() + except JSONRPCException as e: + assert str(e).startswith('Method not found') + continue + + n.importprivkey(n.get_deterministic_priv_key().key) + def run_test(self): """Tests must override this method to define test logic""" raise NotImplementedError # Public helper methods. These can be accessed by the subclass test scripts. - def add_nodes(self, num_nodes, extra_args=None, rpchost=None, timewait=None, binary=None): + def add_nodes(self, num_nodes, extra_args=None, *, rpchost=None, binary=None): """Instantiate TestNode objects""" - + if self.bind_to_localhost_only: + extra_confs = [["bind=127.0.0.1"]] * num_nodes + else: + extra_confs = [[]] * num_nodes if extra_args is None: extra_args = [[]] * num_nodes if binary is None: - binary = [None] * num_nodes + binary = [self.options.bitcoind] * num_nodes + assert_equal(len(extra_confs), num_nodes) assert_equal(len(extra_args), num_nodes) assert_equal(len(binary), num_nodes) for i in range(num_nodes): - self.nodes.append(TestNode(i, self.options.tmpdir, extra_args[i], rpchost, timewait=timewait, binary=binary[i], stderr=None, mocktime=self.mocktime, coverage_dir=self.options.coveragedir)) + self.nodes.append(TestNode(i, get_datadir_path(self.options.tmpdir, i), rpchost=rpchost, timewait=self.rpc_timewait, bitcoind=binary[i], bitcoin_cli=self.options.bitcoincli, mocktime=self.mocktime, coverage_dir=self.options.coveragedir, extra_conf=extra_confs[i], extra_args=extra_args[i], use_cli=self.options.usecli)) - def start_node(self, i, extra_args=None, stderr=None): + def start_node(self, i, *args, **kwargs): """Start a bitcoind""" node = self.nodes[i] - node.start(extra_args, stderr) + node.start(*args, **kwargs) node.wait_for_rpc_connection() if self.options.coveragedir is not None: coverage.write_all_rpc_commands(self.options.coveragedir, node.rpc) - def start_nodes(self, extra_args=None): + def start_nodes(self, extra_args=None, *args, **kwargs): """Start multiple bitcoinds""" if extra_args is None: @@ -249,7 +314,7 @@ def start_nodes(self, extra_args=None): assert_equal(len(extra_args), self.num_nodes) try: for i, node in enumerate(self.nodes): - node.start(extra_args[i]) + node.start(extra_args[i], *args, **kwargs) for node in self.nodes: node.wait_for_rpc_connection() except: @@ -261,9 +326,9 @@ def start_nodes(self, extra_args=None): for node in self.nodes: coverage.write_all_rpc_commands(self.options.coveragedir, node.rpc) - def stop_node(self, i): + def stop_node(self, i, expected_stderr=''): """Stop a bitcoind test node""" - self.nodes[i].stop_node() + self.nodes[i].stop_node(expected_stderr) self.nodes[i].wait_until_stopped() def stop_nodes(self): @@ -276,26 +341,10 @@ def stop_nodes(self): # Wait for nodes to stop node.wait_until_stopped() - def assert_start_raises_init_error(self, i, extra_args=None, expected_msg=None): - with tempfile.SpooledTemporaryFile(max_size=2**16) as log_stderr: - try: - self.start_node(i, extra_args, stderr=log_stderr) - self.stop_node(i) - except Exception as e: - assert 'bitcoind exited' in str(e) # node must have shutdown - self.nodes[i].running = False - self.nodes[i].process = None - if expected_msg is not None: - log_stderr.seek(0) - stderr = log_stderr.read().decode('utf-8') - if expected_msg not in stderr: - raise AssertionError("Expected error \"" + expected_msg + "\" not found in:\n" + stderr) - else: - if expected_msg is None: - assert_msg = "bitcoind should have exited with an error" - else: - assert_msg = "bitcoind should have exited with expected error " + expected_msg - raise AssertionError(assert_msg) + def restart_node(self, i, extra_args=None): + """Stop and start a test node""" + self.stop_node(i) + self.start_node(i, extra_args) def wait_for_node_exit(self, i, timeout): self.nodes[i].process.wait(timeout) @@ -330,7 +379,7 @@ def enable_mocktime(self): blockchain. If the cached version of the blockchain is used without mocktime then the mempools will not sync due to IBD. - For backwared compatibility of the python scripts with previous + For backward compatibility of the python scripts with previous versions of the cache, this helper function sets mocktime to Jan 1, 2014 + (201 * 10 * 60)""" self.mocktime = 1388534400 + (201 * 10 * 60) @@ -345,7 +394,7 @@ def _start_logging(self): self.log = logging.getLogger('TestFramework') self.log.setLevel(logging.DEBUG) # Create file handler to log all messages - fh = logging.FileHandler(self.options.tmpdir + '/test_framework.log') + fh = logging.FileHandler(self.options.tmpdir + '/test_framework.log', encoding='utf-8') fh.setLevel(logging.DEBUG) # Create console handler to log messages to stderr. By default this logs only error messages, but can be configured with --loglevel. ch = logging.StreamHandler(sys.stdout) @@ -353,7 +402,7 @@ def _start_logging(self): ll = int(self.options.loglevel) if self.options.loglevel.isdigit() else self.options.loglevel.upper() ch.setLevel(ll) # Format logs the same as bitcoind's debug.log with microprecision (so log files can be concatenated and sorted) - formatter = logging.Formatter(fmt='%(asctime)s.%(msecs)03d000 %(name)s (%(levelname)s): %(message)s', datefmt='%Y-%m-%d %H:%M:%S') + formatter = logging.Formatter(fmt='%(asctime)s.%(msecs)03d000Z %(name)s (%(levelname)s): %(message)s', datefmt='%Y-%m-%dT%H:%M:%S') formatter.converter = time.gmtime fh.setFormatter(formatter) ch.setFormatter(formatter) @@ -377,7 +426,7 @@ def _initialize_chain(self): assert self.num_nodes <= MAX_NODES create_cache = False for i in range(MAX_NODES): - if not os.path.isdir(os.path.join(self.options.cachedir, 'node' + str(i))): + if not os.path.isdir(get_datadir_path(self.options.cachedir, i)): create_cache = True break @@ -386,16 +435,16 @@ def _initialize_chain(self): # find and delete old cache directories if any exist for i in range(MAX_NODES): - if os.path.isdir(os.path.join(self.options.cachedir, "node" + str(i))): - shutil.rmtree(os.path.join(self.options.cachedir, "node" + str(i))) + if os.path.isdir(get_datadir_path(self.options.cachedir, i)): + shutil.rmtree(get_datadir_path(self.options.cachedir, i)) # Create cache directories, run bitcoinds: for i in range(MAX_NODES): datadir = initialize_datadir(self.options.cachedir, i) - args = [os.getenv("BITCOIND", "bitcoind"), "-server", "-keypool=1", "-datadir=" + datadir, "-discover=0"] + args = [self.options.bitcoind, "-datadir=" + datadir, '-disablewallet'] if i > 0: args.append("-connect=127.0.0.1:" + str(p2p_port(0))) - self.nodes.append(TestNode(i, self.options.cachedir, extra_args=[], rpchost=None, timewait=None, binary=None, stderr=None, mocktime=self.mocktime, coverage_dir=None)) + self.nodes.append(TestNode(i, get_datadir_path(self.options.cachedir, i), extra_conf=["bind=127.0.0.1"], extra_args=[], rpchost=None, timewait=self.rpc_timewait, bitcoind=self.options.bitcoind, bitcoin_cli=self.options.bitcoincli, mocktime=self.mocktime, coverage_dir=None)) self.nodes[i].args = args self.start_node(i) @@ -416,7 +465,7 @@ def _initialize_chain(self): for peer in range(4): for j in range(25): set_node_times(self.nodes, block_time) - self.nodes[peer].generate(1) + self.nodes[peer].generatetoaddress(1, self.nodes[peer].get_deterministic_priv_key().address) block_time += 10 * 60 # Must sync before next peer starts generating blocks sync_blocks(self.nodes) @@ -425,15 +474,19 @@ def _initialize_chain(self): self.stop_nodes() self.nodes = [] self.disable_mocktime() + + def cache_path(n, *paths): + return os.path.join(get_datadir_path(self.options.cachedir, n), "regtest", *paths) + for i in range(MAX_NODES): - os.remove(log_filename(self.options.cachedir, i, "debug.log")) - os.remove(log_filename(self.options.cachedir, i, "db.log")) - os.remove(log_filename(self.options.cachedir, i, "peers.dat")) - os.remove(log_filename(self.options.cachedir, i, "fee_estimates.dat")) + os.rmdir(cache_path(i, 'wallets')) # Remove empty wallets dir + for entry in os.listdir(cache_path(i)): + if entry not in ['chainstate', 'blocks']: + os.remove(cache_path(i, entry)) for i in range(self.num_nodes): - from_dir = os.path.join(self.options.cachedir, "node" + str(i)) - to_dir = os.path.join(self.options.tmpdir, "node" + str(i)) + from_dir = get_datadir_path(self.options.cachedir, i) + to_dir = get_datadir_path(self.options.tmpdir, i) shutil.copytree(from_dir, to_dir) initialize_datadir(self.options.tmpdir, i) # Overwrite port/rpcport in bitcoin.conf @@ -445,36 +498,45 @@ def _initialize_chain_clean(self): for i in range(self.num_nodes): initialize_datadir(self.options.tmpdir, i) -class ComparisonTestFramework(BitcoinTestFramework): - """Test framework for doing p2p comparison testing + def skip_if_no_py3_zmq(self): + """Attempt to import the zmq package and skip the test if the import fails.""" + try: + import zmq # noqa + except ImportError: + raise SkipTest("python3-zmq module not available.") - Sets up some bitcoind binaries: - - 1 binary: test binary - - 2 binaries: 1 test binary, 1 ref binary - - n>2 binaries: 1 test binary, n-1 ref binaries""" + def skip_if_no_bitcoind_zmq(self): + """Skip the running test if bitcoind has not been compiled with zmq support.""" + if not self.is_zmq_compiled(): + raise SkipTest("bitcoind has not been built with zmq enabled.") - def set_test_params(self): - self.num_nodes = 2 - self.setup_clean_chain = True + def skip_if_no_wallet(self): + """Skip the running test if wallet has not been compiled.""" + if not self.is_wallet_compiled(): + raise SkipTest("wallet has not been compiled.") - def add_options(self, parser): - parser.add_option("--testbinary", dest="testbinary", - default=os.getenv("BITCOIND", "bitcoind"), - help="bitcoind binary to test") - parser.add_option("--refbinary", dest="refbinary", - default=os.getenv("BITCOIND", "bitcoind"), - help="bitcoind binary to use for reference nodes (if any)") + def skip_if_no_cli(self): + """Skip the running test if bitcoin-cli has not been compiled.""" + if not self.is_cli_compiled(): + raise SkipTest("bitcoin-cli has not been compiled.") - def setup_network(self): - extra_args = [['-whitelist=127.0.0.1']] * self.num_nodes - if hasattr(self, "extra_args"): - extra_args = self.extra_args - self.add_nodes(self.num_nodes, extra_args, - binary=[self.options.testbinary] + - [self.options.refbinary] * (self.num_nodes - 1)) - self.start_nodes() + def is_cli_compiled(self): + """Checks whether bitcoin-cli was compiled.""" + config = configparser.ConfigParser() + config.read_file(open(self.options.configfile)) -class SkipTest(Exception): - """This exception is raised to skip a test""" - def __init__(self, message): - self.message = message + return config["components"].getboolean("ENABLE_CLI") + + def is_wallet_compiled(self): + """Checks whether the wallet module was compiled.""" + config = configparser.ConfigParser() + config.read_file(open(self.options.configfile)) + + return config["components"].getboolean("ENABLE_WALLET") + + def is_zmq_compiled(self): + """Checks whether the zmq module was compiled.""" + config = configparser.ConfigParser() + config.read_file(open(self.options.configfile)) + + return config["components"].getboolean("ENABLE_ZMQ") diff --git a/test/functional/test_framework/test_node.py b/test/functional/test_framework/test_node.py index 12dab57a0..7ab7fcfcb 100755 --- a/test/functional/test_framework/test_node.py +++ b/test/functional/test_framework/test_node.py @@ -1,28 +1,50 @@ #!/usr/bin/env python3 -# Copyright (c) 2017 The Bitcoin Core developers +# Copyright (c) 2017-2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Class for bitcoind node under test""" +import contextlib import decimal import errno +from enum import Enum import http.client import json import logging import os +import re import subprocess +import tempfile import time +import urllib.parse +import collections +from .authproxy import JSONRPCException from .util import ( - assert_equal, + append_config, + delete_cookie_file, get_rpc_proxy, rpc_url, wait_until, + p2p_port, ) -from .authproxy import JSONRPCException + +# For Python 3.4 compatibility +JSONDecodeError = getattr(json, "JSONDecodeError", ValueError) BITCOIND_PROC_WAIT_TIMEOUT = 60 + +class FailedToStartError(Exception): + """Raised when a node fails to start correctly.""" + + +class ErrorMatch(Enum): + FULL_TEXT = 1 + FULL_REGEX = 2 + PARTIAL_REGEX = 3 + + class TestNode(): """A class for representing a bitcoind node under test. @@ -31,30 +53,40 @@ class TestNode(): - state about the node (whether it's running, etc) - a Python subprocess.Popen object representing the running process - an RPC connection to the node + - one or more P2P connections to the node + - To make things easier for the test writer, a bit of magic is happening under the covers. - Any unrecognised messages will be dispatched to the RPC connection.""" + To make things easier for the test writer, any unrecognised messages will + be dispatched to the RPC connection.""" - def __init__(self, i, dirname, extra_args, rpchost, timewait, binary, stderr, mocktime, coverage_dir): + def __init__(self, i, datadir, *, rpchost, timewait, bitcoind, bitcoin_cli, mocktime, coverage_dir, extra_conf=None, extra_args=None, use_cli=False): self.index = i - self.datadir = os.path.join(dirname, "node" + str(i)) + self.datadir = datadir + self.stdout_dir = os.path.join(self.datadir, "stdout") + self.stderr_dir = os.path.join(self.datadir, "stderr") self.rpchost = rpchost - if timewait: - self.rpc_timeout = timewait - else: - # Wait for up to 60 seconds for the RPC server to respond - self.rpc_timeout = 60 - if binary is None: - self.binary = os.getenv("BITCOIND", "bitcoind") - else: - self.binary = binary - self.stderr = stderr + self.rpc_timeout = timewait + self.binary = bitcoind self.coverage_dir = coverage_dir - # Most callers will just need to add extra args to the standard list below. For those callers that need more flexibity, they can just set the args property directly. + if extra_conf != None: + append_config(datadir, extra_conf) + # Most callers will just need to add extra args to the standard list below. + # For those callers that need more flexibility, they can just set the args property directly. + # Note that common args are set in the config file (see initialize_datadir) self.extra_args = extra_args - self.args = [self.binary, "-datadir=" + self.datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-logtimemicros", "-debug", "-debugexclude=libevent", "-debugexclude=leveldb", "-mocktime=" + str(mocktime), "-uacomment=testnode%d" % i] + self.args = [ + self.binary, + "-datadir=" + self.datadir, + "-logtimemicros", + "-debug", + "-debugexclude=libevent", + "-debugexclude=leveldb", + "-mocktime=" + str(mocktime), + "-uacomment=testnode%d" % i + ] - self.cli = TestNodeCLI(os.getenv("BITCOINCLI", "bitcoin-cli"), self.datadir) + self.cli = TestNodeCLI(bitcoin_cli, self.datadir) + self.use_cli = use_cli self.running = False self.process = None @@ -62,19 +94,76 @@ def __init__(self, i, dirname, extra_args, rpchost, timewait, binary, stderr, mo self.rpc = None self.url = None self.log = logging.getLogger('TestFramework.node%d' % i) + self.cleanup_on_exit = True # Whether to kill the node when this object goes away + + self.p2ps = [] + + def get_deterministic_priv_key(self): + """Return a deterministic priv key in base58, that only depends on the node's index""" + AddressKeyPair = collections.namedtuple('AddressKeyPair', ['address', 'key']) + PRIV_KEYS = [ + # address , privkey + AddressKeyPair('mjTkW3DjgyZck4KbiRusZsqTgaYTxdSz6z', 'cVpF924EspNh8KjYsfhgY96mmxvT6DgdWiTYMtMjuM74hJaU5psW'), + AddressKeyPair('msX6jQXvxiNhx3Q62PKeLPrhrqZQdSimTg', 'cUxsWyKyZ9MAQTaAhUQWJmBbSvHMwSmuv59KgxQV7oZQU3PXN3KE'), + AddressKeyPair('mnonCMyH9TmAsSj3M59DsbH8H63U3RKoFP', 'cTrh7dkEAeJd6b3MRX9bZK8eRmNqVCMH3LSUkE3dSFDyzjU38QxK'), + AddressKeyPair('mqJupas8Dt2uestQDvV2NH3RU8uZh2dqQR', 'cVuKKa7gbehEQvVq717hYcbE9Dqmq7KEBKqWgWrYBa2CKKrhtRim'), + AddressKeyPair('msYac7Rvd5ywm6pEmkjyxhbCDKqWsVeYws', 'cQDCBuKcjanpXDpCqacNSjYfxeQj8G6CAtH1Dsk3cXyqLNC4RPuh'), + AddressKeyPair('n2rnuUnwLgXqf9kk2kjvVm8R5BZK1yxQBi', 'cQakmfPSLSqKHyMFGwAqKHgWUiofJCagVGhiB4KCainaeCSxeyYq'), + AddressKeyPair('myzuPxRwsf3vvGzEuzPfK9Nf2RfwauwYe6', 'cQMpDLJwA8DBe9NcQbdoSb1BhmFxVjWD5gRyrLZCtpuF9Zi3a9RK'), + AddressKeyPair('mumwTaMtbxEPUswmLBBN3vM9oGRtGBrys8', 'cSXmRKXVcoouhNNVpcNKFfxsTsToY5pvB9DVsFksF1ENunTzRKsy'), + AddressKeyPair('mpV7aGShMkJCZgbW7F6iZgrvuPHjZjH9qg', 'cSoXt6tm3pqy43UMabY6eUTmR3eSUYFtB2iNQDGgb3VUnRsQys2k'), + ] + return PRIV_KEYS[self.index] + + def _node_msg(self, msg: str) -> str: + """Return a modified msg that identifies this node by its index as a debugging aid.""" + return "[node %d] %s" % (self.index, msg) + + def _raise_assertion_error(self, msg: str): + """Raise an AssertionError with msg modified to identify this node.""" + raise AssertionError(self._node_msg(msg)) + + def __del__(self): + # Ensure that we don't leave any bitcoind processes lying around after + # the test ends + if self.process and self.cleanup_on_exit: + # Should only happen on test failure + # Avoid using logger, as that may have already been shutdown when + # this destructor is called. + print(self._node_msg("Cleaning up leftover process")) + self.process.kill() - def __getattr__(self, *args, **kwargs): - """Dispatches any unrecognised messages to the RPC connection.""" - assert self.rpc_connected and self.rpc is not None, "Error: no RPC connection" - return self.rpc.__getattr__(*args, **kwargs) + def __getattr__(self, name): + """Dispatches any unrecognised messages to the RPC connection or a CLI instance.""" + if self.use_cli: + return getattr(self.cli, name) + else: + assert self.rpc_connected and self.rpc is not None, self._node_msg("Error: no RPC connection") + return getattr(self.rpc, name) - def start(self, extra_args=None, stderr=None): + def start(self, extra_args=None, *, stdout=None, stderr=None, **kwargs): """Start the node.""" if extra_args is None: extra_args = self.extra_args + + # Add a new stdout and stderr file each time bitcoind is started if stderr is None: - stderr = self.stderr - self.process = subprocess.Popen(self.args + extra_args, stderr=stderr) + stderr = tempfile.NamedTemporaryFile(dir=self.stderr_dir, delete=False) + if stdout is None: + stdout = tempfile.NamedTemporaryFile(dir=self.stdout_dir, delete=False) + self.stderr = stderr + self.stdout = stdout + + # Delete any existing cookie file -- if such a file exists (eg due to + # unclean shutdown), it will get overwritten anyway by bitcoind, and + # potentially interfere with our attempt to authenticate + delete_cookie_file(self.datadir) + + # add environment variable LIBC_FATAL_STDERR_=1 so that libc errors are written to stderr and not the terminal + subp_env = dict(os.environ, LIBC_FATAL_STDERR_="1") + + self.process = subprocess.Popen(self.args + extra_args, env=subp_env, stdout=stdout, stderr=stderr, **kwargs) + self.running = True self.log.debug("bitcoind started, waiting for RPC to come up") @@ -83,7 +172,9 @@ def wait_for_rpc_connection(self): # Poll at a rate of four times per second poll_per_s = 4 for _ in range(poll_per_s * self.rpc_timeout): - assert self.process.poll() is None, "bitcoind exited with status %i during initialization" % self.process.returncode + if self.process.poll() is not None: + raise FailedToStartError(self._node_msg( + 'bitcoind exited with status {} during initialization'.format(self.process.returncode))) try: self.rpc = get_rpc_proxy(rpc_url(self.datadir, self.index, self.rpchost), self.index, timeout=self.rpc_timeout, coveragedir=self.coverage_dir) self.rpc.getblockcount() @@ -102,15 +193,17 @@ def wait_for_rpc_connection(self): if "No RPC credentials" not in str(e): raise time.sleep(1.0 / poll_per_s) - raise AssertionError("Unable to connect to bitcoind") + self._raise_assertion_error("Unable to connect to bitcoind") def get_wallet_rpc(self, wallet_name): - assert self.rpc_connected - assert self.rpc - wallet_path = "wallet/%s" % wallet_name - return self.rpc / wallet_path + if self.use_cli: + return self.cli("-rpcwallet={}".format(wallet_name)) + else: + assert self.rpc_connected and self.rpc, self._node_msg("RPC not connected") + wallet_path = "wallet/{}".format(urllib.parse.quote(wallet_name)) + return self.rpc / wallet_path - def stop_node(self): + def stop_node(self, expected_stderr=''): """Stop the node.""" if not self.running: return @@ -120,6 +213,17 @@ def stop_node(self): except http.client.CannotSendRequest: self.log.exception("Unable to stop node.") + # Check that stderr is as expected + self.stderr.seek(0) + stderr = self.stderr.read().decode('utf-8').strip() + if stderr != expected_stderr: + raise AssertionError("Unexpected stderr {} != {}".format(stderr, expected_stderr)) + + self.stdout.close() + self.stderr.close() + + del self.p2ps[:] + def is_node_stopped(self): """Checks whether the node has stopped. @@ -132,7 +236,8 @@ def is_node_stopped(self): return False # process has stopped. Assert that it didn't return an error code. - assert_equal(return_code, 0) + assert return_code == 0, self._node_msg( + "Node returned non-zero exit code (%d) when stopping" % return_code) self.running = False self.process = None self.rpc_connected = False @@ -143,48 +248,160 @@ def is_node_stopped(self): def wait_until_stopped(self, timeout=BITCOIND_PROC_WAIT_TIMEOUT): wait_until(self.is_node_stopped, timeout=timeout) - def node_encrypt_wallet(self, passphrase): - """"Encrypts the wallet. + @contextlib.contextmanager + def assert_debug_log(self, expected_msgs): + debug_log = os.path.join(self.datadir, 'regtest', 'debug.log') + with open(debug_log, encoding='utf-8') as dl: + dl.seek(0, 2) + prev_size = dl.tell() + try: + yield + finally: + with open(debug_log, encoding='utf-8') as dl: + dl.seek(prev_size) + log = dl.read() + print_log = " - " + "\n - ".join(log.splitlines()) + for expected_msg in expected_msgs: + if re.search(re.escape(expected_msg), log, flags=re.MULTILINE) is None: + self._raise_assertion_error('Expected message "{}" does not partially match log:\n\n{}\n\n'.format(expected_msg, print_log)) + + def assert_start_raises_init_error(self, extra_args=None, expected_msg=None, match=ErrorMatch.FULL_TEXT, *args, **kwargs): + """Attempt to start the node and expect it to raise an error. + + extra_args: extra arguments to pass through to bitcoind + expected_msg: regex that stderr should match when bitcoind fails + + Will throw if bitcoind starts without an error. + Will throw if an expected_msg is provided and it does not match bitcoind's stdout.""" + with tempfile.NamedTemporaryFile(dir=self.stderr_dir, delete=False) as log_stderr, \ + tempfile.NamedTemporaryFile(dir=self.stdout_dir, delete=False) as log_stdout: + try: + self.start(extra_args, stdout=log_stdout, stderr=log_stderr, *args, **kwargs) + self.wait_for_rpc_connection() + self.stop_node() + self.wait_until_stopped() + except FailedToStartError as e: + self.log.debug('bitcoind failed to start: %s', e) + self.running = False + self.process = None + # Check stderr for expected message + if expected_msg is not None: + log_stderr.seek(0) + stderr = log_stderr.read().decode('utf-8').strip() + if match == ErrorMatch.PARTIAL_REGEX: + if re.search(expected_msg, stderr, flags=re.MULTILINE) is None: + self._raise_assertion_error( + 'Expected message "{}" does not partially match stderr:\n"{}"'.format(expected_msg, stderr)) + elif match == ErrorMatch.FULL_REGEX: + if re.fullmatch(expected_msg, stderr) is None: + self._raise_assertion_error( + 'Expected message "{}" does not fully match stderr:\n"{}"'.format(expected_msg, stderr)) + elif match == ErrorMatch.FULL_TEXT: + if expected_msg != stderr: + self._raise_assertion_error( + 'Expected message "{}" does not fully match stderr:\n"{}"'.format(expected_msg, stderr)) + else: + if expected_msg is None: + assert_msg = "bitcoind should have exited with an error" + else: + assert_msg = "bitcoind should have exited with expected error " + expected_msg + self._raise_assertion_error(assert_msg) + + def add_p2p_connection(self, p2p_conn, *, wait_for_verack=True, **kwargs): + """Add a p2p connection to the node. + + This method adds the p2p connection to the self.p2ps list and also + returns the connection to the caller.""" + if 'dstport' not in kwargs: + kwargs['dstport'] = p2p_port(self.index) + if 'dstaddr' not in kwargs: + kwargs['dstaddr'] = '127.0.0.1' - This causes bitcoind to shutdown, so this method takes - care of cleaning up resources.""" - self.encryptwallet(passphrase) - self.wait_until_stopped() + p2p_conn.peer_connect(**kwargs)() + self.p2ps.append(p2p_conn) + if wait_for_verack: + p2p_conn.wait_for_verack() + + return p2p_conn + + @property + def p2p(self): + """Return the first p2p connection + + Convenience property - most tests only use a single p2p connection to each + node, so this saves having to write node.p2ps[0] many times.""" + assert self.p2ps, self._node_msg("No p2p connection") + return self.p2ps[0] + + def disconnect_p2ps(self): + """Close all p2p connections to the node.""" + for p in self.p2ps: + p.peer_disconnect() + del self.p2ps[:] + +class TestNodeCLIAttr: + def __init__(self, cli, command): + self.cli = cli + self.command = command + + def __call__(self, *args, **kwargs): + return self.cli.send_cli(self.command, *args, **kwargs) + + def get_request(self, *args, **kwargs): + return lambda: self(*args, **kwargs) class TestNodeCLI(): """Interface to bitcoin-cli for an individual node""" def __init__(self, binary, datadir): - self.args = [] + self.options = [] self.binary = binary self.datadir = datadir self.input = None + self.log = logging.getLogger('TestFramework.bitcoincli') - def __call__(self, *args, input=None): - # TestNodeCLI is callable with bitcoin-cli command-line args - self.args = [str(arg) for arg in args] - self.input = input - return self + def __call__(self, *options, input=None): + # TestNodeCLI is callable with bitcoin-cli command-line options + cli = TestNodeCLI(self.binary, self.datadir) + cli.options = [str(o) for o in options] + cli.input = input + return cli def __getattr__(self, command): - def dispatcher(*args, **kwargs): - return self.send_cli(command, *args, **kwargs) - return dispatcher + return TestNodeCLIAttr(self, command) - def send_cli(self, command, *args, **kwargs): - """Run bitcoin-cli command. Deserializes returned string as python object.""" + def batch(self, requests): + results = [] + for request in requests: + try: + results.append(dict(result=request())) + except JSONRPCException as e: + results.append(dict(error=e)) + return results - pos_args = [str(arg) for arg in args] + def send_cli(self, command=None, *args, **kwargs): + """Run bitcoin-cli command. Deserializes returned string as python object.""" + pos_args = [str(arg).lower() if type(arg) is bool else str(arg) for arg in args] named_args = [str(key) + "=" + str(value) for (key, value) in kwargs.items()] assert not (pos_args and named_args), "Cannot use positional arguments and named arguments in the same bitcoin-cli call" - p_args = [self.binary, "-datadir=" + self.datadir] + self.args + p_args = [self.binary, "-datadir=" + self.datadir] + self.options if named_args: p_args += ["-named"] - p_args += [command] + pos_args + named_args + if command is not None: + p_args += [command] + p_args += pos_args + named_args + self.log.debug("Running bitcoin-cli command: %s" % command) process = subprocess.Popen(p_args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) cli_stdout, cli_stderr = process.communicate(input=self.input) returncode = process.poll() if returncode: + match = re.match(r'error code: ([-0-9]+)\nerror message:\n(.*)', cli_stderr) + if match: + code, message = match.groups() + raise JSONRPCException(dict(code=int(code), message=message)) # Ignore cli_stdout, raise with cli_stderr raise subprocess.CalledProcessError(returncode, self.binary, output=cli_stderr) - return json.loads(cli_stdout, parse_float=decimal.Decimal) + try: + return json.loads(cli_stdout, parse_float=decimal.Decimal) + except JSONDecodeError: + return cli_stdout.rstrip("\n") diff --git a/test/functional/test_framework/util.py b/test/functional/test_framework/util.py index 102c90301..b355816d8 100644 --- a/test/functional/test_framework/util.py +++ b/test/functional/test_framework/util.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright (c) 2014-2016 The Bitcoin Core developers +# Copyright (c) 2014-2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Helpful routines for regression testing.""" @@ -8,6 +8,7 @@ from binascii import hexlify, unhexlify from decimal import Decimal, ROUND_DOWN import hashlib +import inspect import json import logging import os @@ -26,7 +27,7 @@ def assert_fee_amount(fee, tx_size, fee_per_kB): """Assert the fee was in range""" - target_fee = tx_size * fee_per_kB / 1000 + target_fee = round(tx_size * fee_per_kB / 1000, 8) if fee < target_fee: raise AssertionError("Fee of %s BTC too low! (Should be %s BTC)" % (str(fee), str(target_fee))) # allow the wallet's estimation to be at most 2 bytes off @@ -204,9 +205,9 @@ def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf'), lock=N if attempts == float('inf') and timeout == float('inf'): timeout = 60 attempt = 0 - timeout += time.time() + time_end = time.time() + timeout - while attempt < attempts and time.time() < timeout: + while attempt < attempts and time.time() < time_end: if lock: with lock: if predicate(): @@ -218,8 +219,12 @@ def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf'), lock=N time.sleep(0.05) # Print the cause of the timeout - assert_greater_than(attempts, attempt) - assert_greater_than(timeout, time.time()) + predicate_source = inspect.getsourcelines(predicate) + logger.error("wait_until() failed. Predicate: {}".format(predicate_source)) + if attempt >= attempts: + raise AssertionError("Predicate {} not true after {} attempts".format(predicate_source, attempts)) + elif time.time() >= time_end: + raise AssertionError("Predicate {} not true after {} seconds".format(predicate_source, timeout)) raise RuntimeError('Unreachable') # RPC/P2P connection constants and functions @@ -284,19 +289,31 @@ def rpc_url(datadir, i, rpchost=None): ################ def initialize_datadir(dirname, n): - datadir = os.path.join(dirname, "node" + str(n)) + datadir = get_datadir_path(dirname, n) if not os.path.isdir(datadir): os.makedirs(datadir) with open(os.path.join(datadir, "bitcoin.conf"), 'w', encoding='utf8') as f: f.write("regtest=1\n") + f.write("[regtest]\n") f.write("port=" + str(p2p_port(n)) + "\n") f.write("rpcport=" + str(rpc_port(n)) + "\n") + f.write("server=1\n") + f.write("keypool=1\n") + f.write("discover=0\n") f.write("listenonion=0\n") + f.write("printtoconsole=0\n") + os.makedirs(os.path.join(datadir, 'stderr'), exist_ok=True) + os.makedirs(os.path.join(datadir, 'stdout'), exist_ok=True) return datadir def get_datadir_path(dirname, n): return os.path.join(dirname, "node" + str(n)) +def append_config(datadir, options): + with open(os.path.join(datadir, "bitcoin.conf"), 'a', encoding='utf8') as f: + for option in options: + f.write(option + "\n") + def get_auth_cookie(datadir): user = None password = None @@ -310,7 +327,7 @@ def get_auth_cookie(datadir): assert password is None # Ensure that there is only one rpcpassword line password = line.split("=")[1].strip("\n") if os.path.isfile(os.path.join(datadir, "regtest", ".cookie")): - with open(os.path.join(datadir, "regtest", ".cookie"), 'r') as f: + with open(os.path.join(datadir, "regtest", ".cookie"), 'r', encoding="ascii") as f: userpass = f.read() split_userpass = userpass.split(':') user = split_userpass[0] @@ -319,8 +336,11 @@ def get_auth_cookie(datadir): raise ValueError("No RPC credentials") return user, password -def log_filename(dirname, n_node, logname): - return os.path.join(dirname, "node" + str(n_node), "regtest", logname) +# If a cookie file exists in the given datadir, delete it. +def delete_cookie_file(datadir): + if os.path.isfile(os.path.join(datadir, "regtest", ".cookie")): + logger.debug("Deleting leftover cookie file") + os.remove(os.path.join(datadir, "regtest", ".cookie")) def get_bip9_status(node, key): info = node.getblockchaininfo() @@ -332,22 +352,24 @@ def set_node_times(nodes, t): def disconnect_nodes(from_connection, node_num): for peer_id in [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']]: - from_connection.disconnectnode(nodeid=peer_id) - - for _ in range(50): - if [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']] == []: - break - time.sleep(0.1) - else: - raise AssertionError("timed out waiting for disconnect") + try: + from_connection.disconnectnode(nodeid=peer_id) + except JSONRPCException as e: + # If this node is disconnected between calculating the peer id + # and issuing the disconnect, don't worry about it. + # This avoids a race condition if we're mass-disconnecting peers. + if e.error['code'] != -29: # RPC_CLIENT_NODE_NOT_CONNECTED + raise + + # wait to disconnect + wait_until(lambda: [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']] == [], timeout=5) def connect_nodes(from_connection, node_num): ip_port = "127.0.0.1:" + str(p2p_port(node_num)) from_connection.addnode(ip_port, "onetry") # poll until version handshake complete to avoid race conditions # with transaction relaying - while any(peer['version'] == 0 for peer in from_connection.getpeerinfo()): - time.sleep(0.1) + wait_until(lambda: all(peer['version'] != 0 for peer in from_connection.getpeerinfo())) def connect_nodes_bi(nodes, a, b): connect_nodes(nodes[a], b) @@ -361,51 +383,29 @@ def sync_blocks(rpc_connections, *, wait=1, timeout=60): one node already synced to the latest, stable tip, otherwise there's a chance it might return before all nodes are stably synced. """ - # Use getblockcount() instead of waitforblockheight() to determine the - # initial max height because the two RPCs look at different internal global - # variables (chainActive vs latestBlock) and the former gets updated - # earlier. - maxheight = max(x.getblockcount() for x in rpc_connections) - start_time = cur_time = time.time() - while cur_time <= start_time + timeout: - tips = [r.waitforblockheight(maxheight, int(wait * 1000)) for r in rpc_connections] - if all(t["height"] == maxheight for t in tips): - if all(t["hash"] == tips[0]["hash"] for t in tips): - return - raise AssertionError("Block sync failed, mismatched block hashes:{}".format( - "".join("\n {!r}".format(tip) for tip in tips))) - cur_time = time.time() - raise AssertionError("Block sync to height {} timed out:{}".format( - maxheight, "".join("\n {!r}".format(tip) for tip in tips))) - -def sync_chain(rpc_connections, *, wait=1, timeout=60): - """ - Wait until everybody has the same best block - """ - while timeout > 0: + stop_time = time.time() + timeout + while time.time() <= stop_time: best_hash = [x.getbestblockhash() for x in rpc_connections] - if best_hash == [best_hash[0]] * len(best_hash): + if best_hash.count(best_hash[0]) == len(rpc_connections): return time.sleep(wait) - timeout -= wait - raise AssertionError("Chain sync failed: Best block hashes don't match") + raise AssertionError("Block sync timed out:{}".format("".join("\n {!r}".format(b) for b in best_hash))) -def sync_mempools(rpc_connections, *, wait=1, timeout=60): +def sync_mempools(rpc_connections, *, wait=1, timeout=60, flush_scheduler=True): """ Wait until everybody has the same transactions in their memory pools """ - while timeout > 0: - pool = set(rpc_connections[0].getrawmempool()) - num_match = 1 - for i in range(1, len(rpc_connections)): - if set(rpc_connections[i].getrawmempool()) == pool: - num_match = num_match + 1 - if num_match == len(rpc_connections): + stop_time = time.time() + timeout + while time.time() <= stop_time: + pool = [set(r.getrawmempool()) for r in rpc_connections] + if pool.count(pool[0]) == len(rpc_connections): + if flush_scheduler: + for r in rpc_connections: + r.syncwithvalidationinterfacequeue() return time.sleep(wait) - timeout -= wait - raise AssertionError("Mempool sync failed") + raise AssertionError("Mempool sync timed out:{}".format("".join("\n {!r}".format(m) for m in pool))) # Transaction/Block functions ############################# @@ -469,7 +469,7 @@ def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants): outputs[to_node.getnewaddress()] = float(amount) rawtx = from_node.createrawtransaction(inputs, outputs) - signresult = from_node.signrawtransaction(rawtx) + signresult = from_node.signrawtransactionwithwallet(rawtx) txid = from_node.sendrawtransaction(signresult["hex"], True) return (txid, signresult["hex"], fee) @@ -496,7 +496,7 @@ def create_confirmed_utxos(fee, node, count): outputs[addr1] = satoshi_round(send_value / 2) outputs[addr2] = satoshi_round(send_value / 2) raw_tx = node.createrawtransaction(inputs, outputs) - signed_tx = node.signrawtransaction(raw_tx)["hex"] + signed_tx = node.signrawtransactionwithwallet(raw_tx)["hex"] node.sendrawtransaction(signed_tx) while (node.getmempoolinfo()['size'] > 0): @@ -526,14 +526,6 @@ def gen_return_txouts(): txouts = txouts + script_pubkey return txouts -def create_tx(node, coinbase, to_address, amount): - inputs = [{"txid": coinbase, "vout": 0}] - outputs = {to_address: amount} - rawtx = node.createrawtransaction(inputs, outputs) - signresult = node.signrawtransaction(rawtx) - assert_equal(signresult["complete"], True) - return signresult["hex"] - # Create a spend of each passed-in utxo, splicing in "txouts" to each raw # transaction to make it large. See gen_return_txouts() above. def create_lots_of_big_transactions(node, txouts, utxos, num, fee): @@ -549,7 +541,7 @@ def create_lots_of_big_transactions(node, txouts, utxos, num, fee): newtx = rawtx[0:92] newtx = newtx + txouts newtx = newtx + rawtx[94:] - signresult = node.signrawtransaction(newtx, None, None, "NONE") + signresult = node.signrawtransactionwithwallet(newtx, None, "NONE") txid = node.sendrawtransaction(signresult["hex"], True) txids.append(txid) return txids @@ -566,3 +558,14 @@ def mine_large_block(node, utxos=None): fee = 100 * node.getnetworkinfo()["relayfee"] create_lots_of_big_transactions(node, txouts, utxos, num, fee=fee) node.generate(1) + +def find_vout_for_address(node, txid, addr): + """ + Locate the vout index of the given transaction sending to the + given address. Raises runtime error exception if not found. + """ + tx = node.getrawtransaction(txid, True) + for i in range(len(tx["vout"])): + if any([addr == a for a in tx["vout"][i]["scriptPubKey"]["addresses"]]): + return i + raise RuntimeError("Vout not found for address: txid=%s, addr=%s" % (txid, addr)) diff --git a/test/functional/test_runner.py b/test/functional/test_runner.py index ca74bb68d..d9960460d 100755 --- a/test/functional/test_runner.py +++ b/test/functional/test_runner.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -# Copyright (c) 2014-2016 The Bitcoin Core developers +# Copyright (c) 2014-2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Run regression test suite. @@ -15,6 +15,7 @@ """ import argparse +from collections import deque import configparser import datetime import os @@ -28,7 +29,7 @@ import logging # Formatting. Default colors to empty strings. -BOLD, BLUE, RED, GREY = ("", ""), ("", ""), ("", ""), ("", "") +BOLD, GREEN, RED, GREY = ("", ""), ("", ""), ("", ""), ("", "") try: # Make sure python thinks it can write unicode to its stdout "\u2713".encode("utf_8").decode(sys.stdout.encoding) @@ -40,117 +41,158 @@ CROSS = "x " CIRCLE = "o " -if os.name == 'posix': +if os.name != 'nt' or sys.getwindowsversion() >= (10, 0, 14393): + if os.name == 'nt': + import ctypes + kernel32 = ctypes.windll.kernel32 + ENABLE_VIRTUAL_TERMINAL_PROCESSING = 4 + STD_OUTPUT_HANDLE = -11 + STD_ERROR_HANDLE = -12 + # Enable ascii color control to stdout + stdout = kernel32.GetStdHandle(STD_OUTPUT_HANDLE) + stdout_mode = ctypes.c_int32() + kernel32.GetConsoleMode(stdout, ctypes.byref(stdout_mode)) + kernel32.SetConsoleMode(stdout, stdout_mode.value | ENABLE_VIRTUAL_TERMINAL_PROCESSING) + # Enable ascii color control to stderr + stderr = kernel32.GetStdHandle(STD_ERROR_HANDLE) + stderr_mode = ctypes.c_int32() + kernel32.GetConsoleMode(stderr, ctypes.byref(stderr_mode)) + kernel32.SetConsoleMode(stderr, stderr_mode.value | ENABLE_VIRTUAL_TERMINAL_PROCESSING) # primitive formatting on supported # terminal via ANSI escape sequences: BOLD = ('\033[0m', '\033[1m') - BLUE = ('\033[0m', '\033[0;34m') + GREEN = ('\033[0m', '\033[0;32m') RED = ('\033[0m', '\033[0;31m') GREY = ('\033[0m', '\033[1;30m') TEST_EXIT_PASSED = 0 TEST_EXIT_SKIPPED = 77 -BASE_SCRIPTS= [ +# 20 minutes represented in seconds +TRAVIS_TIMEOUT_DURATION = 20 * 60 + +BASE_SCRIPTS = [ # Scripts that are run by the travis build process. # Longest test should go first, to favor running tests in parallel - 'wallet-hd.py', - 'walletbackup.py', + 'feature_fee_estimation.py', + 'wallet_hd.py', + 'wallet_backup.py', # vv Tests less than 5m vv - 'p2p-fullblocktest.py', - 'fundrawtransaction.py', - 'p2p-compactblocks.py', - 'segwit.py', + 'mining_getblocktemplate_longpoll.py', + 'feature_maxuploadtarget.py', + 'feature_block.py', + 'rpc_fundrawtransaction.py', + 'p2p_compactblocks.py', + 'feature_segwit.py', # vv Tests less than 2m vv - 'wallet.py', - 'wallet-accounts.py', - 'p2p-segwit.py', - 'wallet-dump.py', - 'listtransactions.py', + 'wallet_basic.py', + 'wallet_labels.py', + 'p2p_segwit.py', + 'p2p_timeouts.py', + 'wallet_dump.py', + 'wallet_listtransactions.py', # vv Tests less than 60s vv - 'sendheaders.py', - 'zapwallettxes.py', - 'importmulti.py', + 'p2p_sendheaders.py', + 'wallet_zapwallettxes.py', + 'wallet_importmulti.py', 'mempool_limit.py', - 'merkle_blocks.py', - 'receivedby.py', - 'abandonconflict.py', - 'bip68-112-113-p2p.py', - 'rawtransactions.py', - 'reindex.py', + 'rpc_txoutproof.py', + 'wallet_listreceivedby.py', + 'wallet_abandonconflict.py', + 'feature_csv_activation.py', + 'rpc_rawtransaction.py', + 'wallet_address_types.py', + 'feature_bip68_sequence.py', + 'p2p_feefilter.py', + 'feature_reindex.py', # vv Tests less than 30s vv - 'keypool-topup.py', - 'zmq_test.py', - 'bitcoin_cli.py', - 'mempool_resurrect_test.py', - 'txn_doublespend.py --mineblock', - 'txn_clone.py', - 'getchaintips.py', - 'rest.py', - 'mempool_spendcoinbase.py', + 'wallet_keypool_topup.py', + 'interface_zmq.py', + 'interface_bitcoin_cli.py', + 'mempool_resurrect.py', + 'wallet_txn_doublespend.py --mineblock', + 'wallet_txn_clone.py', + 'wallet_txn_clone.py --segwit', + 'rpc_getchaintips.py', + 'interface_rest.py', + 'mempool_spend_coinbase.py', 'mempool_reorg.py', 'mempool_persist.py', - 'multiwallet.py', - 'httpbasics.py', - 'multi_rpc.py', - 'proxy_test.py', - 'signrawtransactions.py', - 'disconnect_ban.py', - 'decodescript.py', - 'blockchain.py', - 'disablewallet.py', - 'net.py', - 'keypool.py', - 'p2p-mempool.py', - 'prioritise_transaction.py', - 'invalidblockrequest.py', - 'invalidtxrequest.py', - 'p2p-versionbits-warning.py', - 'preciousblock.py', - 'importprunedfunds.py', - 'signmessages.py', - 'nulldummy.py', - 'import-rescan.py', - 'mining.py', - 'bumpfee.py', - 'rpcnamedargs.py', - 'listsinceblock.py', - 'p2p-leaktests.py', - 'wallet-encryption.py', - 'bipdersig-p2p.py', - 'bip65-cltv-p2p.py', - 'uptime.py', - 'resendwallettransactions.py', - 'minchainwork.py', - 'p2p-acceptblock.py', + 'wallet_multiwallet.py', + 'wallet_multiwallet.py --usecli', + 'wallet_disableprivatekeys.py', + 'wallet_disableprivatekeys.py --usecli', + 'interface_http.py', + 'rpc_psbt.py', + 'rpc_users.py', + 'feature_proxy.py', + 'rpc_signrawtransaction.py', + 'wallet_groups.py', + 'p2p_disconnect_ban.py', + 'rpc_decodescript.py', + 'rpc_blockchain.py', + 'rpc_deprecated.py', + 'wallet_disable.py', + 'rpc_net.py', + 'wallet_keypool.py', + 'p2p_mempool.py', + 'mining_prioritisetransaction.py', + 'p2p_invalid_locator.py', + 'p2p_invalid_block.py', + 'p2p_invalid_tx.py', + 'feature_assumevalid.py', + 'example_test.py', + 'wallet_txn_doublespend.py', + 'wallet_txn_clone.py --mineblock', + 'feature_notifications.py', + 'rpc_invalidateblock.py', + 'feature_rbf.py', + 'mempool_packages.py', + 'rpc_createmultisig.py', + 'feature_versionbits_warning.py', + 'rpc_preciousblock.py', + 'wallet_importprunedfunds.py', + 'rpc_zmq.py', + 'rpc_signmessage.py', + 'feature_nulldummy.py', + 'mempool_accept.py', + 'wallet_import_rescan.py', + 'rpc_bind.py --ipv4', + 'rpc_bind.py --ipv6', + 'rpc_bind.py --nonloopback', + 'mining_basic.py', + 'wallet_bumpfee.py', + 'rpc_named_arguments.py', + 'wallet_listsinceblock.py', + 'p2p_leak.py', + 'wallet_encryption.py', + 'feature_dersig.py', + 'feature_cltv.py', + 'rpc_uptime.py', + 'wallet_resendwallettransactions.py', + 'wallet_fallbackfee.py', + 'feature_minchainwork.py', + 'rpc_getblockstats.py', + 'p2p_fingerprint.py', + 'feature_uacomment.py', + 'p2p_unrequested_blocks.py', + 'feature_includeconf.py', + 'rpc_scantxoutset.py', + 'feature_logging.py', + 'p2p_node_network_limited.py', + 'feature_blocksdir.py', + 'feature_config_args.py', + 'rpc_help.py', + 'feature_help.py', + # Don't append tests at the end to avoid merge conflicts + # Put them in a random line within the section that fits their approximate run-time ] EXTENDED_SCRIPTS = [ # These tests are not run by the travis build process. # Longest test should go first, to favor running tests in parallel - 'pruning.py', - # vv Tests less than 20m vv - 'smartfees.py', - # vv Tests less than 5m vv - 'maxuploadtarget.py', - 'mempool_packages.py', - 'dbcrash.py', - # vv Tests less than 2m vv - 'bip68-sequence.py', - 'getblocktemplate_longpoll.py', - 'p2p-timeouts.py', - # vv Tests less than 60s vv - 'bip9-softforks.py', - 'p2p-feefilter.py', - 'rpcbind_test.py', - # vv Tests less than 30s vv - 'assumevalid.py', - 'example_test.py', - 'txn_doublespend.py', - 'txn_clone.py --mineblock', - 'forknotify.py', - 'invalidateblock.py', - 'replace-by-fee.py', + 'feature_pruning.py', + 'feature_dbcrash.py', ] # Place EXTENDED_SCRIPTS first since it has the 3 longest running tests @@ -171,8 +213,9 @@ def main(): epilog=''' Help text and arguments for individual test script:''', formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument('--combinedlogslen', '-c', type=int, default=0, help='print a combined log (of length n lines) from all test nodes and test framework to the console on failure.') parser.add_argument('--coverage', action='store_true', help='generate a basic coverage report for the RPC interface') - parser.add_argument('--exclude', '-x', help='specify a comma-seperated-list of scripts to exclude.') + parser.add_argument('--exclude', '-x', help='specify a comma-separated-list of scripts to exclude.') parser.add_argument('--extended', action='store_true', help='run the extended test suite in addition to the basic tests') parser.add_argument('--force', '-f', action='store_true', help='run tests even on platforms where they are disabled by default (e.g. windows).') parser.add_argument('--help', '-h', '-?', action='store_true', help='print help text and exit') @@ -180,6 +223,7 @@ def main(): parser.add_argument('--keepcache', '-k', action='store_true', help='the default behavior is to flush the cache directory on startup. --keepcache retains the cache from the previous testrun.') parser.add_argument('--quiet', '-q', action='store_true', help='only print results summary and failure logs') parser.add_argument('--tmpdirprefix', '-t', default=tempfile.gettempdir(), help="Root directory for datadirs") + parser.add_argument('--failfast', action='store_true', help='stop execution after the first test failure') args, unknown_args = parser.parse_known_args() # args to be passed on always start with two dashes; tests are the remaining unknown args @@ -189,7 +233,7 @@ def main(): # Read config generated by configure. config = configparser.ConfigParser() configfile = os.path.abspath(os.path.dirname(__file__)) + "/../config.ini" - config.read_file(open(configfile)) + config.read_file(open(configfile, encoding="utf8")) passon_args.append("--configfile=%s" % configfile) @@ -198,13 +242,16 @@ def main(): logging.basicConfig(format='%(message)s', level=logging_level) # Create base test directory - tmpdir = "%s/bitcoin_test_runner_%s" % (args.tmpdirprefix, datetime.datetime.now().strftime("%Y%m%d_%H%M%S")) + tmpdir = "%s/test_runner_₿_🏃_%s" % (args.tmpdirprefix, datetime.datetime.now().strftime("%Y%m%d_%H%M%S")) + + # If we fixed the command-line and filename encoding issue on Windows, these two lines could be removed + if config["environment"]["EXEEXT"] == ".exe": + tmpdir = "%s/test_runner_%s" % (args.tmpdirprefix, datetime.datetime.now().strftime("%Y%m%d_%H%M%S")) + os.makedirs(tmpdir) logging.debug("Temporary test directory at %s" % tmpdir) - enable_wallet = config["components"].getboolean("ENABLE_WALLET") - enable_utils = config["components"].getboolean("ENABLE_UTILS") enable_bitcoind = config["components"].getboolean("ENABLE_BITCOIND") if config["environment"]["EXEEXT"] == ".exe" and not args.force: @@ -213,38 +260,38 @@ def main(): print("Tests currently disabled on Windows by default. Use --force option to enable") sys.exit(0) - if not (enable_wallet and enable_utils and enable_bitcoind): - print("No functional tests to run. Wallet, utils, and bitcoind must all be enabled") - print("Rerun `configure` with -enable-wallet, -with-utils and -with-daemon and rerun make") + if not enable_bitcoind: + print("No functional tests to run.") + print("Rerun ./configure with --with-daemon and then make") sys.exit(0) # Build list of tests + test_list = [] if tests: # Individual tests have been specified. Run specified tests that exist # in the ALL_SCRIPTS list. Accept the name with or without .py extension. - tests = [re.sub("\.py$", "", t) + ".py" for t in tests] - test_list = [] - for t in tests: - if t in ALL_SCRIPTS: - test_list.append(t) + tests = [re.sub("\.py$", "", test) + ".py" for test in tests] + for test in tests: + if test in ALL_SCRIPTS: + test_list.append(test) else: - print("{}WARNING!{} Test '{}' not found in full test list.".format(BOLD[1], BOLD[0], t)) + print("{}WARNING!{} Test '{}' not found in full test list.".format(BOLD[1], BOLD[0], test)) + elif args.extended: + # Include extended tests + test_list += ALL_SCRIPTS else: - # No individual tests have been specified. - # Run all base tests, and optionally run extended tests. - test_list = BASE_SCRIPTS - if args.extended: - # place the EXTENDED_SCRIPTS first since the three longest ones - # are there and the list is shorter - test_list = EXTENDED_SCRIPTS + test_list + # Run base tests only + test_list += BASE_SCRIPTS # Remove the test cases that the user has explicitly asked to exclude. if args.exclude: - tests_excl = [re.sub("\.py$", "", t) + ".py" for t in args.exclude.split(',')] - for exclude_test in tests_excl: - if exclude_test in test_list: - test_list.remove(exclude_test) - else: + exclude_tests = [test.split('.py')[0] for test in args.exclude.split(',')] + for exclude_test in exclude_tests: + # Remove .py and .py --arg from the test list + exclude_list = [test for test in test_list if test.split('.py')[0] == exclude_test] + for exclude_item in exclude_list: + test_list.remove(exclude_item) + if not exclude_list: print("{}WARNING!{} Test '{}' not found in current test list.".format(BOLD[1], BOLD[0], exclude_test)) if not test_list: @@ -255,17 +302,30 @@ def main(): if args.help: # Print help for test_runner.py, then print help of the first script (with args removed) and exit. parser.print_help() - subprocess.check_call([(config["environment"]["SRCDIR"] + '/test/functional/' + test_list[0].split()[0])] + ['-h']) + subprocess.check_call([sys.executable, os.path.join(config["environment"]["SRCDIR"], 'test', 'functional', test_list[0].split()[0]), '-h']) sys.exit(0) check_script_list(config["environment"]["SRCDIR"]) + check_script_prefixes() if not args.keepcache: shutil.rmtree("%s/test/cache" % config["environment"]["BUILDDIR"], ignore_errors=True) - run_tests(test_list, config["environment"]["SRCDIR"], config["environment"]["BUILDDIR"], config["environment"]["EXEEXT"], tmpdir, args.jobs, args.coverage, passon_args) + run_tests( + test_list, + config["environment"]["SRCDIR"], + config["environment"]["BUILDDIR"], + tmpdir, + jobs=args.jobs, + enable_coverage=args.coverage, + args=passon_args, + combined_logs_len=args.combinedlogslen, + failfast=args.failfast, + ) + +def run_tests(test_list, src_dir, build_dir, tmpdir, jobs=1, enable_coverage=False, args=None, combined_logs_len=0, failfast=False): + args = args or [] -def run_tests(test_list, src_dir, build_dir, exeext, tmpdir, jobs=1, enable_coverage=False, args=[]): # Warn if bitcoind is already running (unix only) try: if subprocess.check_output(["pidof", "bitcoind"]) is not None: @@ -278,15 +338,9 @@ def run_tests(test_list, src_dir, build_dir, exeext, tmpdir, jobs=1, enable_cove if os.path.isdir(cache_dir): print("%sWARNING!%s There is a cache directory here: %s. If tests fail unexpectedly, try deleting the cache directory." % (BOLD[1], BOLD[0], cache_dir)) - #Set env vars - if "BITCOIND" not in os.environ: - os.environ["BITCOIND"] = build_dir + '/src/bitcoind' + exeext - os.environ["BITCOINCLI"] = build_dir + '/src/bitcoin-cli' + exeext - tests_dir = src_dir + '/test/functional/' - flags = ["--srcdir={}/src".format(build_dir)] + args - flags.append("--cachedir=%s" % cache_dir) + flags = ['--cachedir={}'.format(cache_dir)] + args if enable_coverage: coverage = RPCCoverage() @@ -297,17 +351,21 @@ def run_tests(test_list, src_dir, build_dir, exeext, tmpdir, jobs=1, enable_cove if len(test_list) > 1 and jobs > 1: # Populate cache - subprocess.check_output([tests_dir + 'create_cache.py'] + flags + ["--tmpdir=%s/cache" % tmpdir]) + try: + subprocess.check_output([sys.executable, tests_dir + 'create_cache.py'] + flags + ["--tmpdir=%s/cache" % tmpdir]) + except subprocess.CalledProcessError as e: + sys.stdout.buffer.write(e.output) + raise #Run Tests job_queue = TestHandler(jobs, tests_dir, tmpdir, test_list, flags) - time0 = time.time() + start_time = time.time() test_results = [] max_len_name = len(max(test_list, key=len)) for _ in range(len(test_list)): - test_result, stdout, stderr = job_queue.get_next() + test_result, testdir, stdout, stderr = job_queue.get_next() test_results.append(test_result) if test_result.status == "Passed": @@ -318,8 +376,23 @@ def run_tests(test_list, src_dir, build_dir, exeext, tmpdir, jobs=1, enable_cove print("\n%s%s%s failed, Duration: %s s\n" % (BOLD[1], test_result.name, BOLD[0], test_result.time)) print(BOLD[1] + 'stdout:\n' + BOLD[0] + stdout + '\n') print(BOLD[1] + 'stderr:\n' + BOLD[0] + stderr + '\n') - - print_results(test_results, max_len_name, (int(time.time() - time0))) + if combined_logs_len and os.path.isdir(testdir): + # Print the final `combinedlogslen` lines of the combined logs + print('{}Combine the logs and print the last {} lines ...{}'.format(BOLD[1], combined_logs_len, BOLD[0])) + print('\n============') + print('{}Combined log for {}:{}'.format(BOLD[1], testdir, BOLD[0])) + print('============\n') + combined_logs_args = [sys.executable, os.path.join(tests_dir, 'combine_logs.py'), testdir] + if BOLD[0]: + combined_logs_args += ['--color'] + combined_logs, _ = subprocess.Popen(combined_logs_args, universal_newlines=True, stdout=subprocess.PIPE).communicate() + print("\n".join(deque(combined_logs.splitlines(), combined_logs_len))) + + if failfast: + logging.debug("Early exiting after test failure") + break + + print_results(test_results, max_len_name, (int(time.time() - start_time))) if coverage: coverage.report_rpc_coverage() @@ -333,12 +406,16 @@ def run_tests(test_list, src_dir, build_dir, exeext, tmpdir, jobs=1, enable_cove all_passed = all(map(lambda test_result: test_result.was_successful, test_results)) + # This will be a no-op unless failfast is True in which case there may be dangling + # processes which need to be killed. + job_queue.kill_and_join() + sys.exit(not all_passed) def print_results(test_results, max_len_name, runtime): results = "\n" + BOLD[1] + "%s | %s | %s\n\n" % ("TEST".ljust(max_len_name), "STATUS ", "DURATION") + BOLD[0] - test_results.sort(key=lambda result: result.name.lower()) + test_results.sort(key=TestResult.sort_key) all_passed = True time_sum = 0 @@ -349,13 +426,17 @@ def print_results(test_results, max_len_name, runtime): results += str(test_result) status = TICK + "Passed" if all_passed else CROSS + "Failed" + if not all_passed: + results += RED[1] results += BOLD[1] + "\n%s | %s | %s s (accumulated) \n" % ("ALL".ljust(max_len_name), status.ljust(9), time_sum) + BOLD[0] + if not all_passed: + results += RED[0] results += "Runtime: %s s\n" % (runtime) print(results) class TestHandler: """ - Trigger the testscrips passed in via the list. + Trigger the test scripts passed in via the list. """ def __init__(self, num_tests_parallel, tests_dir, tmpdir, test_list=None, flags=None): @@ -366,29 +447,27 @@ def __init__(self, num_tests_parallel, tests_dir, tmpdir, test_list=None, flags= self.test_list = test_list self.flags = flags self.num_running = 0 - # In case there is a graveyard of zombie bitcoinds, we can apply a - # pseudorandom offset to hopefully jump over them. - # (625 is PORT_RANGE/MAX_NODES) - self.portseed_offset = int(time.time() * 1000) % 625 self.jobs = [] def get_next(self): while self.num_running < self.num_jobs and self.test_list: # Add tests self.num_running += 1 - t = self.test_list.pop(0) - portseed = len(self.test_list) + self.portseed_offset + test = self.test_list.pop(0) + portseed = len(self.test_list) portseed_arg = ["--portseed={}".format(portseed)] log_stdout = tempfile.SpooledTemporaryFile(max_size=2**16) log_stderr = tempfile.SpooledTemporaryFile(max_size=2**16) - test_argv = t.split() - tmpdir = ["--tmpdir=%s/%s_%s" % (self.tmpdir, re.sub(".py$", "", test_argv[0]), portseed)] - self.jobs.append((t, + test_argv = test.split() + testdir = "{}/{}_{}".format(self.tmpdir, re.sub(".py$", "", test_argv[0]), portseed) + tmpdir_arg = ["--tmpdir={}".format(testdir)] + self.jobs.append((test, time.time(), - subprocess.Popen([self.tests_dir + test_argv[0]] + test_argv[1:] + self.flags + portseed_arg + tmpdir, + subprocess.Popen([sys.executable, self.tests_dir + test_argv[0]] + test_argv[1:] + self.flags + portseed_arg + tmpdir_arg, universal_newlines=True, stdout=log_stdout, stderr=log_stderr), + testdir, log_stdout, log_stderr)) if not self.jobs: @@ -396,15 +475,14 @@ def get_next(self): while True: # Return first proc that finishes time.sleep(.5) - for j in self.jobs: - (name, time0, proc, log_out, log_err) = j - if os.getenv('TRAVIS') == 'true' and int(time.time() - time0) > 20 * 60: - # In travis, timeout individual tests after 20 minutes (to stop tests hanging and not - # providing useful output. + for job in self.jobs: + (name, start_time, proc, testdir, log_out, log_err) = job + if os.getenv('TRAVIS') == 'true' and int(time.time() - start_time) > TRAVIS_TIMEOUT_DURATION: + # In travis, timeout individual tests (to stop tests hanging and not providing useful output). proc.send_signal(signal.SIGINT) if proc.poll() is not None: log_out.seek(0), log_err.seek(0) - [stdout, stderr] = [l.read().decode('utf-8') for l in (log_out, log_err)] + [stdout, stderr] = [log_file.read().decode('utf-8') for log_file in (log_out, log_err)] log_out.close(), log_err.close() if proc.returncode == TEST_EXIT_PASSED and stderr == "": status = "Passed" @@ -413,11 +491,22 @@ def get_next(self): else: status = "Failed" self.num_running -= 1 - self.jobs.remove(j) + self.jobs.remove(job) - return TestResult(name, status, int(time.time() - time0)), stdout, stderr + return TestResult(name, status, int(time.time() - start_time)), testdir, stdout, stderr print('.', end='', flush=True) + def kill_and_join(self): + """Send SIGKILL to all jobs and block until all have ended.""" + procs = [i[2] for i in self.jobs] + + for proc in procs: + proc.kill() + + for proc in procs: + proc.wait() + + class TestResult(): def __init__(self, name, status, time): self.name = name @@ -425,9 +514,17 @@ def __init__(self, name, status, time): self.time = time self.padding = 0 + def sort_key(self): + if self.status == "Passed": + return 0, self.name.lower() + elif self.status == "Failed": + return 2, self.name.lower() + elif self.status == "Skipped": + return 1, self.name.lower() + def __repr__(self): if self.status == "Passed": - color = BLUE + color = GREEN glyph = TICK elif self.status == "Failed": color = RED @@ -443,13 +540,25 @@ def was_successful(self): return self.status != "Failed" +def check_script_prefixes(): + """Check that test scripts start with one of the allowed name prefixes.""" + + good_prefixes_re = re.compile("(example|feature|interface|mempool|mining|p2p|rpc|wallet)_") + bad_script_names = [script for script in ALL_SCRIPTS if good_prefixes_re.match(script) is None] + + if bad_script_names: + print("%sERROR:%s %d tests not meeting naming conventions:" % (BOLD[1], BOLD[0], len(bad_script_names))) + print(" %s" % ("\n ".join(sorted(bad_script_names)))) + raise AssertionError("Some tests are not following naming convention!") + + def check_script_list(src_dir): """Check scripts directory. Check that there are no scripts in the functional tests directory which are not being run by pull-tester.py.""" script_dir = src_dir + '/test/functional/' - python_files = set([t for t in os.listdir(script_dir) if t[-3:] == ".py"]) + python_files = set([test_file for test_file in os.listdir(script_dir) if test_file.endswith(".py")]) missed_tests = list(python_files - set(map(lambda x: x.split()[0], ALL_SCRIPTS + NON_SCRIPTS))) if len(missed_tests) != 0: print("%sWARNING!%s The following scripts are not being run: %s. Check the test lists in test_runner.py." % (BOLD[1], BOLD[0], str(missed_tests))) @@ -457,7 +566,7 @@ def check_script_list(src_dir): # On travis this warning is an error to prevent merging incomplete commits into master sys.exit(1) -class RPCCoverage(object): +class RPCCoverage(): """ Coverage reporting utilities for test_runner. @@ -485,7 +594,7 @@ def report_rpc_coverage(self): if uncovered: print("Uncovered RPC commands:") - print("".join((" - %s\n" % i) for i in sorted(uncovered))) + print("".join((" - %s\n" % command) for command in sorted(uncovered))) else: print("All RPC commands covered.") @@ -509,8 +618,8 @@ def _get_uncovered_rpc_commands(self): if not os.path.isfile(coverage_ref_filename): raise RuntimeError("No coverage reference found") - with open(coverage_ref_filename, 'r') as f: - all_cmds.update([i.strip() for i in f.readlines()]) + with open(coverage_ref_filename, 'r', encoding="utf8") as coverage_ref_file: + all_cmds.update([line.strip() for line in coverage_ref_file.readlines()]) for root, dirs, files in os.walk(self.dir): for filename in files: @@ -518,8 +627,8 @@ def _get_uncovered_rpc_commands(self): coverage_filenames.add(os.path.join(root, filename)) for filename in coverage_filenames: - with open(filename, 'r') as f: - covered_cmds.update([i.strip() for i in f.readlines()]) + with open(filename, 'r', encoding="utf8") as coverage_file: + covered_cmds.update([line.strip() for line in coverage_file.readlines()]) return all_cmds - covered_cmds diff --git a/test/functional/wallet_abandonconflict.py b/test/functional/wallet_abandonconflict.py new file mode 100755 index 000000000..e5ac2c8bd --- /dev/null +++ b/test/functional/wallet_abandonconflict.py @@ -0,0 +1,176 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the abandontransaction RPC. + + The abandontransaction RPC marks a transaction and all its in-wallet + descendants as abandoned which allows their inputs to be respent. It can be + used to replace "stuck" or evicted transactions. It only works on transactions + which are not included in a block and are not currently in the mempool. It has + no effect on transactions which are already abandoned. +""" +from decimal import Decimal + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, assert_raises_rpc_error, connect_nodes, disconnect_nodes, sync_blocks, sync_mempools + +class AbandonConflictTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 2 + self.extra_args = [["-minrelaytxfee=0.00001"], []] + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + self.nodes[1].generate(100) + sync_blocks(self.nodes) + balance = self.nodes[0].getbalance() + txA = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10")) + txB = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10")) + txC = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10")) + sync_mempools(self.nodes) + self.nodes[1].generate(1) + + # Can not abandon non-wallet transaction + assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id', lambda: self.nodes[0].abandontransaction(txid='ff' * 32)) + # Can not abandon confirmed transaction + assert_raises_rpc_error(-5, 'Transaction not eligible for abandonment', lambda: self.nodes[0].abandontransaction(txid=txA)) + + sync_blocks(self.nodes) + newbalance = self.nodes[0].getbalance() + assert(balance - newbalance < Decimal("0.001")) #no more than fees lost + balance = newbalance + + # Disconnect nodes so node0's transactions don't get into node1's mempool + disconnect_nodes(self.nodes[0], 1) + + # Identify the 10btc outputs + nA = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction(txA, 1)["vout"]) if vout["value"] == Decimal("10")) + nB = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction(txB, 1)["vout"]) if vout["value"] == Decimal("10")) + nC = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction(txC, 1)["vout"]) if vout["value"] == Decimal("10")) + + inputs =[] + # spend 10btc outputs from txA and txB + inputs.append({"txid":txA, "vout":nA}) + inputs.append({"txid":txB, "vout":nB}) + outputs = {} + + outputs[self.nodes[0].getnewaddress()] = Decimal("14.99998") + outputs[self.nodes[1].getnewaddress()] = Decimal("5") + signed = self.nodes[0].signrawtransactionwithwallet(self.nodes[0].createrawtransaction(inputs, outputs)) + txAB1 = self.nodes[0].sendrawtransaction(signed["hex"]) + + # Identify the 14.99998btc output + nAB = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction(txAB1, 1)["vout"]) if vout["value"] == Decimal("14.99998")) + + #Create a child tx spending AB1 and C + inputs = [] + inputs.append({"txid":txAB1, "vout":nAB}) + inputs.append({"txid":txC, "vout":nC}) + outputs = {} + outputs[self.nodes[0].getnewaddress()] = Decimal("24.9996") + signed2 = self.nodes[0].signrawtransactionwithwallet(self.nodes[0].createrawtransaction(inputs, outputs)) + txABC2 = self.nodes[0].sendrawtransaction(signed2["hex"]) + + # Create a child tx spending ABC2 + signed3_change = Decimal("24.999") + inputs = [ {"txid":txABC2, "vout":0} ] + outputs = { self.nodes[0].getnewaddress(): signed3_change } + signed3 = self.nodes[0].signrawtransactionwithwallet(self.nodes[0].createrawtransaction(inputs, outputs)) + # note tx is never directly referenced, only abandoned as a child of the above + self.nodes[0].sendrawtransaction(signed3["hex"]) + + # In mempool txs from self should increase balance from change + newbalance = self.nodes[0].getbalance() + assert_equal(newbalance, balance - Decimal("30") + signed3_change) + balance = newbalance + + # Restart the node with a higher min relay fee so the parent tx is no longer in mempool + # TODO: redo with eviction + self.stop_node(0) + self.start_node(0, extra_args=["-minrelaytxfee=0.0001"]) + + # Verify txs no longer in either node's mempool + assert_equal(len(self.nodes[0].getrawmempool()), 0) + assert_equal(len(self.nodes[1].getrawmempool()), 0) + + # Not in mempool txs from self should only reduce balance + # inputs are still spent, but change not received + newbalance = self.nodes[0].getbalance() + assert_equal(newbalance, balance - signed3_change) + # Unconfirmed received funds that are not in mempool, also shouldn't show + # up in unconfirmed balance + unconfbalance = self.nodes[0].getunconfirmedbalance() + self.nodes[0].getbalance() + assert_equal(unconfbalance, newbalance) + # Also shouldn't show up in listunspent + assert(not txABC2 in [utxo["txid"] for utxo in self.nodes[0].listunspent(0)]) + balance = newbalance + + # Abandon original transaction and verify inputs are available again + # including that the child tx was also abandoned + self.nodes[0].abandontransaction(txAB1) + newbalance = self.nodes[0].getbalance() + assert_equal(newbalance, balance + Decimal("30")) + balance = newbalance + + # Verify that even with a low min relay fee, the tx is not reaccepted from wallet on startup once abandoned + self.stop_node(0) + self.start_node(0, extra_args=["-minrelaytxfee=0.00001"]) + assert_equal(len(self.nodes[0].getrawmempool()), 0) + assert_equal(self.nodes[0].getbalance(), balance) + + # But if it is received again then it is unabandoned + # And since now in mempool, the change is available + # But its child tx remains abandoned + self.nodes[0].sendrawtransaction(signed["hex"]) + newbalance = self.nodes[0].getbalance() + assert_equal(newbalance, balance - Decimal("20") + Decimal("14.99998")) + balance = newbalance + + # Send child tx again so it is unabandoned + self.nodes[0].sendrawtransaction(signed2["hex"]) + newbalance = self.nodes[0].getbalance() + assert_equal(newbalance, balance - Decimal("10") - Decimal("14.99998") + Decimal("24.9996")) + balance = newbalance + + # Remove using high relay fee again + self.stop_node(0) + self.start_node(0, extra_args=["-minrelaytxfee=0.0001"]) + assert_equal(len(self.nodes[0].getrawmempool()), 0) + newbalance = self.nodes[0].getbalance() + assert_equal(newbalance, balance - Decimal("24.9996")) + balance = newbalance + + # Create a double spend of AB1 by spending again from only A's 10 output + # Mine double spend from node 1 + inputs =[] + inputs.append({"txid":txA, "vout":nA}) + outputs = {} + outputs[self.nodes[1].getnewaddress()] = Decimal("9.9999") + tx = self.nodes[0].createrawtransaction(inputs, outputs) + signed = self.nodes[0].signrawtransactionwithwallet(tx) + self.nodes[1].sendrawtransaction(signed["hex"]) + self.nodes[1].generate(1) + + connect_nodes(self.nodes[0], 1) + sync_blocks(self.nodes) + + # Verify that B and C's 10 BTC outputs are available for spending again because AB1 is now conflicted + newbalance = self.nodes[0].getbalance() + assert_equal(newbalance, balance + Decimal("20")) + balance = newbalance + + # There is currently a minor bug around this and so this test doesn't work. See Issue #7315 + # Invalidate the block with the double spend and B's 10 BTC output should no longer be available + # Don't think C's should either + self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) + newbalance = self.nodes[0].getbalance() + #assert_equal(newbalance, balance - Decimal("10")) + self.log.info("If balance has not declined after invalidateblock then out of mempool wallet tx which is no longer") + self.log.info("conflicted has not resumed causing its inputs to be seen as spent. See Issue #7315") + self.log.info(str(balance) + " -> " + str(newbalance) + " ?") + +if __name__ == '__main__': + AbandonConflictTest().main() diff --git a/test/functional/wallet_address_types.py b/test/functional/wallet_address_types.py new file mode 100755 index 000000000..0f75045c9 --- /dev/null +++ b/test/functional/wallet_address_types.py @@ -0,0 +1,301 @@ +#!/usr/bin/env python3 +# Copyright (c) 2017-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test that the wallet can send and receive using all combinations of address types. + +There are 5 nodes-under-test: + - node0 uses legacy addresses + - node1 uses p2sh/segwit addresses + - node2 uses p2sh/segwit addresses and bech32 addresses for change + - node3 uses bech32 addresses + - node4 uses a p2sh/segwit addresses for change + +node5 exists to generate new blocks. + +## Multisig address test + +Test that adding a multisig address with: + - an uncompressed pubkey always gives a legacy address + - only compressed pubkeys gives the an `-addresstype` address + +## Sending to address types test + +A series of tests, iterating over node0-node4. In each iteration of the test, one node sends: + - 10/101th of its balance to itself (using getrawchangeaddress for single key addresses) + - 20/101th to the next node + - 30/101th to the node after that + - 40/101th to the remaining node + - 1/101th remains as fee+change + +Iterate over each node for single key addresses, and then over each node for +multisig addresses. + +Repeat test, but with explicit address_type parameters passed to getnewaddress +and getrawchangeaddress: + - node0 and node3 send to p2sh. + - node1 sends to bech32. + - node2 sends to legacy. + +As every node sends coins after receiving, this also +verifies that spending coins sent to all these address types works. + +## Change type test + +Test that the nodes generate the correct change address type: + - node0 always uses a legacy change address. + - node1 uses a bech32 addresses for change if any destination address is bech32. + - node2 always uses a bech32 address for change + - node3 always uses a bech32 address for change + - node4 always uses p2sh/segwit output for change. +""" + +from decimal import Decimal +import itertools + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + assert_greater_than, + assert_raises_rpc_error, + connect_nodes_bi, + sync_blocks, + sync_mempools, +) + + +class AddressTypeTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 6 + self.extra_args = [ + ["-addresstype=legacy"], + ["-addresstype=p2sh-segwit"], + ["-addresstype=p2sh-segwit", "-changetype=bech32"], + ["-addresstype=bech32"], + ["-changetype=p2sh-segwit"], + [], + ] + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def setup_network(self): + self.setup_nodes() + + # Fully mesh-connect nodes for faster mempool sync + for i, j in itertools.product(range(self.num_nodes), repeat=2): + if i > j: + connect_nodes_bi(self.nodes, i, j) + self.sync_all() + + def get_balances(self, confirmed=True): + """Return a list of confirmed or unconfirmed balances.""" + if confirmed: + return [self.nodes[i].getbalance() for i in range(4)] + else: + return [self.nodes[i].getunconfirmedbalance() for i in range(4)] + + def test_address(self, node, address, multisig, typ): + """Run sanity checks on an address.""" + info = self.nodes[node].getaddressinfo(address) + assert(self.nodes[node].validateaddress(address)['isvalid']) + if not multisig and typ == 'legacy': + # P2PKH + assert(not info['isscript']) + assert(not info['iswitness']) + assert('pubkey' in info) + elif not multisig and typ == 'p2sh-segwit': + # P2SH-P2WPKH + assert(info['isscript']) + assert(not info['iswitness']) + assert_equal(info['script'], 'witness_v0_keyhash') + assert('pubkey' in info) + elif not multisig and typ == 'bech32': + # P2WPKH + assert(not info['isscript']) + assert(info['iswitness']) + assert_equal(info['witness_version'], 0) + assert_equal(len(info['witness_program']), 40) + assert('pubkey' in info) + elif typ == 'legacy': + # P2SH-multisig + assert(info['isscript']) + assert_equal(info['script'], 'multisig') + assert(not info['iswitness']) + assert('pubkeys' in info) + elif typ == 'p2sh-segwit': + # P2SH-P2WSH-multisig + assert(info['isscript']) + assert_equal(info['script'], 'witness_v0_scripthash') + assert(not info['iswitness']) + assert(info['embedded']['isscript']) + assert_equal(info['embedded']['script'], 'multisig') + assert(info['embedded']['iswitness']) + assert_equal(info['embedded']['witness_version'], 0) + assert_equal(len(info['embedded']['witness_program']), 64) + assert('pubkeys' in info['embedded']) + elif typ == 'bech32': + # P2WSH-multisig + assert(info['isscript']) + assert_equal(info['script'], 'multisig') + assert(info['iswitness']) + assert_equal(info['witness_version'], 0) + assert_equal(len(info['witness_program']), 64) + assert('pubkeys' in info) + else: + # Unknown type + assert(False) + + def test_change_output_type(self, node_sender, destinations, expected_type): + txid = self.nodes[node_sender].sendmany(dummy="", amounts=dict.fromkeys(destinations, 0.001)) + raw_tx = self.nodes[node_sender].getrawtransaction(txid) + tx = self.nodes[node_sender].decoderawtransaction(raw_tx) + + # Make sure the transaction has change: + assert_equal(len(tx["vout"]), len(destinations) + 1) + + # Make sure the destinations are included, and remove them: + output_addresses = [vout['scriptPubKey']['addresses'][0] for vout in tx["vout"]] + change_addresses = [d for d in output_addresses if d not in destinations] + assert_equal(len(change_addresses), 1) + + self.log.debug("Check if change address " + change_addresses[0] + " is " + expected_type) + self.test_address(node_sender, change_addresses[0], multisig=False, typ=expected_type) + + def run_test(self): + # Mine 101 blocks on node5 to bring nodes out of IBD and make sure that + # no coinbases are maturing for the nodes-under-test during the test + self.nodes[5].generate(101) + sync_blocks(self.nodes) + + uncompressed_1 = "0496b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52da7589379515d4e0a604f8141781e62294721166bf621e73a82cbf2342c858ee" + uncompressed_2 = "047211a824f55b505228e4c3d5194c1fcfaa15a456abdf37f9b9d97a4040afc073dee6c89064984f03385237d92167c13e236446b417ab79a0fcae412ae3316b77" + compressed_1 = "0296b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52" + compressed_2 = "037211a824f55b505228e4c3d5194c1fcfaa15a456abdf37f9b9d97a4040afc073" + + # addmultisigaddress with at least 1 uncompressed key should return a legacy address. + for node in range(4): + self.test_address(node, self.nodes[node].addmultisigaddress(2, [uncompressed_1, uncompressed_2])['address'], True, 'legacy') + self.test_address(node, self.nodes[node].addmultisigaddress(2, [compressed_1, uncompressed_2])['address'], True, 'legacy') + self.test_address(node, self.nodes[node].addmultisigaddress(2, [uncompressed_1, compressed_2])['address'], True, 'legacy') + # addmultisigaddress with all compressed keys should return the appropriate address type (even when the keys are not ours). + self.test_address(0, self.nodes[0].addmultisigaddress(2, [compressed_1, compressed_2])['address'], True, 'legacy') + self.test_address(1, self.nodes[1].addmultisigaddress(2, [compressed_1, compressed_2])['address'], True, 'p2sh-segwit') + self.test_address(2, self.nodes[2].addmultisigaddress(2, [compressed_1, compressed_2])['address'], True, 'p2sh-segwit') + self.test_address(3, self.nodes[3].addmultisigaddress(2, [compressed_1, compressed_2])['address'], True, 'bech32') + + for explicit_type, multisig, from_node in itertools.product([False, True], [False, True], range(4)): + address_type = None + if explicit_type and not multisig: + if from_node == 1: + address_type = 'bech32' + elif from_node == 0 or from_node == 3: + address_type = 'p2sh-segwit' + else: + address_type = 'legacy' + self.log.info("Sending from node {} ({}) with{} multisig using {}".format(from_node, self.extra_args[from_node], "" if multisig else "out", "default" if address_type is None else address_type)) + old_balances = self.get_balances() + self.log.debug("Old balances are {}".format(old_balances)) + to_send = (old_balances[from_node] / 101).quantize(Decimal("0.00000001")) + sends = {} + + self.log.debug("Prepare sends") + for n, to_node in enumerate(range(from_node, from_node + 4)): + to_node %= 4 + change = False + if not multisig: + if from_node == to_node: + # When sending non-multisig to self, use getrawchangeaddress + address = self.nodes[to_node].getrawchangeaddress(address_type=address_type) + change = True + else: + address = self.nodes[to_node].getnewaddress(address_type=address_type) + else: + addr1 = self.nodes[to_node].getnewaddress() + addr2 = self.nodes[to_node].getnewaddress() + address = self.nodes[to_node].addmultisigaddress(2, [addr1, addr2])['address'] + + # Do some sanity checking on the created address + if address_type is not None: + typ = address_type + elif to_node == 0: + typ = 'legacy' + elif to_node == 1 or (to_node == 2 and not change): + typ = 'p2sh-segwit' + else: + typ = 'bech32' + self.test_address(to_node, address, multisig, typ) + + # Output entry + sends[address] = to_send * 10 * (1 + n) + + self.log.debug("Sending: {}".format(sends)) + self.nodes[from_node].sendmany("", sends) + sync_mempools(self.nodes) + + unconf_balances = self.get_balances(False) + self.log.debug("Check unconfirmed balances: {}".format(unconf_balances)) + assert_equal(unconf_balances[from_node], 0) + for n, to_node in enumerate(range(from_node + 1, from_node + 4)): + to_node %= 4 + assert_equal(unconf_balances[to_node], to_send * 10 * (2 + n)) + + # node5 collects fee and block subsidy to keep accounting simple + self.nodes[5].generate(1) + sync_blocks(self.nodes) + + new_balances = self.get_balances() + self.log.debug("Check new balances: {}".format(new_balances)) + # We don't know what fee was set, so we can only check bounds on the balance of the sending node + assert_greater_than(new_balances[from_node], to_send * 10) + assert_greater_than(to_send * 11, new_balances[from_node]) + for n, to_node in enumerate(range(from_node + 1, from_node + 4)): + to_node %= 4 + assert_equal(new_balances[to_node], old_balances[to_node] + to_send * 10 * (2 + n)) + + # Get one p2sh/segwit address from node2 and two bech32 addresses from node3: + to_address_p2sh = self.nodes[2].getnewaddress() + to_address_bech32_1 = self.nodes[3].getnewaddress() + to_address_bech32_2 = self.nodes[3].getnewaddress() + + # Fund node 4: + self.nodes[5].sendtoaddress(self.nodes[4].getnewaddress(), Decimal("1")) + self.nodes[5].generate(1) + sync_blocks(self.nodes) + assert_equal(self.nodes[4].getbalance(), 1) + + self.log.info("Nodes with addresstype=legacy never use a P2WPKH change output") + self.test_change_output_type(0, [to_address_bech32_1], 'legacy') + + self.log.info("Nodes with addresstype=p2sh-segwit only use a P2WPKH change output if any destination address is bech32:") + self.test_change_output_type(1, [to_address_p2sh], 'p2sh-segwit') + self.test_change_output_type(1, [to_address_bech32_1], 'bech32') + self.test_change_output_type(1, [to_address_p2sh, to_address_bech32_1], 'bech32') + self.test_change_output_type(1, [to_address_bech32_1, to_address_bech32_2], 'bech32') + + self.log.info("Nodes with change_type=bech32 always use a P2WPKH change output:") + self.test_change_output_type(2, [to_address_bech32_1], 'bech32') + self.test_change_output_type(2, [to_address_p2sh], 'bech32') + + self.log.info("Nodes with addresstype=bech32 always use a P2WPKH change output (unless changetype is set otherwise):") + self.test_change_output_type(3, [to_address_bech32_1], 'bech32') + self.test_change_output_type(3, [to_address_p2sh], 'bech32') + + self.log.info('getrawchangeaddress defaults to addresstype if -changetype is not set and argument is absent') + self.test_address(3, self.nodes[3].getrawchangeaddress(), multisig=False, typ='bech32') + + self.log.info('test invalid address type arguments') + assert_raises_rpc_error(-5, "Unknown address type ''", self.nodes[3].addmultisigaddress, 2, [compressed_1, compressed_2], None, '') + assert_raises_rpc_error(-5, "Unknown address type ''", self.nodes[3].getnewaddress, None, '') + assert_raises_rpc_error(-5, "Unknown address type ''", self.nodes[3].getrawchangeaddress, '') + assert_raises_rpc_error(-5, "Unknown address type 'bech23'", self.nodes[3].getrawchangeaddress, 'bech23') + + self.log.info("Nodes with changetype=p2sh-segwit never use a P2WPKH change output") + self.test_change_output_type(4, [to_address_bech32_1], 'p2sh-segwit') + self.test_address(4, self.nodes[4].getrawchangeaddress(), multisig=False, typ='p2sh-segwit') + self.log.info("Except for getrawchangeaddress if specified:") + self.test_address(4, self.nodes[4].getrawchangeaddress(), multisig=False, typ='p2sh-segwit') + self.test_address(4, self.nodes[4].getrawchangeaddress('bech32'), multisig=False, typ='bech32') + +if __name__ == '__main__': + AddressTypeTest().main() diff --git a/test/functional/wallet_backup.py b/test/functional/wallet_backup.py new file mode 100755 index 000000000..32ec385fa --- /dev/null +++ b/test/functional/wallet_backup.py @@ -0,0 +1,210 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the wallet backup features. + +Test case is: +4 nodes. 1 2 and 3 send transactions between each other, +fourth node is a miner. +1 2 3 each mine a block to start, then +Miner creates 100 blocks so 1 2 3 each have 50 mature +coins to spend. +Then 5 iterations of 1/2/3 sending coins amongst +themselves to get transactions in the wallets, +and the miner mining one block. + +Wallets are backed up using dumpwallet/backupwallet. +Then 5 more iterations of transactions and mining a block. + +Miner then generates 101 more blocks, so any +transaction fees paid mature. + +Sanity check: + Sum(1,2,3,4 balances) == 114*50 + +1/2/3 are shutdown, and their wallets erased. +Then restore using wallet.dat backup. And +confirm 1/2/3/4 balances are same as before. + +Shutdown again, restore using importwallet, +and confirm again balances are correct. +""" +from decimal import Decimal +import os +from random import randint +import shutil + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, assert_raises_rpc_error, connect_nodes, sync_blocks, sync_mempools + +class WalletBackupTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 4 + self.setup_clean_chain = True + # nodes 1, 2,3 are spenders, let's give them a keypool=100 + self.extra_args = [["-keypool=100"], ["-keypool=100"], ["-keypool=100"], []] + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def setup_network(self, split=False): + self.setup_nodes() + connect_nodes(self.nodes[0], 3) + connect_nodes(self.nodes[1], 3) + connect_nodes(self.nodes[2], 3) + connect_nodes(self.nodes[2], 0) + self.sync_all() + + def one_send(self, from_node, to_address): + if (randint(1,2) == 1): + amount = Decimal(randint(1,10)) / Decimal(10) + self.nodes[from_node].sendtoaddress(to_address, amount) + + def do_one_round(self): + a0 = self.nodes[0].getnewaddress() + a1 = self.nodes[1].getnewaddress() + a2 = self.nodes[2].getnewaddress() + + self.one_send(0, a1) + self.one_send(0, a2) + self.one_send(1, a0) + self.one_send(1, a2) + self.one_send(2, a0) + self.one_send(2, a1) + + # Have the miner (node3) mine a block. + # Must sync mempools before mining. + sync_mempools(self.nodes) + self.nodes[3].generate(1) + sync_blocks(self.nodes) + + # As above, this mirrors the original bash test. + def start_three(self): + self.start_node(0) + self.start_node(1) + self.start_node(2) + connect_nodes(self.nodes[0], 3) + connect_nodes(self.nodes[1], 3) + connect_nodes(self.nodes[2], 3) + connect_nodes(self.nodes[2], 0) + + def stop_three(self): + self.stop_node(0) + self.stop_node(1) + self.stop_node(2) + + def erase_three(self): + os.remove(os.path.join(self.nodes[0].datadir, 'regtest', 'wallets', 'wallet.dat')) + os.remove(os.path.join(self.nodes[1].datadir, 'regtest', 'wallets', 'wallet.dat')) + os.remove(os.path.join(self.nodes[2].datadir, 'regtest', 'wallets', 'wallet.dat')) + + def run_test(self): + self.log.info("Generating initial blockchain") + self.nodes[0].generate(1) + sync_blocks(self.nodes) + self.nodes[1].generate(1) + sync_blocks(self.nodes) + self.nodes[2].generate(1) + sync_blocks(self.nodes) + self.nodes[3].generate(100) + sync_blocks(self.nodes) + + assert_equal(self.nodes[0].getbalance(), 50) + assert_equal(self.nodes[1].getbalance(), 50) + assert_equal(self.nodes[2].getbalance(), 50) + assert_equal(self.nodes[3].getbalance(), 0) + + self.log.info("Creating transactions") + # Five rounds of sending each other transactions. + for i in range(5): + self.do_one_round() + + self.log.info("Backing up") + + self.nodes[0].backupwallet(os.path.join(self.nodes[0].datadir, 'wallet.bak')) + self.nodes[0].dumpwallet(os.path.join(self.nodes[0].datadir, 'wallet.dump')) + self.nodes[1].backupwallet(os.path.join(self.nodes[1].datadir, 'wallet.bak')) + self.nodes[1].dumpwallet(os.path.join(self.nodes[1].datadir, 'wallet.dump')) + self.nodes[2].backupwallet(os.path.join(self.nodes[2].datadir, 'wallet.bak')) + self.nodes[2].dumpwallet(os.path.join(self.nodes[2].datadir, 'wallet.dump')) + + self.log.info("More transactions") + for i in range(5): + self.do_one_round() + + # Generate 101 more blocks, so any fees paid mature + self.nodes[3].generate(101) + self.sync_all() + + balance0 = self.nodes[0].getbalance() + balance1 = self.nodes[1].getbalance() + balance2 = self.nodes[2].getbalance() + balance3 = self.nodes[3].getbalance() + total = balance0 + balance1 + balance2 + balance3 + + # At this point, there are 214 blocks (103 for setup, then 10 rounds, then 101.) + # 114 are mature, so the sum of all wallets should be 114 * 50 = 5700. + assert_equal(total, 5700) + + ## + # Test restoring spender wallets from backups + ## + self.log.info("Restoring using wallet.dat") + self.stop_three() + self.erase_three() + + # Start node2 with no chain + shutil.rmtree(os.path.join(self.nodes[2].datadir, 'regtest', 'blocks')) + shutil.rmtree(os.path.join(self.nodes[2].datadir, 'regtest', 'chainstate')) + + # Restore wallets from backup + shutil.copyfile(os.path.join(self.nodes[0].datadir, 'wallet.bak'), os.path.join(self.nodes[0].datadir, 'regtest', 'wallets', 'wallet.dat')) + shutil.copyfile(os.path.join(self.nodes[1].datadir, 'wallet.bak'), os.path.join(self.nodes[1].datadir, 'regtest', 'wallets', 'wallet.dat')) + shutil.copyfile(os.path.join(self.nodes[2].datadir, 'wallet.bak'), os.path.join(self.nodes[2].datadir, 'regtest', 'wallets', 'wallet.dat')) + + self.log.info("Re-starting nodes") + self.start_three() + sync_blocks(self.nodes) + + assert_equal(self.nodes[0].getbalance(), balance0) + assert_equal(self.nodes[1].getbalance(), balance1) + assert_equal(self.nodes[2].getbalance(), balance2) + + self.log.info("Restoring using dumped wallet") + self.stop_three() + self.erase_three() + + #start node2 with no chain + shutil.rmtree(os.path.join(self.nodes[2].datadir, 'regtest', 'blocks')) + shutil.rmtree(os.path.join(self.nodes[2].datadir, 'regtest', 'chainstate')) + + self.start_three() + + assert_equal(self.nodes[0].getbalance(), 0) + assert_equal(self.nodes[1].getbalance(), 0) + assert_equal(self.nodes[2].getbalance(), 0) + + self.nodes[0].importwallet(os.path.join(self.nodes[0].datadir, 'wallet.dump')) + self.nodes[1].importwallet(os.path.join(self.nodes[1].datadir, 'wallet.dump')) + self.nodes[2].importwallet(os.path.join(self.nodes[2].datadir, 'wallet.dump')) + + sync_blocks(self.nodes) + + assert_equal(self.nodes[0].getbalance(), balance0) + assert_equal(self.nodes[1].getbalance(), balance1) + assert_equal(self.nodes[2].getbalance(), balance2) + + # Backup to source wallet file must fail + sourcePaths = [ + os.path.join(self.nodes[0].datadir, 'regtest', 'wallets', 'wallet.dat'), + os.path.join(self.nodes[0].datadir, 'regtest', '.', 'wallets', 'wallet.dat'), + os.path.join(self.nodes[0].datadir, 'regtest', 'wallets', ''), + os.path.join(self.nodes[0].datadir, 'regtest', 'wallets')] + + for sourcePath in sourcePaths: + assert_raises_rpc_error(-4, "backup failed", self.nodes[0].backupwallet, sourcePath) + + +if __name__ == '__main__': + WalletBackupTest().main() diff --git a/test/functional/wallet_basic.py b/test/functional/wallet_basic.py new file mode 100755 index 000000000..8bbff7f7e --- /dev/null +++ b/test/functional/wallet_basic.py @@ -0,0 +1,492 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the wallet.""" +from decimal import Decimal +import time + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_array_result, + assert_equal, + assert_fee_amount, + assert_raises_rpc_error, + connect_nodes_bi, + sync_blocks, + sync_mempools, + wait_until, +) + +class WalletTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 4 + self.setup_clean_chain = True + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def setup_network(self): + self.add_nodes(4) + self.start_node(0) + self.start_node(1) + self.start_node(2) + connect_nodes_bi(self.nodes, 0, 1) + connect_nodes_bi(self.nodes, 1, 2) + connect_nodes_bi(self.nodes, 0, 2) + self.sync_all([self.nodes[0:3]]) + + def check_fee_amount(self, curr_balance, balance_with_fee, fee_per_byte, tx_size): + """Return curr_balance after asserting the fee was in range""" + fee = balance_with_fee - curr_balance + assert_fee_amount(fee, tx_size, fee_per_byte * 1000) + return curr_balance + + def get_vsize(self, txn): + return self.nodes[0].decoderawtransaction(txn)['vsize'] + + def run_test(self): + # Check that there's no UTXO on none of the nodes + assert_equal(len(self.nodes[0].listunspent()), 0) + assert_equal(len(self.nodes[1].listunspent()), 0) + assert_equal(len(self.nodes[2].listunspent()), 0) + + self.log.info("Mining blocks...") + + self.nodes[0].generate(1) + + walletinfo = self.nodes[0].getwalletinfo() + assert_equal(walletinfo['immature_balance'], 50) + assert_equal(walletinfo['balance'], 0) + + self.sync_all([self.nodes[0:3]]) + self.nodes[1].generate(101) + self.sync_all([self.nodes[0:3]]) + + assert_equal(self.nodes[0].getbalance(), 50) + assert_equal(self.nodes[1].getbalance(), 50) + assert_equal(self.nodes[2].getbalance(), 0) + + # Check getbalance with different arguments + assert_equal(self.nodes[0].getbalance("*"), 50) + assert_equal(self.nodes[0].getbalance("*", 1), 50) + assert_equal(self.nodes[0].getbalance("*", 1, True), 50) + assert_equal(self.nodes[0].getbalance(minconf=1), 50) + + # first argument of getbalance must be excluded or set to "*" + assert_raises_rpc_error(-32, "dummy first argument must be excluded or set to \"*\"", self.nodes[0].getbalance, "") + + # Check that only first and second nodes have UTXOs + utxos = self.nodes[0].listunspent() + assert_equal(len(utxos), 1) + assert_equal(len(self.nodes[1].listunspent()), 1) + assert_equal(len(self.nodes[2].listunspent()), 0) + + self.log.info("test gettxout") + confirmed_txid, confirmed_index = utxos[0]["txid"], utxos[0]["vout"] + # First, outputs that are unspent both in the chain and in the + # mempool should appear with or without include_mempool + txout = self.nodes[0].gettxout(txid=confirmed_txid, n=confirmed_index, include_mempool=False) + assert_equal(txout['value'], 50) + txout = self.nodes[0].gettxout(txid=confirmed_txid, n=confirmed_index, include_mempool=True) + assert_equal(txout['value'], 50) + + # Send 21 BTC from 0 to 2 using sendtoaddress call. + # Locked memory should use at least 32 bytes to sign each transaction + self.log.info("test getmemoryinfo") + memory_before = self.nodes[0].getmemoryinfo() + self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11) + mempool_txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10) + memory_after = self.nodes[0].getmemoryinfo() + assert(memory_before['locked']['used'] + 64 <= memory_after['locked']['used']) + + self.log.info("test gettxout (second part)") + # utxo spent in mempool should be visible if you exclude mempool + # but invisible if you include mempool + txout = self.nodes[0].gettxout(confirmed_txid, confirmed_index, False) + assert_equal(txout['value'], 50) + txout = self.nodes[0].gettxout(confirmed_txid, confirmed_index, True) + assert txout is None + # new utxo from mempool should be invisible if you exclude mempool + # but visible if you include mempool + txout = self.nodes[0].gettxout(mempool_txid, 0, False) + assert txout is None + txout1 = self.nodes[0].gettxout(mempool_txid, 0, True) + txout2 = self.nodes[0].gettxout(mempool_txid, 1, True) + # note the mempool tx will have randomly assigned indices + # but 10 will go to node2 and the rest will go to node0 + balance = self.nodes[0].getbalance() + assert_equal(set([txout1['value'], txout2['value']]), set([10, balance])) + walletinfo = self.nodes[0].getwalletinfo() + assert_equal(walletinfo['immature_balance'], 0) + + # Have node0 mine a block, thus it will collect its own fee. + self.nodes[0].generate(1) + self.sync_all([self.nodes[0:3]]) + + # Exercise locking of unspent outputs + unspent_0 = self.nodes[2].listunspent()[0] + unspent_0 = {"txid": unspent_0["txid"], "vout": unspent_0["vout"]} + assert_raises_rpc_error(-8, "Invalid parameter, expected locked output", self.nodes[2].lockunspent, True, [unspent_0]) + self.nodes[2].lockunspent(False, [unspent_0]) + assert_raises_rpc_error(-8, "Invalid parameter, output already locked", self.nodes[2].lockunspent, False, [unspent_0]) + assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[2].sendtoaddress, self.nodes[2].getnewaddress(), 20) + assert_equal([unspent_0], self.nodes[2].listlockunspent()) + self.nodes[2].lockunspent(True, [unspent_0]) + assert_equal(len(self.nodes[2].listlockunspent()), 0) + assert_raises_rpc_error(-8, "txid must be of length 64 (not 34, for '0000000000000000000000000000000000')", + self.nodes[2].lockunspent, False, + [{"txid": "0000000000000000000000000000000000", "vout": 0}]) + assert_raises_rpc_error(-8, "txid must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", + self.nodes[2].lockunspent, False, + [{"txid": "ZZZ0000000000000000000000000000000000000000000000000000000000000", "vout": 0}]) + assert_raises_rpc_error(-8, "Invalid parameter, unknown transaction", + self.nodes[2].lockunspent, False, + [{"txid": "0000000000000000000000000000000000000000000000000000000000000000", "vout": 0}]) + assert_raises_rpc_error(-8, "Invalid parameter, vout index out of bounds", + self.nodes[2].lockunspent, False, + [{"txid": unspent_0["txid"], "vout": 999}]) + + # An output should be unlocked when spent + unspent_0 = self.nodes[1].listunspent()[0] + self.nodes[1].lockunspent(False, [unspent_0]) + tx = self.nodes[1].createrawtransaction([unspent_0], { self.nodes[1].getnewaddress() : 1 }) + tx = self.nodes[1].fundrawtransaction(tx)['hex'] + tx = self.nodes[1].signrawtransactionwithwallet(tx)["hex"] + self.nodes[1].sendrawtransaction(tx) + assert_equal(len(self.nodes[1].listlockunspent()), 0) + + # Have node1 generate 100 blocks (so node0 can recover the fee) + self.nodes[1].generate(100) + self.sync_all([self.nodes[0:3]]) + + # node0 should end up with 100 btc in block rewards plus fees, but + # minus the 21 plus fees sent to node2 + assert_equal(self.nodes[0].getbalance(), 100 - 21) + assert_equal(self.nodes[2].getbalance(), 21) + + # Node0 should have two unspent outputs. + # Create a couple of transactions to send them to node2, submit them through + # node1, and make sure both node0 and node2 pick them up properly: + node0utxos = self.nodes[0].listunspent(1) + assert_equal(len(node0utxos), 2) + + # create both transactions + txns_to_send = [] + for utxo in node0utxos: + inputs = [] + outputs = {} + inputs.append({"txid": utxo["txid"], "vout": utxo["vout"]}) + outputs[self.nodes[2].getnewaddress()] = utxo["amount"] - 3 + raw_tx = self.nodes[0].createrawtransaction(inputs, outputs) + txns_to_send.append(self.nodes[0].signrawtransactionwithwallet(raw_tx)) + + # Have node 1 (miner) send the transactions + self.nodes[1].sendrawtransaction(txns_to_send[0]["hex"], True) + self.nodes[1].sendrawtransaction(txns_to_send[1]["hex"], True) + + # Have node1 mine a block to confirm transactions: + self.nodes[1].generate(1) + self.sync_all([self.nodes[0:3]]) + + assert_equal(self.nodes[0].getbalance(), 0) + assert_equal(self.nodes[2].getbalance(), 94) + + # Verify that a spent output cannot be locked anymore + spent_0 = {"txid": node0utxos[0]["txid"], "vout": node0utxos[0]["vout"]} + assert_raises_rpc_error(-8, "Invalid parameter, expected unspent output", self.nodes[0].lockunspent, False, [spent_0]) + + # Send 10 BTC normal + address = self.nodes[0].getnewaddress("test") + fee_per_byte = Decimal('0.001') / 1000 + self.nodes[2].settxfee(fee_per_byte * 1000) + txid = self.nodes[2].sendtoaddress(address, 10, "", "", False) + self.nodes[2].generate(1) + self.sync_all([self.nodes[0:3]]) + node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), Decimal('84'), fee_per_byte, self.get_vsize(self.nodes[2].getrawtransaction(txid))) + assert_equal(self.nodes[0].getbalance(), Decimal('10')) + + # Send 10 BTC with subtract fee from amount + txid = self.nodes[2].sendtoaddress(address, 10, "", "", True) + self.nodes[2].generate(1) + self.sync_all([self.nodes[0:3]]) + node_2_bal -= Decimal('10') + assert_equal(self.nodes[2].getbalance(), node_2_bal) + node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), Decimal('20'), fee_per_byte, self.get_vsize(self.nodes[2].getrawtransaction(txid))) + + # Sendmany 10 BTC + txid = self.nodes[2].sendmany('', {address: 10}, 0, "", []) + self.nodes[2].generate(1) + self.sync_all([self.nodes[0:3]]) + node_0_bal += Decimal('10') + node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), node_2_bal - Decimal('10'), fee_per_byte, self.get_vsize(self.nodes[2].getrawtransaction(txid))) + assert_equal(self.nodes[0].getbalance(), node_0_bal) + + # Sendmany 10 BTC with subtract fee from amount + txid = self.nodes[2].sendmany('', {address: 10}, 0, "", [address]) + self.nodes[2].generate(1) + self.sync_all([self.nodes[0:3]]) + node_2_bal -= Decimal('10') + assert_equal(self.nodes[2].getbalance(), node_2_bal) + node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), node_0_bal + Decimal('10'), fee_per_byte, self.get_vsize(self.nodes[2].getrawtransaction(txid))) + + # Test ResendWalletTransactions: + # Create a couple of transactions, then start up a fourth + # node (nodes[3]) and ask nodes[0] to rebroadcast. + # EXPECT: nodes[3] should have those transactions in its mempool. + txid1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1) + txid2 = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1) + sync_mempools(self.nodes[0:2]) + + self.start_node(3) + connect_nodes_bi(self.nodes, 0, 3) + sync_blocks(self.nodes) + + relayed = self.nodes[0].resendwallettransactions() + assert_equal(set(relayed), {txid1, txid2}) + sync_mempools(self.nodes) + + assert(txid1 in self.nodes[3].getrawmempool()) + + # Exercise balance rpcs + assert_equal(self.nodes[0].getwalletinfo()["unconfirmed_balance"], 1) + assert_equal(self.nodes[0].getunconfirmedbalance(), 1) + + # check if we can list zero value tx as available coins + # 1. create raw_tx + # 2. hex-changed one output to 0.0 + # 3. sign and send + # 4. check if recipient (node0) can list the zero value tx + usp = self.nodes[1].listunspent(query_options={'minimumAmount': '49.998'})[0] + inputs = [{"txid": usp['txid'], "vout": usp['vout']}] + outputs = {self.nodes[1].getnewaddress(): 49.998, self.nodes[0].getnewaddress(): 11.11} + + raw_tx = self.nodes[1].createrawtransaction(inputs, outputs).replace("c0833842", "00000000") # replace 11.11 with 0.0 (int32) + signed_raw_tx = self.nodes[1].signrawtransactionwithwallet(raw_tx) + decoded_raw_tx = self.nodes[1].decoderawtransaction(signed_raw_tx['hex']) + zero_value_txid = decoded_raw_tx['txid'] + self.nodes[1].sendrawtransaction(signed_raw_tx['hex']) + + self.sync_all() + self.nodes[1].generate(1) # mine a block + self.sync_all() + + unspent_txs = self.nodes[0].listunspent() # zero value tx must be in listunspents output + found = False + for uTx in unspent_txs: + if uTx['txid'] == zero_value_txid: + found = True + assert_equal(uTx['amount'], Decimal('0')) + assert(found) + + # do some -walletbroadcast tests + self.stop_nodes() + self.start_node(0, ["-walletbroadcast=0"]) + self.start_node(1, ["-walletbroadcast=0"]) + self.start_node(2, ["-walletbroadcast=0"]) + connect_nodes_bi(self.nodes, 0, 1) + connect_nodes_bi(self.nodes, 1, 2) + connect_nodes_bi(self.nodes, 0, 2) + self.sync_all([self.nodes[0:3]]) + + txid_not_broadcast = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 2) + tx_obj_not_broadcast = self.nodes[0].gettransaction(txid_not_broadcast) + self.nodes[1].generate(1) # mine a block, tx should not be in there + self.sync_all([self.nodes[0:3]]) + assert_equal(self.nodes[2].getbalance(), node_2_bal) # should not be changed because tx was not broadcasted + + # now broadcast from another node, mine a block, sync, and check the balance + self.nodes[1].sendrawtransaction(tx_obj_not_broadcast['hex']) + self.nodes[1].generate(1) + self.sync_all([self.nodes[0:3]]) + node_2_bal += 2 + tx_obj_not_broadcast = self.nodes[0].gettransaction(txid_not_broadcast) + assert_equal(self.nodes[2].getbalance(), node_2_bal) + + # create another tx + self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 2) + + # restart the nodes with -walletbroadcast=1 + self.stop_nodes() + self.start_node(0) + self.start_node(1) + self.start_node(2) + connect_nodes_bi(self.nodes, 0, 1) + connect_nodes_bi(self.nodes, 1, 2) + connect_nodes_bi(self.nodes, 0, 2) + sync_blocks(self.nodes[0:3]) + + self.nodes[0].generate(1) + sync_blocks(self.nodes[0:3]) + node_2_bal += 2 + + # tx should be added to balance because after restarting the nodes tx should be broadcast + assert_equal(self.nodes[2].getbalance(), node_2_bal) + + # send a tx with value in a string (PR#6380 +) + txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "2") + tx_obj = self.nodes[0].gettransaction(txid) + assert_equal(tx_obj['amount'], Decimal('-2')) + + txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "0.0001") + tx_obj = self.nodes[0].gettransaction(txid) + assert_equal(tx_obj['amount'], Decimal('-0.0001')) + + # check if JSON parser can handle scientific notation in strings + txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "1e-4") + tx_obj = self.nodes[0].gettransaction(txid) + assert_equal(tx_obj['amount'], Decimal('-0.0001')) + + # This will raise an exception because the amount type is wrong + assert_raises_rpc_error(-3, "Invalid amount", self.nodes[0].sendtoaddress, self.nodes[2].getnewaddress(), "1f-4") + + # This will raise an exception since generate does not accept a string + assert_raises_rpc_error(-1, "not an integer", self.nodes[0].generate, "2") + + # Import address and private key to check correct behavior of spendable unspents + # 1. Send some coins to generate new UTXO + address_to_import = self.nodes[2].getnewaddress() + txid = self.nodes[0].sendtoaddress(address_to_import, 1) + self.nodes[0].generate(1) + self.sync_all([self.nodes[0:3]]) + + # 2. Import address from node2 to node1 + self.nodes[1].importaddress(address_to_import) + + # 3. Validate that the imported address is watch-only on node1 + assert(self.nodes[1].getaddressinfo(address_to_import)["iswatchonly"]) + + # 4. Check that the unspents after import are not spendable + assert_array_result(self.nodes[1].listunspent(), + {"address": address_to_import}, + {"spendable": False}) + + # 5. Import private key of the previously imported address on node1 + priv_key = self.nodes[2].dumpprivkey(address_to_import) + self.nodes[1].importprivkey(priv_key) + + # 6. Check that the unspents are now spendable on node1 + assert_array_result(self.nodes[1].listunspent(), + {"address": address_to_import}, + {"spendable": True}) + + # Mine a block from node0 to an address from node1 + coinbase_addr = self.nodes[1].getnewaddress() + block_hash = self.nodes[0].generatetoaddress(1, coinbase_addr)[0] + coinbase_txid = self.nodes[0].getblock(block_hash)['tx'][0] + self.sync_all([self.nodes[0:3]]) + + # Check that the txid and balance is found by node1 + self.nodes[1].gettransaction(coinbase_txid) + + # check if wallet or blockchain maintenance changes the balance + self.sync_all([self.nodes[0:3]]) + blocks = self.nodes[0].generate(2) + self.sync_all([self.nodes[0:3]]) + balance_nodes = [self.nodes[i].getbalance() for i in range(3)] + block_count = self.nodes[0].getblockcount() + + # Check modes: + # - True: unicode escaped as \u.... + # - False: unicode directly as UTF-8 + for mode in [True, False]: + self.nodes[0].rpc.ensure_ascii = mode + # unicode check: Basic Multilingual Plane, Supplementary Plane respectively + for label in [u'рыба', u'𝅘𝅥𝅯']: + addr = self.nodes[0].getnewaddress() + self.nodes[0].setlabel(addr, label) + assert_equal(self.nodes[0].getaddressinfo(addr)['label'], label) + assert(label in self.nodes[0].listlabels()) + self.nodes[0].rpc.ensure_ascii = True # restore to default + + # maintenance tests + maintenance = [ + '-rescan', + '-reindex', + '-zapwallettxes=1', + '-zapwallettxes=2', + # disabled until issue is fixed: https://github.com/bitcoin/bitcoin/issues/7463 + # '-salvagewallet', + ] + chainlimit = 6 + for m in maintenance: + self.log.info("check " + m) + self.stop_nodes() + # set lower ancestor limit for later + self.start_node(0, [m, "-limitancestorcount=" + str(chainlimit)]) + self.start_node(1, [m, "-limitancestorcount=" + str(chainlimit)]) + self.start_node(2, [m, "-limitancestorcount=" + str(chainlimit)]) + if m == '-reindex': + # reindex will leave rpc warm up "early"; Wait for it to finish + wait_until(lambda: [block_count] * 3 == [self.nodes[i].getblockcount() for i in range(3)]) + assert_equal(balance_nodes, [self.nodes[i].getbalance() for i in range(3)]) + + # Exercise listsinceblock with the last two blocks + coinbase_tx_1 = self.nodes[0].listsinceblock(blocks[0]) + assert_equal(coinbase_tx_1["lastblock"], blocks[1]) + assert_equal(len(coinbase_tx_1["transactions"]), 1) + assert_equal(coinbase_tx_1["transactions"][0]["blockhash"], blocks[1]) + assert_equal(len(self.nodes[0].listsinceblock(blocks[1])["transactions"]), 0) + + # ==Check that wallet prefers to use coins that don't exceed mempool limits ===== + + # Get all non-zero utxos together + chain_addrs = [self.nodes[0].getnewaddress(), self.nodes[0].getnewaddress()] + singletxid = self.nodes[0].sendtoaddress(chain_addrs[0], self.nodes[0].getbalance(), "", "", True) + self.nodes[0].generate(1) + node0_balance = self.nodes[0].getbalance() + # Split into two chains + rawtx = self.nodes[0].createrawtransaction([{"txid": singletxid, "vout": 0}], {chain_addrs[0]: node0_balance / 2 - Decimal('0.01'), chain_addrs[1]: node0_balance / 2 - Decimal('0.01')}) + signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx) + singletxid = self.nodes[0].sendrawtransaction(signedtx["hex"]) + self.nodes[0].generate(1) + + # Make a long chain of unconfirmed payments without hitting mempool limit + # Each tx we make leaves only one output of change on a chain 1 longer + # Since the amount to send is always much less than the outputs, we only ever need one output + # So we should be able to generate exactly chainlimit txs for each original output + sending_addr = self.nodes[1].getnewaddress() + txid_list = [] + for i in range(chainlimit * 2): + txid_list.append(self.nodes[0].sendtoaddress(sending_addr, Decimal('0.0001'))) + assert_equal(self.nodes[0].getmempoolinfo()['size'], chainlimit * 2) + assert_equal(len(txid_list), chainlimit * 2) + + # Without walletrejectlongchains, we will still generate a txid + # The tx will be stored in the wallet but not accepted to the mempool + extra_txid = self.nodes[0].sendtoaddress(sending_addr, Decimal('0.0001')) + assert(extra_txid not in self.nodes[0].getrawmempool()) + assert(extra_txid in [tx["txid"] for tx in self.nodes[0].listtransactions()]) + self.nodes[0].abandontransaction(extra_txid) + total_txs = len(self.nodes[0].listtransactions("*", 99999)) + + # Try with walletrejectlongchains + # Double chain limit but require combining inputs, so we pass SelectCoinsMinConf + self.stop_node(0) + self.start_node(0, extra_args=["-walletrejectlongchains", "-limitancestorcount=" + str(2 * chainlimit)]) + + # wait for loadmempool + timeout = 10 + while (timeout > 0 and len(self.nodes[0].getrawmempool()) < chainlimit * 2): + time.sleep(0.5) + timeout -= 0.5 + assert_equal(len(self.nodes[0].getrawmempool()), chainlimit * 2) + + node0_balance = self.nodes[0].getbalance() + # With walletrejectlongchains we will not create the tx and store it in our wallet. + assert_raises_rpc_error(-4, "Transaction has too long of a mempool chain", self.nodes[0].sendtoaddress, sending_addr, node0_balance - Decimal('0.01')) + + # Verify nothing new in wallet + assert_equal(total_txs, len(self.nodes[0].listtransactions("*", 99999))) + + # Test getaddressinfo. Note that these addresses are taken from disablewallet.py + assert_raises_rpc_error(-5, "Invalid address", self.nodes[0].getaddressinfo, "3J98t1WpEZ73CNmQviecrnyiWrnqRhWNLy") + address_info = self.nodes[0].getaddressinfo("mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ") + assert_equal(address_info['address'], "mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ") + assert_equal(address_info["scriptPubKey"], "76a9144e3854046c7bd1594ac904e4793b6a45b36dea0988ac") + assert not address_info["ismine"] + assert not address_info["iswatchonly"] + assert not address_info["isscript"] + +if __name__ == '__main__': + WalletTest().main() diff --git a/test/functional/wallet_bumpfee.py b/test/functional/wallet_bumpfee.py new file mode 100755 index 000000000..67ee00871 --- /dev/null +++ b/test/functional/wallet_bumpfee.py @@ -0,0 +1,311 @@ +#!/usr/bin/env python3 +# Copyright (c) 2016-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the bumpfee RPC. + +Verifies that the bumpfee RPC creates replacement transactions successfully when +its preconditions are met, and returns appropriate errors in other cases. + +This module consists of around a dozen individual test cases implemented in the +top-level functions named as test_. The test functions +can be disabled or reordered if needed for debugging. If new test cases are +added in the future, they should try to follow the same convention and not +make assumptions about execution order. +""" + +from decimal import Decimal + +from test_framework.blocktools import add_witness_commitment, create_block, create_coinbase, send_to_witness +from test_framework.messages import BIP125_SEQUENCE_NUMBER, CTransaction +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, assert_greater_than, assert_raises_rpc_error, bytes_to_hex_str, connect_nodes_bi, hex_str_to_bytes, sync_mempools + +import io + +WALLET_PASSPHRASE = "test" +WALLET_PASSPHRASE_TIMEOUT = 3600 + + +class BumpFeeTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 2 + self.setup_clean_chain = True + self.extra_args = [[ + "-deprecatedrpc=addwitnessaddress", + "-walletrbf={}".format(i), + "-mintxfee=0.00002", + ] for i in range(self.num_nodes)] + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + # Encrypt wallet for test_locked_wallet_fails test + self.nodes[1].encryptwallet(WALLET_PASSPHRASE) + self.nodes[1].walletpassphrase(WALLET_PASSPHRASE, WALLET_PASSPHRASE_TIMEOUT) + + connect_nodes_bi(self.nodes, 0, 1) + self.sync_all() + + peer_node, rbf_node = self.nodes + rbf_node_address = rbf_node.getnewaddress() + + # fund rbf node with 10 coins of 0.001 btc (100,000 satoshis) + self.log.info("Mining blocks...") + peer_node.generate(110) + self.sync_all() + for i in range(25): + peer_node.sendtoaddress(rbf_node_address, 0.001) + self.sync_all() + peer_node.generate(1) + self.sync_all() + assert_equal(rbf_node.getbalance(), Decimal("0.025")) + + self.log.info("Running tests") + dest_address = peer_node.getnewaddress() + test_simple_bumpfee_succeeds(rbf_node, peer_node, dest_address) + test_segwit_bumpfee_succeeds(rbf_node, dest_address) + test_nonrbf_bumpfee_fails(peer_node, dest_address) + test_notmine_bumpfee_fails(rbf_node, peer_node, dest_address) + test_bumpfee_with_descendant_fails(rbf_node, rbf_node_address, dest_address) + test_small_output_fails(rbf_node, dest_address) + test_dust_to_fee(rbf_node, dest_address) + test_settxfee(rbf_node, dest_address) + test_rebumping(rbf_node, dest_address) + test_rebumping_not_replaceable(rbf_node, dest_address) + test_unconfirmed_not_spendable(rbf_node, rbf_node_address) + test_bumpfee_metadata(rbf_node, dest_address) + test_locked_wallet_fails(rbf_node, dest_address) + self.log.info("Success") + + +def test_simple_bumpfee_succeeds(rbf_node, peer_node, dest_address): + rbfid = spend_one_input(rbf_node, dest_address) + rbftx = rbf_node.gettransaction(rbfid) + sync_mempools((rbf_node, peer_node)) + assert rbfid in rbf_node.getrawmempool() and rbfid in peer_node.getrawmempool() + bumped_tx = rbf_node.bumpfee(rbfid) + assert_equal(bumped_tx["errors"], []) + assert bumped_tx["fee"] - abs(rbftx["fee"]) > 0 + # check that bumped_tx propagates, original tx was evicted and has a wallet conflict + sync_mempools((rbf_node, peer_node)) + assert bumped_tx["txid"] in rbf_node.getrawmempool() + assert bumped_tx["txid"] in peer_node.getrawmempool() + assert rbfid not in rbf_node.getrawmempool() + assert rbfid not in peer_node.getrawmempool() + oldwtx = rbf_node.gettransaction(rbfid) + assert len(oldwtx["walletconflicts"]) > 0 + # check wallet transaction replaces and replaced_by values + bumpedwtx = rbf_node.gettransaction(bumped_tx["txid"]) + assert_equal(oldwtx["replaced_by_txid"], bumped_tx["txid"]) + assert_equal(bumpedwtx["replaces_txid"], rbfid) + + +def test_segwit_bumpfee_succeeds(rbf_node, dest_address): + # Create a transaction with segwit output, then create an RBF transaction + # which spends it, and make sure bumpfee can be called on it. + + segwit_in = next(u for u in rbf_node.listunspent() if u["amount"] == Decimal("0.001")) + segwit_out = rbf_node.getaddressinfo(rbf_node.getnewaddress()) + rbf_node.addwitnessaddress(segwit_out["address"]) + segwitid = send_to_witness( + use_p2wsh=False, + node=rbf_node, + utxo=segwit_in, + pubkey=segwit_out["pubkey"], + encode_p2sh=False, + amount=Decimal("0.0009"), + sign=True) + + rbfraw = rbf_node.createrawtransaction([{ + 'txid': segwitid, + 'vout': 0, + "sequence": BIP125_SEQUENCE_NUMBER + }], {dest_address: Decimal("0.0005"), + rbf_node.getrawchangeaddress(): Decimal("0.0003")}) + rbfsigned = rbf_node.signrawtransactionwithwallet(rbfraw) + rbfid = rbf_node.sendrawtransaction(rbfsigned["hex"]) + assert rbfid in rbf_node.getrawmempool() + + bumped_tx = rbf_node.bumpfee(rbfid) + assert bumped_tx["txid"] in rbf_node.getrawmempool() + assert rbfid not in rbf_node.getrawmempool() + + +def test_nonrbf_bumpfee_fails(peer_node, dest_address): + # cannot replace a non RBF transaction (from node which did not enable RBF) + not_rbfid = peer_node.sendtoaddress(dest_address, Decimal("0.00090000")) + assert_raises_rpc_error(-4, "not BIP 125 replaceable", peer_node.bumpfee, not_rbfid) + + +def test_notmine_bumpfee_fails(rbf_node, peer_node, dest_address): + # cannot bump fee unless the tx has only inputs that we own. + # here, the rbftx has a peer_node coin and then adds a rbf_node input + # Note that this test depends upon the RPC code checking input ownership prior to change outputs + # (since it can't use fundrawtransaction, it lacks a proper change output) + utxos = [node.listunspent()[-1] for node in (rbf_node, peer_node)] + inputs = [{ + "txid": utxo["txid"], + "vout": utxo["vout"], + "address": utxo["address"], + "sequence": BIP125_SEQUENCE_NUMBER + } for utxo in utxos] + output_val = sum(utxo["amount"] for utxo in utxos) - Decimal("0.001") + rawtx = rbf_node.createrawtransaction(inputs, {dest_address: output_val}) + signedtx = rbf_node.signrawtransactionwithwallet(rawtx) + signedtx = peer_node.signrawtransactionwithwallet(signedtx["hex"]) + rbfid = rbf_node.sendrawtransaction(signedtx["hex"]) + assert_raises_rpc_error(-4, "Transaction contains inputs that don't belong to this wallet", + rbf_node.bumpfee, rbfid) + + +def test_bumpfee_with_descendant_fails(rbf_node, rbf_node_address, dest_address): + # cannot bump fee if the transaction has a descendant + # parent is send-to-self, so we don't have to check which output is change when creating the child tx + parent_id = spend_one_input(rbf_node, rbf_node_address) + tx = rbf_node.createrawtransaction([{"txid": parent_id, "vout": 0}], {dest_address: 0.00020000}) + tx = rbf_node.signrawtransactionwithwallet(tx) + rbf_node.sendrawtransaction(tx["hex"]) + assert_raises_rpc_error(-8, "Transaction has descendants in the wallet", rbf_node.bumpfee, parent_id) + + +def test_small_output_fails(rbf_node, dest_address): + # cannot bump fee with a too-small output + rbfid = spend_one_input(rbf_node, dest_address) + rbf_node.bumpfee(rbfid, {"totalFee": 50000}) + + rbfid = spend_one_input(rbf_node, dest_address) + assert_raises_rpc_error(-4, "Change output is too small", rbf_node.bumpfee, rbfid, {"totalFee": 50001}) + + +def test_dust_to_fee(rbf_node, dest_address): + # check that if output is reduced to dust, it will be converted to fee + # the bumped tx sets fee=49,900, but it converts to 50,000 + rbfid = spend_one_input(rbf_node, dest_address) + fulltx = rbf_node.getrawtransaction(rbfid, 1) + # (32-byte p2sh-pwpkh output size + 148 p2pkh spend estimate) * 10k(discard_rate) / 1000 = 1800 + # P2SH outputs are slightly "over-discarding" due to the IsDust calculation assuming it will + # be spent as a P2PKH. + bumped_tx = rbf_node.bumpfee(rbfid, {"totalFee": 50000-1800}) + full_bumped_tx = rbf_node.getrawtransaction(bumped_tx["txid"], 1) + assert_equal(bumped_tx["fee"], Decimal("0.00050000")) + assert_equal(len(fulltx["vout"]), 2) + assert_equal(len(full_bumped_tx["vout"]), 1) #change output is eliminated + + +def test_settxfee(rbf_node, dest_address): + assert_raises_rpc_error(-8, "txfee cannot be less than min relay tx fee", rbf_node.settxfee, Decimal('0.000005')) + assert_raises_rpc_error(-8, "txfee cannot be less than wallet min fee", rbf_node.settxfee, Decimal('0.000015')) + # check that bumpfee reacts correctly to the use of settxfee (paytxfee) + rbfid = spend_one_input(rbf_node, dest_address) + requested_feerate = Decimal("0.00025000") + rbf_node.settxfee(requested_feerate) + bumped_tx = rbf_node.bumpfee(rbfid) + actual_feerate = bumped_tx["fee"] * 1000 / rbf_node.getrawtransaction(bumped_tx["txid"], True)["vsize"] + # Assert that the difference between the requested feerate and the actual + # feerate of the bumped transaction is small. + assert_greater_than(Decimal("0.00001000"), abs(requested_feerate - actual_feerate)) + rbf_node.settxfee(Decimal("0.00000000")) # unset paytxfee + + +def test_rebumping(rbf_node, dest_address): + # check that re-bumping the original tx fails, but bumping the bumper succeeds + rbfid = spend_one_input(rbf_node, dest_address) + bumped = rbf_node.bumpfee(rbfid, {"totalFee": 2000}) + assert_raises_rpc_error(-4, "already bumped", rbf_node.bumpfee, rbfid, {"totalFee": 3000}) + rbf_node.bumpfee(bumped["txid"], {"totalFee": 3000}) + + +def test_rebumping_not_replaceable(rbf_node, dest_address): + # check that re-bumping a non-replaceable bump tx fails + rbfid = spend_one_input(rbf_node, dest_address) + bumped = rbf_node.bumpfee(rbfid, {"totalFee": 10000, "replaceable": False}) + assert_raises_rpc_error(-4, "Transaction is not BIP 125 replaceable", rbf_node.bumpfee, bumped["txid"], + {"totalFee": 20000}) + + +def test_unconfirmed_not_spendable(rbf_node, rbf_node_address): + # check that unconfirmed outputs from bumped transactions are not spendable + rbfid = spend_one_input(rbf_node, rbf_node_address) + rbftx = rbf_node.gettransaction(rbfid)["hex"] + assert rbfid in rbf_node.getrawmempool() + bumpid = rbf_node.bumpfee(rbfid)["txid"] + assert bumpid in rbf_node.getrawmempool() + assert rbfid not in rbf_node.getrawmempool() + + # check that outputs from the bump transaction are not spendable + # due to the replaces_txid check in CWallet::AvailableCoins + assert_equal([t for t in rbf_node.listunspent(minconf=0, include_unsafe=False) if t["txid"] == bumpid], []) + + # submit a block with the rbf tx to clear the bump tx out of the mempool, + # then invalidate the block so the rbf tx will be put back in the mempool. + # This makes it possible to check whether the rbf tx outputs are + # spendable before the rbf tx is confirmed. + block = submit_block_with_tx(rbf_node, rbftx) + # Can not abandon conflicted tx + assert_raises_rpc_error(-5, 'Transaction not eligible for abandonment', lambda: rbf_node.abandontransaction(txid=bumpid)) + rbf_node.invalidateblock(block.hash) + # Call abandon to make sure the wallet doesn't attempt to resubmit + # the bump tx and hope the wallet does not rebroadcast before we call. + rbf_node.abandontransaction(bumpid) + assert bumpid not in rbf_node.getrawmempool() + assert rbfid in rbf_node.getrawmempool() + + # check that outputs from the rbf tx are not spendable before the + # transaction is confirmed, due to the replaced_by_txid check in + # CWallet::AvailableCoins + assert_equal([t for t in rbf_node.listunspent(minconf=0, include_unsafe=False) if t["txid"] == rbfid], []) + + # check that the main output from the rbf tx is spendable after confirmed + rbf_node.generate(1) + assert_equal( + sum(1 for t in rbf_node.listunspent(minconf=0, include_unsafe=False) + if t["txid"] == rbfid and t["address"] == rbf_node_address and t["spendable"]), 1) + + +def test_bumpfee_metadata(rbf_node, dest_address): + rbfid = rbf_node.sendtoaddress(dest_address, Decimal("0.00100000"), "comment value", "to value") + bumped_tx = rbf_node.bumpfee(rbfid) + bumped_wtx = rbf_node.gettransaction(bumped_tx["txid"]) + assert_equal(bumped_wtx["comment"], "comment value") + assert_equal(bumped_wtx["to"], "to value") + + +def test_locked_wallet_fails(rbf_node, dest_address): + rbfid = spend_one_input(rbf_node, dest_address) + rbf_node.walletlock() + assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first.", + rbf_node.bumpfee, rbfid) + + +def spend_one_input(node, dest_address): + tx_input = dict( + sequence=BIP125_SEQUENCE_NUMBER, **next(u for u in node.listunspent() if u["amount"] == Decimal("0.00100000"))) + rawtx = node.createrawtransaction( + [tx_input], {dest_address: Decimal("0.00050000"), + node.getrawchangeaddress(): Decimal("0.00049000")}) + signedtx = node.signrawtransactionwithwallet(rawtx) + txid = node.sendrawtransaction(signedtx["hex"]) + return txid + + +def submit_block_with_tx(node, tx): + ctx = CTransaction() + ctx.deserialize(io.BytesIO(hex_str_to_bytes(tx))) + + tip = node.getbestblockhash() + height = node.getblockcount() + 1 + block_time = node.getblockheader(tip)["mediantime"] + 1 + block = create_block(int(tip, 16), create_coinbase(height), block_time) + block.vtx.append(ctx) + block.rehash() + block.hashMerkleRoot = block.calc_merkle_root() + add_witness_commitment(block) + block.solve() + node.submitblock(bytes_to_hex_str(block.serialize(True))) + return block + + +if __name__ == "__main__": + BumpFeeTest().main() diff --git a/test/functional/wallet_disable.py b/test/functional/wallet_disable.py new file mode 100755 index 000000000..6530c58c7 --- /dev/null +++ b/test/functional/wallet_disable.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python3 +# Copyright (c) 2015-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test a node with the -disablewallet option. + +- Test that validateaddress RPC works when running with -disablewallet +- Test that it is not possible to mine to an invalid address. +""" + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_raises_rpc_error + +class DisableWalletTest (BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 1 + self.extra_args = [["-disablewallet"]] + + def run_test (self): + # Make sure wallet is really disabled + assert_raises_rpc_error(-32601, 'Method not found', self.nodes[0].getwalletinfo) + x = self.nodes[0].validateaddress('3J98t1WpEZ73CNmQviecrnyiWrnqRhWNLy') + assert(x['isvalid'] == False) + x = self.nodes[0].validateaddress('mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ') + assert(x['isvalid'] == True) + + # Checking mining to an address without a wallet. Generating to a valid address should succeed + # but generating to an invalid address will fail. + self.nodes[0].generatetoaddress(1, 'mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ') + assert_raises_rpc_error(-5, "Invalid address", self.nodes[0].generatetoaddress, 1, '3J98t1WpEZ73CNmQviecrnyiWrnqRhWNLy') + +if __name__ == '__main__': + DisableWalletTest ().main () diff --git a/test/functional/wallet_disableprivatekeys.py b/test/functional/wallet_disableprivatekeys.py new file mode 100755 index 000000000..34ff52525 --- /dev/null +++ b/test/functional/wallet_disableprivatekeys.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python3 +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test disable-privatekeys mode. +""" + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_raises_rpc_error, +) + + +class DisablePrivateKeysTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = False + self.num_nodes = 1 + self.supports_cli = True + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + node = self.nodes[0] + self.log.info("Test disableprivatekeys creation.") + self.nodes[0].createwallet('w1', True) + self.nodes[0].createwallet('w2') + w1 = node.get_wallet_rpc('w1') + w2 = node.get_wallet_rpc('w2') + assert_raises_rpc_error(-4,"Error: Private keys are disabled for this wallet", w1.getnewaddress) + assert_raises_rpc_error(-4,"Error: Private keys are disabled for this wallet", w1.getrawchangeaddress) + w1.importpubkey(w2.getaddressinfo(w2.getnewaddress())['pubkey']) + +if __name__ == '__main__': + DisablePrivateKeysTest().main() diff --git a/test/functional/wallet_dump.py b/test/functional/wallet_dump.py new file mode 100755 index 000000000..b1db1e4ab --- /dev/null +++ b/test/functional/wallet_dump.py @@ -0,0 +1,167 @@ +#!/usr/bin/env python3 +# Copyright (c) 2016-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the dumpwallet RPC.""" + +import os + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + assert_raises_rpc_error, +) + + +def read_dump(file_name, addrs, script_addrs, hd_master_addr_old): + """ + Read the given dump, count the addrs that match, count change and reserve. + Also check that the old hd_master is inactive + """ + with open(file_name, encoding='utf8') as inputfile: + found_addr = 0 + found_script_addr = 0 + found_addr_chg = 0 + found_addr_rsv = 0 + witness_addr_ret = None + hd_master_addr_ret = None + for line in inputfile: + # only read non comment lines + if line[0] != "#" and len(line) > 10: + # split out some data + key_date_label, comment = line.split("#") + key_date_label = key_date_label.split(" ") + # key = key_date_label[0] + date = key_date_label[1] + keytype = key_date_label[2] + + imported_key = date == '1970-01-01T00:00:01Z' + if imported_key: + # Imported keys have multiple addresses, no label (keypath) and timestamp + # Skip them + continue + + addr_keypath = comment.split(" addr=")[1] + addr = addr_keypath.split(" ")[0] + keypath = None + if keytype == "inactivehdseed=1": + # ensure the old master is still available + assert (hd_master_addr_old == addr) + elif keytype == "hdseed=1": + # ensure we have generated a new hd master key + assert (hd_master_addr_old != addr) + hd_master_addr_ret = addr + elif keytype == "script=1": + # scripts don't have keypaths + keypath = None + else: + keypath = addr_keypath.rstrip().split("hdkeypath=")[1] + + # count key types + for addrObj in addrs: + if addrObj['address'] == addr.split(",")[0] and addrObj['hdkeypath'] == keypath and keytype == "label=": + # a labeled entry in the wallet should contain both a native address + # and the p2sh-p2wpkh address that was added at wallet setup + if len(addr.split(",")) == 2: + addr_list = addr.split(",") + # the entry should be of the first key in the wallet + assert_equal(addrs[0]['address'], addr_list[0]) + witness_addr_ret = addr_list[1] + found_addr += 1 + break + elif keytype == "change=1": + found_addr_chg += 1 + break + elif keytype == "reserve=1": + found_addr_rsv += 1 + break + + # count scripts + for script_addr in script_addrs: + if script_addr == addr.rstrip() and keytype == "script=1": + found_script_addr += 1 + break + + return found_addr, found_script_addr, found_addr_chg, found_addr_rsv, hd_master_addr_ret, witness_addr_ret + + +class WalletDumpTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 1 + self.extra_args = [["-keypool=90", "-addresstype=legacy", "-deprecatedrpc=addwitnessaddress"]] + self.rpc_timeout = 120 + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def setup_network(self, split=False): + self.add_nodes(self.num_nodes, extra_args=self.extra_args) + self.start_nodes() + + def run_test(self): + wallet_unenc_dump = os.path.join(self.nodes[0].datadir, "wallet.unencrypted.dump") + wallet_enc_dump = os.path.join(self.nodes[0].datadir, "wallet.encrypted.dump") + + # generate 20 addresses to compare against the dump + # but since we add a p2sh-p2wpkh address for the first pubkey in the + # wallet, we will expect 21 addresses in the dump + test_addr_count = 20 + addrs = [] + for i in range(0,test_addr_count): + addr = self.nodes[0].getnewaddress() + vaddr= self.nodes[0].getaddressinfo(addr) #required to get hd keypath + addrs.append(vaddr) + # Should be a no-op: + self.nodes[0].keypoolrefill() + + # Test scripts dump by adding a P2SH witness and a 1-of-1 multisig address + witness_addr = self.nodes[0].addwitnessaddress(addrs[0]["address"], True) + multisig_addr = self.nodes[0].addmultisigaddress(1, [addrs[1]["address"]])["address"] + script_addrs = [witness_addr, multisig_addr] + + # dump unencrypted wallet + result = self.nodes[0].dumpwallet(wallet_unenc_dump) + assert_equal(result['filename'], wallet_unenc_dump) + + found_addr, found_script_addr, found_addr_chg, found_addr_rsv, hd_master_addr_unenc, witness_addr_ret = \ + read_dump(wallet_unenc_dump, addrs, script_addrs, None) + assert_equal(found_addr, test_addr_count) # all keys must be in the dump + assert_equal(found_script_addr, 2) # all scripts must be in the dump + assert_equal(found_addr_chg, 0) # 0 blocks where mined + assert_equal(found_addr_rsv, 90 * 2) # 90 keys plus 100% internal keys + assert_equal(witness_addr_ret, witness_addr) # p2sh-p2wsh address added to the first key + + #encrypt wallet, restart, unlock and dump + self.nodes[0].encryptwallet('test') + self.nodes[0].walletpassphrase('test', 10) + # Should be a no-op: + self.nodes[0].keypoolrefill() + self.nodes[0].dumpwallet(wallet_enc_dump) + + found_addr, found_script_addr, found_addr_chg, found_addr_rsv, _, witness_addr_ret = \ + read_dump(wallet_enc_dump, addrs, script_addrs, hd_master_addr_unenc) + assert_equal(found_addr, test_addr_count) + assert_equal(found_script_addr, 2) + assert_equal(found_addr_chg, 90 * 2) # old reserve keys are marked as change now + assert_equal(found_addr_rsv, 90 * 2) + assert_equal(witness_addr_ret, witness_addr) + + # Overwriting should fail + assert_raises_rpc_error(-8, "already exists", lambda: self.nodes[0].dumpwallet(wallet_enc_dump)) + + # Restart node with new wallet, and test importwallet + self.stop_node(0) + self.start_node(0, ['-wallet=w2']) + + # Make sure the address is not IsMine before import + result = self.nodes[0].getaddressinfo(multisig_addr) + assert(result['ismine'] == False) + + self.nodes[0].importwallet(wallet_unenc_dump) + + # Now check IsMine is true + result = self.nodes[0].getaddressinfo(multisig_addr) + assert(result['ismine'] == True) + +if __name__ == '__main__': + WalletDumpTest().main() diff --git a/test/functional/wallet_encryption.py b/test/functional/wallet_encryption.py new file mode 100755 index 000000000..ab9ebed8d --- /dev/null +++ b/test/functional/wallet_encryption.py @@ -0,0 +1,83 @@ +#!/usr/bin/env python3 +# Copyright (c) 2016-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test Wallet encryption""" + +import time + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + assert_raises_rpc_error, + assert_greater_than, + assert_greater_than_or_equal, +) + +class WalletEncryptionTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 1 + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + passphrase = "WalletPassphrase" + passphrase2 = "SecondWalletPassphrase" + + # Make sure the wallet isn't encrypted first + address = self.nodes[0].getnewaddress() + privkey = self.nodes[0].dumpprivkey(address) + assert_equal(privkey[:1], "c") + assert_equal(len(privkey), 52) + + # Encrypt the wallet + self.nodes[0].encryptwallet(passphrase) + + # Test that the wallet is encrypted + assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address) + + # Check that walletpassphrase works + self.nodes[0].walletpassphrase(passphrase, 2) + assert_equal(privkey, self.nodes[0].dumpprivkey(address)) + + # Check that the timeout is right + time.sleep(2) + assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address) + + # Test wrong passphrase + assert_raises_rpc_error(-14, "wallet passphrase entered was incorrect", self.nodes[0].walletpassphrase, passphrase + "wrong", 10) + + # Test walletlock + self.nodes[0].walletpassphrase(passphrase, 84600) + assert_equal(privkey, self.nodes[0].dumpprivkey(address)) + self.nodes[0].walletlock() + assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address) + + # Test passphrase changes + self.nodes[0].walletpassphrasechange(passphrase, passphrase2) + assert_raises_rpc_error(-14, "wallet passphrase entered was incorrect", self.nodes[0].walletpassphrase, passphrase, 10) + self.nodes[0].walletpassphrase(passphrase2, 10) + assert_equal(privkey, self.nodes[0].dumpprivkey(address)) + self.nodes[0].walletlock() + + # Test timeout bounds + assert_raises_rpc_error(-8, "Timeout cannot be negative.", self.nodes[0].walletpassphrase, passphrase2, -10) + # Check the timeout + # Check a time less than the limit + MAX_VALUE = 100000000 + expected_time = int(time.time()) + MAX_VALUE - 600 + self.nodes[0].walletpassphrase(passphrase2, MAX_VALUE - 600) + actual_time = self.nodes[0].getwalletinfo()['unlocked_until'] + assert_greater_than_or_equal(actual_time, expected_time) + assert_greater_than(expected_time + 5, actual_time) # 5 second buffer + # Check a time greater than the limit + expected_time = int(time.time()) + MAX_VALUE - 1 + self.nodes[0].walletpassphrase(passphrase2, MAX_VALUE + 1000) + actual_time = self.nodes[0].getwalletinfo()['unlocked_until'] + assert_greater_than_or_equal(actual_time, expected_time) + assert_greater_than(expected_time + 5, actual_time) # 5 second buffer + +if __name__ == '__main__': + WalletEncryptionTest().main() diff --git a/test/functional/wallet_fallbackfee.py b/test/functional/wallet_fallbackfee.py new file mode 100755 index 000000000..0c67982bb --- /dev/null +++ b/test/functional/wallet_fallbackfee.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python3 +# Copyright (c) 2017-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test wallet replace-by-fee capabilities in conjunction with the fallbackfee.""" +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_raises_rpc_error + +class WalletRBFTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 1 + self.setup_clean_chain = True + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + self.nodes[0].generate(101) + + # sending a transaction without fee estimations must be possible by default on regtest + self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1) + + # test sending a tx with disabled fallback fee (must fail) + self.restart_node(0, extra_args=["-fallbackfee=0"]) + assert_raises_rpc_error(-4, "Fee estimation failed", lambda: self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)) + assert_raises_rpc_error(-4, "Fee estimation failed", lambda: self.nodes[0].fundrawtransaction(self.nodes[0].createrawtransaction([], {self.nodes[0].getnewaddress(): 1}))) + assert_raises_rpc_error(-6, "Fee estimation failed", lambda: self.nodes[0].sendmany("", {self.nodes[0].getnewaddress(): 1})) + +if __name__ == '__main__': + WalletRBFTest().main() diff --git a/test/functional/wallet_groups.py b/test/functional/wallet_groups.py new file mode 100755 index 000000000..9d6148386 --- /dev/null +++ b/test/functional/wallet_groups.py @@ -0,0 +1,96 @@ +#!/usr/bin/env python3 +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test wallet group functionality.""" + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.messages import CTransaction, FromHex, ToHex +from test_framework.util import ( + assert_equal, +) + +def assert_approx(v, vexp, vspan=0.00001): + if v < vexp - vspan: + raise AssertionError("%s < [%s..%s]" % (str(v), str(vexp - vspan), str(vexp + vspan))) + if v > vexp + vspan: + raise AssertionError("%s > [%s..%s]" % (str(v), str(vexp - vspan), str(vexp + vspan))) + +class WalletGroupTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 3 + self.extra_args = [[], [], ['-avoidpartialspends']] + self.rpc_timewait = 120 + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + # Mine some coins + self.nodes[0].generate(110) + + # Get some addresses from the two nodes + addr1 = [self.nodes[1].getnewaddress() for i in range(3)] + addr2 = [self.nodes[2].getnewaddress() for i in range(3)] + addrs = addr1 + addr2 + + # Send 1 + 0.5 coin to each address + [self.nodes[0].sendtoaddress(addr, 1.0) for addr in addrs] + [self.nodes[0].sendtoaddress(addr, 0.5) for addr in addrs] + + self.nodes[0].generate(1) + self.sync_all() + + # For each node, send 0.2 coins back to 0; + # - node[1] should pick one 0.5 UTXO and leave the rest + # - node[2] should pick one (1.0 + 0.5) UTXO group corresponding to a + # given address, and leave the rest + txid1 = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 0.2) + tx1 = self.nodes[1].getrawtransaction(txid1, True) + # txid1 should have 1 input and 2 outputs + assert_equal(1, len(tx1["vin"])) + assert_equal(2, len(tx1["vout"])) + # one output should be 0.2, the other should be ~0.3 + v = [vout["value"] for vout in tx1["vout"]] + v.sort() + assert_approx(v[0], 0.2) + assert_approx(v[1], 0.3, 0.0001) + + txid2 = self.nodes[2].sendtoaddress(self.nodes[0].getnewaddress(), 0.2) + tx2 = self.nodes[2].getrawtransaction(txid2, True) + # txid2 should have 2 inputs and 2 outputs + assert_equal(2, len(tx2["vin"])) + assert_equal(2, len(tx2["vout"])) + # one output should be 0.2, the other should be ~1.3 + v = [vout["value"] for vout in tx2["vout"]] + v.sort() + assert_approx(v[0], 0.2) + assert_approx(v[1], 1.3, 0.0001) + + # Empty out node2's wallet + self.nodes[2].sendtoaddress(address=self.nodes[0].getnewaddress(), amount=self.nodes[2].getbalance(), subtractfeefromamount=True) + self.sync_all() + self.nodes[0].generate(1) + + # Fill node2's wallet with 10000 outputs corresponding to the same + # scriptPubKey + for i in range(5): + raw_tx = self.nodes[0].createrawtransaction([{"txid":"0"*64, "vout":0}], [{addr2[0]: 0.05}]) + tx = FromHex(CTransaction(), raw_tx) + tx.vin = [] + tx.vout = [tx.vout[0]] * 2000 + funded_tx = self.nodes[0].fundrawtransaction(ToHex(tx)) + signed_tx = self.nodes[0].signrawtransactionwithwallet(funded_tx['hex']) + self.nodes[0].sendrawtransaction(signed_tx['hex']) + self.nodes[0].generate(1) + + self.sync_all() + + # Check that we can create a transaction that only requires ~100 of our + # utxos, without pulling in all outputs and creating a transaction that + # is way too big. + assert self.nodes[2].sendtoaddress(address=addr2[0], amount=5) + +if __name__ == '__main__': + WalletGroupTest().main () diff --git a/test/functional/wallet_hd.py b/test/functional/wallet_hd.py new file mode 100755 index 000000000..eb4253169 --- /dev/null +++ b/test/functional/wallet_hd.py @@ -0,0 +1,159 @@ +#!/usr/bin/env python3 +# Copyright (c) 2016-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test Hierarchical Deterministic wallet function.""" + +import os +import shutil + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + connect_nodes_bi, + assert_raises_rpc_error +) + + +class WalletHDTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 2 + self.extra_args = [[], ['-keypool=0']] + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + # Make sure we use hd, keep masterkeyid + masterkeyid = self.nodes[1].getwalletinfo()['hdseedid'] + assert_equal(masterkeyid, self.nodes[1].getwalletinfo()['hdmasterkeyid']) + assert_equal(len(masterkeyid), 40) + + # create an internal key + change_addr = self.nodes[1].getrawchangeaddress() + change_addrV= self.nodes[1].getaddressinfo(change_addr) + assert_equal(change_addrV["hdkeypath"], "m/0'/1'/0'") #first internal child key + + # Import a non-HD private key in the HD wallet + non_hd_add = self.nodes[0].getnewaddress() + self.nodes[1].importprivkey(self.nodes[0].dumpprivkey(non_hd_add)) + + # This should be enough to keep the master key and the non-HD key + self.nodes[1].backupwallet(os.path.join(self.nodes[1].datadir, "hd.bak")) + #self.nodes[1].dumpwallet(os.path.join(self.nodes[1].datadir, "hd.dump")) + + # Derive some HD addresses and remember the last + # Also send funds to each add + self.nodes[0].generate(101) + hd_add = None + NUM_HD_ADDS = 10 + for i in range(NUM_HD_ADDS): + hd_add = self.nodes[1].getnewaddress() + hd_info = self.nodes[1].getaddressinfo(hd_add) + assert_equal(hd_info["hdkeypath"], "m/0'/0'/"+str(i)+"'") + assert_equal(hd_info["hdseedid"], masterkeyid) + assert_equal(hd_info["hdmasterkeyid"], masterkeyid) + self.nodes[0].sendtoaddress(hd_add, 1) + self.nodes[0].generate(1) + self.nodes[0].sendtoaddress(non_hd_add, 1) + self.nodes[0].generate(1) + + # create an internal key (again) + change_addr = self.nodes[1].getrawchangeaddress() + change_addrV= self.nodes[1].getaddressinfo(change_addr) + assert_equal(change_addrV["hdkeypath"], "m/0'/1'/1'") #second internal child key + + self.sync_all() + assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1) + + self.log.info("Restore backup ...") + self.stop_node(1) + # we need to delete the complete regtest directory + # otherwise node1 would auto-recover all funds in flag the keypool keys as used + shutil.rmtree(os.path.join(self.nodes[1].datadir, "regtest", "blocks")) + shutil.rmtree(os.path.join(self.nodes[1].datadir, "regtest", "chainstate")) + shutil.copyfile(os.path.join(self.nodes[1].datadir, "hd.bak"), os.path.join(self.nodes[1].datadir, "regtest", "wallets", "wallet.dat")) + self.start_node(1) + + # Assert that derivation is deterministic + hd_add_2 = None + for i in range(NUM_HD_ADDS): + hd_add_2 = self.nodes[1].getnewaddress() + hd_info_2 = self.nodes[1].getaddressinfo(hd_add_2) + assert_equal(hd_info_2["hdkeypath"], "m/0'/0'/"+str(i)+"'") + assert_equal(hd_info_2["hdseedid"], masterkeyid) + assert_equal(hd_info_2["hdmasterkeyid"], masterkeyid) + assert_equal(hd_add, hd_add_2) + connect_nodes_bi(self.nodes, 0, 1) + self.sync_all() + + # Needs rescan + self.stop_node(1) + self.start_node(1, extra_args=self.extra_args[1] + ['-rescan']) + assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1) + + # Try a RPC based rescan + self.stop_node(1) + shutil.rmtree(os.path.join(self.nodes[1].datadir, "regtest", "blocks")) + shutil.rmtree(os.path.join(self.nodes[1].datadir, "regtest", "chainstate")) + shutil.copyfile(os.path.join(self.nodes[1].datadir, "hd.bak"), os.path.join(self.nodes[1].datadir, "regtest", "wallets", "wallet.dat")) + self.start_node(1, extra_args=self.extra_args[1]) + connect_nodes_bi(self.nodes, 0, 1) + self.sync_all() + # Wallet automatically scans blocks older than key on startup + assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1) + out = self.nodes[1].rescanblockchain(0, 1) + assert_equal(out['start_height'], 0) + assert_equal(out['stop_height'], 1) + out = self.nodes[1].rescanblockchain() + assert_equal(out['start_height'], 0) + assert_equal(out['stop_height'], self.nodes[1].getblockcount()) + assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1) + + # send a tx and make sure its using the internal chain for the changeoutput + txid = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1) + outs = self.nodes[1].decoderawtransaction(self.nodes[1].gettransaction(txid)['hex'])['vout'] + keypath = "" + for out in outs: + if out['value'] != 1: + keypath = self.nodes[1].getaddressinfo(out['scriptPubKey']['addresses'][0])['hdkeypath'] + + assert_equal(keypath[0:7], "m/0'/1'") + + # Generate a new HD seed on node 1 and make sure it is set + orig_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid'] + self.nodes[1].sethdseed() + new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid'] + assert orig_masterkeyid != new_masterkeyid + addr = self.nodes[1].getnewaddress() + assert_equal(self.nodes[1].getaddressinfo(addr)['hdkeypath'], 'm/0\'/0\'/0\'') # Make sure the new address is the first from the keypool + self.nodes[1].keypoolrefill(1) # Fill keypool with 1 key + + # Set a new HD seed on node 1 without flushing the keypool + new_seed = self.nodes[0].dumpprivkey(self.nodes[0].getnewaddress()) + orig_masterkeyid = new_masterkeyid + self.nodes[1].sethdseed(False, new_seed) + new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid'] + assert orig_masterkeyid != new_masterkeyid + addr = self.nodes[1].getnewaddress() + assert_equal(orig_masterkeyid, self.nodes[1].getaddressinfo(addr)['hdseedid']) + assert_equal(self.nodes[1].getaddressinfo(addr)['hdkeypath'], 'm/0\'/0\'/1\'') # Make sure the new address continues previous keypool + + # Check that the next address is from the new seed + self.nodes[1].keypoolrefill(1) + next_addr = self.nodes[1].getnewaddress() + assert_equal(new_masterkeyid, self.nodes[1].getaddressinfo(next_addr)['hdseedid']) + assert_equal(self.nodes[1].getaddressinfo(next_addr)['hdkeypath'], 'm/0\'/0\'/0\'') # Make sure the new address is not from previous keypool + assert next_addr != addr + + # Sethdseed parameter validity + assert_raises_rpc_error(-1, 'sethdseed', self.nodes[0].sethdseed, False, new_seed, 0) + assert_raises_rpc_error(-5, "Invalid private key", self.nodes[1].sethdseed, False, "not_wif") + assert_raises_rpc_error(-1, "JSON value is not a boolean as expected", self.nodes[1].sethdseed, "Not_bool") + assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[1].sethdseed, False, True) + assert_raises_rpc_error(-5, "Already have this key", self.nodes[1].sethdseed, False, new_seed) + assert_raises_rpc_error(-5, "Already have this key", self.nodes[1].sethdseed, False, self.nodes[1].dumpprivkey(self.nodes[1].getnewaddress())) + +if __name__ == '__main__': + WalletHDTest().main () diff --git a/test/functional/wallet_import_rescan.py b/test/functional/wallet_import_rescan.py new file mode 100755 index 000000000..f043a544f --- /dev/null +++ b/test/functional/wallet_import_rescan.py @@ -0,0 +1,189 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test wallet import RPCs. + +Test rescan behavior of importaddress, importpubkey, importprivkey, and +importmulti RPCs with different types of keys and rescan options. + +In the first part of the test, node 0 creates an address for each type of +import RPC call and sends BTC to it. Then other nodes import the addresses, +and the test makes listtransactions and getbalance calls to confirm that the +importing node either did or did not execute rescans picking up the send +transactions. + +In the second part of the test, node 0 sends more BTC to each address, and the +test makes more listtransactions and getbalance calls to confirm that the +importing nodes pick up the new transactions regardless of whether rescans +happened previously. +""" + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import (assert_raises_rpc_error, connect_nodes, sync_blocks, assert_equal, set_node_times) + +import collections +import enum +import itertools + +Call = enum.Enum("Call", "single multiaddress multiscript") +Data = enum.Enum("Data", "address pub priv") +Rescan = enum.Enum("Rescan", "no yes late_timestamp") + + +class Variant(collections.namedtuple("Variant", "call data rescan prune")): + """Helper for importing one key and verifying scanned transactions.""" + + def try_rpc(self, func, *args, **kwargs): + if self.expect_disabled: + assert_raises_rpc_error(-4, "Rescan is disabled in pruned mode", func, *args, **kwargs) + else: + return func(*args, **kwargs) + + def do_import(self, timestamp): + """Call one key import RPC.""" + rescan = self.rescan == Rescan.yes + + if self.call == Call.single: + if self.data == Data.address: + response = self.try_rpc(self.node.importaddress, address=self.address["address"], rescan=rescan) + elif self.data == Data.pub: + response = self.try_rpc(self.node.importpubkey, pubkey=self.address["pubkey"], rescan=rescan) + elif self.data == Data.priv: + response = self.try_rpc(self.node.importprivkey, privkey=self.key, rescan=rescan) + assert_equal(response, None) + + elif self.call in (Call.multiaddress, Call.multiscript): + response = self.node.importmulti([{ + "scriptPubKey": { + "address": self.address["address"] + } if self.call == Call.multiaddress else self.address["scriptPubKey"], + "timestamp": timestamp + TIMESTAMP_WINDOW + (1 if self.rescan == Rescan.late_timestamp else 0), + "pubkeys": [self.address["pubkey"]] if self.data == Data.pub else [], + "keys": [self.key] if self.data == Data.priv else [], + "watchonly": self.data != Data.priv + }], {"rescan": self.rescan in (Rescan.yes, Rescan.late_timestamp)}) + assert_equal(response, [{"success": True}]) + + def check(self, txid=None, amount=None, confirmations=None): + """Verify that listreceivedbyaddress returns expected values.""" + + addresses = self.node.listreceivedbyaddress(minconf=0, include_watchonly=True, address_filter=self.address['address']) + if self.expected_txs: + assert_equal(len(addresses[0]["txids"]), self.expected_txs) + + if txid is not None: + address, = [ad for ad in addresses if txid in ad["txids"]] + assert_equal(address["address"], self.address["address"]) + assert_equal(address["amount"], self.expected_balance) + assert_equal(address["confirmations"], confirmations) + # Verify the transaction is correctly marked watchonly depending on + # whether the transaction pays to an imported public key or + # imported private key. The test setup ensures that transaction + # inputs will not be from watchonly keys (important because + # involvesWatchonly will be true if either the transaction output + # or inputs are watchonly). + if self.data != Data.priv: + assert_equal(address["involvesWatchonly"], True) + else: + assert_equal("involvesWatchonly" not in address, True) + + +# List of Variants for each way a key or address could be imported. +IMPORT_VARIANTS = [Variant(*variants) for variants in itertools.product(Call, Data, Rescan, (False, True))] + +# List of nodes to import keys to. Half the nodes will have pruning disabled, +# half will have it enabled. Different nodes will be used for imports that are +# expected to cause rescans, and imports that are not expected to cause +# rescans, in order to prevent rescans during later imports picking up +# transactions associated with earlier imports. This makes it easier to keep +# track of expected balances and transactions. +ImportNode = collections.namedtuple("ImportNode", "prune rescan") +IMPORT_NODES = [ImportNode(*fields) for fields in itertools.product((False, True), repeat=2)] + +# Rescans start at the earliest block up to 2 hours before the key timestamp. +TIMESTAMP_WINDOW = 2 * 60 * 60 + + +class ImportRescanTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 2 + len(IMPORT_NODES) + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def setup_network(self): + extra_args = [["-addresstype=legacy"] for _ in range(self.num_nodes)] + for i, import_node in enumerate(IMPORT_NODES, 2): + if import_node.prune: + extra_args[i] += ["-prune=1"] + + self.add_nodes(self.num_nodes, extra_args=extra_args) + + # Import keys with pruning disabled + self.start_nodes(extra_args=[[]] * self.num_nodes) + super().import_deterministic_coinbase_privkeys() + self.stop_nodes() + + self.start_nodes() + for i in range(1, self.num_nodes): + connect_nodes(self.nodes[i], 0) + + def import_deterministic_coinbase_privkeys(self): + pass + + def run_test(self): + # Create one transaction on node 0 with a unique amount for + # each possible type of wallet import RPC. + for i, variant in enumerate(IMPORT_VARIANTS): + variant.address = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress()) + variant.key = self.nodes[1].dumpprivkey(variant.address["address"]) + variant.initial_amount = 1 - (i + 1) / 64 + variant.initial_txid = self.nodes[0].sendtoaddress(variant.address["address"], variant.initial_amount) + + # Generate a block containing the initial transactions, then another + # block further in the future (past the rescan window). + self.nodes[0].generate(1) + assert_equal(self.nodes[0].getrawmempool(), []) + timestamp = self.nodes[0].getblockheader(self.nodes[0].getbestblockhash())["time"] + set_node_times(self.nodes, timestamp + TIMESTAMP_WINDOW + 1) + self.nodes[0].generate(1) + sync_blocks(self.nodes) + + # For each variation of wallet key import, invoke the import RPC and + # check the results from getbalance and listtransactions. + for variant in IMPORT_VARIANTS: + variant.expect_disabled = variant.rescan == Rescan.yes and variant.prune and variant.call == Call.single + expect_rescan = variant.rescan == Rescan.yes and not variant.expect_disabled + variant.node = self.nodes[2 + IMPORT_NODES.index(ImportNode(variant.prune, expect_rescan))] + variant.do_import(timestamp) + if expect_rescan: + variant.expected_balance = variant.initial_amount + variant.expected_txs = 1 + variant.check(variant.initial_txid, variant.initial_amount, 2) + else: + variant.expected_balance = 0 + variant.expected_txs = 0 + variant.check() + + # Create new transactions sending to each address. + for i, variant in enumerate(IMPORT_VARIANTS): + variant.sent_amount = 1 - (2 * i + 1) / 128 + variant.sent_txid = self.nodes[0].sendtoaddress(variant.address["address"], variant.sent_amount) + + # Generate a block containing the new transactions. + self.nodes[0].generate(1) + assert_equal(self.nodes[0].getrawmempool(), []) + sync_blocks(self.nodes) + + # Check the latest results from getbalance and listtransactions. + for variant in IMPORT_VARIANTS: + if not variant.expect_disabled: + variant.expected_balance += variant.sent_amount + variant.expected_txs += 1 + variant.check(variant.sent_txid, variant.sent_amount, 1) + else: + variant.check() + +if __name__ == "__main__": + ImportRescanTest().main() diff --git a/test/functional/wallet_importmulti.py b/test/functional/wallet_importmulti.py new file mode 100755 index 000000000..f78ff1979 --- /dev/null +++ b/test/functional/wallet_importmulti.py @@ -0,0 +1,463 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the importmulti RPC.""" + +from test_framework import script +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + assert_greater_than, + assert_raises_rpc_error, + bytes_to_hex_str, +) + + +class ImportMultiTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 2 + self.extra_args = [["-addresstype=legacy"], ["-addresstype=legacy"]] + self.setup_clean_chain = True + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def setup_network(self): + self.setup_nodes() + + def run_test (self): + self.log.info("Mining blocks...") + self.nodes[0].generate(1) + self.nodes[1].generate(1) + timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime'] + + node0_address1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + + #Check only one address + assert_equal(node0_address1['ismine'], True) + + #Node 1 sync test + assert_equal(self.nodes[1].getblockcount(),1) + + #Address Test - before import + address_info = self.nodes[1].getaddressinfo(node0_address1['address']) + assert_equal(address_info['iswatchonly'], False) + assert_equal(address_info['ismine'], False) + + + # RPC importmulti ----------------------------------------------- + + # Bitcoin Address + self.log.info("Should import an address") + address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + result = self.nodes[1].importmulti([{ + "scriptPubKey": { + "address": address['address'] + }, + "timestamp": "now", + }]) + assert_equal(result[0]['success'], True) + address_assert = self.nodes[1].getaddressinfo(address['address']) + assert_equal(address_assert['iswatchonly'], True) + assert_equal(address_assert['ismine'], False) + assert_equal(address_assert['timestamp'], timestamp) + watchonly_address = address['address'] + watchonly_timestamp = timestamp + + self.log.info("Should not import an invalid address") + result = self.nodes[1].importmulti([{ + "scriptPubKey": { + "address": "not valid address", + }, + "timestamp": "now", + }]) + assert_equal(result[0]['success'], False) + assert_equal(result[0]['error']['code'], -5) + assert_equal(result[0]['error']['message'], 'Invalid address') + + # ScriptPubKey + internal + self.log.info("Should import a scriptPubKey with internal flag") + address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + result = self.nodes[1].importmulti([{ + "scriptPubKey": address['scriptPubKey'], + "timestamp": "now", + "internal": True + }]) + assert_equal(result[0]['success'], True) + address_assert = self.nodes[1].getaddressinfo(address['address']) + assert_equal(address_assert['iswatchonly'], True) + assert_equal(address_assert['ismine'], False) + assert_equal(address_assert['timestamp'], timestamp) + + # Nonstandard scriptPubKey + !internal + self.log.info("Should not import a nonstandard scriptPubKey without internal flag") + nonstandardScriptPubKey = address['scriptPubKey'] + bytes_to_hex_str(script.CScript([script.OP_NOP])) + address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + result = self.nodes[1].importmulti([{ + "scriptPubKey": nonstandardScriptPubKey, + "timestamp": "now", + }]) + assert_equal(result[0]['success'], False) + assert_equal(result[0]['error']['code'], -8) + assert_equal(result[0]['error']['message'], 'Internal must be set to true for nonstandard scriptPubKey imports.') + address_assert = self.nodes[1].getaddressinfo(address['address']) + assert_equal(address_assert['iswatchonly'], False) + assert_equal(address_assert['ismine'], False) + assert_equal('timestamp' in address_assert, False) + + + # Address + Public key + !Internal + self.log.info("Should import an address with public key") + address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + result = self.nodes[1].importmulti([{ + "scriptPubKey": { + "address": address['address'] + }, + "timestamp": "now", + "pubkeys": [ address['pubkey'] ] + }]) + assert_equal(result[0]['success'], True) + address_assert = self.nodes[1].getaddressinfo(address['address']) + assert_equal(address_assert['iswatchonly'], True) + assert_equal(address_assert['ismine'], False) + assert_equal(address_assert['timestamp'], timestamp) + + + # ScriptPubKey + Public key + internal + self.log.info("Should import a scriptPubKey with internal and with public key") + address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + request = [{ + "scriptPubKey": address['scriptPubKey'], + "timestamp": "now", + "pubkeys": [ address['pubkey'] ], + "internal": True + }] + result = self.nodes[1].importmulti(request) + assert_equal(result[0]['success'], True) + address_assert = self.nodes[1].getaddressinfo(address['address']) + assert_equal(address_assert['iswatchonly'], True) + assert_equal(address_assert['ismine'], False) + assert_equal(address_assert['timestamp'], timestamp) + + # Nonstandard scriptPubKey + Public key + !internal + self.log.info("Should not import a nonstandard scriptPubKey without internal and with public key") + address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + request = [{ + "scriptPubKey": nonstandardScriptPubKey, + "timestamp": "now", + "pubkeys": [ address['pubkey'] ] + }] + result = self.nodes[1].importmulti(request) + assert_equal(result[0]['success'], False) + assert_equal(result[0]['error']['code'], -8) + assert_equal(result[0]['error']['message'], 'Internal must be set to true for nonstandard scriptPubKey imports.') + address_assert = self.nodes[1].getaddressinfo(address['address']) + assert_equal(address_assert['iswatchonly'], False) + assert_equal(address_assert['ismine'], False) + assert_equal('timestamp' in address_assert, False) + + # Address + Private key + !watchonly + self.log.info("Should import an address with private key") + address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + result = self.nodes[1].importmulti([{ + "scriptPubKey": { + "address": address['address'] + }, + "timestamp": "now", + "keys": [ self.nodes[0].dumpprivkey(address['address']) ] + }]) + assert_equal(result[0]['success'], True) + address_assert = self.nodes[1].getaddressinfo(address['address']) + assert_equal(address_assert['iswatchonly'], False) + assert_equal(address_assert['ismine'], True) + assert_equal(address_assert['timestamp'], timestamp) + + self.log.info("Should not import an address with private key if is already imported") + result = self.nodes[1].importmulti([{ + "scriptPubKey": { + "address": address['address'] + }, + "timestamp": "now", + "keys": [ self.nodes[0].dumpprivkey(address['address']) ] + }]) + assert_equal(result[0]['success'], False) + assert_equal(result[0]['error']['code'], -4) + assert_equal(result[0]['error']['message'], 'The wallet already contains the private key for this address or script') + + # Address + Private key + watchonly + self.log.info("Should not import an address with private key and with watchonly") + address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + result = self.nodes[1].importmulti([{ + "scriptPubKey": { + "address": address['address'] + }, + "timestamp": "now", + "keys": [ self.nodes[0].dumpprivkey(address['address']) ], + "watchonly": True + }]) + assert_equal(result[0]['success'], False) + assert_equal(result[0]['error']['code'], -8) + assert_equal(result[0]['error']['message'], 'Incompatibility found between watchonly and keys') + address_assert = self.nodes[1].getaddressinfo(address['address']) + assert_equal(address_assert['iswatchonly'], False) + assert_equal(address_assert['ismine'], False) + assert_equal('timestamp' in address_assert, False) + + # ScriptPubKey + Private key + internal + self.log.info("Should import a scriptPubKey with internal and with private key") + address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + result = self.nodes[1].importmulti([{ + "scriptPubKey": address['scriptPubKey'], + "timestamp": "now", + "keys": [ self.nodes[0].dumpprivkey(address['address']) ], + "internal": True + }]) + assert_equal(result[0]['success'], True) + address_assert = self.nodes[1].getaddressinfo(address['address']) + assert_equal(address_assert['iswatchonly'], False) + assert_equal(address_assert['ismine'], True) + assert_equal(address_assert['timestamp'], timestamp) + + # Nonstandard scriptPubKey + Private key + !internal + self.log.info("Should not import a nonstandard scriptPubKey without internal and with private key") + address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + result = self.nodes[1].importmulti([{ + "scriptPubKey": nonstandardScriptPubKey, + "timestamp": "now", + "keys": [ self.nodes[0].dumpprivkey(address['address']) ] + }]) + assert_equal(result[0]['success'], False) + assert_equal(result[0]['error']['code'], -8) + assert_equal(result[0]['error']['message'], 'Internal must be set to true for nonstandard scriptPubKey imports.') + address_assert = self.nodes[1].getaddressinfo(address['address']) + assert_equal(address_assert['iswatchonly'], False) + assert_equal(address_assert['ismine'], False) + assert_equal('timestamp' in address_assert, False) + + + # P2SH address + sig_address_1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + sig_address_2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + sig_address_3 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['pubkey'], sig_address_2['pubkey'], sig_address_3['pubkey']]) + self.nodes[1].generate(100) + self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00) + self.nodes[1].generate(1) + timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime'] + + self.log.info("Should import a p2sh") + result = self.nodes[1].importmulti([{ + "scriptPubKey": { + "address": multi_sig_script['address'] + }, + "timestamp": "now", + }]) + assert_equal(result[0]['success'], True) + address_assert = self.nodes[1].getaddressinfo(multi_sig_script['address']) + assert_equal(address_assert['isscript'], True) + assert_equal(address_assert['iswatchonly'], True) + assert_equal(address_assert['timestamp'], timestamp) + p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0] + assert_equal(p2shunspent['spendable'], False) + assert_equal(p2shunspent['solvable'], False) + + + # P2SH + Redeem script + sig_address_1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + sig_address_2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + sig_address_3 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['pubkey'], sig_address_2['pubkey'], sig_address_3['pubkey']]) + self.nodes[1].generate(100) + self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00) + self.nodes[1].generate(1) + timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime'] + + self.log.info("Should import a p2sh with respective redeem script") + result = self.nodes[1].importmulti([{ + "scriptPubKey": { + "address": multi_sig_script['address'] + }, + "timestamp": "now", + "redeemscript": multi_sig_script['redeemScript'] + }]) + assert_equal(result[0]['success'], True) + address_assert = self.nodes[1].getaddressinfo(multi_sig_script['address']) + assert_equal(address_assert['timestamp'], timestamp) + + p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0] + assert_equal(p2shunspent['spendable'], False) + assert_equal(p2shunspent['solvable'], True) + + + # P2SH + Redeem script + Private Keys + !Watchonly + sig_address_1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + sig_address_2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + sig_address_3 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['pubkey'], sig_address_2['pubkey'], sig_address_3['pubkey']]) + self.nodes[1].generate(100) + self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00) + self.nodes[1].generate(1) + timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime'] + + self.log.info("Should import a p2sh with respective redeem script and private keys") + result = self.nodes[1].importmulti([{ + "scriptPubKey": { + "address": multi_sig_script['address'] + }, + "timestamp": "now", + "redeemscript": multi_sig_script['redeemScript'], + "keys": [ self.nodes[0].dumpprivkey(sig_address_1['address']), self.nodes[0].dumpprivkey(sig_address_2['address'])] + }]) + assert_equal(result[0]['success'], True) + address_assert = self.nodes[1].getaddressinfo(multi_sig_script['address']) + assert_equal(address_assert['timestamp'], timestamp) + + p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0] + assert_equal(p2shunspent['spendable'], False) + assert_equal(p2shunspent['solvable'], True) + + # P2SH + Redeem script + Private Keys + Watchonly + sig_address_1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + sig_address_2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + sig_address_3 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['pubkey'], sig_address_2['pubkey'], sig_address_3['pubkey']]) + self.nodes[1].generate(100) + self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00) + self.nodes[1].generate(1) + timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime'] + + self.log.info("Should import a p2sh with respective redeem script and private keys") + result = self.nodes[1].importmulti([{ + "scriptPubKey": { + "address": multi_sig_script['address'] + }, + "timestamp": "now", + "redeemscript": multi_sig_script['redeemScript'], + "keys": [ self.nodes[0].dumpprivkey(sig_address_1['address']), self.nodes[0].dumpprivkey(sig_address_2['address'])], + "watchonly": True + }]) + assert_equal(result[0]['success'], False) + assert_equal(result[0]['error']['code'], -8) + assert_equal(result[0]['error']['message'], 'Incompatibility found between watchonly and keys') + + + # Address + Public key + !Internal + Wrong pubkey + self.log.info("Should not import an address with a wrong public key") + address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + address2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + result = self.nodes[1].importmulti([{ + "scriptPubKey": { + "address": address['address'] + }, + "timestamp": "now", + "pubkeys": [ address2['pubkey'] ] + }]) + assert_equal(result[0]['success'], False) + assert_equal(result[0]['error']['code'], -5) + assert_equal(result[0]['error']['message'], 'Consistency check failed') + address_assert = self.nodes[1].getaddressinfo(address['address']) + assert_equal(address_assert['iswatchonly'], False) + assert_equal(address_assert['ismine'], False) + assert_equal('timestamp' in address_assert, False) + + + # ScriptPubKey + Public key + internal + Wrong pubkey + self.log.info("Should not import a scriptPubKey with internal and with a wrong public key") + address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + address2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + request = [{ + "scriptPubKey": address['scriptPubKey'], + "timestamp": "now", + "pubkeys": [ address2['pubkey'] ], + "internal": True + }] + result = self.nodes[1].importmulti(request) + assert_equal(result[0]['success'], False) + assert_equal(result[0]['error']['code'], -5) + assert_equal(result[0]['error']['message'], 'Consistency check failed') + address_assert = self.nodes[1].getaddressinfo(address['address']) + assert_equal(address_assert['iswatchonly'], False) + assert_equal(address_assert['ismine'], False) + assert_equal('timestamp' in address_assert, False) + + + # Address + Private key + !watchonly + Wrong private key + self.log.info("Should not import an address with a wrong private key") + address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + address2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + result = self.nodes[1].importmulti([{ + "scriptPubKey": { + "address": address['address'] + }, + "timestamp": "now", + "keys": [ self.nodes[0].dumpprivkey(address2['address']) ] + }]) + assert_equal(result[0]['success'], False) + assert_equal(result[0]['error']['code'], -5) + assert_equal(result[0]['error']['message'], 'Consistency check failed') + address_assert = self.nodes[1].getaddressinfo(address['address']) + assert_equal(address_assert['iswatchonly'], False) + assert_equal(address_assert['ismine'], False) + assert_equal('timestamp' in address_assert, False) + + + # ScriptPubKey + Private key + internal + Wrong private key + self.log.info("Should not import a scriptPubKey with internal and with a wrong private key") + address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + address2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress()) + result = self.nodes[1].importmulti([{ + "scriptPubKey": address['scriptPubKey'], + "timestamp": "now", + "keys": [ self.nodes[0].dumpprivkey(address2['address']) ], + "internal": True + }]) + assert_equal(result[0]['success'], False) + assert_equal(result[0]['error']['code'], -5) + assert_equal(result[0]['error']['message'], 'Consistency check failed') + address_assert = self.nodes[1].getaddressinfo(address['address']) + assert_equal(address_assert['iswatchonly'], False) + assert_equal(address_assert['ismine'], False) + assert_equal('timestamp' in address_assert, False) + + + # Importing existing watch only address with new timestamp should replace saved timestamp. + assert_greater_than(timestamp, watchonly_timestamp) + self.log.info("Should replace previously saved watch only timestamp.") + result = self.nodes[1].importmulti([{ + "scriptPubKey": { + "address": watchonly_address, + }, + "timestamp": "now", + }]) + assert_equal(result[0]['success'], True) + address_assert = self.nodes[1].getaddressinfo(watchonly_address) + assert_equal(address_assert['iswatchonly'], True) + assert_equal(address_assert['ismine'], False) + assert_equal(address_assert['timestamp'], timestamp) + watchonly_timestamp = timestamp + + + # restart nodes to check for proper serialization/deserialization of watch only address + self.stop_nodes() + self.start_nodes() + address_assert = self.nodes[1].getaddressinfo(watchonly_address) + assert_equal(address_assert['iswatchonly'], True) + assert_equal(address_assert['ismine'], False) + assert_equal(address_assert['timestamp'], watchonly_timestamp) + + # Bad or missing timestamps + self.log.info("Should throw on invalid or missing timestamp values") + assert_raises_rpc_error(-3, 'Missing required timestamp field for key', + self.nodes[1].importmulti, [{ + "scriptPubKey": address['scriptPubKey'], + }]) + assert_raises_rpc_error(-3, 'Expected number or "now" timestamp value for key. got type string', + self.nodes[1].importmulti, [{ + "scriptPubKey": address['scriptPubKey'], + "timestamp": "", + }]) + + +if __name__ == '__main__': + ImportMultiTest ().main () diff --git a/test/functional/wallet_importprunedfunds.py b/test/functional/wallet_importprunedfunds.py new file mode 100755 index 000000000..26b181db3 --- /dev/null +++ b/test/functional/wallet_importprunedfunds.py @@ -0,0 +1,116 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the importprunedfunds and removeprunedfunds RPCs.""" +from decimal import Decimal + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + assert_raises_rpc_error, +) + +class ImportPrunedFundsTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 2 + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + self.log.info("Mining blocks...") + self.nodes[0].generate(101) + + self.sync_all() + + # address + address1 = self.nodes[0].getnewaddress() + # pubkey + address2 = self.nodes[0].getnewaddress() + # privkey + address3 = self.nodes[0].getnewaddress() + address3_privkey = self.nodes[0].dumpprivkey(address3) # Using privkey + + # Check only one address + address_info = self.nodes[0].getaddressinfo(address1) + assert_equal(address_info['ismine'], True) + + self.sync_all() + + # Node 1 sync test + assert_equal(self.nodes[1].getblockcount(), 101) + + # Address Test - before import + address_info = self.nodes[1].getaddressinfo(address1) + assert_equal(address_info['iswatchonly'], False) + assert_equal(address_info['ismine'], False) + + address_info = self.nodes[1].getaddressinfo(address2) + assert_equal(address_info['iswatchonly'], False) + assert_equal(address_info['ismine'], False) + + address_info = self.nodes[1].getaddressinfo(address3) + assert_equal(address_info['iswatchonly'], False) + assert_equal(address_info['ismine'], False) + + # Send funds to self + txnid1 = self.nodes[0].sendtoaddress(address1, 0.1) + self.nodes[0].generate(1) + rawtxn1 = self.nodes[0].gettransaction(txnid1)['hex'] + proof1 = self.nodes[0].gettxoutproof([txnid1]) + + txnid2 = self.nodes[0].sendtoaddress(address2, 0.05) + self.nodes[0].generate(1) + rawtxn2 = self.nodes[0].gettransaction(txnid2)['hex'] + proof2 = self.nodes[0].gettxoutproof([txnid2]) + + txnid3 = self.nodes[0].sendtoaddress(address3, 0.025) + self.nodes[0].generate(1) + rawtxn3 = self.nodes[0].gettransaction(txnid3)['hex'] + proof3 = self.nodes[0].gettxoutproof([txnid3]) + + self.sync_all() + + # Import with no affiliated address + assert_raises_rpc_error(-5, "No addresses", self.nodes[1].importprunedfunds, rawtxn1, proof1) + + balance1 = self.nodes[1].getbalance() + assert_equal(balance1, Decimal(0)) + + # Import with affiliated address with no rescan + self.nodes[1].importaddress(address=address2, rescan=False) + self.nodes[1].importprunedfunds(rawtxn2, proof2) + assert [tx for tx in self.nodes[1].listtransactions(include_watchonly=True) if tx['txid'] == txnid2] + + # Import with private key with no rescan + self.nodes[1].importprivkey(privkey=address3_privkey, rescan=False) + self.nodes[1].importprunedfunds(rawtxn3, proof3) + assert [tx for tx in self.nodes[1].listtransactions() if tx['txid'] == txnid3] + balance3 = self.nodes[1].getbalance() + assert_equal(balance3, Decimal('0.025')) + + # Addresses Test - after import + address_info = self.nodes[1].getaddressinfo(address1) + assert_equal(address_info['iswatchonly'], False) + assert_equal(address_info['ismine'], False) + address_info = self.nodes[1].getaddressinfo(address2) + assert_equal(address_info['iswatchonly'], True) + assert_equal(address_info['ismine'], False) + address_info = self.nodes[1].getaddressinfo(address3) + assert_equal(address_info['iswatchonly'], False) + assert_equal(address_info['ismine'], True) + + # Remove transactions + assert_raises_rpc_error(-8, "Transaction does not exist in wallet.", self.nodes[1].removeprunedfunds, txnid1) + assert not [tx for tx in self.nodes[1].listtransactions(include_watchonly=True) if tx['txid'] == txnid1] + + self.nodes[1].removeprunedfunds(txnid2) + assert not [tx for tx in self.nodes[1].listtransactions(include_watchonly=True) if tx['txid'] == txnid2] + + self.nodes[1].removeprunedfunds(txnid3) + assert not [tx for tx in self.nodes[1].listtransactions(include_watchonly=True) if tx['txid'] == txnid3] + +if __name__ == '__main__': + ImportPrunedFundsTest().main() diff --git a/test/functional/wallet_keypool.py b/test/functional/wallet_keypool.py new file mode 100755 index 000000000..51afa0cb1 --- /dev/null +++ b/test/functional/wallet_keypool.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the wallet keypool and interaction with wallet encryption/locking.""" + +import time + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, assert_raises_rpc_error + +class KeyPoolTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 1 + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + nodes = self.nodes + addr_before_encrypting = nodes[0].getnewaddress() + addr_before_encrypting_data = nodes[0].getaddressinfo(addr_before_encrypting) + wallet_info_old = nodes[0].getwalletinfo() + assert_equal(wallet_info_old['hdseedid'], wallet_info_old['hdmasterkeyid']) + assert(addr_before_encrypting_data['hdseedid'] == wallet_info_old['hdseedid']) + + # Encrypt wallet and wait to terminate + nodes[0].encryptwallet('test') + # Keep creating keys + addr = nodes[0].getnewaddress() + addr_data = nodes[0].getaddressinfo(addr) + wallet_info = nodes[0].getwalletinfo() + assert_equal(wallet_info['hdseedid'], wallet_info['hdmasterkeyid']) + assert(addr_before_encrypting_data['hdseedid'] != wallet_info['hdseedid']) + assert(addr_data['hdseedid'] == wallet_info['hdseedid']) + assert_raises_rpc_error(-12, "Error: Keypool ran out, please call keypoolrefill first", nodes[0].getnewaddress) + + # put six (plus 2) new keys in the keypool (100% external-, +100% internal-keys, 1 in min) + nodes[0].walletpassphrase('test', 12000) + nodes[0].keypoolrefill(6) + nodes[0].walletlock() + wi = nodes[0].getwalletinfo() + assert_equal(wi['keypoolsize_hd_internal'], 6) + assert_equal(wi['keypoolsize'], 6) + + # drain the internal keys + nodes[0].getrawchangeaddress() + nodes[0].getrawchangeaddress() + nodes[0].getrawchangeaddress() + nodes[0].getrawchangeaddress() + nodes[0].getrawchangeaddress() + nodes[0].getrawchangeaddress() + addr = set() + # the next one should fail + assert_raises_rpc_error(-12, "Keypool ran out", nodes[0].getrawchangeaddress) + + # drain the external keys + addr.add(nodes[0].getnewaddress()) + addr.add(nodes[0].getnewaddress()) + addr.add(nodes[0].getnewaddress()) + addr.add(nodes[0].getnewaddress()) + addr.add(nodes[0].getnewaddress()) + addr.add(nodes[0].getnewaddress()) + assert(len(addr) == 6) + # the next one should fail + assert_raises_rpc_error(-12, "Error: Keypool ran out, please call keypoolrefill first", nodes[0].getnewaddress) + + # refill keypool with three new addresses + nodes[0].walletpassphrase('test', 1) + nodes[0].keypoolrefill(3) + + # test walletpassphrase timeout + time.sleep(1.1) + assert_equal(nodes[0].getwalletinfo()["unlocked_until"], 0) + + # drain them by mining + nodes[0].generate(1) + nodes[0].generate(1) + nodes[0].generate(1) + assert_raises_rpc_error(-12, "Keypool ran out", nodes[0].generate, 1) + + nodes[0].walletpassphrase('test', 100) + nodes[0].keypoolrefill(100) + wi = nodes[0].getwalletinfo() + assert_equal(wi['keypoolsize_hd_internal'], 100) + assert_equal(wi['keypoolsize'], 100) + +if __name__ == '__main__': + KeyPoolTest().main() diff --git a/test/functional/wallet_keypool_topup.py b/test/functional/wallet_keypool_topup.py new file mode 100755 index 000000000..f1a441c39 --- /dev/null +++ b/test/functional/wallet_keypool_topup.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python3 +# Copyright (c) 2017-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test HD Wallet keypool restore function. + +Two nodes. Node1 is under test. Node0 is providing transactions and generating blocks. + +- Start node1, shutdown and backup wallet. +- Generate 110 keys (enough to drain the keypool). Store key 90 (in the initial keypool) and key 110 (beyond the initial keypool). Send funds to key 90 and key 110. +- Stop node1, clear the datadir, move wallet file back into the datadir and restart node1. +- connect node1 to node0. Verify that they sync and node1 receives its funds.""" +import os +import shutil + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + connect_nodes_bi, + sync_blocks, +) + + +class KeypoolRestoreTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 2 + self.extra_args = [[], ['-keypool=100']] + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + wallet_path = os.path.join(self.nodes[1].datadir, "regtest", "wallets", "wallet.dat") + wallet_backup_path = os.path.join(self.nodes[1].datadir, "wallet.bak") + self.nodes[0].generate(101) + + self.log.info("Make backup of wallet") + self.stop_node(1) + shutil.copyfile(wallet_path, wallet_backup_path) + self.start_node(1, self.extra_args[1]) + connect_nodes_bi(self.nodes, 0, 1) + + self.log.info("Generate keys for wallet") + for _ in range(90): + addr_oldpool = self.nodes[1].getnewaddress() + for _ in range(20): + addr_extpool = self.nodes[1].getnewaddress() + + self.log.info("Send funds to wallet") + self.nodes[0].sendtoaddress(addr_oldpool, 10) + self.nodes[0].generate(1) + self.nodes[0].sendtoaddress(addr_extpool, 5) + self.nodes[0].generate(1) + sync_blocks(self.nodes) + + self.log.info("Restart node with wallet backup") + self.stop_node(1) + shutil.copyfile(wallet_backup_path, wallet_path) + self.start_node(1, self.extra_args[1]) + connect_nodes_bi(self.nodes, 0, 1) + self.sync_all() + + self.log.info("Verify keypool is restored and balance is correct") + assert_equal(self.nodes[1].getbalance(), 15) + assert_equal(self.nodes[1].listtransactions()[0]['category'], "receive") + # Check that we have marked all keys up to the used keypool key as used + assert_equal(self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress())['hdkeypath'], "m/0'/0'/110'") + + +if __name__ == '__main__': + KeypoolRestoreTest().main() diff --git a/test/functional/wallet_labels.py b/test/functional/wallet_labels.py new file mode 100755 index 000000000..8d7c77bb9 --- /dev/null +++ b/test/functional/wallet_labels.py @@ -0,0 +1,177 @@ +#!/usr/bin/env python3 +# Copyright (c) 2016-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test label RPCs. + +RPCs tested are: + - getaddressesbylabel + - listaddressgroupings + - setlabel +""" +from collections import defaultdict + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, assert_raises_rpc_error + +class WalletLabelsTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 1 + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + # Check that there's no UTXO on the node + node = self.nodes[0] + assert_equal(len(node.listunspent()), 0) + + # Note each time we call generate, all generated coins go into + # the same address, so we call twice to get two addresses w/50 each + node.generate(1) + node.generate(101) + assert_equal(node.getbalance(), 100) + + # there should be 2 address groups + # each with 1 address with a balance of 50 Bitcoins + address_groups = node.listaddressgroupings() + assert_equal(len(address_groups), 2) + # the addresses aren't linked now, but will be after we send to the + # common address + linked_addresses = set() + for address_group in address_groups: + assert_equal(len(address_group), 1) + assert_equal(len(address_group[0]), 2) + assert_equal(address_group[0][1], 50) + linked_addresses.add(address_group[0][0]) + + # send 50 from each address to a third address not in this wallet + common_address = "msf4WtN1YQKXvNtvdFYt9JBnUD2FB41kjr" + node.sendmany( + amounts={common_address: 100}, + subtractfeefrom=[common_address], + minconf=1, + ) + # there should be 1 address group, with the previously + # unlinked addresses now linked (they both have 0 balance) + address_groups = node.listaddressgroupings() + assert_equal(len(address_groups), 1) + assert_equal(len(address_groups[0]), 2) + assert_equal(set([a[0] for a in address_groups[0]]), linked_addresses) + assert_equal([a[1] for a in address_groups[0]], [0, 0]) + + node.generate(1) + + # we want to reset so that the "" label has what's expected. + # otherwise we're off by exactly the fee amount as that's mined + # and matures in the next 100 blocks + amount_to_send = 1.0 + + # Create labels and make sure subsequent label API calls + # recognize the label/address associations. + labels = [Label(name) for name in ("a", "b", "c", "d", "e")] + for label in labels: + address = node.getnewaddress(label.name) + label.add_receive_address(address) + label.verify(node) + + # Check all labels are returned by listlabels. + assert_equal(node.listlabels(), [label.name for label in labels]) + + # Send a transaction to each label. + for label in labels: + node.sendtoaddress(label.addresses[0], amount_to_send) + label.verify(node) + + # Check the amounts received. + node.generate(1) + for label in labels: + assert_equal( + node.getreceivedbyaddress(label.addresses[0]), amount_to_send) + assert_equal(node.getreceivedbylabel(label.name), amount_to_send) + + for i, label in enumerate(labels): + to_label = labels[(i + 1) % len(labels)] + node.sendtoaddress(to_label.addresses[0], amount_to_send) + node.generate(1) + for label in labels: + address = node.getnewaddress(label.name) + label.add_receive_address(address) + label.verify(node) + assert_equal(node.getreceivedbylabel(label.name), 2) + label.verify(node) + node.generate(101) + + # Check that setlabel can assign a label to a new unused address. + for label in labels: + address = node.getnewaddress() + node.setlabel(address, label.name) + label.add_address(address) + label.verify(node) + assert_raises_rpc_error(-11, "No addresses with label", node.getaddressesbylabel, "") + + # Check that addmultisigaddress can assign labels. + for label in labels: + addresses = [] + for x in range(10): + addresses.append(node.getnewaddress()) + multisig_address = node.addmultisigaddress(5, addresses, label.name)['address'] + label.add_address(multisig_address) + label.purpose[multisig_address] = "send" + label.verify(node) + node.generate(101) + + # Check that setlabel can change the label of an address from a + # different label. + change_label(node, labels[0].addresses[0], labels[0], labels[1]) + + # Check that setlabel can set the label of an address already + # in the label. This is a no-op. + change_label(node, labels[2].addresses[0], labels[2], labels[2]) + +class Label: + def __init__(self, name): + # Label name + self.name = name + # Current receiving address associated with this label. + self.receive_address = None + # List of all addresses assigned with this label + self.addresses = [] + # Map of address to address purpose + self.purpose = defaultdict(lambda: "receive") + + def add_address(self, address): + assert_equal(address not in self.addresses, True) + self.addresses.append(address) + + def add_receive_address(self, address): + self.add_address(address) + + def verify(self, node): + if self.receive_address is not None: + assert self.receive_address in self.addresses + + for address in self.addresses: + assert_equal( + node.getaddressinfo(address)['labels'][0], + {"name": self.name, + "purpose": self.purpose[address]}) + assert_equal(node.getaddressinfo(address)['label'], self.name) + + assert_equal( + node.getaddressesbylabel(self.name), + {address: {"purpose": self.purpose[address]} for address in self.addresses}) + +def change_label(node, address, old_label, new_label): + assert_equal(address in old_label.addresses, True) + node.setlabel(address, new_label.name) + + old_label.addresses.remove(address) + new_label.add_address(address) + + old_label.verify(node) + new_label.verify(node) + +if __name__ == '__main__': + WalletLabelsTest().main() diff --git a/test/functional/wallet_listreceivedby.py b/test/functional/wallet_listreceivedby.py new file mode 100755 index 000000000..3485c4470 --- /dev/null +++ b/test/functional/wallet_listreceivedby.py @@ -0,0 +1,168 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the listreceivedbyaddress RPC.""" +from decimal import Decimal + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_array_result, + assert_equal, + assert_raises_rpc_error, + sync_blocks, +) + + +class ReceivedByTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 2 + + def import_deterministic_coinbase_privkeys(self): + assert_equal(0, len(self.nodes[1].listreceivedbyaddress(minconf=0, include_empty=True, include_watchonly=True))) + super().import_deterministic_coinbase_privkeys() + self.num_cb_reward_addresses = len(self.nodes[1].listreceivedbyaddress(minconf=0, include_empty=True, include_watchonly=True)) + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + # Generate block to get out of IBD + self.nodes[0].generate(1) + sync_blocks(self.nodes) + + self.log.info("listreceivedbyaddress Test") + + # Send from node 0 to 1 + addr = self.nodes[1].getnewaddress() + txid = self.nodes[0].sendtoaddress(addr, 0.1) + self.sync_all() + + # Check not listed in listreceivedbyaddress because has 0 confirmations + assert_array_result(self.nodes[1].listreceivedbyaddress(), + {"address": addr}, + {}, + True) + # Bury Tx under 10 block so it will be returned by listreceivedbyaddress + self.nodes[1].generate(10) + self.sync_all() + assert_array_result(self.nodes[1].listreceivedbyaddress(), + {"address": addr}, + {"address": addr, "label": "", "amount": Decimal("0.1"), "confirmations": 10, "txids": [txid, ]}) + # With min confidence < 10 + assert_array_result(self.nodes[1].listreceivedbyaddress(5), + {"address": addr}, + {"address": addr, "label": "", "amount": Decimal("0.1"), "confirmations": 10, "txids": [txid, ]}) + # With min confidence > 10, should not find Tx + assert_array_result(self.nodes[1].listreceivedbyaddress(11), {"address": addr}, {}, True) + + # Empty Tx + empty_addr = self.nodes[1].getnewaddress() + assert_array_result(self.nodes[1].listreceivedbyaddress(0, True), + {"address": empty_addr}, + {"address": empty_addr, "label": "", "amount": 0, "confirmations": 0, "txids": []}) + + # Test Address filtering + # Only on addr + expected = {"address": addr, "label": "", "amount": Decimal("0.1"), "confirmations": 10, "txids": [txid, ]} + res = self.nodes[1].listreceivedbyaddress(minconf=0, include_empty=True, include_watchonly=True, address_filter=addr) + assert_array_result(res, {"address": addr}, expected) + assert_equal(len(res), 1) + # Error on invalid address + assert_raises_rpc_error(-4, "address_filter parameter was invalid", self.nodes[1].listreceivedbyaddress, minconf=0, include_empty=True, include_watchonly=True, address_filter="bamboozling") + # Another address receive money + res = self.nodes[1].listreceivedbyaddress(0, True, True) + assert_equal(len(res), 2 + self.num_cb_reward_addresses) # Right now 2 entries + other_addr = self.nodes[1].getnewaddress() + txid2 = self.nodes[0].sendtoaddress(other_addr, 0.1) + self.nodes[0].generate(1) + self.sync_all() + # Same test as above should still pass + expected = {"address": addr, "label": "", "amount": Decimal("0.1"), "confirmations": 11, "txids": [txid, ]} + res = self.nodes[1].listreceivedbyaddress(0, True, True, addr) + assert_array_result(res, {"address": addr}, expected) + assert_equal(len(res), 1) + # Same test as above but with other_addr should still pass + expected = {"address": other_addr, "label": "", "amount": Decimal("0.1"), "confirmations": 1, "txids": [txid2, ]} + res = self.nodes[1].listreceivedbyaddress(0, True, True, other_addr) + assert_array_result(res, {"address": other_addr}, expected) + assert_equal(len(res), 1) + # Should be two entries though without filter + res = self.nodes[1].listreceivedbyaddress(0, True, True) + assert_equal(len(res), 3 + self.num_cb_reward_addresses) # Became 3 entries + + # Not on random addr + other_addr = self.nodes[0].getnewaddress() # note on node[0]! just a random addr + res = self.nodes[1].listreceivedbyaddress(0, True, True, other_addr) + assert_equal(len(res), 0) + + self.log.info("getreceivedbyaddress Test") + + # Send from node 0 to 1 + addr = self.nodes[1].getnewaddress() + txid = self.nodes[0].sendtoaddress(addr, 0.1) + self.sync_all() + + # Check balance is 0 because of 0 confirmations + balance = self.nodes[1].getreceivedbyaddress(addr) + assert_equal(balance, Decimal("0.0")) + + # Check balance is 0.1 + balance = self.nodes[1].getreceivedbyaddress(addr, 0) + assert_equal(balance, Decimal("0.1")) + + # Bury Tx under 10 block so it will be returned by the default getreceivedbyaddress + self.nodes[1].generate(10) + self.sync_all() + balance = self.nodes[1].getreceivedbyaddress(addr) + assert_equal(balance, Decimal("0.1")) + + # Trying to getreceivedby for an address the wallet doesn't own should return an error + assert_raises_rpc_error(-4, "Address not found in wallet", self.nodes[0].getreceivedbyaddress, addr) + + self.log.info("listreceivedbylabel + getreceivedbylabel Test") + + # set pre-state + label = '' + address = self.nodes[1].getnewaddress() + assert_equal(self.nodes[1].getaddressinfo(address)['label'], label) + received_by_label_json = [r for r in self.nodes[1].listreceivedbylabel() if r["label"] == label][0] + balance_by_label = self.nodes[1].getreceivedbylabel(label) + + txid = self.nodes[0].sendtoaddress(addr, 0.1) + self.sync_all() + + # listreceivedbylabel should return received_by_label_json because of 0 confirmations + assert_array_result(self.nodes[1].listreceivedbylabel(), + {"label": label}, + received_by_label_json) + + # getreceivedbyaddress should return same balance because of 0 confirmations + balance = self.nodes[1].getreceivedbylabel(label) + assert_equal(balance, balance_by_label) + + self.nodes[1].generate(10) + self.sync_all() + # listreceivedbylabel should return updated received list + assert_array_result(self.nodes[1].listreceivedbylabel(), + {"label": label}, + {"label": received_by_label_json["label"], "amount": (received_by_label_json["amount"] + Decimal("0.1"))}) + + # getreceivedbylabel should return updated receive total + balance = self.nodes[1].getreceivedbylabel(label) + assert_equal(balance, balance_by_label + Decimal("0.1")) + + # Create a new label named "mynewlabel" that has a 0 balance + address = self.nodes[1].getnewaddress() + self.nodes[1].setlabel(address, "mynewlabel") + received_by_label_json = [r for r in self.nodes[1].listreceivedbylabel(0, True) if r["label"] == "mynewlabel"][0] + + # Test includeempty of listreceivedbylabel + assert_equal(received_by_label_json["amount"], Decimal("0.0")) + + # Test getreceivedbylabel for 0 amount labels + balance = self.nodes[1].getreceivedbylabel("mynewlabel") + assert_equal(balance, Decimal("0.0")) + +if __name__ == '__main__': + ReceivedByTest().main() diff --git a/test/functional/wallet_listsinceblock.py b/test/functional/wallet_listsinceblock.py new file mode 100755 index 000000000..25ab22237 --- /dev/null +++ b/test/functional/wallet_listsinceblock.py @@ -0,0 +1,285 @@ +#!/usr/bin/env python3 +# Copyright (c) 2017-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the listsincelast RPC.""" + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, assert_array_result, assert_raises_rpc_error + +class ListSinceBlockTest (BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 4 + self.setup_clean_chain = True + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + self.nodes[2].generate(101) + self.sync_all() + + self.test_no_blockhash() + self.test_invalid_blockhash() + self.test_reorg() + self.test_double_spend() + self.test_double_send() + + def test_no_blockhash(self): + txid = self.nodes[2].sendtoaddress(self.nodes[0].getnewaddress(), 1) + blockhash, = self.nodes[2].generate(1) + self.sync_all() + + txs = self.nodes[0].listtransactions() + assert_array_result(txs, {"txid": txid}, { + "category": "receive", + "amount": 1, + "blockhash": blockhash, + "confirmations": 1, + }) + assert_equal( + self.nodes[0].listsinceblock(), + {"lastblock": blockhash, + "removed": [], + "transactions": txs}) + assert_equal( + self.nodes[0].listsinceblock(""), + {"lastblock": blockhash, + "removed": [], + "transactions": txs}) + + def test_invalid_blockhash(self): + assert_raises_rpc_error(-5, "Block not found", self.nodes[0].listsinceblock, + "42759cde25462784395a337460bde75f58e73d3f08bd31fdc3507cbac856a2c4") + assert_raises_rpc_error(-5, "Block not found", self.nodes[0].listsinceblock, + "0000000000000000000000000000000000000000000000000000000000000000") + assert_raises_rpc_error(-8, "blockhash must be of length 64 (not 11, for 'invalid-hex')", self.nodes[0].listsinceblock, + "invalid-hex") + assert_raises_rpc_error(-8, "blockhash must be hexadecimal string (not 'Z000000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].listsinceblock, + "Z000000000000000000000000000000000000000000000000000000000000000") + + def test_reorg(self): + ''' + `listsinceblock` did not behave correctly when handed a block that was + no longer in the main chain: + + ab0 + / \ + aa1 [tx0] bb1 + | | + aa2 bb2 + | | + aa3 bb3 + | + bb4 + + Consider a client that has only seen block `aa3` above. It asks the node + to `listsinceblock aa3`. But at some point prior the main chain switched + to the bb chain. + + Previously: listsinceblock would find height=4 for block aa3 and compare + this to height=5 for the tip of the chain (bb4). It would then return + results restricted to bb3-bb4. + + Now: listsinceblock finds the fork at ab0 and returns results in the + range bb1-bb4. + + This test only checks that [tx0] is present. + ''' + + # Split network into two + self.split_network() + + # send to nodes[0] from nodes[2] + senttx = self.nodes[2].sendtoaddress(self.nodes[0].getnewaddress(), 1) + + # generate on both sides + lastblockhash = self.nodes[1].generate(6)[5] + self.nodes[2].generate(7) + self.log.info('lastblockhash=%s' % (lastblockhash)) + + self.sync_all([self.nodes[:2], self.nodes[2:]]) + + self.join_network() + + # listsinceblock(lastblockhash) should now include tx, as seen from nodes[0] + lsbres = self.nodes[0].listsinceblock(lastblockhash) + found = False + for tx in lsbres['transactions']: + if tx['txid'] == senttx: + found = True + break + assert found + + def test_double_spend(self): + ''' + This tests the case where the same UTXO is spent twice on two separate + blocks as part of a reorg. + + ab0 + / \ + aa1 [tx1] bb1 [tx2] + | | + aa2 bb2 + | | + aa3 bb3 + | + bb4 + + Problematic case: + + 1. User 1 receives BTC in tx1 from utxo1 in block aa1. + 2. User 2 receives BTC in tx2 from utxo1 (same) in block bb1 + 3. User 1 sees 2 confirmations at block aa3. + 4. Reorg into bb chain. + 5. User 1 asks `listsinceblock aa3` and does not see that tx1 is now + invalidated. + + Currently the solution to this is to detect that a reorg'd block is + asked for in listsinceblock, and to iterate back over existing blocks up + until the fork point, and to include all transactions that relate to the + node wallet. + ''' + + self.sync_all() + + # Split network into two + self.split_network() + + # share utxo between nodes[1] and nodes[2] + utxos = self.nodes[2].listunspent() + utxo = utxos[0] + privkey = self.nodes[2].dumpprivkey(utxo['address']) + self.nodes[1].importprivkey(privkey) + + # send from nodes[1] using utxo to nodes[0] + change = '%.8f' % (float(utxo['amount']) - 1.0003) + recipient_dict = { + self.nodes[0].getnewaddress(): 1, + self.nodes[1].getnewaddress(): change, + } + utxo_dicts = [{ + 'txid': utxo['txid'], + 'vout': utxo['vout'], + }] + txid1 = self.nodes[1].sendrawtransaction( + self.nodes[1].signrawtransactionwithwallet( + self.nodes[1].createrawtransaction(utxo_dicts, recipient_dict))['hex']) + + # send from nodes[2] using utxo to nodes[3] + recipient_dict2 = { + self.nodes[3].getnewaddress(): 1, + self.nodes[2].getnewaddress(): change, + } + self.nodes[2].sendrawtransaction( + self.nodes[2].signrawtransactionwithwallet( + self.nodes[2].createrawtransaction(utxo_dicts, recipient_dict2))['hex']) + + # generate on both sides + lastblockhash = self.nodes[1].generate(3)[2] + self.nodes[2].generate(4) + + self.join_network() + + self.sync_all() + + # gettransaction should work for txid1 + assert self.nodes[0].gettransaction(txid1)['txid'] == txid1, "gettransaction failed to find txid1" + + # listsinceblock(lastblockhash) should now include txid1, as seen from nodes[0] + lsbres = self.nodes[0].listsinceblock(lastblockhash) + assert any(tx['txid'] == txid1 for tx in lsbres['removed']) + + # but it should not include 'removed' if include_removed=false + lsbres2 = self.nodes[0].listsinceblock(blockhash=lastblockhash, include_removed=False) + assert 'removed' not in lsbres2 + + def test_double_send(self): + ''' + This tests the case where the same transaction is submitted twice on two + separate blocks as part of a reorg. The former will vanish and the + latter will appear as the true transaction (with confirmations dropping + as a result). + + ab0 + / \ + aa1 [tx1] bb1 + | | + aa2 bb2 + | | + aa3 bb3 [tx1] + | + bb4 + + Asserted: + + 1. tx1 is listed in listsinceblock. + 2. It is included in 'removed' as it was removed, even though it is now + present in a different block. + 3. It is listed with a confirmation count of 2 (bb3, bb4), not + 3 (aa1, aa2, aa3). + ''' + + self.sync_all() + + # Split network into two + self.split_network() + + # create and sign a transaction + utxos = self.nodes[2].listunspent() + utxo = utxos[0] + change = '%.8f' % (float(utxo['amount']) - 1.0003) + recipient_dict = { + self.nodes[0].getnewaddress(): 1, + self.nodes[2].getnewaddress(): change, + } + utxo_dicts = [{ + 'txid': utxo['txid'], + 'vout': utxo['vout'], + }] + signedtxres = self.nodes[2].signrawtransactionwithwallet( + self.nodes[2].createrawtransaction(utxo_dicts, recipient_dict)) + assert signedtxres['complete'] + + signedtx = signedtxres['hex'] + + # send from nodes[1]; this will end up in aa1 + txid1 = self.nodes[1].sendrawtransaction(signedtx) + + # generate bb1-bb2 on right side + self.nodes[2].generate(2) + + # send from nodes[2]; this will end up in bb3 + txid2 = self.nodes[2].sendrawtransaction(signedtx) + + assert_equal(txid1, txid2) + + # generate on both sides + lastblockhash = self.nodes[1].generate(3)[2] + self.nodes[2].generate(2) + + self.join_network() + + self.sync_all() + + # gettransaction should work for txid1 + self.nodes[0].gettransaction(txid1) + + # listsinceblock(lastblockhash) should now include txid1 in transactions + # as well as in removed + lsbres = self.nodes[0].listsinceblock(lastblockhash) + assert any(tx['txid'] == txid1 for tx in lsbres['transactions']) + assert any(tx['txid'] == txid1 for tx in lsbres['removed']) + + # find transaction and ensure confirmations is valid + for tx in lsbres['transactions']: + if tx['txid'] == txid1: + assert_equal(tx['confirmations'], 2) + + # the same check for the removed array; confirmations should STILL be 2 + for tx in lsbres['removed']: + if tx['txid'] == txid1: + assert_equal(tx['confirmations'], 2) + +if __name__ == '__main__': + ListSinceBlockTest().main() diff --git a/test/functional/wallet_listtransactions.py b/test/functional/wallet_listtransactions.py new file mode 100755 index 000000000..5a17395ab --- /dev/null +++ b/test/functional/wallet_listtransactions.py @@ -0,0 +1,209 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the listtransactions API.""" +from decimal import Decimal +from io import BytesIO + +from test_framework.messages import COIN, CTransaction +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_array_result, + assert_equal, + bytes_to_hex_str, + hex_str_to_bytes, + sync_mempools, +) + +def tx_from_hex(hexstring): + tx = CTransaction() + f = BytesIO(hex_str_to_bytes(hexstring)) + tx.deserialize(f) + return tx + +class ListTransactionsTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 2 + self.enable_mocktime() + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + # Simple send, 0 to 1: + txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1) + self.sync_all() + assert_array_result(self.nodes[0].listtransactions(), + {"txid": txid}, + {"category": "send", "amount": Decimal("-0.1"), "confirmations": 0}) + assert_array_result(self.nodes[1].listtransactions(), + {"txid": txid}, + {"category": "receive", "amount": Decimal("0.1"), "confirmations": 0}) + # mine a block, confirmations should change: + self.nodes[0].generate(1) + self.sync_all() + assert_array_result(self.nodes[0].listtransactions(), + {"txid": txid}, + {"category": "send", "amount": Decimal("-0.1"), "confirmations": 1}) + assert_array_result(self.nodes[1].listtransactions(), + {"txid": txid}, + {"category": "receive", "amount": Decimal("0.1"), "confirmations": 1}) + + # send-to-self: + txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 0.2) + assert_array_result(self.nodes[0].listtransactions(), + {"txid": txid, "category": "send"}, + {"amount": Decimal("-0.2")}) + assert_array_result(self.nodes[0].listtransactions(), + {"txid": txid, "category": "receive"}, + {"amount": Decimal("0.2")}) + + # sendmany from node1: twice to self, twice to node2: + send_to = {self.nodes[0].getnewaddress(): 0.11, + self.nodes[1].getnewaddress(): 0.22, + self.nodes[0].getnewaddress(): 0.33, + self.nodes[1].getnewaddress(): 0.44} + txid = self.nodes[1].sendmany("", send_to) + self.sync_all() + assert_array_result(self.nodes[1].listtransactions(), + {"category": "send", "amount": Decimal("-0.11")}, + {"txid": txid}) + assert_array_result(self.nodes[0].listtransactions(), + {"category": "receive", "amount": Decimal("0.11")}, + {"txid": txid}) + assert_array_result(self.nodes[1].listtransactions(), + {"category": "send", "amount": Decimal("-0.22")}, + {"txid": txid}) + assert_array_result(self.nodes[1].listtransactions(), + {"category": "receive", "amount": Decimal("0.22")}, + {"txid": txid}) + assert_array_result(self.nodes[1].listtransactions(), + {"category": "send", "amount": Decimal("-0.33")}, + {"txid": txid}) + assert_array_result(self.nodes[0].listtransactions(), + {"category": "receive", "amount": Decimal("0.33")}, + {"txid": txid}) + assert_array_result(self.nodes[1].listtransactions(), + {"category": "send", "amount": Decimal("-0.44")}, + {"txid": txid}) + assert_array_result(self.nodes[1].listtransactions(), + {"category": "receive", "amount": Decimal("0.44")}, + {"txid": txid}) + + pubkey = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress())['pubkey'] + multisig = self.nodes[1].createmultisig(1, [pubkey]) + self.nodes[0].importaddress(multisig["redeemScript"], "watchonly", False, True) + txid = self.nodes[1].sendtoaddress(multisig["address"], 0.1) + self.nodes[1].generate(1) + self.sync_all() + assert not [tx for tx in self.nodes[0].listtransactions(dummy="*", count=100, skip=0, include_watchonly=False) if "label" in tx and tx["label"] == "watchonly"] + txs = [tx for tx in self.nodes[0].listtransactions(dummy="*", count=100, skip=0, include_watchonly=True) if "label" in tx and tx['label'] == 'watchonly'] + assert_array_result(txs, {"category": "receive", "amount": Decimal("0.1")}, {"txid": txid}) + + self.run_rbf_opt_in_test() + + # Check that the opt-in-rbf flag works properly, for sent and received + # transactions. + def run_rbf_opt_in_test(self): + # Check whether a transaction signals opt-in RBF itself + def is_opt_in(node, txid): + rawtx = node.getrawtransaction(txid, 1) + for x in rawtx["vin"]: + if x["sequence"] < 0xfffffffe: + return True + return False + + # Find an unconfirmed output matching a certain txid + def get_unconfirmed_utxo_entry(node, txid_to_match): + utxo = node.listunspent(0, 0) + for i in utxo: + if i["txid"] == txid_to_match: + return i + return None + + # 1. Chain a few transactions that don't opt-in. + txid_1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1) + assert(not is_opt_in(self.nodes[0], txid_1)) + assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_1}, {"bip125-replaceable": "no"}) + sync_mempools(self.nodes) + assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_1}, {"bip125-replaceable": "no"}) + + # Tx2 will build off txid_1, still not opting in to RBF. + utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[0], txid_1) + assert_equal(utxo_to_use["safe"], True) + utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[1], txid_1) + utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[1], txid_1) + assert_equal(utxo_to_use["safe"], False) + + # Create tx2 using createrawtransaction + inputs = [{"txid": utxo_to_use["txid"], "vout": utxo_to_use["vout"]}] + outputs = {self.nodes[0].getnewaddress(): 0.999} + tx2 = self.nodes[1].createrawtransaction(inputs, outputs) + tx2_signed = self.nodes[1].signrawtransactionwithwallet(tx2)["hex"] + txid_2 = self.nodes[1].sendrawtransaction(tx2_signed) + + # ...and check the result + assert(not is_opt_in(self.nodes[1], txid_2)) + assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_2}, {"bip125-replaceable": "no"}) + sync_mempools(self.nodes) + assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_2}, {"bip125-replaceable": "no"}) + + # Tx3 will opt-in to RBF + utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[0], txid_2) + inputs = [{"txid": txid_2, "vout": utxo_to_use["vout"]}] + outputs = {self.nodes[1].getnewaddress(): 0.998} + tx3 = self.nodes[0].createrawtransaction(inputs, outputs) + tx3_modified = tx_from_hex(tx3) + tx3_modified.vin[0].nSequence = 0 + tx3 = bytes_to_hex_str(tx3_modified.serialize()) + tx3_signed = self.nodes[0].signrawtransactionwithwallet(tx3)['hex'] + txid_3 = self.nodes[0].sendrawtransaction(tx3_signed) + + assert(is_opt_in(self.nodes[0], txid_3)) + assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_3}, {"bip125-replaceable": "yes"}) + sync_mempools(self.nodes) + assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_3}, {"bip125-replaceable": "yes"}) + + # Tx4 will chain off tx3. Doesn't signal itself, but depends on one + # that does. + utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[1], txid_3) + inputs = [{"txid": txid_3, "vout": utxo_to_use["vout"]}] + outputs = {self.nodes[0].getnewaddress(): 0.997} + tx4 = self.nodes[1].createrawtransaction(inputs, outputs) + tx4_signed = self.nodes[1].signrawtransactionwithwallet(tx4)["hex"] + txid_4 = self.nodes[1].sendrawtransaction(tx4_signed) + + assert(not is_opt_in(self.nodes[1], txid_4)) + assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_4}, {"bip125-replaceable": "yes"}) + sync_mempools(self.nodes) + assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_4}, {"bip125-replaceable": "yes"}) + + # Replace tx3, and check that tx4 becomes unknown + tx3_b = tx3_modified + tx3_b.vout[0].nValue -= int(Decimal("0.004") * COIN) # bump the fee + tx3_b = bytes_to_hex_str(tx3_b.serialize()) + tx3_b_signed = self.nodes[0].signrawtransactionwithwallet(tx3_b)['hex'] + txid_3b = self.nodes[0].sendrawtransaction(tx3_b_signed, True) + assert(is_opt_in(self.nodes[0], txid_3b)) + + assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_4}, {"bip125-replaceable": "unknown"}) + sync_mempools(self.nodes) + assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_4}, {"bip125-replaceable": "unknown"}) + + # Check gettransaction as well: + for n in self.nodes[0:2]: + assert_equal(n.gettransaction(txid_1)["bip125-replaceable"], "no") + assert_equal(n.gettransaction(txid_2)["bip125-replaceable"], "no") + assert_equal(n.gettransaction(txid_3)["bip125-replaceable"], "yes") + assert_equal(n.gettransaction(txid_3b)["bip125-replaceable"], "yes") + assert_equal(n.gettransaction(txid_4)["bip125-replaceable"], "unknown") + + # After mining a transaction, it's no longer BIP125-replaceable + self.nodes[0].generate(1) + assert(txid_3b not in self.nodes[0].getrawmempool()) + assert_equal(self.nodes[0].gettransaction(txid_3b)["bip125-replaceable"], "no") + assert_equal(self.nodes[0].gettransaction(txid_4)["bip125-replaceable"], "unknown") + +if __name__ == '__main__': + ListTransactionsTest().main() diff --git a/test/functional/wallet_multiwallet.py b/test/functional/wallet_multiwallet.py new file mode 100755 index 000000000..189bc2d50 --- /dev/null +++ b/test/functional/wallet_multiwallet.py @@ -0,0 +1,305 @@ +#!/usr/bin/env python3 +# Copyright (c) 2017-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test multiwallet. + +Verify that a bitcoind node can load multiple wallet files +""" +import os +import shutil + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.test_node import ErrorMatch +from test_framework.util import ( + assert_equal, + assert_raises_rpc_error, +) + + +class MultiWalletTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 2 + self.supports_cli = True + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + node = self.nodes[0] + + data_dir = lambda *p: os.path.join(node.datadir, 'regtest', *p) + wallet_dir = lambda *p: data_dir('wallets', *p) + wallet = lambda name: node.get_wallet_rpc(name) + + def wallet_file(name): + if os.path.isdir(wallet_dir(name)): + return wallet_dir(name, "wallet.dat") + return wallet_dir(name) + + # check wallet.dat is created + self.stop_nodes() + assert_equal(os.path.isfile(wallet_dir('wallet.dat')), True) + + # create symlink to verify wallet directory path can be referenced + # through symlink + if os.name != 'nt': + os.mkdir(wallet_dir('w7')) + os.symlink('w7', wallet_dir('w7_symlink')) + + # rename wallet.dat to make sure plain wallet file paths (as opposed to + # directory paths) can be loaded + os.rename(wallet_dir("wallet.dat"), wallet_dir("w8")) + + # create another dummy wallet for use in testing backups later + self.start_node(0, []) + self.stop_nodes() + empty_wallet = os.path.join(self.options.tmpdir, 'empty.dat') + os.rename(wallet_dir("wallet.dat"), empty_wallet) + + # restart node with a mix of wallet names: + # w1, w2, w3 - to verify new wallets created when non-existing paths specified + # w - to verify wallet name matching works when one wallet path is prefix of another + # sub/w5 - to verify relative wallet path is created correctly + # extern/w6 - to verify absolute wallet path is created correctly + # w7_symlink - to verify symlinked wallet path is initialized correctly + # w8 - to verify existing wallet file is loaded correctly + # '' - to verify default wallet file is created correctly + wallet_names = ['w1', 'w2', 'w3', 'w', 'sub/w5', os.path.join(self.options.tmpdir, 'extern/w6'), 'w7_symlink', 'w8', ''] + if os.name == 'nt': + wallet_names.remove('w7_symlink') + extra_args = ['-wallet={}'.format(n) for n in wallet_names] + self.start_node(0, extra_args) + assert_equal(set(node.listwallets()), set(wallet_names)) + + # check that all requested wallets were created + self.stop_node(0) + for wallet_name in wallet_names: + assert_equal(os.path.isfile(wallet_file(wallet_name)), True) + + # should not initialize if wallet path can't be created + exp_stderr = "boost::filesystem::create_directory:" + self.nodes[0].assert_start_raises_init_error(['-wallet=wallet.dat/bad'], exp_stderr, match=ErrorMatch.PARTIAL_REGEX) + + self.nodes[0].assert_start_raises_init_error(['-walletdir=wallets'], 'Error: Specified -walletdir "wallets" does not exist') + self.nodes[0].assert_start_raises_init_error(['-walletdir=wallets'], 'Error: Specified -walletdir "wallets" is a relative path', cwd=data_dir()) + self.nodes[0].assert_start_raises_init_error(['-walletdir=debug.log'], 'Error: Specified -walletdir "debug.log" is not a directory', cwd=data_dir()) + + # should not initialize if there are duplicate wallets + self.nodes[0].assert_start_raises_init_error(['-wallet=w1', '-wallet=w1'], 'Error: Error loading wallet w1. Duplicate -wallet filename specified.') + + # should not initialize if one wallet is a copy of another + shutil.copyfile(wallet_dir('w8'), wallet_dir('w8_copy')) + exp_stderr = "BerkeleyBatch: Can't open database w8_copy \(duplicates fileid \w+ from w8\)" + self.nodes[0].assert_start_raises_init_error(['-wallet=w8', '-wallet=w8_copy'], exp_stderr, match=ErrorMatch.PARTIAL_REGEX) + + # should not initialize if wallet file is a symlink + if os.name != 'nt': + os.symlink('w8', wallet_dir('w8_symlink')) + self.nodes[0].assert_start_raises_init_error(['-wallet=w8_symlink'], 'Error: Invalid -wallet path \'w8_symlink\'\. .*', match=ErrorMatch.FULL_REGEX) + + # should not initialize if the specified walletdir does not exist + self.nodes[0].assert_start_raises_init_error(['-walletdir=bad'], 'Error: Specified -walletdir "bad" does not exist') + # should not initialize if the specified walletdir is not a directory + not_a_dir = wallet_dir('notadir') + open(not_a_dir, 'a', encoding="utf8").close() + self.nodes[0].assert_start_raises_init_error(['-walletdir=' + not_a_dir], 'Error: Specified -walletdir "' + not_a_dir + '" is not a directory') + + self.log.info("Do not allow -zapwallettxes with multiwallet") + self.nodes[0].assert_start_raises_init_error(['-zapwallettxes', '-wallet=w1', '-wallet=w2'], "Error: -zapwallettxes is only allowed with a single wallet file") + self.nodes[0].assert_start_raises_init_error(['-zapwallettxes=1', '-wallet=w1', '-wallet=w2'], "Error: -zapwallettxes is only allowed with a single wallet file") + self.nodes[0].assert_start_raises_init_error(['-zapwallettxes=2', '-wallet=w1', '-wallet=w2'], "Error: -zapwallettxes is only allowed with a single wallet file") + + self.log.info("Do not allow -salvagewallet with multiwallet") + self.nodes[0].assert_start_raises_init_error(['-salvagewallet', '-wallet=w1', '-wallet=w2'], "Error: -salvagewallet is only allowed with a single wallet file") + self.nodes[0].assert_start_raises_init_error(['-salvagewallet=1', '-wallet=w1', '-wallet=w2'], "Error: -salvagewallet is only allowed with a single wallet file") + + self.log.info("Do not allow -upgradewallet with multiwallet") + self.nodes[0].assert_start_raises_init_error(['-upgradewallet', '-wallet=w1', '-wallet=w2'], "Error: -upgradewallet is only allowed with a single wallet file") + self.nodes[0].assert_start_raises_init_error(['-upgradewallet=1', '-wallet=w1', '-wallet=w2'], "Error: -upgradewallet is only allowed with a single wallet file") + + # if wallets/ doesn't exist, datadir should be the default wallet dir + wallet_dir2 = data_dir('walletdir') + os.rename(wallet_dir(), wallet_dir2) + self.start_node(0, ['-wallet=w4', '-wallet=w5']) + assert_equal(set(node.listwallets()), {"w4", "w5"}) + w5 = wallet("w5") + w5.generate(1) + + # now if wallets/ exists again, but the rootdir is specified as the walletdir, w4 and w5 should still be loaded + os.rename(wallet_dir2, wallet_dir()) + self.restart_node(0, ['-wallet=w4', '-wallet=w5', '-walletdir=' + data_dir()]) + assert_equal(set(node.listwallets()), {"w4", "w5"}) + w5 = wallet("w5") + w5_info = w5.getwalletinfo() + assert_equal(w5_info['immature_balance'], 50) + + competing_wallet_dir = os.path.join(self.options.tmpdir, 'competing_walletdir') + os.mkdir(competing_wallet_dir) + self.restart_node(0, ['-walletdir=' + competing_wallet_dir]) + exp_stderr = "Error: Error initializing wallet database environment \"\S+competing_walletdir\"!" + self.nodes[1].assert_start_raises_init_error(['-walletdir=' + competing_wallet_dir], exp_stderr, match=ErrorMatch.PARTIAL_REGEX) + + self.restart_node(0, extra_args) + + wallets = [wallet(w) for w in wallet_names] + wallet_bad = wallet("bad") + + # check wallet names and balances + wallets[0].generate(1) + for wallet_name, wallet in zip(wallet_names, wallets): + info = wallet.getwalletinfo() + assert_equal(info['immature_balance'], 50 if wallet is wallets[0] else 0) + assert_equal(info['walletname'], wallet_name) + + # accessing invalid wallet fails + assert_raises_rpc_error(-18, "Requested wallet does not exist or is not loaded", wallet_bad.getwalletinfo) + + # accessing wallet RPC without using wallet endpoint fails + assert_raises_rpc_error(-19, "Wallet file not specified", node.getwalletinfo) + + w1, w2, w3, w4, *_ = wallets + w1.generate(101) + assert_equal(w1.getbalance(), 100) + assert_equal(w2.getbalance(), 0) + assert_equal(w3.getbalance(), 0) + assert_equal(w4.getbalance(), 0) + + w1.sendtoaddress(w2.getnewaddress(), 1) + w1.sendtoaddress(w3.getnewaddress(), 2) + w1.sendtoaddress(w4.getnewaddress(), 3) + w1.generate(1) + assert_equal(w2.getbalance(), 1) + assert_equal(w3.getbalance(), 2) + assert_equal(w4.getbalance(), 3) + + batch = w1.batch([w1.getblockchaininfo.get_request(), w1.getwalletinfo.get_request()]) + assert_equal(batch[0]["result"]["chain"], "regtest") + assert_equal(batch[1]["result"]["walletname"], "w1") + + self.log.info('Check for per-wallet settxfee call') + assert_equal(w1.getwalletinfo()['paytxfee'], 0) + assert_equal(w2.getwalletinfo()['paytxfee'], 0) + w2.settxfee(4.0) + assert_equal(w1.getwalletinfo()['paytxfee'], 0) + assert_equal(w2.getwalletinfo()['paytxfee'], 4.0) + + self.log.info("Test dynamic wallet loading") + + self.restart_node(0, ['-nowallet']) + assert_equal(node.listwallets(), []) + assert_raises_rpc_error(-32601, "Method not found", node.getwalletinfo) + + self.log.info("Load first wallet") + loadwallet_name = node.loadwallet(wallet_names[0]) + assert_equal(loadwallet_name['name'], wallet_names[0]) + assert_equal(node.listwallets(), wallet_names[0:1]) + node.getwalletinfo() + w1 = node.get_wallet_rpc(wallet_names[0]) + w1.getwalletinfo() + + self.log.info("Load second wallet") + loadwallet_name = node.loadwallet(wallet_names[1]) + assert_equal(loadwallet_name['name'], wallet_names[1]) + assert_equal(node.listwallets(), wallet_names[0:2]) + assert_raises_rpc_error(-19, "Wallet file not specified", node.getwalletinfo) + w2 = node.get_wallet_rpc(wallet_names[1]) + w2.getwalletinfo() + + self.log.info("Load remaining wallets") + for wallet_name in wallet_names[2:]: + loadwallet_name = self.nodes[0].loadwallet(wallet_name) + assert_equal(loadwallet_name['name'], wallet_name) + + assert_equal(set(self.nodes[0].listwallets()), set(wallet_names)) + + # Fail to load if wallet doesn't exist + assert_raises_rpc_error(-18, 'Wallet wallets not found.', self.nodes[0].loadwallet, 'wallets') + + # Fail to load duplicate wallets + assert_raises_rpc_error(-4, 'Wallet file verification failed: Error loading wallet w1. Duplicate -wallet filename specified.', self.nodes[0].loadwallet, wallet_names[0]) + + # Fail to load if one wallet is a copy of another + assert_raises_rpc_error(-1, "BerkeleyBatch: Can't open database w8_copy (duplicates fileid", self.nodes[0].loadwallet, 'w8_copy') + + # Fail to load if wallet file is a symlink + if os.name != 'nt': + assert_raises_rpc_error(-4, "Wallet file verification failed: Invalid -wallet path 'w8_symlink'", self.nodes[0].loadwallet, 'w8_symlink') + + # Fail to load if a directory is specified that doesn't contain a wallet + os.mkdir(wallet_dir('empty_wallet_dir')) + assert_raises_rpc_error(-18, "Directory empty_wallet_dir does not contain a wallet.dat file", self.nodes[0].loadwallet, 'empty_wallet_dir') + + self.log.info("Test dynamic wallet creation.") + + # Fail to create a wallet if it already exists. + assert_raises_rpc_error(-4, "Wallet w2 already exists.", self.nodes[0].createwallet, 'w2') + + # Successfully create a wallet with a new name + loadwallet_name = self.nodes[0].createwallet('w9') + assert_equal(loadwallet_name['name'], 'w9') + w9 = node.get_wallet_rpc('w9') + assert_equal(w9.getwalletinfo()['walletname'], 'w9') + + assert 'w9' in self.nodes[0].listwallets() + + # Successfully create a wallet using a full path + new_wallet_dir = os.path.join(self.options.tmpdir, 'new_walletdir') + new_wallet_name = os.path.join(new_wallet_dir, 'w10') + loadwallet_name = self.nodes[0].createwallet(new_wallet_name) + assert_equal(loadwallet_name['name'], new_wallet_name) + w10 = node.get_wallet_rpc(new_wallet_name) + assert_equal(w10.getwalletinfo()['walletname'], new_wallet_name) + + assert new_wallet_name in self.nodes[0].listwallets() + + self.log.info("Test dynamic wallet unloading") + + # Test `unloadwallet` errors + assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].unloadwallet) + assert_raises_rpc_error(-18, "Requested wallet does not exist or is not loaded", self.nodes[0].unloadwallet, "dummy") + assert_raises_rpc_error(-18, "Requested wallet does not exist or is not loaded", node.get_wallet_rpc("dummy").unloadwallet) + assert_raises_rpc_error(-8, "Cannot unload the requested wallet", w1.unloadwallet, "w2"), + + # Successfully unload the specified wallet name + self.nodes[0].unloadwallet("w1") + assert 'w1' not in self.nodes[0].listwallets() + + # Successfully unload the wallet referenced by the request endpoint + w2.unloadwallet() + assert 'w2' not in self.nodes[0].listwallets() + + # Successfully unload all wallets + for wallet_name in self.nodes[0].listwallets(): + self.nodes[0].unloadwallet(wallet_name) + assert_equal(self.nodes[0].listwallets(), []) + assert_raises_rpc_error(-32601, "Method not found (wallet method is disabled because no wallet is loaded)", self.nodes[0].getwalletinfo) + + # Successfully load a previously unloaded wallet + self.nodes[0].loadwallet('w1') + assert_equal(self.nodes[0].listwallets(), ['w1']) + assert_equal(w1.getwalletinfo()['walletname'], 'w1') + + # Test backing up and restoring wallets + self.log.info("Test wallet backup") + self.restart_node(0, ['-nowallet']) + for wallet_name in wallet_names: + self.nodes[0].loadwallet(wallet_name) + for wallet_name in wallet_names: + rpc = self.nodes[0].get_wallet_rpc(wallet_name) + addr = rpc.getnewaddress() + backup = os.path.join(self.options.tmpdir, 'backup.dat') + rpc.backupwallet(backup) + self.nodes[0].unloadwallet(wallet_name) + shutil.copyfile(empty_wallet, wallet_file(wallet_name)) + self.nodes[0].loadwallet(wallet_name) + assert_equal(rpc.getaddressinfo(addr)['ismine'], False) + self.nodes[0].unloadwallet(wallet_name) + shutil.copyfile(backup, wallet_file(wallet_name)) + self.nodes[0].loadwallet(wallet_name) + assert_equal(rpc.getaddressinfo(addr)['ismine'], True) + + +if __name__ == '__main__': + MultiWalletTest().main() diff --git a/test/functional/wallet_resendwallettransactions.py b/test/functional/wallet_resendwallettransactions.py new file mode 100755 index 000000000..00bf58d70 --- /dev/null +++ b/test/functional/wallet_resendwallettransactions.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python3 +# Copyright (c) 2017-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test resendwallettransactions RPC.""" + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal, assert_raises_rpc_error + +class ResendWalletTransactionsTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 1 + self.extra_args = [['--walletbroadcast=false']] + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + # Should raise RPC_WALLET_ERROR (-4) if walletbroadcast is disabled. + assert_raises_rpc_error(-4, "Error: Wallet transaction broadcasting is disabled with -walletbroadcast", self.nodes[0].resendwallettransactions) + + # Should return an empty array if there aren't unconfirmed wallet transactions. + self.stop_node(0) + self.start_node(0, extra_args=[]) + assert_equal(self.nodes[0].resendwallettransactions(), []) + + # Should return an array with the unconfirmed wallet transaction. + txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1) + assert_equal(self.nodes[0].resendwallettransactions(), [txid]) + +if __name__ == '__main__': + ResendWalletTransactionsTest().main() diff --git a/test/functional/wallet_txn_clone.py b/test/functional/wallet_txn_clone.py new file mode 100755 index 000000000..d78c105c1 --- /dev/null +++ b/test/functional/wallet_txn_clone.py @@ -0,0 +1,149 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the wallet accounts properly when there are cloned transactions with malleated scriptsigs.""" + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + connect_nodes, + disconnect_nodes, + sync_blocks, +) + +class TxnMallTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 4 + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def add_options(self, parser): + parser.add_argument("--mineblock", dest="mine_block", default=False, action="store_true", + help="Test double-spend of 1-confirmed transaction") + parser.add_argument("--segwit", dest="segwit", default=False, action="store_true", + help="Test behaviour with SegWit txn (which should fail") + + def setup_network(self): + # Start with split network: + super(TxnMallTest, self).setup_network() + disconnect_nodes(self.nodes[1], 2) + disconnect_nodes(self.nodes[2], 1) + + def run_test(self): + if self.options.segwit: + output_type = "p2sh-segwit" + else: + output_type = "legacy" + + # All nodes should start with 1,250 BTC: + starting_balance = 1250 + for i in range(4): + assert_equal(self.nodes[i].getbalance(), starting_balance) + self.nodes[i].getnewaddress() # bug workaround, coins generated assigned to first getnewaddress! + + self.nodes[0].settxfee(.001) + + node0_address1 = self.nodes[0].getnewaddress(address_type=output_type) + node0_txid1 = self.nodes[0].sendtoaddress(node0_address1, 1219) + node0_tx1 = self.nodes[0].gettransaction(node0_txid1) + + node0_address2 = self.nodes[0].getnewaddress(address_type=output_type) + node0_txid2 = self.nodes[0].sendtoaddress(node0_address2, 29) + node0_tx2 = self.nodes[0].gettransaction(node0_txid2) + + assert_equal(self.nodes[0].getbalance(), + starting_balance + node0_tx1["fee"] + node0_tx2["fee"]) + + # Coins are sent to node1_address + node1_address = self.nodes[1].getnewaddress() + + # Send tx1, and another transaction tx2 that won't be cloned + txid1 = self.nodes[0].sendtoaddress(node1_address, 40) + txid2 = self.nodes[0].sendtoaddress(node1_address, 20) + + # Construct a clone of tx1, to be malleated + rawtx1 = self.nodes[0].getrawtransaction(txid1, 1) + clone_inputs = [{"txid": rawtx1["vin"][0]["txid"], "vout": rawtx1["vin"][0]["vout"]}] + clone_outputs = {rawtx1["vout"][0]["scriptPubKey"]["addresses"][0]: rawtx1["vout"][0]["value"], + rawtx1["vout"][1]["scriptPubKey"]["addresses"][0]: rawtx1["vout"][1]["value"]} + clone_locktime = rawtx1["locktime"] + clone_raw = self.nodes[0].createrawtransaction(clone_inputs, clone_outputs, clone_locktime) + + # createrawtransaction randomizes the order of its outputs, so swap them if necessary. + # output 0 is at version+#inputs+input+sigstub+sequence+#outputs + # 40 BTC serialized is 00286bee00000000 + pos0 = 2 * (4 + 1 + 36 + 1 + 4 + 1) + hex40 = "00286bee00000000" + output_len = 16 + 2 + 2 * int("0x" + clone_raw[pos0 + 16:pos0 + 16 + 2], 0) + if (rawtx1["vout"][0]["value"] == 40 and clone_raw[pos0:pos0 + 16] != hex40 or rawtx1["vout"][0]["value"] != 40 and clone_raw[pos0:pos0 + 16] == hex40): + output0 = clone_raw[pos0:pos0 + output_len] + output1 = clone_raw[pos0 + output_len:pos0 + 2 * output_len] + clone_raw = clone_raw[:pos0] + output1 + output0 + clone_raw[pos0 + 2 * output_len:] + + # Use a different signature hash type to sign. This creates an equivalent but malleated clone. + # Don't send the clone anywhere yet + tx1_clone = self.nodes[0].signrawtransactionwithwallet(clone_raw, None, "ALL|ANYONECANPAY") + assert_equal(tx1_clone["complete"], True) + + # Have node0 mine a block, if requested: + if (self.options.mine_block): + self.nodes[0].generate(1) + sync_blocks(self.nodes[0:2]) + + tx1 = self.nodes[0].gettransaction(txid1) + tx2 = self.nodes[0].gettransaction(txid2) + + # Node0's balance should be starting balance, plus 50BTC for another + # matured block, minus tx1 and tx2 amounts, and minus transaction fees: + expected = starting_balance + node0_tx1["fee"] + node0_tx2["fee"] + if self.options.mine_block: + expected += 50 + expected += tx1["amount"] + tx1["fee"] + expected += tx2["amount"] + tx2["fee"] + assert_equal(self.nodes[0].getbalance(), expected) + + if self.options.mine_block: + assert_equal(tx1["confirmations"], 1) + assert_equal(tx2["confirmations"], 1) + else: + assert_equal(tx1["confirmations"], 0) + assert_equal(tx2["confirmations"], 0) + + # Send clone and its parent to miner + self.nodes[2].sendrawtransaction(node0_tx1["hex"]) + txid1_clone = self.nodes[2].sendrawtransaction(tx1_clone["hex"]) + if self.options.segwit: + assert_equal(txid1, txid1_clone) + return + + # ... mine a block... + self.nodes[2].generate(1) + + # Reconnect the split network, and sync chain: + connect_nodes(self.nodes[1], 2) + self.nodes[2].sendrawtransaction(node0_tx2["hex"]) + self.nodes[2].sendrawtransaction(tx2["hex"]) + self.nodes[2].generate(1) # Mine another block to make sure we sync + sync_blocks(self.nodes) + + # Re-fetch transaction info: + tx1 = self.nodes[0].gettransaction(txid1) + tx1_clone = self.nodes[0].gettransaction(txid1_clone) + tx2 = self.nodes[0].gettransaction(txid2) + + # Verify expected confirmations + assert_equal(tx1["confirmations"], -2) + assert_equal(tx1_clone["confirmations"], 2) + assert_equal(tx2["confirmations"], 1) + + # Check node0's total balance; should be same as before the clone, + 100 BTC for 2 matured, + # less possible orphaned matured subsidy + expected += 100 + if (self.options.mine_block): + expected -= 50 + assert_equal(self.nodes[0].getbalance(), expected) + +if __name__ == '__main__': + TxnMallTest().main() diff --git a/test/functional/wallet_txn_doublespend.py b/test/functional/wallet_txn_doublespend.py new file mode 100755 index 000000000..f114d5ab6 --- /dev/null +++ b/test/functional/wallet_txn_doublespend.py @@ -0,0 +1,135 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the wallet accounts properly when there is a double-spend conflict.""" +from decimal import Decimal + +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + connect_nodes, + disconnect_nodes, + find_output, + sync_blocks, +) + +class TxnMallTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 4 + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def add_options(self, parser): + parser.add_argument("--mineblock", dest="mine_block", default=False, action="store_true", + help="Test double-spend of 1-confirmed transaction") + + def setup_network(self): + # Start with split network: + super().setup_network() + disconnect_nodes(self.nodes[1], 2) + disconnect_nodes(self.nodes[2], 1) + + def run_test(self): + # All nodes should start with 1,250 BTC: + starting_balance = 1250 + for i in range(4): + assert_equal(self.nodes[i].getbalance(), starting_balance) + self.nodes[i].getnewaddress("") # bug workaround, coins generated assigned to first getnewaddress! + + # Assign coins to foo and bar addresses: + node0_address_foo = self.nodes[0].getnewaddress() + fund_foo_txid = self.nodes[0].sendtoaddress(node0_address_foo, 1219) + fund_foo_tx = self.nodes[0].gettransaction(fund_foo_txid) + + node0_address_bar = self.nodes[0].getnewaddress() + fund_bar_txid = self.nodes[0].sendtoaddress(node0_address_bar, 29) + fund_bar_tx = self.nodes[0].gettransaction(fund_bar_txid) + + assert_equal(self.nodes[0].getbalance(), + starting_balance + fund_foo_tx["fee"] + fund_bar_tx["fee"]) + + # Coins are sent to node1_address + node1_address = self.nodes[1].getnewaddress() + + # First: use raw transaction API to send 1240 BTC to node1_address, + # but don't broadcast: + doublespend_fee = Decimal('-.02') + rawtx_input_0 = {} + rawtx_input_0["txid"] = fund_foo_txid + rawtx_input_0["vout"] = find_output(self.nodes[0], fund_foo_txid, 1219) + rawtx_input_1 = {} + rawtx_input_1["txid"] = fund_bar_txid + rawtx_input_1["vout"] = find_output(self.nodes[0], fund_bar_txid, 29) + inputs = [rawtx_input_0, rawtx_input_1] + change_address = self.nodes[0].getnewaddress() + outputs = {} + outputs[node1_address] = 1240 + outputs[change_address] = 1248 - 1240 + doublespend_fee + rawtx = self.nodes[0].createrawtransaction(inputs, outputs) + doublespend = self.nodes[0].signrawtransactionwithwallet(rawtx) + assert_equal(doublespend["complete"], True) + + # Create two spends using 1 50 BTC coin each + txid1 = self.nodes[0].sendtoaddress(node1_address, 40) + txid2 = self.nodes[0].sendtoaddress(node1_address, 20) + + # Have node0 mine a block: + if (self.options.mine_block): + self.nodes[0].generate(1) + sync_blocks(self.nodes[0:2]) + + tx1 = self.nodes[0].gettransaction(txid1) + tx2 = self.nodes[0].gettransaction(txid2) + + # Node0's balance should be starting balance, plus 50BTC for another + # matured block, minus 40, minus 20, and minus transaction fees: + expected = starting_balance + fund_foo_tx["fee"] + fund_bar_tx["fee"] + if self.options.mine_block: + expected += 50 + expected += tx1["amount"] + tx1["fee"] + expected += tx2["amount"] + tx2["fee"] + assert_equal(self.nodes[0].getbalance(), expected) + + if self.options.mine_block: + assert_equal(tx1["confirmations"], 1) + assert_equal(tx2["confirmations"], 1) + # Node1's balance should be both transaction amounts: + assert_equal(self.nodes[1].getbalance(), starting_balance - tx1["amount"] - tx2["amount"]) + else: + assert_equal(tx1["confirmations"], 0) + assert_equal(tx2["confirmations"], 0) + + # Now give doublespend and its parents to miner: + self.nodes[2].sendrawtransaction(fund_foo_tx["hex"]) + self.nodes[2].sendrawtransaction(fund_bar_tx["hex"]) + doublespend_txid = self.nodes[2].sendrawtransaction(doublespend["hex"]) + # ... mine a block... + self.nodes[2].generate(1) + + # Reconnect the split network, and sync chain: + connect_nodes(self.nodes[1], 2) + self.nodes[2].generate(1) # Mine another block to make sure we sync + sync_blocks(self.nodes) + assert_equal(self.nodes[0].gettransaction(doublespend_txid)["confirmations"], 2) + + # Re-fetch transaction info: + tx1 = self.nodes[0].gettransaction(txid1) + tx2 = self.nodes[0].gettransaction(txid2) + + # Both transactions should be conflicted + assert_equal(tx1["confirmations"], -2) + assert_equal(tx2["confirmations"], -2) + + # Node0's total balance should be starting balance, plus 100BTC for + # two more matured blocks, minus 1240 for the double-spend, plus fees (which are + # negative): + expected = starting_balance + 100 - 1240 + fund_foo_tx["fee"] + fund_bar_tx["fee"] + doublespend_fee + assert_equal(self.nodes[0].getbalance(), expected) + + # Node1's balance should be its initial balance (1250 for 25 block rewards) plus the doublespend: + assert_equal(self.nodes[1].getbalance(), 1250 + 1240) + +if __name__ == '__main__': + TxnMallTest().main() diff --git a/test/functional/wallet_zapwallettxes.py b/test/functional/wallet_zapwallettxes.py new file mode 100755 index 000000000..adebff360 --- /dev/null +++ b/test/functional/wallet_zapwallettxes.py @@ -0,0 +1,82 @@ +#!/usr/bin/env python3 +# Copyright (c) 2014-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test the zapwallettxes functionality. + +- start two bitcoind nodes +- create two transactions on node 0 - one is confirmed and one is unconfirmed. +- restart node 0 and verify that both the confirmed and the unconfirmed + transactions are still available. +- restart node 0 with zapwallettxes and persistmempool, and verify that both + the confirmed and the unconfirmed transactions are still available. +- restart node 0 with just zapwallettxes and verify that the confirmed + transactions are still available, but that the unconfirmed transaction has + been zapped. +""" +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import ( + assert_equal, + assert_raises_rpc_error, + wait_until, +) + +class ZapWalletTXesTest (BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 2 + + def skip_test_if_missing_module(self): + self.skip_if_no_wallet() + + def run_test(self): + self.log.info("Mining blocks...") + self.nodes[0].generate(1) + self.sync_all() + self.nodes[1].generate(100) + self.sync_all() + + # This transaction will be confirmed + txid1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 10) + + self.nodes[0].generate(1) + self.sync_all() + + # This transaction will not be confirmed + txid2 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 20) + + # Confirmed and unconfirmed transactions are now in the wallet. + assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1) + assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2) + + # Stop-start node0. Both confirmed and unconfirmed transactions remain in the wallet. + self.stop_node(0) + self.start_node(0) + + assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1) + assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2) + + # Stop node0 and restart with zapwallettxes and persistmempool. The unconfirmed + # transaction is zapped from the wallet, but is re-added when the mempool is reloaded. + self.stop_node(0) + self.start_node(0, ["-persistmempool=1", "-zapwallettxes=2"]) + + wait_until(lambda: self.nodes[0].getmempoolinfo()['size'] == 1, timeout=3) + self.nodes[0].syncwithvalidationinterfacequeue() # Flush mempool to wallet + + assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1) + assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2) + + # Stop node0 and restart with zapwallettxes, but not persistmempool. + # The unconfirmed transaction is zapped and is no longer in the wallet. + self.stop_node(0) + self.start_node(0, ["-zapwallettxes=2"]) + + # tx1 is still be available because it was confirmed + assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1) + + # This will raise an exception because the unconfirmed transaction has been zapped + assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id', self.nodes[0].gettransaction, txid2) + +if __name__ == '__main__': + ZapWalletTXesTest().main() diff --git a/test/lint/README.md b/test/lint/README.md new file mode 100644 index 000000000..15974a359 --- /dev/null +++ b/test/lint/README.md @@ -0,0 +1,29 @@ +This folder contains lint scripts. + +check-doc.py +============ +Check for missing documentation of command line options. + +commit-script-check.sh +====================== +Verification of [scripted diffs](/doc/developer-notes.md#scripted-diffs). + +git-subtree-check.sh +==================== +Run this script from the root of the repository to verify that a subtree matches the contents of +the commit it claims to have been updated to. + +To use, make sure that you have fetched the upstream repository branch in which the subtree is +maintained: +* for `src/secp256k1`: https://github.com/bitcoin-core/secp256k1.git (branch master) +* for `src/leveldb`: https://github.com/bitcoin-core/leveldb.git (branch bitcoin-fork) +* for `src/univalue`: https://github.com/bitcoin-core/univalue.git (branch master) +* for `src/crypto/ctaes`: https://github.com/bitcoin-core/ctaes.git (branch master) + +Usage: `git-subtree-check.sh DIR (COMMIT)` + +`COMMIT` may be omitted, in which case `HEAD` is used. + +lint-all.sh +=========== +Calls other scripts with the `lint-` prefix. diff --git a/test/lint/check-doc.py b/test/lint/check-doc.py new file mode 100755 index 000000000..b0d9f8795 --- /dev/null +++ b/test/lint/check-doc.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python3 +# Copyright (c) 2015-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. + +''' +This checks if all command line args are documented. +Return value is 0 to indicate no error. + +Author: @MarcoFalke +''' + +from subprocess import check_output +import re +import sys + +FOLDER_GREP = 'src' +FOLDER_TEST = 'src/test/' +REGEX_ARG = '(?:ForceSet|SoftSet|Get|Is)(?:Bool)?Args?(?:Set)?\("(-[^"]+)"' +REGEX_DOC = 'AddArg\("(-[^"=]+?)(?:=|")' +CMD_ROOT_DIR = '`git rev-parse --show-toplevel`/{}'.format(FOLDER_GREP) +CMD_GREP_ARGS = r"git grep --perl-regexp '{}' -- {} ':(exclude){}'".format(REGEX_ARG, CMD_ROOT_DIR, FOLDER_TEST) +CMD_GREP_DOCS = r"git grep --perl-regexp '{}' {}".format(REGEX_DOC, CMD_ROOT_DIR) +# list unsupported, deprecated and duplicate args as they need no documentation +SET_DOC_OPTIONAL = set(['-h', '-help', '-dbcrashratio', '-forcecompactdb']) + + +def main(): + used = check_output(CMD_GREP_ARGS, shell=True, universal_newlines=True, encoding='utf8') + docd = check_output(CMD_GREP_DOCS, shell=True, universal_newlines=True, encoding='utf8') + + args_used = set(re.findall(re.compile(REGEX_ARG), used)) + args_docd = set(re.findall(re.compile(REGEX_DOC), docd)).union(SET_DOC_OPTIONAL) + args_need_doc = args_used.difference(args_docd) + args_unknown = args_docd.difference(args_used) + + print("Args used : {}".format(len(args_used))) + print("Args documented : {}".format(len(args_docd))) + print("Args undocumented: {}".format(len(args_need_doc))) + print(args_need_doc) + print("Args unknown : {}".format(len(args_unknown))) + print(args_unknown) + + sys.exit(len(args_need_doc)) + + +if __name__ == "__main__": + main() diff --git a/test/lint/check-rpc-mappings.py b/test/lint/check-rpc-mappings.py new file mode 100755 index 000000000..137cc82b5 --- /dev/null +++ b/test/lint/check-rpc-mappings.py @@ -0,0 +1,162 @@ +#!/usr/bin/env python3 +# Copyright (c) 2017-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Check RPC argument consistency.""" + +from collections import defaultdict +import os +import re +import sys + +# Source files (relative to root) to scan for dispatch tables +SOURCES = [ + "src/rpc/server.cpp", + "src/rpc/blockchain.cpp", + "src/rpc/mining.cpp", + "src/rpc/misc.cpp", + "src/rpc/net.cpp", + "src/rpc/rawtransaction.cpp", + "src/wallet/rpcwallet.cpp", +] +# Source file (relative to root) containing conversion mapping +SOURCE_CLIENT = 'src/rpc/client.cpp' +# Argument names that should be ignored in consistency checks +IGNORE_DUMMY_ARGS = {'dummy', 'arg0', 'arg1', 'arg2', 'arg3', 'arg4', 'arg5', 'arg6', 'arg7', 'arg8', 'arg9'} + +class RPCCommand: + def __init__(self, name, args): + self.name = name + self.args = args + +class RPCArgument: + def __init__(self, names, idx): + self.names = names + self.idx = idx + self.convert = False + +def parse_string(s): + assert s[0] == '"' + assert s[-1] == '"' + return s[1:-1] + +def process_commands(fname): + """Find and parse dispatch table in implementation file `fname`.""" + cmds = [] + in_rpcs = False + with open(fname, "r", encoding="utf8") as f: + for line in f: + line = line.rstrip() + if not in_rpcs: + if re.match("static const CRPCCommand .*\[\] =", line): + in_rpcs = True + else: + if line.startswith('};'): + in_rpcs = False + elif '{' in line and '"' in line: + m = re.search('{ *("[^"]*"), *("[^"]*"), *&([^,]*), *{([^}]*)} *},', line) + assert m, 'No match to table expression: %s' % line + name = parse_string(m.group(2)) + args_str = m.group(4).strip() + if args_str: + args = [RPCArgument(parse_string(x.strip()).split('|'), idx) for idx, x in enumerate(args_str.split(','))] + else: + args = [] + cmds.append(RPCCommand(name, args)) + assert not in_rpcs and cmds, "Something went wrong with parsing the C++ file: update the regexps" + return cmds + +def process_mapping(fname): + """Find and parse conversion table in implementation file `fname`.""" + cmds = [] + in_rpcs = False + with open(fname, "r", encoding="utf8") as f: + for line in f: + line = line.rstrip() + if not in_rpcs: + if line == 'static const CRPCConvertParam vRPCConvertParams[] =': + in_rpcs = True + else: + if line.startswith('};'): + in_rpcs = False + elif '{' in line and '"' in line: + m = re.search('{ *("[^"]*"), *([0-9]+) *, *("[^"]*") *},', line) + assert m, 'No match to table expression: %s' % line + name = parse_string(m.group(1)) + idx = int(m.group(2)) + argname = parse_string(m.group(3)) + cmds.append((name, idx, argname)) + assert not in_rpcs and cmds + return cmds + +def main(): + if len(sys.argv) != 2: + print('Usage: {} ROOT-DIR'.format(sys.argv[0]), file=sys.stderr) + sys.exit(1) + + root = sys.argv[1] + + # Get all commands from dispatch tables + cmds = [] + for fname in SOURCES: + cmds += process_commands(os.path.join(root, fname)) + + cmds_by_name = {} + for cmd in cmds: + cmds_by_name[cmd.name] = cmd + + # Get current convert mapping for client + client = SOURCE_CLIENT + mapping = set(process_mapping(os.path.join(root, client))) + + print('* Checking consistency between dispatch tables and vRPCConvertParams') + + # Check mapping consistency + errors = 0 + for (cmdname, argidx, argname) in mapping: + try: + rargnames = cmds_by_name[cmdname].args[argidx].names + except IndexError: + print('ERROR: %s argument %i (named %s in vRPCConvertParams) is not defined in dispatch table' % (cmdname, argidx, argname)) + errors += 1 + continue + if argname not in rargnames: + print('ERROR: %s argument %i is named %s in vRPCConvertParams but %s in dispatch table' % (cmdname, argidx, argname, rargnames), file=sys.stderr) + errors += 1 + + # Check for conflicts in vRPCConvertParams conversion + # All aliases for an argument must either be present in the + # conversion table, or not. Anything in between means an oversight + # and some aliases won't work. + for cmd in cmds: + for arg in cmd.args: + convert = [((cmd.name, arg.idx, argname) in mapping) for argname in arg.names] + if any(convert) != all(convert): + print('ERROR: %s argument %s has conflicts in vRPCConvertParams conversion specifier %s' % (cmd.name, arg.names, convert)) + errors += 1 + arg.convert = all(convert) + + # Check for conversion difference by argument name. + # It is preferable for API consistency that arguments with the same name + # have the same conversion, so bin by argument name. + all_methods_by_argname = defaultdict(list) + converts_by_argname = defaultdict(list) + for cmd in cmds: + for arg in cmd.args: + for argname in arg.names: + all_methods_by_argname[argname].append(cmd.name) + converts_by_argname[argname].append(arg.convert) + + for argname, convert in converts_by_argname.items(): + if all(convert) != any(convert): + if argname in IGNORE_DUMMY_ARGS: + # these are testing or dummy, don't warn for them + continue + print('WARNING: conversion mismatch for argument named %s (%s)' % + (argname, list(zip(all_methods_by_argname[argname], converts_by_argname[argname])))) + + sys.exit(errors > 0) + + +if __name__ == '__main__': + main() diff --git a/test/lint/commit-script-check.sh b/test/lint/commit-script-check.sh new file mode 100755 index 000000000..f1327469f --- /dev/null +++ b/test/lint/commit-script-check.sh @@ -0,0 +1,47 @@ +#!/bin/sh +# Copyright (c) 2017 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. + +# This simple script checks for commits beginning with: scripted-diff: +# If found, looks for a script between the lines -BEGIN VERIFY SCRIPT- and +# -END VERIFY SCRIPT-. If no ending is found, it reads until the end of the +# commit message. + +# The resulting script should exactly transform the previous commit into the current +# one. Any remaining diff signals an error. + +export LC_ALL=C +if test "x$1" = "x"; then + echo "Usage: $0 ..." + exit 1 +fi + +RET=0 +PREV_BRANCH=`git name-rev --name-only HEAD` +PREV_HEAD=`git rev-parse HEAD` +for i in `git rev-list --reverse $1`; do + if git rev-list -n 1 --pretty="%s" $i | grep -q "^scripted-diff:"; then + git checkout --quiet $i^ || exit + SCRIPT="`git rev-list --format=%b -n1 $i | sed '/^-BEGIN VERIFY SCRIPT-$/,/^-END VERIFY SCRIPT-$/{//!b};d'`" + if test "x$SCRIPT" = "x"; then + echo "Error: missing script for: $i" + echo "Failed" + RET=1 + else + echo "Running script for: $i" + echo "$SCRIPT" + eval "$SCRIPT" + git --no-pager diff --exit-code $i && echo "OK" || (echo "Failed"; false) || RET=1 + fi + git reset --quiet --hard HEAD + else + if git rev-list "--format=%b" -n1 $i | grep -q '^-\(BEGIN\|END\)[ a-zA-Z]*-$'; then + echo "Error: script block marker but no scripted-diff in title" + echo "Failed" + RET=1 + fi + fi +done +git checkout --quiet $PREV_BRANCH 2>/dev/null || git checkout --quiet $PREV_HEAD +exit $RET diff --git a/test/lint/git-subtree-check.sh b/test/lint/git-subtree-check.sh new file mode 100755 index 000000000..85e8b841b --- /dev/null +++ b/test/lint/git-subtree-check.sh @@ -0,0 +1,95 @@ +#!/bin/sh +# Copyright (c) 2015 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. + +export LC_ALL=C +DIR="$1" +COMMIT="$2" +if [ -z "$COMMIT" ]; then + COMMIT=HEAD +fi + +# Taken from git-subtree (Copyright (C) 2009 Avery Pennarun ) +find_latest_squash() +{ + dir="$1" + sq= + main= + sub= + git log --grep="^git-subtree-dir: $dir/*\$" \ + --pretty=format:'START %H%n%s%n%n%b%nEND%n' "$COMMIT" | + while read a b _; do + case "$a" in + START) sq="$b" ;; + git-subtree-mainline:) main="$b" ;; + git-subtree-split:) sub="$b" ;; + END) + if [ -n "$sub" ]; then + if [ -n "$main" ]; then + # a rejoin commit? + # Pretend its sub was a squash. + sq="$sub" + fi + echo "$sq" "$sub" + break + fi + sq= + main= + sub= + ;; + esac + done +} + +# find latest subtree update +latest_squash="$(find_latest_squash "$DIR")" +if [ -z "$latest_squash" ]; then + echo "ERROR: $DIR is not a subtree" >&2 + exit 2 +fi +set $latest_squash +old=$1 +rev=$2 + +# get the tree in the current commit +tree_actual=$(git ls-tree -d "$COMMIT" "$DIR" | head -n 1) +if [ -z "$tree_actual" ]; then + echo "FAIL: subtree directory $DIR not found in $COMMIT" >&2 + exit 1 +fi +set $tree_actual +tree_actual_type=$2 +tree_actual_tree=$3 +echo "$DIR in $COMMIT currently refers to $tree_actual_type $tree_actual_tree" +if [ "d$tree_actual_type" != "dtree" ]; then + echo "FAIL: subtree directory $DIR is not a tree in $COMMIT" >&2 + exit 1 +fi + +# get the tree at the time of the last subtree update +tree_commit=$(git show -s --format="%T" $old) +echo "$DIR in $COMMIT was last updated in commit $old (tree $tree_commit)" + +# ... and compare the actual tree with it +if [ "$tree_actual_tree" != "$tree_commit" ]; then + git diff $tree_commit $tree_actual_tree >&2 + echo "FAIL: subtree directory was touched without subtree merge" >&2 + exit 1 +fi + +# get the tree in the subtree commit referred to +if [ "d$(git cat-file -t $rev 2>/dev/null)" != dcommit ]; then + echo "subtree commit $rev unavailable: cannot compare" >&2 + exit +fi +tree_subtree=$(git show -s --format="%T" $rev) +echo "$DIR in $COMMIT was last updated to upstream commit $rev (tree $tree_subtree)" + +# ... and compare the actual tree with it +if [ "$tree_actual_tree" != "$tree_subtree" ]; then + echo "FAIL: subtree update commit differs from upstream tree!" >&2 + exit 1 +fi + +echo "GOOD" diff --git a/test/lint/lint-all.sh b/test/lint/lint-all.sh new file mode 100755 index 000000000..7c4f96cb3 --- /dev/null +++ b/test/lint/lint-all.sh @@ -0,0 +1,26 @@ +#!/usr/bin/env bash +# +# Copyright (c) 2017 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# +# This script runs all contrib/devtools/lint-*.sh files, and fails if any exit +# with a non-zero status code. + +# This script is intentionally locale dependent by not setting "export LC_ALL=C" +# in order to allow for the executed lint scripts to opt in or opt out of locale +# dependence themselves. + +set -u + +SCRIPTDIR=$(dirname "${BASH_SOURCE[0]}") +LINTALL=$(basename "${BASH_SOURCE[0]}") + +for f in "${SCRIPTDIR}"/lint-*.sh; do + if [ "$(basename "$f")" != "$LINTALL" ]; then + if ! "$f"; then + echo "^---- failure generated from $f" + exit 1 + fi + fi +done diff --git a/test/lint/lint-assertions.sh b/test/lint/lint-assertions.sh new file mode 100755 index 000000000..5bbcae79e --- /dev/null +++ b/test/lint/lint-assertions.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash +# +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# +# Check for assertions with obvious side effects. + +export LC_ALL=C + +EXIT_CODE=0 + +# PRE31-C (SEI CERT C Coding Standard): +# "Assertions should not contain assignments, increment, or decrement operators." +OUTPUT=$(git grep -E '[^_]assert\(.*(\+\+|\-\-|[^=!<>]=[^=!<>]).*\);' -- "*.cpp" "*.h") +if [[ ${OUTPUT} != "" ]]; then + echo "Assertions should not have side effects:" + echo + echo "${OUTPUT}" + EXIT_CODE=1 +fi + +exit ${EXIT_CODE} diff --git a/test/lint/lint-circular-dependencies.sh b/test/lint/lint-circular-dependencies.sh new file mode 100755 index 000000000..3972baed1 --- /dev/null +++ b/test/lint/lint-circular-dependencies.sh @@ -0,0 +1,83 @@ +#!/usr/bin/env bash +# +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# +# Check for circular dependencies + +export LC_ALL=C + +EXPECTED_CIRCULAR_DEPENDENCIES=( + "chainparamsbase -> util -> chainparamsbase" + "checkpoints -> validation -> checkpoints" + "index/txindex -> validation -> index/txindex" + "policy/fees -> txmempool -> policy/fees" + "policy/policy -> validation -> policy/policy" + "qt/addresstablemodel -> qt/walletmodel -> qt/addresstablemodel" + "qt/bantablemodel -> qt/clientmodel -> qt/bantablemodel" + "qt/bitcoingui -> qt/utilitydialog -> qt/bitcoingui" + "qt/bitcoingui -> qt/walletframe -> qt/bitcoingui" + "qt/bitcoingui -> qt/walletview -> qt/bitcoingui" + "qt/clientmodel -> qt/peertablemodel -> qt/clientmodel" + "qt/paymentserver -> qt/walletmodel -> qt/paymentserver" + "qt/recentrequeststablemodel -> qt/walletmodel -> qt/recentrequeststablemodel" + "qt/sendcoinsdialog -> qt/walletmodel -> qt/sendcoinsdialog" + "qt/transactiontablemodel -> qt/walletmodel -> qt/transactiontablemodel" + "qt/walletmodel -> qt/walletmodeltransaction -> qt/walletmodel" + "txmempool -> validation -> txmempool" + "validation -> validationinterface -> validation" + "wallet/coincontrol -> wallet/wallet -> wallet/coincontrol" + "wallet/fees -> wallet/wallet -> wallet/fees" + "wallet/rpcwallet -> wallet/wallet -> wallet/rpcwallet" + "wallet/wallet -> wallet/walletdb -> wallet/wallet" + "policy/fees -> policy/policy -> validation -> policy/fees" + "policy/rbf -> txmempool -> validation -> policy/rbf" + "qt/addressbookpage -> qt/bitcoingui -> qt/walletview -> qt/addressbookpage" + "qt/guiutil -> qt/walletmodel -> qt/optionsmodel -> qt/guiutil" + "txmempool -> validation -> validationinterface -> txmempool" + "qt/addressbookpage -> qt/bitcoingui -> qt/walletview -> qt/receivecoinsdialog -> qt/addressbookpage" + "qt/addressbookpage -> qt/bitcoingui -> qt/walletview -> qt/signverifymessagedialog -> qt/addressbookpage" + "qt/guiutil -> qt/walletmodel -> qt/optionsmodel -> qt/intro -> qt/guiutil" + "qt/addressbookpage -> qt/bitcoingui -> qt/walletview -> qt/sendcoinsdialog -> qt/sendcoinsentry -> qt/addressbookpage" +) + +EXIT_CODE=0 + +CIRCULAR_DEPENDENCIES=() + +IFS=$'\n' +for CIRC in $(cd src && ../contrib/devtools/circular-dependencies.py {*,*/*,*/*/*}.{h,cpp} | sed -e 's/^Circular dependency: //'); do + CIRCULAR_DEPENDENCIES+=($CIRC) + IS_EXPECTED_CIRC=0 + for EXPECTED_CIRC in "${EXPECTED_CIRCULAR_DEPENDENCIES[@]}"; do + if [[ "${CIRC}" == "${EXPECTED_CIRC}" ]]; then + IS_EXPECTED_CIRC=1 + break + fi + done + if [[ ${IS_EXPECTED_CIRC} == 0 ]]; then + echo "A new circular dependency in the form of \"${CIRC}\" appears to have been introduced." + echo + EXIT_CODE=1 + fi +done + +for EXPECTED_CIRC in "${EXPECTED_CIRCULAR_DEPENDENCIES[@]}"; do + IS_PRESENT_EXPECTED_CIRC=0 + for CIRC in "${CIRCULAR_DEPENDENCIES[@]}"; do + if [[ "${CIRC}" == "${EXPECTED_CIRC}" ]]; then + IS_PRESENT_EXPECTED_CIRC=1 + break + fi + done + if [[ ${IS_PRESENT_EXPECTED_CIRC} == 0 ]]; then + echo "Good job! The circular dependency \"${EXPECTED_CIRC}\" is no longer present." + echo "Please remove it from EXPECTED_CIRCULAR_DEPENDENCIES in $0" + echo "to make sure this circular dependency is not accidentally reintroduced." + echo + EXIT_CODE=1 + fi +done + +exit ${EXIT_CODE} diff --git a/test/lint/lint-filenames.sh b/test/lint/lint-filenames.sh new file mode 100755 index 000000000..5391e43d9 --- /dev/null +++ b/test/lint/lint-filenames.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash +# +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# +# Make sure only lowercase alphanumerics (a-z0-9), underscores (_), +# hyphens (-) and dots (.) are used in source code filenames. + +export LC_ALL=C + +EXIT_CODE=0 +OUTPUT=$(git ls-files --full-name -- "*.[cC][pP][pP]" "*.[hH]" "*.[pP][yY]" "*.[sS][hH]" | \ + grep -vE '^[a-z0-9_./-]+$' | \ + grep -vE '^src/(secp256k1|univalue)/') + +if [[ ${OUTPUT} != "" ]]; then + echo "Use only lowercase alphanumerics (a-z0-9), underscores (_), hyphens (-) and dots (.)" + echo "in source code filenames:" + echo + echo "${OUTPUT}" + EXIT_CODE=1 +fi +exit ${EXIT_CODE} diff --git a/test/lint/lint-format-strings.py b/test/lint/lint-format-strings.py new file mode 100755 index 000000000..2fb35fd8c --- /dev/null +++ b/test/lint/lint-format-strings.py @@ -0,0 +1,291 @@ +#!/usr/bin/env python3 +# +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# +# Lint format strings: This program checks that the number of arguments passed +# to a variadic format string function matches the number of format specifiers +# in the format string. + +import argparse +import re +import sys + +FALSE_POSITIVES = [ + ("src/dbwrapper.cpp", "vsnprintf(p, limit - p, format, backup_ap)"), + ("src/index/base.cpp", "FatalError(const char* fmt, const Args&... args)"), + ("src/netbase.cpp", "LogConnectFailure(bool manual_connection, const char* fmt, const Args&... args)"), + ("src/util.cpp", "strprintf(_(COPYRIGHT_HOLDERS), _(COPYRIGHT_HOLDERS_SUBSTITUTION))"), + ("src/util.cpp", "strprintf(COPYRIGHT_HOLDERS, COPYRIGHT_HOLDERS_SUBSTITUTION)"), + ("src/wallet/wallet.h", "WalletLogPrintf(std::string fmt, Params... parameters)"), + ("src/wallet/wallet.h", "LogPrintf((\"%s \" + fmt).c_str(), GetDisplayName(), parameters...)"), + ("src/logging.h", "LogPrintf(const char* fmt, const Args&... args)"), +] + + +def parse_function_calls(function_name, source_code): + """Return an array with all calls to function function_name in string source_code. + Preprocessor directives and C++ style comments ("//") in source_code are removed. + + >>> len(parse_function_calls("foo", "foo();bar();foo();bar();")) + 2 + >>> parse_function_calls("foo", "foo(1);bar(1);foo(2);bar(2);")[0].startswith("foo(1);") + True + >>> parse_function_calls("foo", "foo(1);bar(1);foo(2);bar(2);")[1].startswith("foo(2);") + True + >>> len(parse_function_calls("foo", "foo();bar();// foo();bar();")) + 1 + >>> len(parse_function_calls("foo", "#define FOO foo();")) + 0 + """ + assert(type(function_name) is str and type(source_code) is str and function_name) + lines = [re.sub("// .*", " ", line).strip() + for line in source_code.split("\n") + if not line.strip().startswith("#")] + return re.findall(r"[^a-zA-Z_](?=({}\(.*).*)".format(function_name), " " + " ".join(lines)) + + +def normalize(s): + """Return a normalized version of string s with newlines, tabs and C style comments ("/* ... */") + replaced with spaces. Multiple spaces are replaced with a single space. + + >>> normalize(" /* nothing */ foo\tfoo /* bar */ foo ") + 'foo foo foo' + """ + assert(type(s) is str) + s = s.replace("\n", " ") + s = s.replace("\t", " ") + s = re.sub("/\*.*?\*/", " ", s) + s = re.sub(" {2,}", " ", s) + return s.strip() + + +ESCAPE_MAP = { + r"\n": "[escaped-newline]", + r"\t": "[escaped-tab]", + r'\"': "[escaped-quote]", +} + + +def escape(s): + """Return the escaped version of string s with "\\\"", "\\n" and "\\t" escaped as + "[escaped-backslash]", "[escaped-newline]" and "[escaped-tab]". + + >>> unescape(escape("foo")) == "foo" + True + >>> escape(r'foo \\t foo \\n foo \\\\ foo \\ foo \\"bar\\"') + 'foo [escaped-tab] foo [escaped-newline] foo \\\\\\\\ foo \\\\ foo [escaped-quote]bar[escaped-quote]' + """ + assert(type(s) is str) + for raw_value, escaped_value in ESCAPE_MAP.items(): + s = s.replace(raw_value, escaped_value) + return s + + +def unescape(s): + """Return the unescaped version of escaped string s. + Reverses the replacements made in function escape(s). + + >>> unescape(escape("bar")) + 'bar' + >>> unescape("foo [escaped-tab] foo [escaped-newline] foo \\\\\\\\ foo \\\\ foo [escaped-quote]bar[escaped-quote]") + 'foo \\\\t foo \\\\n foo \\\\\\\\ foo \\\\ foo \\\\"bar\\\\"' + """ + assert(type(s) is str) + for raw_value, escaped_value in ESCAPE_MAP.items(): + s = s.replace(escaped_value, raw_value) + return s + + +def parse_function_call_and_arguments(function_name, function_call): + """Split string function_call into an array of strings consisting of: + * the string function_call followed by "(" + * the function call argument #1 + * ... + * the function call argument #n + * a trailing ");" + + The strings returned are in escaped form. See escape(...). + + >>> parse_function_call_and_arguments("foo", 'foo("%s", "foo");') + ['foo(', '"%s",', ' "foo"', ')'] + >>> parse_function_call_and_arguments("foo", 'foo("%s", "foo");') + ['foo(', '"%s",', ' "foo"', ')'] + >>> parse_function_call_and_arguments("foo", 'foo("%s %s", "foo", "bar");') + ['foo(', '"%s %s",', ' "foo",', ' "bar"', ')'] + >>> parse_function_call_and_arguments("fooprintf", 'fooprintf("%050d", i);') + ['fooprintf(', '"%050d",', ' i', ')'] + >>> parse_function_call_and_arguments("foo", 'foo(bar(foobar(barfoo("foo"))), foobar); barfoo') + ['foo(', 'bar(foobar(barfoo("foo"))),', ' foobar', ')'] + >>> parse_function_call_and_arguments("foo", "foo()") + ['foo(', '', ')'] + >>> parse_function_call_and_arguments("foo", "foo(123)") + ['foo(', '123', ')'] + >>> parse_function_call_and_arguments("foo", 'foo("foo")') + ['foo(', '"foo"', ')'] + >>> parse_function_call_and_arguments("strprintf", 'strprintf("%s (%d)", std::wstring_convert,wchar_t>().to_bytes(buf), err);') + ['strprintf(', '"%s (%d)",', ' std::wstring_convert,wchar_t>().to_bytes(buf),', ' err', ')'] + >>> parse_function_call_and_arguments("strprintf", 'strprintf("%s (%d)", foo().to_bytes(buf), err);') + ['strprintf(', '"%s (%d)",', ' foo().to_bytes(buf),', ' err', ')'] + >>> parse_function_call_and_arguments("strprintf", 'strprintf("%s (%d)", foo().to_bytes(buf), err);') + ['strprintf(', '"%s (%d)",', ' foo().to_bytes(buf),', ' err', ')'] + >>> parse_function_call_and_arguments("strprintf", 'strprintf("%s (%d)", foo << 1, err);') + ['strprintf(', '"%s (%d)",', ' foo << 1,', ' err', ')'] + >>> parse_function_call_and_arguments("strprintf", 'strprintf("%s (%d)", foo() >> 1, err);') + ['strprintf(', '"%s (%d)",', ' foo() >> 1,', ' err', ')'] + >>> parse_function_call_and_arguments("strprintf", 'strprintf("%s (%d)", foo < 1 ? bar : foobar, err);') + ['strprintf(', '"%s (%d)",', ' foo < 1 ? bar : foobar,', ' err', ')'] + >>> parse_function_call_and_arguments("strprintf", 'strprintf("%s (%d)", foo < 1, err);') + ['strprintf(', '"%s (%d)",', ' foo < 1,', ' err', ')'] + >>> parse_function_call_and_arguments("strprintf", 'strprintf("%s (%d)", foo > 1 ? bar : foobar, err);') + ['strprintf(', '"%s (%d)",', ' foo > 1 ? bar : foobar,', ' err', ')'] + >>> parse_function_call_and_arguments("strprintf", 'strprintf("%s (%d)", foo > 1, err);') + ['strprintf(', '"%s (%d)",', ' foo > 1,', ' err', ')'] + >>> parse_function_call_and_arguments("strprintf", 'strprintf("%s (%d)", foo <= 1, err);') + ['strprintf(', '"%s (%d)",', ' foo <= 1,', ' err', ')'] + >>> parse_function_call_and_arguments("strprintf", 'strprintf("%s (%d)", foo <= bar<1, 2>(1, 2), err);') + ['strprintf(', '"%s (%d)",', ' foo <= bar<1, 2>(1, 2),', ' err', ')'] + >>> parse_function_call_and_arguments("strprintf", 'strprintf("%s (%d)", foo>foo<1,2>(1,2)?bar:foobar,err)'); + ['strprintf(', '"%s (%d)",', ' foo>foo<1,2>(1,2)?bar:foobar,', 'err', ')'] + >>> parse_function_call_and_arguments("strprintf", 'strprintf("%s (%d)", foo>foo<1,2>(1,2),err)'); + ['strprintf(', '"%s (%d)",', ' foo>foo<1,2>(1,2),', 'err', ')'] + """ + assert(type(function_name) is str and type(function_call) is str and function_name) + remaining = normalize(escape(function_call)) + expected_function_call = "{}(".format(function_name) + assert(remaining.startswith(expected_function_call)) + parts = [expected_function_call] + remaining = remaining[len(expected_function_call):] + open_parentheses = 1 + open_template_arguments = 0 + in_string = False + parts.append("") + for i, char in enumerate(remaining): + parts.append(parts.pop() + char) + if char == "\"": + in_string = not in_string + continue + if in_string: + continue + if char == "(": + open_parentheses += 1 + continue + if char == ")": + open_parentheses -= 1 + if open_parentheses > 1: + continue + if open_parentheses == 0: + parts.append(parts.pop()[:-1]) + parts.append(char) + break + prev_char = remaining[i - 1] if i - 1 >= 0 else None + next_char = remaining[i + 1] if i + 1 <= len(remaining) - 1 else None + if char == "<" and next_char not in [" ", "<", "="] and prev_char not in [" ", "<"]: + open_template_arguments += 1 + continue + if char == ">" and next_char not in [" ", ">", "="] and prev_char not in [" ", ">"] and open_template_arguments > 0: + open_template_arguments -= 1 + if open_template_arguments > 0: + continue + if char == ",": + parts.append("") + return parts + + +def parse_string_content(argument): + """Return the text within quotes in string argument. + + >>> parse_string_content('1 "foo %d bar" 2') + 'foo %d bar' + >>> parse_string_content('1 foobar 2') + '' + >>> parse_string_content('1 "bar" 2') + 'bar' + >>> parse_string_content('1 "foo" 2 "bar" 3') + 'foobar' + >>> parse_string_content('1 "foo" 2 " " "bar" 3') + 'foo bar' + >>> parse_string_content('""') + '' + >>> parse_string_content('') + '' + >>> parse_string_content('1 2 3') + '' + """ + assert(type(argument) is str) + string_content = "" + in_string = False + for char in normalize(escape(argument)): + if char == "\"": + in_string = not in_string + elif in_string: + string_content += char + return string_content + + +def count_format_specifiers(format_string): + """Return the number of format specifiers in string format_string. + + >>> count_format_specifiers("foo bar foo") + 0 + >>> count_format_specifiers("foo %d bar foo") + 1 + >>> count_format_specifiers("foo %d bar %i foo") + 2 + >>> count_format_specifiers("foo %d bar %i foo %% foo") + 2 + >>> count_format_specifiers("foo %d bar %i foo %% foo %d foo") + 3 + >>> count_format_specifiers("foo %d bar %i foo %% foo %*d foo") + 4 + """ + assert(type(format_string) is str) + n = 0 + in_specifier = False + for i, char in enumerate(format_string): + if format_string[i - 1:i + 1] == "%%" or format_string[i:i + 2] == "%%": + pass + elif char == "%": + in_specifier = True + n += 1 + elif char in "aAcdeEfFgGinopsuxX": + in_specifier = False + elif in_specifier and char == "*": + n += 1 + return n + + +def main(): + parser = argparse.ArgumentParser(description="This program checks that the number of arguments passed " + "to a variadic format string function matches the number of format " + "specifiers in the format string.") + parser.add_argument("--skip-arguments", type=int, help="number of arguments before the format string " + "argument (e.g. 1 in the case of fprintf)", default=0) + parser.add_argument("function_name", help="function name (e.g. fprintf)", default=None) + parser.add_argument("file", type=argparse.FileType("r", encoding="utf-8"), nargs="*", help="C++ source code file (e.g. foo.cpp)") + args = parser.parse_args() + + exit_code = 0 + for f in args.file: + for function_call_str in parse_function_calls(args.function_name, f.read()): + parts = parse_function_call_and_arguments(args.function_name, function_call_str) + relevant_function_call_str = unescape("".join(parts))[:512] + if (f.name, relevant_function_call_str) in FALSE_POSITIVES: + continue + if len(parts) < 3 + args.skip_arguments: + exit_code = 1 + print("{}: Could not parse function call string \"{}(...)\": {}".format(f.name, args.function_name, relevant_function_call_str)) + continue + argument_count = len(parts) - 3 - args.skip_arguments + format_str = parse_string_content(parts[1 + args.skip_arguments]) + format_specifier_count = count_format_specifiers(format_str) + if format_specifier_count != argument_count: + exit_code = 1 + print("{}: Expected {} argument(s) after format string but found {} argument(s): {}".format(f.name, format_specifier_count, argument_count, relevant_function_call_str)) + continue + sys.exit(exit_code) + + +if __name__ == "__main__": + main() diff --git a/test/lint/lint-format-strings.sh b/test/lint/lint-format-strings.sh new file mode 100755 index 000000000..2c443abf6 --- /dev/null +++ b/test/lint/lint-format-strings.sh @@ -0,0 +1,43 @@ +#!/usr/bin/env bash +# +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# +# Lint format strings: This program checks that the number of arguments passed +# to a variadic format string function matches the number of format specifiers +# in the format string. + +export LC_ALL=C + +FUNCTION_NAMES_AND_NUMBER_OF_LEADING_ARGUMENTS=( + FatalError,0 + fprintf,1 + LogConnectFailure,1 + LogPrint,1 + LogPrintf,0 + printf,0 + snprintf,2 + sprintf,1 + strprintf,0 + vfprintf,1 + vprintf,1 + vsnprintf,1 + vsprintf,1 + WalletLogPrintf,0 +) + +EXIT_CODE=0 +if ! python3 -m doctest test/lint/lint-format-strings.py; then + EXIT_CODE=1 +fi +for S in "${FUNCTION_NAMES_AND_NUMBER_OF_LEADING_ARGUMENTS[@]}"; do + IFS="," read -r FUNCTION_NAME SKIP_ARGUMENTS <<< "${S}" + for MATCHING_FILE in $(git grep --full-name -l "${FUNCTION_NAME}" -- "*.c" "*.cpp" "*.h" | sort | grep -vE "^src/(leveldb|secp256k1|tinyformat|univalue)"); do + MATCHING_FILES+=("${MATCHING_FILE}") + done + if ! test/lint/lint-format-strings.py --skip-arguments "${SKIP_ARGUMENTS}" "${FUNCTION_NAME}" "${MATCHING_FILES[@]}"; then + EXIT_CODE=1 + fi +done +exit ${EXIT_CODE} diff --git a/test/lint/lint-include-guards.sh b/test/lint/lint-include-guards.sh new file mode 100755 index 000000000..464969794 --- /dev/null +++ b/test/lint/lint-include-guards.sh @@ -0,0 +1,30 @@ +#!/usr/bin/env bash +# +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# +# Check include guards. + +export LC_ALL=C +HEADER_ID_PREFIX="BITCOIN_" +HEADER_ID_SUFFIX="_H" + +REGEXP_EXCLUDE_FILES_WITH_PREFIX="src/(crypto/ctaes/|leveldb/|secp256k1/|tinyformat.h|univalue/)" + +EXIT_CODE=0 +for HEADER_FILE in $(git ls-files -- "*.h" | grep -vE "^${REGEXP_EXCLUDE_FILES_WITH_PREFIX}") +do + HEADER_ID_BASE=$(cut -f2- -d/ <<< "${HEADER_FILE}" | sed "s/\.h$//g" | tr / _ | tr "[:lower:]" "[:upper:]") + HEADER_ID="${HEADER_ID_PREFIX}${HEADER_ID_BASE}${HEADER_ID_SUFFIX}" + if [[ $(grep -cE "^#(ifndef|define) ${HEADER_ID}" "${HEADER_FILE}") != 2 ]]; then + echo "${HEADER_FILE} seems to be missing the expected include guard:" + echo " #ifndef ${HEADER_ID}" + echo " #define ${HEADER_ID}" + echo " ..." + echo " #endif // ${HEADER_ID}" + echo + EXIT_CODE=1 + fi +done +exit ${EXIT_CODE} diff --git a/test/lint/lint-includes.sh b/test/lint/lint-includes.sh new file mode 100755 index 000000000..f6d0fd382 --- /dev/null +++ b/test/lint/lint-includes.sh @@ -0,0 +1,112 @@ +#!/usr/bin/env bash +# +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# +# Check for duplicate includes. +# Guard against accidental introduction of new Boost dependencies. +# Check includes: Check for duplicate includes. Enforce bracket syntax includes. + +export LC_ALL=C +IGNORE_REGEXP="/(leveldb|secp256k1|univalue)/" + +filter_suffix() { + git ls-files | grep -E "^src/.*\.${1}"'$' | grep -Ev "${IGNORE_REGEXP}" +} + +EXIT_CODE=0 + +for HEADER_FILE in $(filter_suffix h); do + DUPLICATE_INCLUDES_IN_HEADER_FILE=$(grep -E "^#include " < "${HEADER_FILE}" | sort | uniq -d) + if [[ ${DUPLICATE_INCLUDES_IN_HEADER_FILE} != "" ]]; then + echo "Duplicate include(s) in ${HEADER_FILE}:" + echo "${DUPLICATE_INCLUDES_IN_HEADER_FILE}" + echo + EXIT_CODE=1 + fi +done + +for CPP_FILE in $(filter_suffix cpp); do + DUPLICATE_INCLUDES_IN_CPP_FILE=$(grep -E "^#include " < "${CPP_FILE}" | sort | uniq -d) + if [[ ${DUPLICATE_INCLUDES_IN_CPP_FILE} != "" ]]; then + echo "Duplicate include(s) in ${CPP_FILE}:" + echo "${DUPLICATE_INCLUDES_IN_CPP_FILE}" + echo + EXIT_CODE=1 + fi +done + +INCLUDED_CPP_FILES=$(git grep -E "^#include [<\"][^>\"]+\.cpp[>\"]" -- "*.cpp" "*.h") +if [[ ${INCLUDED_CPP_FILES} != "" ]]; then + echo "The following files #include .cpp files:" + echo "${INCLUDED_CPP_FILES}" + echo + EXIT_CODE=1 +fi + +EXPECTED_BOOST_INCLUDES=( + boost/algorithm/string.hpp + boost/algorithm/string/classification.hpp + boost/algorithm/string/replace.hpp + boost/algorithm/string/split.hpp + boost/bind.hpp + boost/chrono/chrono.hpp + boost/date_time/posix_time/posix_time.hpp + boost/filesystem.hpp + boost/filesystem/fstream.hpp + boost/multi_index/hashed_index.hpp + boost/multi_index/ordered_index.hpp + boost/multi_index/sequenced_index.hpp + boost/multi_index_container.hpp + boost/optional.hpp + boost/preprocessor/cat.hpp + boost/preprocessor/stringize.hpp + boost/signals2/connection.hpp + boost/signals2/last_value.hpp + boost/signals2/signal.hpp + boost/test/unit_test.hpp + boost/thread.hpp + boost/thread/condition_variable.hpp + boost/thread/mutex.hpp + boost/thread/thread.hpp + boost/variant.hpp + boost/variant/apply_visitor.hpp + boost/variant/static_visitor.hpp +) + +for BOOST_INCLUDE in $(git grep '^#include ' | sort -u); do + IS_EXPECTED_INCLUDE=0 + for EXPECTED_BOOST_INCLUDE in "${EXPECTED_BOOST_INCLUDES[@]}"; do + if [[ "${BOOST_INCLUDE}" == "${EXPECTED_BOOST_INCLUDE}" ]]; then + IS_EXPECTED_INCLUDE=1 + break + fi + done + if [[ ${IS_EXPECTED_INCLUDE} == 0 ]]; then + EXIT_CODE=1 + echo "A new Boost dependency in the form of \"${BOOST_INCLUDE}\" appears to have been introduced:" + git grep "${BOOST_INCLUDE}" -- "*.cpp" "*.h" + echo + fi +done + +for EXPECTED_BOOST_INCLUDE in "${EXPECTED_BOOST_INCLUDES[@]}"; do + if ! git grep -q "^#include <${EXPECTED_BOOST_INCLUDE}>" -- "*.cpp" "*.h"; then + echo "Good job! The Boost dependency \"${EXPECTED_BOOST_INCLUDE}\" is no longer used." + echo "Please remove it from EXPECTED_BOOST_INCLUDES in $0" + echo "to make sure this dependency is not accidentally reintroduced." + echo + EXIT_CODE=1 + fi +done + +QUOTE_SYNTAX_INCLUDES=$(git grep '^#include "' -- "*.cpp" "*.h" | grep -Ev "${IGNORE_REGEXP}") +if [[ ${QUOTE_SYNTAX_INCLUDES} != "" ]]; then + echo "Please use bracket syntax includes (\"#include \") instead of quote syntax includes:" + echo "${QUOTE_SYNTAX_INCLUDES}" + echo + EXIT_CODE=1 +fi + +exit ${EXIT_CODE} diff --git a/test/lint/lint-locale-dependence.sh b/test/lint/lint-locale-dependence.sh new file mode 100755 index 000000000..cbee437c9 --- /dev/null +++ b/test/lint/lint-locale-dependence.sh @@ -0,0 +1,228 @@ +#!/usr/bin/env bash + +export LC_ALL=C +KNOWN_VIOLATIONS=( + "src/base58.cpp:.*isspace" + "src/bitcoin-tx.cpp.*stoul" + "src/bitcoin-tx.cpp.*trim_right" + "src/bitcoin-tx.cpp:.*atoi" + "src/core_read.cpp.*is_digit" + "src/dbwrapper.cpp.*stoul" + "src/dbwrapper.cpp:.*vsnprintf" + "src/httprpc.cpp.*trim" + "src/init.cpp:.*atoi" + "src/qt/rpcconsole.cpp:.*atoi" + "src/qt/rpcconsole.cpp:.*isdigit" + "src/rest.cpp:.*strtol" + "src/test/dbwrapper_tests.cpp:.*snprintf" + "src/test/getarg_tests.cpp.*split" + "src/torcontrol.cpp:.*atoi" + "src/torcontrol.cpp:.*strtol" + "src/uint256.cpp:.*isspace" + "src/uint256.cpp:.*tolower" + "src/util.cpp:.*atoi" + "src/util.cpp:.*fprintf" + "src/util.cpp:.*tolower" + "src/utilmoneystr.cpp:.*isdigit" + "src/utilmoneystr.cpp:.*isspace" + "src/utilstrencodings.cpp:.*atoi" + "src/utilstrencodings.cpp:.*isspace" + "src/utilstrencodings.cpp:.*strtol" + "src/utilstrencodings.cpp:.*strtoll" + "src/utilstrencodings.cpp:.*strtoul" + "src/utilstrencodings.cpp:.*strtoull" + "src/utilstrencodings.h:.*atoi" +) + +REGEXP_IGNORE_EXTERNAL_DEPENDENCIES="^src/(crypto/ctaes/|leveldb/|secp256k1/|tinyformat.h|univalue/)" + +LOCALE_DEPENDENT_FUNCTIONS=( + alphasort # LC_COLLATE (via strcoll) + asctime # LC_TIME (directly) + asprintf # (via vasprintf) + atof # LC_NUMERIC (via strtod) + atoi # LC_NUMERIC (via strtol) + atol # LC_NUMERIC (via strtol) + atoll # (via strtoll) + atoq + btowc # LC_CTYPE (directly) + ctime # (via asctime or localtime) + dprintf # (via vdprintf) + fgetwc + fgetws + fold_case # boost::locale::fold_case + fprintf # (via vfprintf) + fputwc + fputws + fscanf # (via __vfscanf) + fwprintf # (via __vfwprintf) + getdate # via __getdate_r => isspace // __localtime_r + getwc + getwchar + is_digit # boost::algorithm::is_digit + is_space # boost::algorithm::is_space + isalnum # LC_CTYPE + isalpha # LC_CTYPE + isblank # LC_CTYPE + iscntrl # LC_CTYPE + isctype # LC_CTYPE + isdigit # LC_CTYPE + isgraph # LC_CTYPE + islower # LC_CTYPE + isprint # LC_CTYPE + ispunct # LC_CTYPE + isspace # LC_CTYPE + isupper # LC_CTYPE + iswalnum # LC_CTYPE + iswalpha # LC_CTYPE + iswblank # LC_CTYPE + iswcntrl # LC_CTYPE + iswctype # LC_CTYPE + iswdigit # LC_CTYPE + iswgraph # LC_CTYPE + iswlower # LC_CTYPE + iswprint # LC_CTYPE + iswpunct # LC_CTYPE + iswspace # LC_CTYPE + iswupper # LC_CTYPE + iswxdigit # LC_CTYPE + isxdigit # LC_CTYPE + localeconv # LC_NUMERIC + LC_MONETARY + mblen # LC_CTYPE + mbrlen + mbrtowc + mbsinit + mbsnrtowcs + mbsrtowcs + mbstowcs # LC_CTYPE + mbtowc # LC_CTYPE + mktime + normalize # boost::locale::normalize +# printf # LC_NUMERIC + putwc + putwchar + scanf # LC_NUMERIC + setlocale + snprintf + sprintf + sscanf + stod + stof + stoi + stol + stold + stoll + stoul + stoull + strcasecmp + strcasestr + strcoll # LC_COLLATE +# strerror + strfmon + strftime # LC_TIME + strncasecmp + strptime + strtod # LC_NUMERIC + strtof + strtoimax + strtol # LC_NUMERIC + strtold + strtoll + strtoq + strtoul # LC_NUMERIC + strtoull + strtoumax + strtouq + strxfrm # LC_COLLATE + swprintf + to_lower # boost::locale::to_lower + to_title # boost::locale::to_title + to_upper # boost::locale::to_upper + tolower # LC_CTYPE + toupper # LC_CTYPE + towctrans + towlower # LC_CTYPE + towupper # LC_CTYPE + trim # boost::algorithm::trim + trim_left # boost::algorithm::trim_left + trim_right # boost::algorithm::trim_right + ungetwc + vasprintf + vdprintf + versionsort + vfprintf + vfscanf + vfwprintf + vprintf + vscanf + vsnprintf + vsprintf + vsscanf + vswprintf + vwprintf + wcrtomb + wcscasecmp + wcscoll # LC_COLLATE + wcsftime # LC_TIME + wcsncasecmp + wcsnrtombs + wcsrtombs + wcstod # LC_NUMERIC + wcstof + wcstoimax + wcstol # LC_NUMERIC + wcstold + wcstoll + wcstombs # LC_CTYPE + wcstoul # LC_NUMERIC + wcstoull + wcstoumax + wcswidth + wcsxfrm # LC_COLLATE + wctob + wctomb # LC_CTYPE + wctrans + wctype + wcwidth + wprintf +) + +function join_array { + local IFS="$1" + shift + echo "$*" +} + +REGEXP_IGNORE_KNOWN_VIOLATIONS=$(join_array "|" "${KNOWN_VIOLATIONS[@]}") + +# Invoke "git grep" only once in order to minimize run-time +REGEXP_LOCALE_DEPENDENT_FUNCTIONS=$(join_array "|" "${LOCALE_DEPENDENT_FUNCTIONS[@]}") +GIT_GREP_OUTPUT=$(git grep -E "[^a-zA-Z0-9_\`'\"<>](${REGEXP_LOCALE_DEPENDENT_FUNCTIONS}(_r|_s)?)[^a-zA-Z0-9_\`'\"<>]" -- "*.cpp" "*.h") + +EXIT_CODE=0 +for LOCALE_DEPENDENT_FUNCTION in "${LOCALE_DEPENDENT_FUNCTIONS[@]}"; do + MATCHES=$(grep -E "[^a-zA-Z0-9_\`'\"<>]${LOCALE_DEPENDENT_FUNCTION}(_r|_s)?[^a-zA-Z0-9_\`'\"<>]" <<< "${GIT_GREP_OUTPUT}" | \ + grep -vE "\.(c|cpp|h):\s*(//|\*|/\*|\").*${LOCALE_DEPENDENT_FUNCTION}" | \ + grep -vE 'fprintf\(.*(stdout|stderr)') + if [[ ${REGEXP_IGNORE_EXTERNAL_DEPENDENCIES} != "" ]]; then + MATCHES=$(grep -vE "${REGEXP_IGNORE_EXTERNAL_DEPENDENCIES}" <<< "${MATCHES}") + fi + if [[ ${REGEXP_IGNORE_KNOWN_VIOLATIONS} != "" ]]; then + MATCHES=$(grep -vE "${REGEXP_IGNORE_KNOWN_VIOLATIONS}" <<< "${MATCHES}") + fi + if [[ ${MATCHES} != "" ]]; then + echo "The locale dependent function ${LOCALE_DEPENDENT_FUNCTION}(...) appears to be used:" + echo "${MATCHES}" + echo + EXIT_CODE=1 + fi +done +if [[ ${EXIT_CODE} != 0 ]]; then + echo "Unnecessary locale dependence can cause bugs that are very" + echo "tricky to isolate and fix. Please avoid using locale dependent" + echo "functions if possible." + echo + echo "Advice not applicable in this specific case? Add an exception" + echo "by updating the ignore list in $0" +fi +exit ${EXIT_CODE} diff --git a/test/lint/lint-logs.sh b/test/lint/lint-logs.sh new file mode 100755 index 000000000..1afd4cfc1 --- /dev/null +++ b/test/lint/lint-logs.sh @@ -0,0 +1,26 @@ +#!/usr/bin/env bash +# +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# +# Check that all logs are terminated with '\n' +# +# Some logs are continued over multiple lines. They should be explicitly +# commented with \* Continued *\ +# +# There are some instances of LogPrintf() in comments. Those can be +# ignored + +export LC_ALL=C +UNTERMINATED_LOGS=$(git grep --extended-regexp "LogPrintf?\(" -- "*.cpp" | \ + grep -v '\\n"' | \ + grep -v "/\* Continued \*/" | \ + grep -v "LogPrint()" | \ + grep -v "LogPrintf()") +if [[ ${UNTERMINATED_LOGS} != "" ]]; then + echo "All calls to LogPrintf() and LogPrint() should be terminated with \\n" + echo + echo "${UNTERMINATED_LOGS}" + exit 1 +fi diff --git a/test/lint/lint-python-shebang.sh b/test/lint/lint-python-shebang.sh new file mode 100755 index 000000000..4ff87f0bf --- /dev/null +++ b/test/lint/lint-python-shebang.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash +# Shebang must use python3 (not python or python2) + +export LC_ALL=C +EXIT_CODE=0 +for PYTHON_FILE in $(git ls-files -- "*.py"); do + if [[ $(head -c 2 "${PYTHON_FILE}") == "#!" && + $(head -n 1 "${PYTHON_FILE}") != "#!/usr/bin/env python3" ]]; then + echo "Missing shebang \"#!/usr/bin/env python3\" in ${PYTHON_FILE} (do not use python or python2)" + EXIT_CODE=1 + fi +done +exit ${EXIT_CODE} diff --git a/test/lint/lint-python-utf8-encoding.sh b/test/lint/lint-python-utf8-encoding.sh new file mode 100755 index 000000000..d03c20205 --- /dev/null +++ b/test/lint/lint-python-utf8-encoding.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash +# +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# +# Make sure we explicitly open all text files using UTF-8 (or ASCII) encoding to +# avoid potential issues on the BSDs where the locale is not always set. + +export LC_ALL=C +EXIT_CODE=0 +OUTPUT=$(git grep " open(" -- "*.py" | grep -vE "encoding=.(ascii|utf8|utf-8)." | grep -vE "open\([^,]*, ['\"][^'\"]*b[^'\"]*['\"]") +if [[ ${OUTPUT} != "" ]]; then + echo "Python's open(...) seems to be used to open text files without explicitly" + echo "specifying encoding=\"utf8\":" + echo + echo "${OUTPUT}" + EXIT_CODE=1 +fi +OUTPUT=$(git grep "check_output(" -- "*.py" | grep "universal_newlines=True" | grep -vE "encoding=.(ascii|utf8|utf-8).") +if [[ ${OUTPUT} != "" ]]; then + echo "Python's check_output(...) seems to be used to get program outputs without explicitly" + echo "specifying encoding=\"utf8\":" + echo + echo "${OUTPUT}" + EXIT_CODE=1 +fi +exit ${EXIT_CODE} diff --git a/test/lint/lint-python.sh b/test/lint/lint-python.sh new file mode 100755 index 000000000..4f4542ff0 --- /dev/null +++ b/test/lint/lint-python.sh @@ -0,0 +1,90 @@ +#!/bin/sh +# +# Copyright (c) 2017 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# +# Check for specified flake8 warnings in python files. + +export LC_ALL=C + +# E101 indentation contains mixed spaces and tabs +# E112 expected an indented block +# E113 unexpected indentation +# E115 expected an indented block (comment) +# E116 unexpected indentation (comment) +# E125 continuation line with same indent as next logical line +# E129 visually indented line with same indent as next logical line +# E131 continuation line unaligned for hanging indent +# E133 closing bracket is missing indentation +# E223 tab before operator +# E224 tab after operator +# E242 tab after ',' +# E266 too many leading '#' for block comment +# E271 multiple spaces after keyword +# E272 multiple spaces before keyword +# E273 tab after keyword +# E274 tab before keyword +# E275 missing whitespace after keyword +# E304 blank lines found after function decorator +# E306 expected 1 blank line before a nested definition +# E401 multiple imports on one line +# E402 module level import not at top of file +# F403 'from foo_module import *' used; unable to detect undefined names +# F405 foo_function may be undefined, or defined from star imports: bar_module +# E502 the backslash is redundant between brackets +# E701 multiple statements on one line (colon) +# E702 multiple statements on one line (semicolon) +# E703 statement ends with a semicolon +# E714 test for object identity should be "is not" +# E721 do not compare types, use "isinstance()" +# E741 do not use variables named "l", "O", or "I" +# E742 do not define classes named "l", "O", or "I" +# E743 do not define functions named "l", "O", or "I" +# E901 SyntaxError: invalid syntax +# E902 TokenError: EOF in multi-line string +# F401 module imported but unused +# F402 import module from line N shadowed by loop variable +# F404 future import(s) name after other statements +# F406 "from module import *" only allowed at module level +# F407 an undefined __future__ feature name was imported +# F601 dictionary key name repeated with different values +# F602 dictionary key variable name repeated with different values +# F621 too many expressions in an assignment with star-unpacking +# F622 two or more starred expressions in an assignment (a, *b, *c = d) +# F631 assertion test is a tuple, which are always True +# F701 a break statement outside of a while or for loop +# F702 a continue statement outside of a while or for loop +# F703 a continue statement in a finally block in a loop +# F704 a yield or yield from statement outside of a function +# F705 a return statement with arguments inside a generator +# F706 a return statement outside of a function/method +# F707 an except: block as not the last exception handler +# F811 redefinition of unused name from line N +# F812 list comprehension redefines 'foo' from line N +# F821 undefined name 'Foo' +# F822 undefined name name in __all__ +# F823 local variable name … referenced before assignment +# F831 duplicate argument name in function definition +# F841 local variable 'foo' is assigned to but never used +# W191 indentation contains tabs +# W291 trailing whitespace +# W292 no newline at end of file +# W293 blank line contains whitespace +# W504 line break after binary operator +# W601 .has_key() is deprecated, use "in" +# W602 deprecated form of raising exception +# W603 "<>" is deprecated, use "!=" +# W604 backticks are deprecated, use "repr()" +# W605 invalid escape sequence "x" +# W606 'async' and 'await' are reserved keywords starting with Python 3.7 + +if ! command -v flake8 > /dev/null; then + echo "Skipping Python linting since flake8 is not installed. Install by running \"pip3 install flake8\"" + exit 0 +elif PYTHONWARNINGS="ignore" flake8 --version | grep -q "Python 2"; then + echo "Skipping Python linting since flake8 is running under Python 2. Install the Python 3 version of flake8 by running \"pip3 install flake8\"" + exit 0 +fi + +PYTHONWARNINGS="ignore" flake8 --ignore=B,C,E,F,I,N,W --select=E101,E112,E113,E115,E116,E125,E129,E131,E133,E223,E224,E242,E266,E271,E272,E273,E274,E275,E304,E306,E401,E402,E502,E701,E702,E703,E714,E721,E741,E742,E743,E901,E902,F401,F402,F403,F404,F405,F406,F407,F601,F602,F621,F622,F631,F701,F702,F703,F704,F705,F706,F707,F811,F812,F821,F822,F823,F831,F841,W191,W291,W292,W293,W504,W601,W602,W603,W604,W605,W606 . diff --git a/test/lint/lint-qt.sh b/test/lint/lint-qt.sh new file mode 100755 index 000000000..2e77682aa --- /dev/null +++ b/test/lint/lint-qt.sh @@ -0,0 +1,20 @@ +#!/usr/bin/env bash +# +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# +# Check for SIGNAL/SLOT connect style, removed since Qt4 support drop. + +export LC_ALL=C + +EXIT_CODE=0 + +OUTPUT=$(git grep -E '(SIGNAL|, ?SLOT)\(' -- src/qt) +if [[ ${OUTPUT} != "" ]]; then + echo "Use Qt5 connect style in:" + echo "$OUTPUT" + EXIT_CODE=1 +fi + +exit ${EXIT_CODE} diff --git a/test/lint/lint-shell-locale.sh b/test/lint/lint-shell-locale.sh new file mode 100755 index 000000000..084dc93f7 --- /dev/null +++ b/test/lint/lint-shell-locale.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash +# +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# +# Make sure all shell scripts: +# a.) explicitly opt out of locale dependence using +# "export LC_ALL=C" or "export LC_ALL=C.UTF-8", or +# b.) explicitly opt in to locale dependence using the annotation below. + +export LC_ALL=C + +EXIT_CODE=0 +for SHELL_SCRIPT in $(git ls-files -- "*.sh" | grep -vE "src/(secp256k1|univalue)/"); do + if grep -q "# This script is intentionally locale dependent by not setting \"export LC_ALL=C\"" "${SHELL_SCRIPT}"; then + continue + fi + FIRST_NON_COMMENT_LINE=$(grep -vE '^(#.*)?$' "${SHELL_SCRIPT}" | head -1) + if [[ ${FIRST_NON_COMMENT_LINE} != "export LC_ALL=C" && ${FIRST_NON_COMMENT_LINE} != "export LC_ALL=C.UTF-8" ]]; then + echo "Missing \"export LC_ALL=C\" (to avoid locale dependence) as first non-comment non-empty line in ${SHELL_SCRIPT}" + EXIT_CODE=1 + fi +done +exit ${EXIT_CODE} diff --git a/test/lint/lint-shell.sh b/test/lint/lint-shell.sh new file mode 100755 index 000000000..9af3c10ed --- /dev/null +++ b/test/lint/lint-shell.sh @@ -0,0 +1,47 @@ +#!/usr/bin/env bash +# +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# +# Check for shellcheck warnings in shell scripts. + +export LC_ALL=C + +# The shellcheck binary segfault/coredumps in Travis with LC_ALL=C +# It does not do so in Ubuntu 14.04, 16.04, 18.04 in versions 0.3.3, 0.3.7, 0.4.6 +# respectively. So export LC_ALL=C is set as required by lint-shell-locale.sh +# but unset here in case of running in Travis. +if [ "$TRAVIS" = "true" ]; then + unset LC_ALL +fi + +if ! command -v shellcheck > /dev/null; then + echo "Skipping shell linting since shellcheck is not installed." + exit 0 +fi + +# Disabled warnings: +# SC1087: Use braces when expanding arrays, e.g. ${array[idx]} (or ${var}[.. to quiet). +# SC1117: Backslash is literal in "\.". Prefer explicit escaping: "\\.". +# SC2001: See if you can use ${variable//search/replace} instead. +# SC2004: $/${} is unnecessary on arithmetic variables. +# SC2005: Useless echo? Instead of 'echo $(cmd)', just use 'cmd'. +# SC2006: Use $(..) instead of legacy `..`. +# SC2016: Expressions don't expand in single quotes, use double quotes for that. +# SC2028: echo won't expand escape sequences. Consider printf. +# SC2046: Quote this to prevent word splitting. +# SC2048: Use "$@" (with quotes) to prevent whitespace problems. +# SC2066: Since you double quoted this, it will not word split, and the loop will only run once. +# SC2086: Double quote to prevent globbing and word splitting. +# SC2116: Useless echo? Instead of 'cmd $(echo foo)', just use 'cmd foo'. +# SC2148: Tips depend on target shell and yours is unknown. Add a shebang. +# SC2162: read without -r will mangle backslashes. +# SC2166: Prefer [ p ] && [ q ] as [ p -a q ] is not well defined. +# SC2166: Prefer [ p ] || [ q ] as [ p -o q ] is not well defined. +# SC2181: Check exit code directly with e.g. 'if mycmd;', not indirectly with $?. +# SC2206: Quote to prevent word splitting, or split robustly with mapfile or read -a. +# SC2207: Prefer mapfile or read -a to split command output (or quote to avoid splitting). +# SC2230: which is non-standard. Use builtin 'command -v' instead. +shellcheck -e SC1087,SC1117,SC2001,SC2004,SC2005,SC2006,SC2016,SC2028,SC2046,SC2048,SC2066,SC2086,SC2116,SC2148,SC2162,SC2166,SC2181,SC2206,SC2207,SC2230 \ + $(git ls-files -- "*.sh" | grep -vE 'src/(secp256k1|univalue)/') diff --git a/test/lint/lint-spelling.ignore-words.txt b/test/lint/lint-spelling.ignore-words.txt new file mode 100644 index 000000000..9a49f3227 --- /dev/null +++ b/test/lint/lint-spelling.ignore-words.txt @@ -0,0 +1,6 @@ +cas +hights +mor +objext +unselect +useable diff --git a/test/lint/lint-spelling.sh b/test/lint/lint-spelling.sh new file mode 100755 index 000000000..ebeafd7d5 --- /dev/null +++ b/test/lint/lint-spelling.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash +# +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# +# Warn in case of spelling errors. +# Note: Will exit successfully regardless of spelling errors. + +export LC_ALL=C + +EXIT_CODE=0 +IGNORE_WORDS_FILE=test/lint/lint-spelling.ignore-words.txt +if ! codespell --check-filenames --disable-colors --quiet-level=7 --ignore-words=${IGNORE_WORDS_FILE} $(git ls-files -- ":(exclude)build-aux/m4/" ":(exclude)contrib/seeds/*.txt" ":(exclude)depends/" ":(exclude)doc/release-notes/" ":(exclude)src/leveldb/" ":(exclude)src/qt/locale/" ":(exclude)src/secp256k1/" ":(exclude)src/univalue/"); then + echo "^ Warning: codespell identified likely spelling errors. Any false positives? Add them to the list of ignored words in ${IGNORE_WORDS_FILE}" + EXIT_CODE=1 +fi +exit ${EXIT_CODE} diff --git a/test/lint/lint-tests.sh b/test/lint/lint-tests.sh new file mode 100755 index 000000000..35d11023e --- /dev/null +++ b/test/lint/lint-tests.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash +# +# Copyright (c) 2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# +# Check the test suite naming conventions + +export LC_ALL=C +EXIT_CODE=0 + +NAMING_INCONSISTENCIES=$(git grep -E '^BOOST_FIXTURE_TEST_SUITE\(' -- \ + "src/test/**.cpp" "src/wallet/test/**.cpp" | \ + grep -vE '/(.*?)\.cpp:BOOST_FIXTURE_TEST_SUITE\(\1, .*\)$') +if [[ ${NAMING_INCONSISTENCIES} != "" ]]; then + echo "The test suite in file src/test/foo_tests.cpp should be named" + echo "\"foo_tests\". Please make sure the following test suites follow" + echo "that convention:" + echo + echo "${NAMING_INCONSISTENCIES}" + EXIT_CODE=1 +fi + +TEST_SUITE_NAME_COLLISSIONS=$(git grep -E '^BOOST_FIXTURE_TEST_SUITE\(' -- \ + "src/test/**.cpp" "src/wallet/test/**.cpp" | cut -f2 -d'(' | cut -f1 -d, | \ + sort | uniq -d) +if [[ ${TEST_SUITE_NAME_COLLISSIONS} != "" ]]; then + echo "Test suite names must be unique. The following test suite names" + echo "appear to be used more than once:" + echo + echo "${TEST_SUITE_NAME_COLLISSIONS}" + EXIT_CODE=1 +fi + +exit ${EXIT_CODE} diff --git a/test/lint/lint-whitespace.sh b/test/lint/lint-whitespace.sh new file mode 100755 index 000000000..beb7ec42f --- /dev/null +++ b/test/lint/lint-whitespace.sh @@ -0,0 +1,113 @@ +#!/usr/bin/env bash +# +# Copyright (c) 2017 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# +# Check for new lines in diff that introduce trailing whitespace. + +# We can't run this check unless we know the commit range for the PR. + +export LC_ALL=C +while getopts "?" opt; do + case $opt in + ?) + echo "Usage: .lint-whitespace.sh [N]" + echo " TRAVIS_COMMIT_RANGE='' .lint-whitespace.sh" + echo " .lint-whitespace.sh -?" + echo "Checks unstaged changes, the previous N commits, or a commit range." + echo "TRAVIS_COMMIT_RANGE='47ba2c3...ee50c9e' .lint-whitespace.sh" + exit 0 + ;; + esac +done + +if [ -z "${TRAVIS_COMMIT_RANGE}" ]; then + if [ "$1" ]; then + TRAVIS_COMMIT_RANGE="HEAD~$1...HEAD" + else + TRAVIS_COMMIT_RANGE="HEAD" + fi +fi + +showdiff() { + if ! git diff -U0 "${TRAVIS_COMMIT_RANGE}" -- "." ":(exclude)depends/patches/" ":(exclude)src/leveldb/" ":(exclude)src/secp256k1/" ":(exclude)src/univalue/" ":(exclude)doc/release-notes/"; then + echo "Failed to get a diff" + exit 1 + fi +} + +showcodediff() { + if ! git diff -U0 "${TRAVIS_COMMIT_RANGE}" -- *.cpp *.h *.md *.py *.sh ":(exclude)src/leveldb/" ":(exclude)src/secp256k1/" ":(exclude)src/univalue/" ":(exclude)doc/release-notes/"; then + echo "Failed to get a diff" + exit 1 + fi +} + +RET=0 + +# Check if trailing whitespace was found in the diff. +if showdiff | grep -E -q '^\+.*\s+$'; then + echo "This diff appears to have added new lines with trailing whitespace." + echo "The following changes were suspected:" + FILENAME="" + SEEN=0 + SEENLN=0 + while read -r line; do + if [[ "$line" =~ ^diff ]]; then + FILENAME="$line" + SEEN=0 + elif [[ "$line" =~ ^@@ ]]; then + LINENUMBER="$line" + SEENLN=0 + else + if [ "$SEEN" -eq 0 ]; then + # The first time a file is seen with trailing whitespace, we print the + # filename (preceded by a newline). + echo + echo "$FILENAME" + SEEN=1 + fi + if [ "$SEENLN" -eq 0 ]; then + echo "$LINENUMBER" + SEENLN=1 + fi + echo "$line" + fi + done < <(showdiff | grep -E '^(diff --git |@@|\+.*\s+$)') + RET=1 +fi + +# Check if tab characters were found in the diff. +if showcodediff | perl -nle '$MATCH++ if m{^\+.*\t}; END{exit 1 unless $MATCH>0}' > /dev/null; then + echo "This diff appears to have added new lines with tab characters instead of spaces." + echo "The following changes were suspected:" + FILENAME="" + SEEN=0 + SEENLN=0 + while read -r line; do + if [[ "$line" =~ ^diff ]]; then + FILENAME="$line" + SEEN=0 + elif [[ "$line" =~ ^@@ ]]; then + LINENUMBER="$line" + SEENLN=0 + else + if [ "$SEEN" -eq 0 ]; then + # The first time a file is seen with a tab character, we print the + # filename (preceded by a newline). + echo + echo "$FILENAME" + SEEN=1 + fi + if [ "$SEENLN" -eq 0 ]; then + echo "$LINENUMBER" + SEENLN=1 + fi + echo "$line" + fi + done < <(showcodediff | perl -nle 'print if m{^(diff --git |@@|\+.*\t)}') + RET=1 +fi + +exit $RET diff --git a/test/util/bitcoin-util-test.py b/test/util/bitcoin-util-test.py index ef34955d9..92fef30e1 100755 --- a/test/util/bitcoin-util-test.py +++ b/test/util/bitcoin-util-test.py @@ -28,7 +28,7 @@ def main(): config = configparser.ConfigParser() config.optionxform = str - config.readfp(open(os.path.join(os.path.dirname(__file__), "../config.ini"))) + config.read_file(open(os.path.join(os.path.dirname(__file__), "../config.ini"), encoding="utf8")) env_conf = dict(config.items('environment')) parser = argparse.ArgumentParser(description=__doc__) @@ -44,12 +44,12 @@ def main(): # Add the format/level to the logger logging.basicConfig(format=formatter, level=level) - bctester(os.path.join(env_conf["SRCDIR"], "test/util/data"), "bitcoin-util-test.json", env_conf) + bctester(os.path.join(env_conf["SRCDIR"], "test", "util", "data"), "bitcoin-util-test.json", env_conf) def bctester(testDir, input_basename, buildenv): """ Loads and parses the input file, runs all tests and reports results""" - input_filename = testDir + "/" + input_basename - raw_data = open(input_filename).read() + input_filename = os.path.join(testDir, input_basename) + raw_data = open(input_filename, encoding="utf8").read() input_data = json.loads(raw_data) failed_testcases = [] @@ -77,7 +77,7 @@ def bctest(testDir, testObj, buildenv): are not as expected. Error is caught by bctester() and reported. """ # Get the exec names and arguments - execprog = buildenv["BUILDDIR"] + "/src/" + testObj['exec'] + buildenv["EXEEXT"] + execprog = os.path.join(buildenv["BUILDDIR"], "src", testObj["exec"] + buildenv["EXEEXT"]) execargs = testObj['args'] execrun = [execprog] + execargs @@ -85,24 +85,28 @@ def bctest(testDir, testObj, buildenv): stdinCfg = None inputData = None if "input" in testObj: - filename = testDir + "/" + testObj['input'] - inputData = open(filename).read() + filename = os.path.join(testDir, testObj["input"]) + inputData = open(filename, encoding="utf8").read() stdinCfg = subprocess.PIPE # Read the expected output data (if there is any) outputFn = None outputData = None + outputType = None if "output_cmp" in testObj: outputFn = testObj['output_cmp'] outputType = os.path.splitext(outputFn)[1][1:] # output type from file extension (determines how to compare) try: - outputData = open(testDir + "/" + outputFn).read() + outputData = open(os.path.join(testDir, outputFn), encoding="utf8").read() except: logging.error("Output file " + outputFn + " can not be opened") raise if not outputData: logging.error("Output data missing for " + outputFn) raise Exception + if not outputType: + logging.error("Output file %s does not have a file extension" % outputFn) + raise Exception # Run the test proc = subprocess.Popen(execrun, stdin=stdinCfg, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) diff --git a/test/util/data/bitcoin-util-test.json b/test/util/data/bitcoin-util-test.json index e5b7064e6..761923a81 100644 --- a/test/util/data/bitcoin-util-test.json +++ b/test/util/data/bitcoin-util-test.json @@ -1,65 +1,149 @@ [ - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-create", "nversion=1"], "output_cmp": "blanktxv1.hex", "description": "Creates a blank v1 transaction" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-json","-create", "nversion=1"], "output_cmp": "blanktxv1.json", "description": "Creates a blank v1 transaction (output in json)" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", + "args": ["-"], + "input": "blanktxv2.hex", + "output_cmp": "blanktxv2.hex", + "description": "Creates a blank transaction when nothing is piped into bitcoin-tx" + }, + { "exec": "./bitcoin-tx", + "args": ["-json","-create"], + "output_cmp": "blanktxv2.json", + "description": "Creates a blank transaction (output in json)" + }, + { "exec": "./bitcoin-tx", + "args": ["-json","-"], + "input": "blanktxv2.hex", + "output_cmp": "blanktxv2.json", + "description": "Creates a blank transaction when nothing is piped into bitcoin-tx (output in json)" + }, + { "exec": "./bitcoin-tx", + "args": ["-create", "nversion=1foo"], + "return_code": 1, + "error_txt": "error: Invalid TX version requested", + "description": "Tests the check for invalid nversion value" + }, + { "exec": "./bitcoin-tx", "args": ["-", "delin=1"], "input": "tx394b54bb.hex", "output_cmp": "tt-delin1-out.hex", "description": "Deletes a single input from a transaction" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-json", "-", "delin=1"], "input": "tx394b54bb.hex", "output_cmp": "tt-delin1-out.json", "description": "Deletes a single input from a transaction (output in json)" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-", "delin=31"], "input": "tx394b54bb.hex", "return_code": 1, "error_txt": "error: Invalid TX input index '31'", "description": "Attempts to delete an input with a bad index from a transaction. Expected to fail." }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", + "args": ["-", "delin=1foo"], + "input": "tx394b54bb.hex", + "return_code": 1, + "error_txt": "error: Invalid TX input index", + "description": "Tests the check for an invalid input index with delin" + }, + { "exec": "./bitcoin-tx", "args": ["-", "delout=1"], "input": "tx394b54bb.hex", "output_cmp": "tt-delout1-out.hex", "description": "Deletes a single output from a transaction" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-json", "-", "delout=1"], "input": "tx394b54bb.hex", "output_cmp": "tt-delout1-out.json", "description": "Deletes a single output from a transaction (output in json)" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-", "delout=2"], "input": "tx394b54bb.hex", "return_code": 1, "error_txt": "error: Invalid TX output index '2'", "description": "Attempts to delete an output with a bad index from a transaction. Expected to fail." }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", + "args": ["-", "delout=1foo"], + "input": "tx394b54bb.hex", + "return_code": 1, + "error_txt": "error: Invalid TX output index", + "description": "Tests the check for an invalid output index with delout" + }, + { "exec": "./bitcoin-tx", "args": ["-", "locktime=317000"], "input": "tx394b54bb.hex", "output_cmp": "tt-locktime317000-out.hex", "description": "Adds an nlocktime to a transaction" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-json", "-", "locktime=317000"], "input": "tx394b54bb.hex", "output_cmp": "tt-locktime317000-out.json", "description": "Adds an nlocktime to a transaction (output in json)" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", + "args": ["-create", "locktime=317000foo"], + "return_code": 1, + "error_txt": "error: Invalid TX locktime requested", + "description": "Tests the check for invalid locktime value" + }, + { "exec": "./bitcoin-tx", + "args": + ["-create", + "in=Z897de6bd6027a475eadd57019d4e6872c396d0716c4875a5f1a6fcfdf385c1f:0"], + "return_code": 1, + "error_txt": "error: invalid TX input txid", + "description": "Tests the check for an invalid txid invalid hex" + }, + { "exec": "./bitcoin-tx", + "args": + ["-create", + "in=5897de6bd6:0"], + "return_code": 1, + "error_txt": "error: invalid TX input txid", + "description": "Tests the check for an invalid txid valid hex but too short" + }, + { "exec": "./bitcoin-tx", + "args": + ["-create", + "in=5897de6bd6027a475eadd57019d4e6872c396d0716c4875a5f1a6fcfdf385c1f12:0"], + "return_code": 1, + "error_txt": "error: invalid TX input txid", + "description": "Tests the check for an invalid txid valid hex but too long" + }, + { "exec": "./bitcoin-tx", + "args": + ["-create", + "in=5897de6bd6027a475eadd57019d4e6872c396d0716c4875a5f1a6fcfdf385c1f:0", + "replaceable=0foo"], + "return_code": 1, + "error_txt": "error: Invalid TX input index", + "description": "Tests the check for an invalid input index with replaceable" + }, + { "exec": "./bitcoin-tx", + "args": + ["-create", + "in=5897de6bd6027a475eadd57019d4e6872c396d0716c4875a5f1a6fcfdf385c1f:0x"], + "return_code": 1, + "error_txt": "error: invalid TX input vout", + "description": "Tests the check for an invalid vout value when adding an input" + }, + { "exec": "./bitcoin-tx", "args": ["-create", "outaddr=1"], @@ -67,15 +151,15 @@ "error_txt": "error: TX output missing or too many separators", "description": "Malformed outaddr argument (no address specified). Expected to fail." }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-create", - "outaddr=1:ZeGm46MkY3D96sygu5HFtbgadSaQKKrJV1:garbage"], + "outaddr=1:13tuJJDR2RgArmgfv6JScSdreahzgc4T6o:garbage"], "return_code": 1, "error_txt": "error: TX output missing or too many separators", "description": "Malformed outaddr argument (too many separators). Expected to fail." }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-create", "outpubkey=0"], @@ -83,7 +167,7 @@ "error_txt": "error: TX output missing or too many separators", "description": "Malformed outpubkey argument (no pubkey specified). Expected to fail." }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-create", "outpubkey=0:02a5613bd857b7048924264d1e70e08fb2a7e6527d32b7ab1bb993ac59964ff397:W:non53nse"], @@ -91,101 +175,101 @@ "error_txt": "error: TX output missing or too many separators", "description": "Malformed outpubkey argument (too many separators). Expected to fail." }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-create", "in=5897de6bd6027a475eadd57019d4e6872c396d0716c4875a5f1a6fcfdf385c1f:0", "in=bf829c6bcf84579331337659d31f89dfd138f7f7785802d5501c92333145ca7c:18", "in=22a6f904655d53ae2ff70e701a0bbd90aa3975c0f40bfc6cc996a9049e31cdfc:1", - "outaddr=0.18:ZeGm46MkY3D96sygu5HFtbgadSaQKKrJV1", - "outaddr=4:ZyWqGihqeM2gwVJcyGdewP7JnPEviphmv8"], + "outaddr=0.18:13tuJJDR2RgArmgfv6JScSdreahzgc4T6o", + "outaddr=4:1P8yWvZW8jVihP1bzHeqfE4aoXNX8AVa46"], "output_cmp": "txcreate1.hex", "description": "Creates a new transaction with three inputs and two outputs" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-json", "-create", "in=5897de6bd6027a475eadd57019d4e6872c396d0716c4875a5f1a6fcfdf385c1f:0", "in=bf829c6bcf84579331337659d31f89dfd138f7f7785802d5501c92333145ca7c:18", "in=22a6f904655d53ae2ff70e701a0bbd90aa3975c0f40bfc6cc996a9049e31cdfc:1", - "outaddr=0.18:ZeGm46MkY3D96sygu5HFtbgadSaQKKrJV1", - "outaddr=4:ZyWqGihqeM2gwVJcyGdewP7JnPEviphmv8"], + "outaddr=0.18:13tuJJDR2RgArmgfv6JScSdreahzgc4T6o", + "outaddr=4:1P8yWvZW8jVihP1bzHeqfE4aoXNX8AVa46"], "output_cmp": "txcreate1.json", "description": "Creates a new transaction with three inputs and two outputs (output in json)" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-create", "outscript=0:"], "output_cmp": "txcreate2.hex", "description": "Creates a new transaction with a single empty output script" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-json", "-create", "outscript=0:"], "output_cmp": "txcreate2.json", "description": "Creates a new transaction with a single empty output script (output in json)" }, - { "exec": "./bitzeny-tx", - "args": ["01000000000100000000000000000000000000"], + { "exec": "./bitcoin-tx", + "args": ["02000000000100000000000000000000000000"], "output_cmp": "txcreate2.hex", - "description": "Parses a transation with no inputs and a single output script" + "description": "Parses a transaction with no inputs and a single output script" }, - { "exec": "./bitzeny-tx", - "args": ["-json", "01000000000100000000000000000000000000"], + { "exec": "./bitcoin-tx", + "args": ["-json", "02000000000100000000000000000000000000"], "output_cmp": "txcreate2.json", - "description": "Parses a transation with no inputs and a single output script (output in json)" + "description": "Parses a transaction with no inputs and a single output script (output in json)" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-create", "outscript=0:OP_DROP", "nversion=1"], "output_cmp": "txcreatescript1.hex", "description": "Create a new transaction with a single output script (OP_DROP)" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-json", "-create", "outscript=0:OP_DROP", "nversion=1"], "output_cmp": "txcreatescript1.json", "description": "Create a new transaction with a single output script (OP_DROP) (output as json)" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-create", "outscript=0:OP_DROP:S", "nversion=1"], "output_cmp": "txcreatescript2.hex", "description": "Create a new transaction with a single output script (OP_DROP) in a P2SH" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-json", "-create", "outscript=0:OP_DROP:S", "nversion=1"], "output_cmp": "txcreatescript2.json", "description": "Create a new transaction with a single output script (OP_DROP) in a P2SH (output as json)" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-create", "outscript=0:OP_DROP:W", "nversion=1"], "output_cmp": "txcreatescript3.hex", "description": "Create a new transaction with a single output script (OP_DROP) in a P2WSH" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-json", "-create", "outscript=0:OP_DROP:W", "nversion=1"], "output_cmp": "txcreatescript3.json", "description": "Create a new transaction with a single output script (OP_DROP) in a P2WSH (output as json)" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-create", "outscript=0:OP_DROP:WS", "nversion=1"], "output_cmp": "txcreatescript4.hex", "description": "Create a new transaction with a single output script (OP_DROP) in a P2WSH, wrapped in a P2SH" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-json", "-create", "outscript=0:OP_DROP:WS", "nversion=1"], "output_cmp": "txcreatescript4.json", "description": "Create a new transaction with a single output script (OP_DROP) in a P2SH, wrapped in a P2SH (output as json)" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-create", "nversion=1", "in=4d49a71ec9da436f71ec4ee231d04f292a29cd316f598bb7068feccabdc59485:0", "set=privatekeys:[\"5HpHagT65TZzG1PH3CSu63k8DbpvD8s5ip4nEB3kEsreAnchuDf\"]", "set=prevtxs:[{\"txid\":\"4d49a71ec9da436f71ec4ee231d04f292a29cd316f598bb7068feccabdc59485\",\"vout\":0,\"scriptPubKey\":\"76a91491b24bf9f5288532960ac687abb035127b1d28a588ac\"}]", "sign=ALL", - "outaddr=0.001:ZjREr93FwgKB2rWwL8tM4AjbRHi6ZDzxHJ"], + "outaddr=0.001:193P6LtvS4nCnkDvM9uXn1gsSRqh4aDAz7"], "output_cmp": "txcreatesignv1.hex", "description": "Creates a new v1 transaction with a single input and a single output, and then signs the transaction" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-json", "-create", "nversion=1", @@ -193,54 +277,113 @@ "set=privatekeys:[\"5HpHagT65TZzG1PH3CSu63k8DbpvD8s5ip4nEB3kEsreAnchuDf\"]", "set=prevtxs:[{\"txid\":\"4d49a71ec9da436f71ec4ee231d04f292a29cd316f598bb7068feccabdc59485\",\"vout\":0,\"scriptPubKey\":\"76a91491b24bf9f5288532960ac687abb035127b1d28a588ac\"}]", "sign=ALL", - "outaddr=0.001:ZjREr93FwgKB2rWwL8tM4AjbRHi6ZDzxHJ"], + "outaddr=0.001:193P6LtvS4nCnkDvM9uXn1gsSRqh4aDAz7"], "output_cmp": "txcreatesignv1.json", "description": "Creates a new v1 transaction with a single input and a single output, and then signs the transaction (output in json)" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", + "args": + ["-create", + "in=4d49a71ec9da436f71ec4ee231d04f292a29cd316f598bb7068feccabdc59485:0", + "set=privatekeys:[\"5HpHagT65TZzG1PH3CSu63k8DbpvD8s5ip4nEB3kEsreAnchuDf\"]", + "set=prevtxs:[{\"txid\":\"4d49a71ec9da436f71ec4ee231d04f292a29cd316f598bb7068feccabdc59485\",\"vout\":0,\"scriptPubKey\":\"76a91491b24bf9f5288532960ac687abb035127b1d28a588ac\"}]", + "sign=ALL", + "outaddr=0.001:193P6LtvS4nCnkDvM9uXn1gsSRqh4aDAz7"], + "output_cmp": "txcreatesignv2.hex", + "description": "Creates a new transaction with a single input and a single output, and then signs the transaction" + }, + { "exec": "./bitcoin-tx", + "args": + ["-create", + "in=4d49a71ec9da436f71ec4ee231d04f292a29cd316f598bb7068feccabdc59485:0", + "set=privatekeys:[\"5HpHagT65TZzG1PH3CSu63k8DbpvD8s5ip4nEB3kEsreAnchuDf\"]", + "set=prevtxs:[{\"txid\":\"4d49a71ec9da436f71ec4ee231d04f292a29cd316f598bb7068feccabdc59485\",\"vout\":\"0foo\",\"scriptPubKey\":\"76a91491b24bf9f5288532960ac687abb035127b1d28a588ac\"}]", + "sign=ALL", + "outaddr=0.001:193P6LtvS4nCnkDvM9uXn1gsSRqh4aDAz7"], + "return_code": 1, + "error_txt": "error: prevtxs internal object typecheck fail", + "description": "Tests the check for invalid vout index in prevtxs for sign" + }, + { "exec": "./bitcoin-tx", + "args": + ["-create", + "in=4d49a71ec9da436f71ec4ee231d04f292a29cd316f598bb7068feccabdc59485:0", + "set=privatekeys:[\"5HpHagT65TZzG1PH3CSu63k8DbpvD8s5ip4nEB3kEsreAnchuDf\"]", + "set=prevtxs:[{\"txid\":\"Zd49a71ec9da436f71ec4ee231d04f292a29cd316f598bb7068feccabdc59412\",\"vout\":0,\"scriptPubKey\":\"76a91491b24bf9f5288532960ac687abb035127b1d28a588ac\"}]", + "sign=ALL", + "outaddr=0.001:193P6LtvS4nCnkDvM9uXn1gsSRqh4aDAz7"], + "return_code": 1, + "error_txt": "error: txid must be hexadecimal string (not 'Zd49a71ec9da436f71ec4ee231d04f292a29cd316f598bb7068feccabdc59412')", + "description": "Tests the check for invalid txid due to invalid hex" + }, + { "exec": "./bitcoin-tx", + "args": + ["-create", + "in=4d49a71ec9da436f71ec4ee231d04f292a29cd316f598bb7068feccabdc59485:0", + "set=privatekeys:[\"5HpHagT65TZzG1PH3CSu63k8DbpvD8s5ip4nEB3kEsreAnchuDf\"]", + "set=prevtxs:[{\"txid\":\"4d49a71ec9da436f71ec4ee231d04f292a29cd316f598bb7068feccabdc594\",\"vout\":0,\"scriptPubKey\":\"76a91491b24bf9f5288532960ac687abb035127b1d28a588ac\"}]", + "sign=ALL", + "outaddr=0.001:193P6LtvS4nCnkDvM9uXn1gsSRqh4aDAz7"], + "return_code": 1, + "error_txt": "error: txid must be hexadecimal string (not '4d49a71ec9da436f71ec4ee231d04f292a29cd316f598bb7068feccabdc594')", + "description": "Tests the check for invalid txid valid hex, but too short" + }, + { "exec": "./bitcoin-tx", + "args": + ["-create", + "in=4d49a71ec9da436f71ec4ee231d04f292a29cd316f598bb7068feccabdc59485:0", + "set=privatekeys:[\"5HpHagT65TZzG1PH3CSu63k8DbpvD8s5ip4nEB3kEsreAnchuDf\"]", + "set=prevtxs:[{\"txid\":\"4d49a71ec9da436f71ec4ee231d04f292a29cd316f598bb7068feccabdc5948512\",\"vout\":0,\"scriptPubKey\":\"76a91491b24bf9f5288532960ac687abb035127b1d28a588ac\"}]", + "sign=ALL", + "outaddr=0.001:193P6LtvS4nCnkDvM9uXn1gsSRqh4aDAz7"], + "return_code": 1, + "error_txt": "error: txid must be hexadecimal string (not '4d49a71ec9da436f71ec4ee231d04f292a29cd316f598bb7068feccabdc5948512')", + "description": "Tests the check for invalid txid valid hex, but too long" + }, + { "exec": "./bitcoin-tx", "args": ["-create", "outpubkey=0:02a5613bd857b7048924264d1e70e08fb2a7e6527d32b7ab1bb993ac59964ff397", "nversion=1"], "output_cmp": "txcreateoutpubkey1.hex", "description": "Creates a new transaction with a single pay-to-pubkey output" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-json", "-create", "outpubkey=0:02a5613bd857b7048924264d1e70e08fb2a7e6527d32b7ab1bb993ac59964ff397", "nversion=1"], "output_cmp": "txcreateoutpubkey1.json", "description": "Creates a new transaction with a single pay-to-pubkey output (output as json)" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-create", "outpubkey=0:02a5613bd857b7048924264d1e70e08fb2a7e6527d32b7ab1bb993ac59964ff397:W", "nversion=1"], "output_cmp": "txcreateoutpubkey2.hex", "description": "Creates a new transaction with a single pay-to-witness-pubkey output" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-json", "-create", "outpubkey=0:02a5613bd857b7048924264d1e70e08fb2a7e6527d32b7ab1bb993ac59964ff397:W", "nversion=1"], "output_cmp": "txcreateoutpubkey2.json", "description": "Creates a new transaction with a single pay-to-witness-pubkey output (output as json)" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-create", "outpubkey=0:02a5613bd857b7048924264d1e70e08fb2a7e6527d32b7ab1bb993ac59964ff397:WS", "nversion=1"], "output_cmp": "txcreateoutpubkey3.hex", "description": "Creates a new transaction with a single pay-to-witness-pubkey, wrapped in P2SH output" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-json", "-create", "outpubkey=0:02a5613bd857b7048924264d1e70e08fb2a7e6527d32b7ab1bb993ac59964ff397:WS", "nversion=1"], "output_cmp": "txcreateoutpubkey3.json", "description": "Creates a new transaction with a single pay-to-pub-key output, wrapped in P2SH (output as json)" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-json", "-create", "outpubkey=0:047d1368ba7ae01c94bc32293efd70bd7e3be7aa7912d07d0b1c659c1008d179b8642f5fb90f47580feb29f045e216ff5a4716d3a0fed36da414d332046303c44a:WS", "nversion=1"], "return_code": 1, "error_txt": "error: Uncompressed pubkeys are not useable for SegWit outputs", "description": "Creates a new transaction with a single pay-to-pub-key output, wrapped in P2SH (output as json)" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-create", "in=5897de6bd6027a475eadd57019d4e6872c396d0716c4875a5f1a6fcfdf385c1f:0", @@ -249,7 +392,7 @@ "error_txt": "error: invalid TX output data", "description": "Attempts to create a new transaction with one input and an output with malformed hex data. Expected to fail" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-create", "in=5897de6bd6027a475eadd57019d4e6872c396d0716c4875a5f1a6fcfdf385c1f:0", @@ -258,69 +401,69 @@ "error_txt": "error: invalid TX output data", "description": "Attempts to create a new transaction with one input and an output with no value and malformed hex data. Expected to fail" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-create", "in=5897de6bd6027a475eadd57019d4e6872c396d0716c4875a5f1a6fcfdf385c1f:0", - "outaddr=0.18:ZeGm46MkY3D96sygu5HFtbgadSaQKKrJV1", + "outaddr=0.18:13tuJJDR2RgArmgfv6JScSdreahzgc4T6o", "outdata=4:54686973204f505f52455455524e207472616e73616374696f6e206f7574707574207761732063726561746564206279206d6f646966696564206372656174657261777472616e73616374696f6e2e"], "output_cmp": "txcreatedata1.hex", "description": "Creates a new transaction with one input, one address output and one data output" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-json", "-create", "nversion=1", "in=5897de6bd6027a475eadd57019d4e6872c396d0716c4875a5f1a6fcfdf385c1f:0", - "outaddr=0.18:ZeGm46MkY3D96sygu5HFtbgadSaQKKrJV1", + "outaddr=0.18:13tuJJDR2RgArmgfv6JScSdreahzgc4T6o", "outdata=4:54686973204f505f52455455524e207472616e73616374696f6e206f7574707574207761732063726561746564206279206d6f646966696564206372656174657261777472616e73616374696f6e2e"], "output_cmp": "txcreatedata1.json", "description": "Creates a new v1 transaction with one input, one address output and one data output (output in json)" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-create", "in=5897de6bd6027a475eadd57019d4e6872c396d0716c4875a5f1a6fcfdf385c1f:0", - "outaddr=0.18:ZeGm46MkY3D96sygu5HFtbgadSaQKKrJV1", + "outaddr=0.18:13tuJJDR2RgArmgfv6JScSdreahzgc4T6o", "outdata=54686973204f505f52455455524e207472616e73616374696f6e206f7574707574207761732063726561746564206279206d6f646966696564206372656174657261777472616e73616374696f6e2e"], "output_cmp": "txcreatedata2.hex", "description": "Creates a new transaction with one input, one address output and one data (zero value) output" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-json", "-create", "in=5897de6bd6027a475eadd57019d4e6872c396d0716c4875a5f1a6fcfdf385c1f:0", - "outaddr=0.18:ZeGm46MkY3D96sygu5HFtbgadSaQKKrJV1", + "outaddr=0.18:13tuJJDR2RgArmgfv6JScSdreahzgc4T6o", "outdata=54686973204f505f52455455524e207472616e73616374696f6e206f7574707574207761732063726561746564206279206d6f646966696564206372656174657261777472616e73616374696f6e2e"], "output_cmp": "txcreatedata2.json", "description": "Creates a new transaction with one input, one address output and one data (zero value) output (output in json)" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-create", "in=5897de6bd6027a475eadd57019d4e6872c396d0716c4875a5f1a6fcfdf385c1f:0:4294967293", - "outaddr=0.18:ZeGm46MkY3D96sygu5HFtbgadSaQKKrJV1"], + "outaddr=0.18:13tuJJDR2RgArmgfv6JScSdreahzgc4T6o"], "output_cmp": "txcreatedata_seq0.hex", "description": "Creates a new transaction with one input with sequence number and one address output" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-json", "-create", "in=5897de6bd6027a475eadd57019d4e6872c396d0716c4875a5f1a6fcfdf385c1f:0:4294967293", - "outaddr=0.18:ZeGm46MkY3D96sygu5HFtbgadSaQKKrJV1"], + "outaddr=0.18:13tuJJDR2RgArmgfv6JScSdreahzgc4T6o"], "output_cmp": "txcreatedata_seq0.json", "description": "Creates a new transaction with one input with sequence number and one address output (output in json)" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["01000000011f5c38dfcf6f1a5f5a87c416076d392c87e6d41970d5ad5e477a02d66bde97580000000000fdffffff0180a81201000000001976a9141fc11f39be1729bf973a7ab6a615ca4729d6457488ac00000000", "in=5897de6bd6027a475eadd57019d4e6872c396d0716c4875a5f1a6fcfdf385c1f:0:1"], "output_cmp": "txcreatedata_seq1.hex", "description": "Adds a new input with sequence number to a transaction" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-json", "01000000011f5c38dfcf6f1a5f5a87c416076d392c87e6d41970d5ad5e477a02d66bde97580000000000fdffffff0180a81201000000001976a9141fc11f39be1729bf973a7ab6a615ca4729d6457488ac00000000", @@ -328,52 +471,52 @@ "output_cmp": "txcreatedata_seq1.json", "description": "Adds a new input with sequence number to a transaction (output in json)" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-create", "outmultisig=1:2:3:02a5613bd857b7048924264d1e70e08fb2a7e6527d32b7ab1bb993ac59964ff397:021ac43c7ff740014c3b33737ede99c967e4764553d1b2b83db77c83b8715fa72d:02df2089105c77f266fa11a9d33f05c735234075f2e8780824c6b709415f9fb485", "nversion=1"], "output_cmp": "txcreatemultisig1.hex", "description": "Creates a new transaction with a single 2-of-3 multisig output" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-json", "-create", "outmultisig=1:2:3:02a5613bd857b7048924264d1e70e08fb2a7e6527d32b7ab1bb993ac59964ff397:021ac43c7ff740014c3b33737ede99c967e4764553d1b2b83db77c83b8715fa72d:02df2089105c77f266fa11a9d33f05c735234075f2e8780824c6b709415f9fb485", "nversion=1"], "output_cmp": "txcreatemultisig1.json", "description": "Creates a new transaction with a single 2-of-3 multisig output (output in json)" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-create", "outmultisig=1:2:3:02a5613bd857b7048924264d1e70e08fb2a7e6527d32b7ab1bb993ac59964ff397:021ac43c7ff740014c3b33737ede99c967e4764553d1b2b83db77c83b8715fa72d:02df2089105c77f266fa11a9d33f05c735234075f2e8780824c6b709415f9fb485:S", "nversion=1"], "output_cmp": "txcreatemultisig2.hex", "description": "Creates a new transaction with a single 2-of-3 multisig in a P2SH output" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-json", "-create", "outmultisig=1:2:3:02a5613bd857b7048924264d1e70e08fb2a7e6527d32b7ab1bb993ac59964ff397:021ac43c7ff740014c3b33737ede99c967e4764553d1b2b83db77c83b8715fa72d:02df2089105c77f266fa11a9d33f05c735234075f2e8780824c6b709415f9fb485:S", "nversion=1"], "output_cmp": "txcreatemultisig2.json", "description": "Creates a new transaction with a single 2-of-3 multisig in a P2SH output (output in json)" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-create", "outmultisig=1:2:3:02a5613bd857b7048924264d1e70e08fb2a7e6527d32b7ab1bb993ac59964ff397:021ac43c7ff740014c3b33737ede99c967e4764553d1b2b83db77c83b8715fa72d:02df2089105c77f266fa11a9d33f05c735234075f2e8780824c6b709415f9fb485:W", "nversion=1"], "output_cmp": "txcreatemultisig3.hex", "description": "Creates a new transaction with a single 2-of-3 multisig in a P2WSH output" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-json", "-create", "outmultisig=1:2:3:02a5613bd857b7048924264d1e70e08fb2a7e6527d32b7ab1bb993ac59964ff397:021ac43c7ff740014c3b33737ede99c967e4764553d1b2b83db77c83b8715fa72d:02df2089105c77f266fa11a9d33f05c735234075f2e8780824c6b709415f9fb485:W", "nversion=1"], "output_cmp": "txcreatemultisig3.json", "description": "Creates a new transaction with a single 2-of-3 multisig in a P2WSH output (output in json)" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-create", "outmultisig=1:2:3:02a5613bd857b7048924264d1e70e08fb2a7e6527d32b7ab1bb993ac59964ff397:021ac43c7ff740014c3b33737ede99c967e4764553d1b2b83db77c83b8715fa72d:02df2089105c77f266fa11a9d33f05c735234075f2e8780824c6b709415f9fb485:WS", "nversion=1"], "output_cmp": "txcreatemultisig4.hex", "description": "Creates a new transaction with a single 2-of-3 multisig in a P2WSH output, wrapped in P2SH" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-json", "-create", "outmultisig=1:2:3:02a5613bd857b7048924264d1e70e08fb2a7e6527d32b7ab1bb993ac59964ff397:021ac43c7ff740014c3b33737ede99c967e4764553d1b2b83db77c83b8715fa72d:02df2089105c77f266fa11a9d33f05c735234075f2e8780824c6b709415f9fb485:WS", "nversion=1"], "output_cmp": "txcreatemultisig4.json", "description": "Creates a new transaction with a single 2-of-3 multisig in a P2WSH output, wrapped in P2SH (output in json)" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-json", "-create", "outmultisig=1:2:3:02a5613bd857b7048924264d1e70e08fb2a7e6527d32b7ab1bb993ac59964ff397:021ac43c7ff740014c3b33737ede99c967e4764553d1b2b83db77c83b8715fa72d:047d1368ba7ae01c94bc32293efd70bd7e3be7aa7912d07d0b1c659c1008d179b8642f5fb90f47580feb29f045e216ff5a4716d3a0fed36da414d332046303c44a:S"], "output_cmp": "txcreatemultisig5.json", "description": "Uncompressed pubkeys should work just fine for non-witness outputs" }, - { "exec": "./bitzeny-tx", + { "exec": "./bitcoin-tx", "args": ["-json", "-create", "outmultisig=1:2:3:02a5613bd857b7048924264d1e70e08fb2a7e6527d32b7ab1bb993ac59964ff397:021ac43c7ff740014c3b33737ede99c967e4764553d1b2b83db77c83b8715fa72d:047d1368ba7ae01c94bc32293efd70bd7e3be7aa7912d07d0b1c659c1008d179b8642f5fb90f47580feb29f045e216ff5a4716d3a0fed36da414d332046303c44a:WS"], "return_code": 1, "error_txt": "error: Uncompressed pubkeys are not useable for SegWit outputs", diff --git a/test/util/data/blanktxv1.json b/test/util/data/blanktxv1.json index 9fe2de649..3d5a1ccca 100644 --- a/test/util/data/blanktxv1.json +++ b/test/util/data/blanktxv1.json @@ -4,6 +4,7 @@ "version": 1, "size": 10, "vsize": 10, + "weight": 40, "locktime": 0, "vin": [ ], diff --git a/test/util/data/blanktxv2.json b/test/util/data/blanktxv2.json index e97626e42..8374a34ad 100644 --- a/test/util/data/blanktxv2.json +++ b/test/util/data/blanktxv2.json @@ -4,6 +4,7 @@ "version": 2, "size": 10, "vsize": 10, + "weight": 40, "locktime": 0, "vin": [ ], diff --git a/test/util/data/tt-delin1-out.json b/test/util/data/tt-delin1-out.json index 44bd8d344..9fc2ddc37 100644 --- a/test/util/data/tt-delin1-out.json +++ b/test/util/data/tt-delin1-out.json @@ -4,6 +4,7 @@ "version": 1, "size": 3040, "vsize": 3040, + "weight": 12160, "locktime": 0, "vin": [ { @@ -14,7 +15,7 @@ "hex": "493046022100b4251ecd63778a3dde0155abe4cd162947620ae9ee45a874353551092325b116022100db307baf4ff3781ec520bd18f387948cedd15dc27bafe17c894b0fe6ffffcafa012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc" }, "sequence": 4294967295 - }, + }, { "txid": "752f7f69b915637dc1c2f7aed1466ad676f6f3e24cf922809705f664e97ab3c1", "vout": 1, @@ -23,7 +24,7 @@ "hex": "473044022079bd62ee09621a3be96b760c39e8ef78170101d46313923c6b07ae60a95c90670220238e51ea29fc70b04b65508450523caedbb11cb4dd5aa608c81487de798925ba0121027a759be8df971a6a04fafcb4f6babf75dc811c5cdaa0734cddbe9b942ce75b34" }, "sequence": 4294967295 - }, + }, { "txid": "b0ac9cca2e69cd02410e31b1f4402a25758e71abd1ab06c265ef9077dc05d0ed", "vout": 209, @@ -32,7 +33,7 @@ "hex": "48304502207722d6f9038673c86a1019b1c4de2d687ae246477cd4ca7002762be0299de385022100e594a11e3a313942595f7666dcf7078bcb14f1330f4206b95c917e7ec0e82fac012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc" }, "sequence": 4294967295 - }, + }, { "txid": "a135eafb595eaf4c1ea59ccb111cdc0eae1b2c979b226a1e5aa8b76fe2d628df", "vout": 0, @@ -41,7 +42,7 @@ "hex": "483045022100a63a4788027b79b65c6f9d9e054f68cf3b4eed19efd82a2d53f70dcbe64683390220526f243671425b2bd05745fcf2729361f985cfe84ea80c7cfc817b93d8134374012103a621f08be22d1bbdcbe4e527ee4927006aa555fc65e2aafa767d4ea2fe9dfa52" }, "sequence": 4294967295 - }, + }, { "txid": "a5d6bf53ba21140b8a4d554feb00fe8bb9a62430ff9e4624aa2f58a120232aae", "vout": 1, @@ -50,7 +51,7 @@ "hex": "493046022100b200ac6db16842f76dab9abe807ce423c992805879bc50abd46ed8275a59d9cf022100c0d518e85dd345b3c29dd4dc47b9a420d3ce817b18720e94966d2fe23413a408012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc" }, "sequence": 4294967295 - }, + }, { "txid": "1b299cf14f1a22e81ea56d71b7affbd7cf386807bf2b4d4b79a18a54125accb3", "vout": 0, @@ -59,7 +60,7 @@ "hex": "483045022100ededc441c3103a6f2bd6cab7639421af0f6ec5e60503bce1e603cf34f00aee1c02205cb75f3f519a13fb348783b21db3085cb5ec7552c59e394fdbc3e1feea43f967012103a621f08be22d1bbdcbe4e527ee4927006aa555fc65e2aafa767d4ea2fe9dfa52" }, "sequence": 4294967295 - }, + }, { "txid": "071df1cdcb3f0070f9d6af7b0274f02d0be2324a274727cfd288383167531485", "vout": 21, @@ -68,7 +69,7 @@ "hex": "483045022100d9eed5413d2a4b4b98625aa6e3169edc4fb4663e7862316d69224454e70cd8ca022061e506521d5ced51dd0ea36496e75904d756a4c4f9fb111568555075d5f68d9a012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c" }, "sequence": 4294967295 - }, + }, { "txid": "b012e500eb7adf7a13ed332dd6ece849f94f7a62bb3eac5babab356d1fc19282", "vout": 9, @@ -77,7 +78,7 @@ "hex": "48304502207e84b27139c4c19c828cb1e30c349bba88e4d9b59be97286960793b5ddc0a2af0221008cdc7a951e7f31c20953ed5635fbabf228e80b7047f32faaa0313e7693005177012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c" }, "sequence": 4294967295 - }, + }, { "txid": "58840fee9c833f2f2d40575842f30f4b8d2553094d06ad88b03d06869acf3d88", "vout": 30, @@ -86,7 +87,7 @@ "hex": "4730440220426540dfed9c4ab5812e5f06df705b8bcf307dd7d20f7fa6512298b2a6314f420220064055096e3ca62f6c7352c66a5447767c53f946acdf35025ab3807ddb2fa404012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c" }, "sequence": 4294967295 - }, + }, { "txid": "e69f9cd16946e570a665245354428a3f507ea69f4568b581e4af98edb3db9766", "vout": 114, @@ -95,7 +96,7 @@ "hex": "47304402200a5e673996f2fc88e21cc8613611f08a650bc0370338803591d85d0ec5663764022040b6664a0d1ec83a7f01975b8fde5232992b8ca58bf48af6725d2f92a936ab2e012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c" }, "sequence": 4294967295 - }, + }, { "txid": "595d1257f654ed2cbe5a65421e8aefd2b4d70b5b6c89a03f1d7e518221fc3f02", "vout": 103, @@ -104,7 +105,7 @@ "hex": "493046022100d93b30219c5735f673be5c3b4688366d96f545561c74cb62c6958c00f6960806022100ec8200adcb028f2184fa2a4f6faac7f8bb57cb4503bb7584ac11051fece31b3d012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc" }, "sequence": 4294967295 - }, + }, { "txid": "06fc818f9555a261248ecd7aad0993eafb5a82ceb2b5c87c3ddfb06671c7f816", "vout": 1, @@ -113,7 +114,7 @@ "hex": "483045022100a13934e68d3f5b22b130c4cb33f4da468cffc52323a47fbfbe06b64858162246022047081e0a70ff770e64a2e2d31e5d520d9102268b57a47009a72fe73ec766901801210234b9d9413f247bb78cd3293b7b65a2c38018ba5621ea9ee737f3a6a3523fb4cd" }, "sequence": 4294967295 - }, + }, { "txid": "fb416c8155d6bb1d43f9395466ca90a638a7c2dd3ff617aadf3a7ac8f3967b19", "vout": 0, @@ -122,7 +123,7 @@ "hex": "49304602210097f1f35d5bdc1a3a60390a1b015b8e7c4f916aa3847aafd969e04975e15bbe70022100a9052eb25517d481f1fda1b129eb1b534da50ea1a51f3ee012dca3601c11b86a0121027a759be8df971a6a04fafcb4f6babf75dc811c5cdaa0734cddbe9b942ce75b34" }, "sequence": 4294967295 - }, + }, { "txid": "3940b9683bd6104ad24c978e640ba4095993cafdb27d2ed91baa27ee61a2d920", "vout": 221, @@ -131,7 +132,7 @@ "hex": "483045022012b3138c591bf7154b6fef457f2c4a3c7162225003788ac0024a99355865ff13022100b71b125ae1ffb2e1d1571f580cd3ebc8cd049a2d7a8a41f138ba94aeb982106f012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc" }, "sequence": 4294967295 - }, + }, { "txid": "711b5714d3b5136147c02194cd95bde94a4648c4263ca6f972d86cd1d579f150", "vout": 1, @@ -140,7 +141,7 @@ "hex": "483045022100f834ccc8b22ee72712a3e5e6ef4acb8b2fb791b5385b70e2cd4332674d6667f4022024fbda0a997e0c253503f217501f508a4d56edce2c813ecdd9ad796dbeba907401210234b9d9413f247bb78cd3293b7b65a2c38018ba5621ea9ee737f3a6a3523fb4cd" }, "sequence": 4294967295 - }, + }, { "txid": "6364b5c5efe018430789e7fb4e338209546cae5d9c5f5e300aac68155d861b55", "vout": 27, @@ -149,7 +150,7 @@ "hex": "48304502203b2fd1e39ae0e469d7a15768f262661b0de41470daf0fe8c4fd0c26542a0870002210081c57e331f9a2d214457d953e3542904727ee412c63028113635d7224da3dccc012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c" }, "sequence": 4294967295 - }, + }, { "txid": "0bb57f6e38012c86d4c5a28c904f2675082859147921a707d48961015a3e5057", "vout": 1095, @@ -158,7 +159,7 @@ "hex": "48304502206947a9c54f0664ece4430fd4ae999891dc50bb6126bc36b6a15a3189f29d25e9022100a86cfc4e2fdd9e39a20e305cfd1b76509c67b3e313e0f118229105caa0e823c9012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c" }, "sequence": 4294967295 - }, + }, { "txid": "9b34274814a2540bb062107117f8f3e75ef85d953e9372d8261a3e9dfbc1163f", "vout": 37, @@ -167,7 +168,7 @@ "hex": "483045022100c7128fe10b2d38744ae8177776054c29fc8ec13f07207723e70766ab7164847402201d2cf09009b9596de74c0183d1ab832e5edddb7a9965880bb400097e850850f8012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c" }, "sequence": 4294967295 - }, + }, { "txid": "b86b5cc0d8a7374d94e277850b0a249cb26a7b42ddf014f28a49b8859da64241", "vout": 20, @@ -176,7 +177,7 @@ "hex": "48304502203b89a71628a28cc3703d170ca3be77786cff6b867e38a18b719705f8a326578f022100b2a9879e1acf621faa6466c207746a7f3eb4c8514c1482969aba3f2a957f1321012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c" }, "sequence": 4294967295 - }, + }, { "txid": "3d0a2353eeec44d3c10aed259038db321912122cd4150048f7bfa4c0ecfee236", "vout": 242, @@ -197,10 +198,10 @@ "reqSigs": 1, "type": "pubkeyhash", "addresses": [ - "ZpVJ2UJtkopC2brMtQUXoCjCkpgm9VUA7Q" + "1E7SGgAZFCHDnVZLuRViX3gUmxpMfdvd2o" ] } - }, + }, { "value": 0.01000001, "n": 1, @@ -210,7 +211,7 @@ "reqSigs": 1, "type": "pubkeyhash", "addresses": [ - "ZmGNWRv1HAHD6aJ7c2JNFuMCa6gZ1QRVn9" + "1AtWkdmfmYkErU16d3KYykJUbEp9MAj9Sb" ] } } diff --git a/test/util/data/tt-delout1-out.json b/test/util/data/tt-delout1-out.json index 672a01aec..922d04890 100644 --- a/test/util/data/tt-delout1-out.json +++ b/test/util/data/tt-delout1-out.json @@ -4,6 +4,7 @@ "version": 1, "size": 3155, "vsize": 3155, + "weight": 12620, "locktime": 0, "vin": [ { @@ -14,7 +15,7 @@ "hex": "493046022100b4251ecd63778a3dde0155abe4cd162947620ae9ee45a874353551092325b116022100db307baf4ff3781ec520bd18f387948cedd15dc27bafe17c894b0fe6ffffcafa012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc" }, "sequence": 4294967295 - }, + }, { "txid": "a72ec96bd0d022d1b0c2f9078cdd46b3725b8eecdd001e17b21e3ababad14ecb", "vout": 0, @@ -23,7 +24,7 @@ "hex": "493046022100a9b617843b68c284715d3e02fd120479cd0d96a6c43bf01e697fb0a460a21a3a022100ba0a12fbe8b993d4e7911fa3467615765dbe421ddf5c51b57a9c1ee19dcc00ba012103e633b4fa4ceb705c2da712390767199be8ef2448b3095dc01652e11b2b751505" }, "sequence": 4294967295 - }, + }, { "txid": "752f7f69b915637dc1c2f7aed1466ad676f6f3e24cf922809705f664e97ab3c1", "vout": 1, @@ -32,7 +33,7 @@ "hex": "473044022079bd62ee09621a3be96b760c39e8ef78170101d46313923c6b07ae60a95c90670220238e51ea29fc70b04b65508450523caedbb11cb4dd5aa608c81487de798925ba0121027a759be8df971a6a04fafcb4f6babf75dc811c5cdaa0734cddbe9b942ce75b34" }, "sequence": 4294967295 - }, + }, { "txid": "b0ac9cca2e69cd02410e31b1f4402a25758e71abd1ab06c265ef9077dc05d0ed", "vout": 209, @@ -41,7 +42,7 @@ "hex": "48304502207722d6f9038673c86a1019b1c4de2d687ae246477cd4ca7002762be0299de385022100e594a11e3a313942595f7666dcf7078bcb14f1330f4206b95c917e7ec0e82fac012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc" }, "sequence": 4294967295 - }, + }, { "txid": "a135eafb595eaf4c1ea59ccb111cdc0eae1b2c979b226a1e5aa8b76fe2d628df", "vout": 0, @@ -50,7 +51,7 @@ "hex": "483045022100a63a4788027b79b65c6f9d9e054f68cf3b4eed19efd82a2d53f70dcbe64683390220526f243671425b2bd05745fcf2729361f985cfe84ea80c7cfc817b93d8134374012103a621f08be22d1bbdcbe4e527ee4927006aa555fc65e2aafa767d4ea2fe9dfa52" }, "sequence": 4294967295 - }, + }, { "txid": "a5d6bf53ba21140b8a4d554feb00fe8bb9a62430ff9e4624aa2f58a120232aae", "vout": 1, @@ -59,7 +60,7 @@ "hex": "493046022100b200ac6db16842f76dab9abe807ce423c992805879bc50abd46ed8275a59d9cf022100c0d518e85dd345b3c29dd4dc47b9a420d3ce817b18720e94966d2fe23413a408012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc" }, "sequence": 4294967295 - }, + }, { "txid": "1b299cf14f1a22e81ea56d71b7affbd7cf386807bf2b4d4b79a18a54125accb3", "vout": 0, @@ -68,7 +69,7 @@ "hex": "483045022100ededc441c3103a6f2bd6cab7639421af0f6ec5e60503bce1e603cf34f00aee1c02205cb75f3f519a13fb348783b21db3085cb5ec7552c59e394fdbc3e1feea43f967012103a621f08be22d1bbdcbe4e527ee4927006aa555fc65e2aafa767d4ea2fe9dfa52" }, "sequence": 4294967295 - }, + }, { "txid": "071df1cdcb3f0070f9d6af7b0274f02d0be2324a274727cfd288383167531485", "vout": 21, @@ -77,7 +78,7 @@ "hex": "483045022100d9eed5413d2a4b4b98625aa6e3169edc4fb4663e7862316d69224454e70cd8ca022061e506521d5ced51dd0ea36496e75904d756a4c4f9fb111568555075d5f68d9a012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c" }, "sequence": 4294967295 - }, + }, { "txid": "b012e500eb7adf7a13ed332dd6ece849f94f7a62bb3eac5babab356d1fc19282", "vout": 9, @@ -86,7 +87,7 @@ "hex": "48304502207e84b27139c4c19c828cb1e30c349bba88e4d9b59be97286960793b5ddc0a2af0221008cdc7a951e7f31c20953ed5635fbabf228e80b7047f32faaa0313e7693005177012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c" }, "sequence": 4294967295 - }, + }, { "txid": "58840fee9c833f2f2d40575842f30f4b8d2553094d06ad88b03d06869acf3d88", "vout": 30, @@ -95,7 +96,7 @@ "hex": "4730440220426540dfed9c4ab5812e5f06df705b8bcf307dd7d20f7fa6512298b2a6314f420220064055096e3ca62f6c7352c66a5447767c53f946acdf35025ab3807ddb2fa404012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c" }, "sequence": 4294967295 - }, + }, { "txid": "e69f9cd16946e570a665245354428a3f507ea69f4568b581e4af98edb3db9766", "vout": 114, @@ -104,7 +105,7 @@ "hex": "47304402200a5e673996f2fc88e21cc8613611f08a650bc0370338803591d85d0ec5663764022040b6664a0d1ec83a7f01975b8fde5232992b8ca58bf48af6725d2f92a936ab2e012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c" }, "sequence": 4294967295 - }, + }, { "txid": "595d1257f654ed2cbe5a65421e8aefd2b4d70b5b6c89a03f1d7e518221fc3f02", "vout": 103, @@ -113,7 +114,7 @@ "hex": "493046022100d93b30219c5735f673be5c3b4688366d96f545561c74cb62c6958c00f6960806022100ec8200adcb028f2184fa2a4f6faac7f8bb57cb4503bb7584ac11051fece31b3d012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc" }, "sequence": 4294967295 - }, + }, { "txid": "06fc818f9555a261248ecd7aad0993eafb5a82ceb2b5c87c3ddfb06671c7f816", "vout": 1, @@ -122,7 +123,7 @@ "hex": "483045022100a13934e68d3f5b22b130c4cb33f4da468cffc52323a47fbfbe06b64858162246022047081e0a70ff770e64a2e2d31e5d520d9102268b57a47009a72fe73ec766901801210234b9d9413f247bb78cd3293b7b65a2c38018ba5621ea9ee737f3a6a3523fb4cd" }, "sequence": 4294967295 - }, + }, { "txid": "fb416c8155d6bb1d43f9395466ca90a638a7c2dd3ff617aadf3a7ac8f3967b19", "vout": 0, @@ -131,7 +132,7 @@ "hex": "49304602210097f1f35d5bdc1a3a60390a1b015b8e7c4f916aa3847aafd969e04975e15bbe70022100a9052eb25517d481f1fda1b129eb1b534da50ea1a51f3ee012dca3601c11b86a0121027a759be8df971a6a04fafcb4f6babf75dc811c5cdaa0734cddbe9b942ce75b34" }, "sequence": 4294967295 - }, + }, { "txid": "3940b9683bd6104ad24c978e640ba4095993cafdb27d2ed91baa27ee61a2d920", "vout": 221, @@ -140,7 +141,7 @@ "hex": "483045022012b3138c591bf7154b6fef457f2c4a3c7162225003788ac0024a99355865ff13022100b71b125ae1ffb2e1d1571f580cd3ebc8cd049a2d7a8a41f138ba94aeb982106f012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc" }, "sequence": 4294967295 - }, + }, { "txid": "711b5714d3b5136147c02194cd95bde94a4648c4263ca6f972d86cd1d579f150", "vout": 1, @@ -149,7 +150,7 @@ "hex": "483045022100f834ccc8b22ee72712a3e5e6ef4acb8b2fb791b5385b70e2cd4332674d6667f4022024fbda0a997e0c253503f217501f508a4d56edce2c813ecdd9ad796dbeba907401210234b9d9413f247bb78cd3293b7b65a2c38018ba5621ea9ee737f3a6a3523fb4cd" }, "sequence": 4294967295 - }, + }, { "txid": "6364b5c5efe018430789e7fb4e338209546cae5d9c5f5e300aac68155d861b55", "vout": 27, @@ -158,7 +159,7 @@ "hex": "48304502203b2fd1e39ae0e469d7a15768f262661b0de41470daf0fe8c4fd0c26542a0870002210081c57e331f9a2d214457d953e3542904727ee412c63028113635d7224da3dccc012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c" }, "sequence": 4294967295 - }, + }, { "txid": "0bb57f6e38012c86d4c5a28c904f2675082859147921a707d48961015a3e5057", "vout": 1095, @@ -167,7 +168,7 @@ "hex": "48304502206947a9c54f0664ece4430fd4ae999891dc50bb6126bc36b6a15a3189f29d25e9022100a86cfc4e2fdd9e39a20e305cfd1b76509c67b3e313e0f118229105caa0e823c9012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c" }, "sequence": 4294967295 - }, + }, { "txid": "9b34274814a2540bb062107117f8f3e75ef85d953e9372d8261a3e9dfbc1163f", "vout": 37, @@ -176,7 +177,7 @@ "hex": "483045022100c7128fe10b2d38744ae8177776054c29fc8ec13f07207723e70766ab7164847402201d2cf09009b9596de74c0183d1ab832e5edddb7a9965880bb400097e850850f8012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c" }, "sequence": 4294967295 - }, + }, { "txid": "b86b5cc0d8a7374d94e277850b0a249cb26a7b42ddf014f28a49b8859da64241", "vout": 20, @@ -185,7 +186,7 @@ "hex": "48304502203b89a71628a28cc3703d170ca3be77786cff6b867e38a18b719705f8a326578f022100b2a9879e1acf621faa6466c207746a7f3eb4c8514c1482969aba3f2a957f1321012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c" }, "sequence": 4294967295 - }, + }, { "txid": "3d0a2353eeec44d3c10aed259038db321912122cd4150048f7bfa4c0ecfee236", "vout": 242, @@ -206,7 +207,7 @@ "reqSigs": 1, "type": "pubkeyhash", "addresses": [ - "ZpVJ2UJtkopC2brMtQUXoCjCkpgm9VUA7Q" + "1E7SGgAZFCHDnVZLuRViX3gUmxpMfdvd2o" ] } } diff --git a/test/util/data/tt-locktime317000-out.json b/test/util/data/tt-locktime317000-out.json index a1221e0a7..c97206f1e 100644 --- a/test/util/data/tt-locktime317000-out.json +++ b/test/util/data/tt-locktime317000-out.json @@ -4,6 +4,7 @@ "version": 1, "size": 3189, "vsize": 3189, + "weight": 12756, "locktime": 317000, "vin": [ { @@ -14,7 +15,7 @@ "hex": "493046022100b4251ecd63778a3dde0155abe4cd162947620ae9ee45a874353551092325b116022100db307baf4ff3781ec520bd18f387948cedd15dc27bafe17c894b0fe6ffffcafa012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc" }, "sequence": 4294967295 - }, + }, { "txid": "a72ec96bd0d022d1b0c2f9078cdd46b3725b8eecdd001e17b21e3ababad14ecb", "vout": 0, @@ -23,7 +24,7 @@ "hex": "493046022100a9b617843b68c284715d3e02fd120479cd0d96a6c43bf01e697fb0a460a21a3a022100ba0a12fbe8b993d4e7911fa3467615765dbe421ddf5c51b57a9c1ee19dcc00ba012103e633b4fa4ceb705c2da712390767199be8ef2448b3095dc01652e11b2b751505" }, "sequence": 4294967295 - }, + }, { "txid": "752f7f69b915637dc1c2f7aed1466ad676f6f3e24cf922809705f664e97ab3c1", "vout": 1, @@ -32,7 +33,7 @@ "hex": "473044022079bd62ee09621a3be96b760c39e8ef78170101d46313923c6b07ae60a95c90670220238e51ea29fc70b04b65508450523caedbb11cb4dd5aa608c81487de798925ba0121027a759be8df971a6a04fafcb4f6babf75dc811c5cdaa0734cddbe9b942ce75b34" }, "sequence": 4294967295 - }, + }, { "txid": "b0ac9cca2e69cd02410e31b1f4402a25758e71abd1ab06c265ef9077dc05d0ed", "vout": 209, @@ -41,7 +42,7 @@ "hex": "48304502207722d6f9038673c86a1019b1c4de2d687ae246477cd4ca7002762be0299de385022100e594a11e3a313942595f7666dcf7078bcb14f1330f4206b95c917e7ec0e82fac012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc" }, "sequence": 4294967295 - }, + }, { "txid": "a135eafb595eaf4c1ea59ccb111cdc0eae1b2c979b226a1e5aa8b76fe2d628df", "vout": 0, @@ -50,7 +51,7 @@ "hex": "483045022100a63a4788027b79b65c6f9d9e054f68cf3b4eed19efd82a2d53f70dcbe64683390220526f243671425b2bd05745fcf2729361f985cfe84ea80c7cfc817b93d8134374012103a621f08be22d1bbdcbe4e527ee4927006aa555fc65e2aafa767d4ea2fe9dfa52" }, "sequence": 4294967295 - }, + }, { "txid": "a5d6bf53ba21140b8a4d554feb00fe8bb9a62430ff9e4624aa2f58a120232aae", "vout": 1, @@ -59,7 +60,7 @@ "hex": "493046022100b200ac6db16842f76dab9abe807ce423c992805879bc50abd46ed8275a59d9cf022100c0d518e85dd345b3c29dd4dc47b9a420d3ce817b18720e94966d2fe23413a408012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc" }, "sequence": 4294967295 - }, + }, { "txid": "1b299cf14f1a22e81ea56d71b7affbd7cf386807bf2b4d4b79a18a54125accb3", "vout": 0, @@ -68,7 +69,7 @@ "hex": "483045022100ededc441c3103a6f2bd6cab7639421af0f6ec5e60503bce1e603cf34f00aee1c02205cb75f3f519a13fb348783b21db3085cb5ec7552c59e394fdbc3e1feea43f967012103a621f08be22d1bbdcbe4e527ee4927006aa555fc65e2aafa767d4ea2fe9dfa52" }, "sequence": 4294967295 - }, + }, { "txid": "071df1cdcb3f0070f9d6af7b0274f02d0be2324a274727cfd288383167531485", "vout": 21, @@ -77,7 +78,7 @@ "hex": "483045022100d9eed5413d2a4b4b98625aa6e3169edc4fb4663e7862316d69224454e70cd8ca022061e506521d5ced51dd0ea36496e75904d756a4c4f9fb111568555075d5f68d9a012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c" }, "sequence": 4294967295 - }, + }, { "txid": "b012e500eb7adf7a13ed332dd6ece849f94f7a62bb3eac5babab356d1fc19282", "vout": 9, @@ -86,7 +87,7 @@ "hex": "48304502207e84b27139c4c19c828cb1e30c349bba88e4d9b59be97286960793b5ddc0a2af0221008cdc7a951e7f31c20953ed5635fbabf228e80b7047f32faaa0313e7693005177012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c" }, "sequence": 4294967295 - }, + }, { "txid": "58840fee9c833f2f2d40575842f30f4b8d2553094d06ad88b03d06869acf3d88", "vout": 30, @@ -95,7 +96,7 @@ "hex": "4730440220426540dfed9c4ab5812e5f06df705b8bcf307dd7d20f7fa6512298b2a6314f420220064055096e3ca62f6c7352c66a5447767c53f946acdf35025ab3807ddb2fa404012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c" }, "sequence": 4294967295 - }, + }, { "txid": "e69f9cd16946e570a665245354428a3f507ea69f4568b581e4af98edb3db9766", "vout": 114, @@ -104,7 +105,7 @@ "hex": "47304402200a5e673996f2fc88e21cc8613611f08a650bc0370338803591d85d0ec5663764022040b6664a0d1ec83a7f01975b8fde5232992b8ca58bf48af6725d2f92a936ab2e012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c" }, "sequence": 4294967295 - }, + }, { "txid": "595d1257f654ed2cbe5a65421e8aefd2b4d70b5b6c89a03f1d7e518221fc3f02", "vout": 103, @@ -113,7 +114,7 @@ "hex": "493046022100d93b30219c5735f673be5c3b4688366d96f545561c74cb62c6958c00f6960806022100ec8200adcb028f2184fa2a4f6faac7f8bb57cb4503bb7584ac11051fece31b3d012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc" }, "sequence": 4294967295 - }, + }, { "txid": "06fc818f9555a261248ecd7aad0993eafb5a82ceb2b5c87c3ddfb06671c7f816", "vout": 1, @@ -122,7 +123,7 @@ "hex": "483045022100a13934e68d3f5b22b130c4cb33f4da468cffc52323a47fbfbe06b64858162246022047081e0a70ff770e64a2e2d31e5d520d9102268b57a47009a72fe73ec766901801210234b9d9413f247bb78cd3293b7b65a2c38018ba5621ea9ee737f3a6a3523fb4cd" }, "sequence": 4294967295 - }, + }, { "txid": "fb416c8155d6bb1d43f9395466ca90a638a7c2dd3ff617aadf3a7ac8f3967b19", "vout": 0, @@ -131,7 +132,7 @@ "hex": "49304602210097f1f35d5bdc1a3a60390a1b015b8e7c4f916aa3847aafd969e04975e15bbe70022100a9052eb25517d481f1fda1b129eb1b534da50ea1a51f3ee012dca3601c11b86a0121027a759be8df971a6a04fafcb4f6babf75dc811c5cdaa0734cddbe9b942ce75b34" }, "sequence": 4294967295 - }, + }, { "txid": "3940b9683bd6104ad24c978e640ba4095993cafdb27d2ed91baa27ee61a2d920", "vout": 221, @@ -140,7 +141,7 @@ "hex": "483045022012b3138c591bf7154b6fef457f2c4a3c7162225003788ac0024a99355865ff13022100b71b125ae1ffb2e1d1571f580cd3ebc8cd049a2d7a8a41f138ba94aeb982106f012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc" }, "sequence": 4294967295 - }, + }, { "txid": "711b5714d3b5136147c02194cd95bde94a4648c4263ca6f972d86cd1d579f150", "vout": 1, @@ -149,7 +150,7 @@ "hex": "483045022100f834ccc8b22ee72712a3e5e6ef4acb8b2fb791b5385b70e2cd4332674d6667f4022024fbda0a997e0c253503f217501f508a4d56edce2c813ecdd9ad796dbeba907401210234b9d9413f247bb78cd3293b7b65a2c38018ba5621ea9ee737f3a6a3523fb4cd" }, "sequence": 4294967295 - }, + }, { "txid": "6364b5c5efe018430789e7fb4e338209546cae5d9c5f5e300aac68155d861b55", "vout": 27, @@ -158,7 +159,7 @@ "hex": "48304502203b2fd1e39ae0e469d7a15768f262661b0de41470daf0fe8c4fd0c26542a0870002210081c57e331f9a2d214457d953e3542904727ee412c63028113635d7224da3dccc012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c" }, "sequence": 4294967295 - }, + }, { "txid": "0bb57f6e38012c86d4c5a28c904f2675082859147921a707d48961015a3e5057", "vout": 1095, @@ -167,7 +168,7 @@ "hex": "48304502206947a9c54f0664ece4430fd4ae999891dc50bb6126bc36b6a15a3189f29d25e9022100a86cfc4e2fdd9e39a20e305cfd1b76509c67b3e313e0f118229105caa0e823c9012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c" }, "sequence": 4294967295 - }, + }, { "txid": "9b34274814a2540bb062107117f8f3e75ef85d953e9372d8261a3e9dfbc1163f", "vout": 37, @@ -176,7 +177,7 @@ "hex": "483045022100c7128fe10b2d38744ae8177776054c29fc8ec13f07207723e70766ab7164847402201d2cf09009b9596de74c0183d1ab832e5edddb7a9965880bb400097e850850f8012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c" }, "sequence": 4294967295 - }, + }, { "txid": "b86b5cc0d8a7374d94e277850b0a249cb26a7b42ddf014f28a49b8859da64241", "vout": 20, @@ -185,7 +186,7 @@ "hex": "48304502203b89a71628a28cc3703d170ca3be77786cff6b867e38a18b719705f8a326578f022100b2a9879e1acf621faa6466c207746a7f3eb4c8514c1482969aba3f2a957f1321012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c" }, "sequence": 4294967295 - }, + }, { "txid": "3d0a2353eeec44d3c10aed259038db321912122cd4150048f7bfa4c0ecfee236", "vout": 242, @@ -206,10 +207,10 @@ "reqSigs": 1, "type": "pubkeyhash", "addresses": [ - "ZpVJ2UJtkopC2brMtQUXoCjCkpgm9VUA7Q" + "1E7SGgAZFCHDnVZLuRViX3gUmxpMfdvd2o" ] } - }, + }, { "value": 0.01000001, "n": 1, @@ -219,7 +220,7 @@ "reqSigs": 1, "type": "pubkeyhash", "addresses": [ - "ZmGNWRv1HAHD6aJ7c2JNFuMCa6gZ1QRVn9" + "1AtWkdmfmYkErU16d3KYykJUbEp9MAj9Sb" ] } } diff --git a/test/util/data/txcreate1.hex b/test/util/data/txcreate1.hex index e2981a51c..9ec6ee353 100644 --- a/test/util/data/txcreate1.hex +++ b/test/util/data/txcreate1.hex @@ -1 +1 @@ -01000000031f5c38dfcf6f1a5f5a87c416076d392c87e6d41970d5ad5e477a02d66bde97580000000000ffffffff7cca453133921c50d5025878f7f738d1df891fd359763331935784cf6b9c82bf1200000000fffffffffccd319e04a996c96cfc0bf4c07539aa90bd0b1a700ef72fae535d6504f9a6220100000000ffffffff0280a81201000000001976a9141fc11f39be1729bf973a7ab6a615ca4729d6457488ac0084d717000000001976a914f2d4db28cad6502226ee484ae24505c2885cb12d88ac00000000 +02000000031f5c38dfcf6f1a5f5a87c416076d392c87e6d41970d5ad5e477a02d66bde97580000000000ffffffff7cca453133921c50d5025878f7f738d1df891fd359763331935784cf6b9c82bf1200000000fffffffffccd319e04a996c96cfc0bf4c07539aa90bd0b1a700ef72fae535d6504f9a6220100000000ffffffff0280a81201000000001976a9141fc11f39be1729bf973a7ab6a615ca4729d6457488ac0084d717000000001976a914f2d4db28cad6502226ee484ae24505c2885cb12d88ac00000000 diff --git a/test/util/data/txcreate1.json b/test/util/data/txcreate1.json index c27795f26..ca9eacd54 100644 --- a/test/util/data/txcreate1.json +++ b/test/util/data/txcreate1.json @@ -1,9 +1,10 @@ { - "txid": "f70f0d6c71416ed538e37549f430ab3665fee2437a42f10238c1bd490e782231", - "hash": "f70f0d6c71416ed538e37549f430ab3665fee2437a42f10238c1bd490e782231", - "version": 1, + "txid": "fe7d174f42dce0cffa7a527e9bc8368956057619ec817648f6138b98f2533e8f", + "hash": "fe7d174f42dce0cffa7a527e9bc8368956057619ec817648f6138b98f2533e8f", + "version": 2, "size": 201, "vsize": 201, + "weight": 804, "locktime": 0, "vin": [ { @@ -14,7 +15,7 @@ "hex": "" }, "sequence": 4294967295 - }, + }, { "txid": "bf829c6bcf84579331337659d31f89dfd138f7f7785802d5501c92333145ca7c", "vout": 18, @@ -23,7 +24,7 @@ "hex": "" }, "sequence": 4294967295 - }, + }, { "txid": "22a6f904655d53ae2ff70e701a0bbd90aa3975c0f40bfc6cc996a9049e31cdfc", "vout": 1, @@ -44,10 +45,10 @@ "reqSigs": 1, "type": "pubkeyhash", "addresses": [ - "ZeGm46MkY3D96sygu5HFtbgadSaQKKrJV1" + "13tuJJDR2RgArmgfv6JScSdreahzgc4T6o" ] } - }, + }, { "value": 4.00000000, "n": 1, @@ -57,10 +58,10 @@ "reqSigs": 1, "type": "pubkeyhash", "addresses": [ - "ZyWqGihqeM2gwVJcyGdewP7JnPEviphmv8" + "1P8yWvZW8jVihP1bzHeqfE4aoXNX8AVa46" ] } } ], - "hex": "01000000031f5c38dfcf6f1a5f5a87c416076d392c87e6d41970d5ad5e477a02d66bde97580000000000ffffffff7cca453133921c50d5025878f7f738d1df891fd359763331935784cf6b9c82bf1200000000fffffffffccd319e04a996c96cfc0bf4c07539aa90bd0b1a700ef72fae535d6504f9a6220100000000ffffffff0280a81201000000001976a9141fc11f39be1729bf973a7ab6a615ca4729d6457488ac0084d717000000001976a914f2d4db28cad6502226ee484ae24505c2885cb12d88ac00000000" + "hex": "02000000031f5c38dfcf6f1a5f5a87c416076d392c87e6d41970d5ad5e477a02d66bde97580000000000ffffffff7cca453133921c50d5025878f7f738d1df891fd359763331935784cf6b9c82bf1200000000fffffffffccd319e04a996c96cfc0bf4c07539aa90bd0b1a700ef72fae535d6504f9a6220100000000ffffffff0280a81201000000001976a9141fc11f39be1729bf973a7ab6a615ca4729d6457488ac0084d717000000001976a914f2d4db28cad6502226ee484ae24505c2885cb12d88ac00000000" } diff --git a/test/util/data/txcreate2.hex b/test/util/data/txcreate2.hex index 5243c2d02..38bb7b104 100644 --- a/test/util/data/txcreate2.hex +++ b/test/util/data/txcreate2.hex @@ -1 +1 @@ -01000000000100000000000000000000000000 +02000000000100000000000000000000000000 diff --git a/test/util/data/txcreate2.json b/test/util/data/txcreate2.json index a06537e01..ee9b9c3c1 100644 --- a/test/util/data/txcreate2.json +++ b/test/util/data/txcreate2.json @@ -1,9 +1,10 @@ { - "txid": "cf90229625e9eb10f6be8156bf6aa5ec2eca19a42b1e05c11f3029b560a32e13", - "hash": "cf90229625e9eb10f6be8156bf6aa5ec2eca19a42b1e05c11f3029b560a32e13", - "version": 1, + "txid": "0481afb29931341d0d7861d8a2f6f26456fa042abf54a23e96440ed7946e0715", + "hash": "0481afb29931341d0d7861d8a2f6f26456fa042abf54a23e96440ed7946e0715", + "version": 2, "size": 19, "vsize": 19, + "weight": 76, "locktime": 0, "vin": [ ], @@ -18,5 +19,5 @@ } } ], - "hex": "01000000000100000000000000000000000000" + "hex": "02000000000100000000000000000000000000" } diff --git a/test/util/data/txcreatedata1.hex b/test/util/data/txcreatedata1.hex index eccc7604e..cefd1a05a 100644 --- a/test/util/data/txcreatedata1.hex +++ b/test/util/data/txcreatedata1.hex @@ -1 +1 @@ -01000000011f5c38dfcf6f1a5f5a87c416076d392c87e6d41970d5ad5e477a02d66bde97580000000000ffffffff0280a81201000000001976a9141fc11f39be1729bf973a7ab6a615ca4729d6457488ac0084d71700000000526a4c4f54686973204f505f52455455524e207472616e73616374696f6e206f7574707574207761732063726561746564206279206d6f646966696564206372656174657261777472616e73616374696f6e2e00000000 +02000000011f5c38dfcf6f1a5f5a87c416076d392c87e6d41970d5ad5e477a02d66bde97580000000000ffffffff0280a81201000000001976a9141fc11f39be1729bf973a7ab6a615ca4729d6457488ac0084d71700000000526a4c4f54686973204f505f52455455524e207472616e73616374696f6e206f7574707574207761732063726561746564206279206d6f646966696564206372656174657261777472616e73616374696f6e2e00000000 diff --git a/test/util/data/txcreatedata1.json b/test/util/data/txcreatedata1.json index 683d39ac8..39909c2e3 100644 --- a/test/util/data/txcreatedata1.json +++ b/test/util/data/txcreatedata1.json @@ -4,6 +4,7 @@ "version": 1, "size": 176, "vsize": 176, + "weight": 704, "locktime": 0, "vin": [ { @@ -26,10 +27,10 @@ "reqSigs": 1, "type": "pubkeyhash", "addresses": [ - "ZeGm46MkY3D96sygu5HFtbgadSaQKKrJV1" + "13tuJJDR2RgArmgfv6JScSdreahzgc4T6o" ] } - }, + }, { "value": 4.00000000, "n": 1, diff --git a/test/util/data/txcreatedata2.hex b/test/util/data/txcreatedata2.hex index 3c7644c29..d69cf58ba 100644 --- a/test/util/data/txcreatedata2.hex +++ b/test/util/data/txcreatedata2.hex @@ -1 +1 @@ -01000000011f5c38dfcf6f1a5f5a87c416076d392c87e6d41970d5ad5e477a02d66bde97580000000000ffffffff0280a81201000000001976a9141fc11f39be1729bf973a7ab6a615ca4729d6457488ac0000000000000000526a4c4f54686973204f505f52455455524e207472616e73616374696f6e206f7574707574207761732063726561746564206279206d6f646966696564206372656174657261777472616e73616374696f6e2e00000000 +02000000011f5c38dfcf6f1a5f5a87c416076d392c87e6d41970d5ad5e477a02d66bde97580000000000ffffffff0280a81201000000001976a9141fc11f39be1729bf973a7ab6a615ca4729d6457488ac0000000000000000526a4c4f54686973204f505f52455455524e207472616e73616374696f6e206f7574707574207761732063726561746564206279206d6f646966696564206372656174657261777472616e73616374696f6e2e00000000 diff --git a/test/util/data/txcreatedata2.json b/test/util/data/txcreatedata2.json index f0f323570..2958006e5 100644 --- a/test/util/data/txcreatedata2.json +++ b/test/util/data/txcreatedata2.json @@ -1,9 +1,10 @@ { - "txid": "4ed17118f5e932ba8c75c461787d171bc02a016d8557cb5bcf34cd416c27bb8b", - "hash": "4ed17118f5e932ba8c75c461787d171bc02a016d8557cb5bcf34cd416c27bb8b", - "version": 1, + "txid": "c14b007fa3a6c1e7765919c1d14c1cfc2b8642c3a5d3be4b1fa8c4ccfec98bb0", + "hash": "c14b007fa3a6c1e7765919c1d14c1cfc2b8642c3a5d3be4b1fa8c4ccfec98bb0", + "version": 2, "size": 176, "vsize": 176, + "weight": 704, "locktime": 0, "vin": [ { @@ -26,10 +27,10 @@ "reqSigs": 1, "type": "pubkeyhash", "addresses": [ - "ZeGm46MkY3D96sygu5HFtbgadSaQKKrJV1" + "13tuJJDR2RgArmgfv6JScSdreahzgc4T6o" ] } - }, + }, { "value": 0.00000000, "n": 1, @@ -40,5 +41,5 @@ } } ], - "hex": "01000000011f5c38dfcf6f1a5f5a87c416076d392c87e6d41970d5ad5e477a02d66bde97580000000000ffffffff0280a81201000000001976a9141fc11f39be1729bf973a7ab6a615ca4729d6457488ac0000000000000000526a4c4f54686973204f505f52455455524e207472616e73616374696f6e206f7574707574207761732063726561746564206279206d6f646966696564206372656174657261777472616e73616374696f6e2e00000000" + "hex": "02000000011f5c38dfcf6f1a5f5a87c416076d392c87e6d41970d5ad5e477a02d66bde97580000000000ffffffff0280a81201000000001976a9141fc11f39be1729bf973a7ab6a615ca4729d6457488ac0000000000000000526a4c4f54686973204f505f52455455524e207472616e73616374696f6e206f7574707574207761732063726561746564206279206d6f646966696564206372656174657261777472616e73616374696f6e2e00000000" } diff --git a/test/util/data/txcreatedata_seq0.hex b/test/util/data/txcreatedata_seq0.hex index db02b5e4a..54b89d238 100644 --- a/test/util/data/txcreatedata_seq0.hex +++ b/test/util/data/txcreatedata_seq0.hex @@ -1 +1 @@ -01000000011f5c38dfcf6f1a5f5a87c416076d392c87e6d41970d5ad5e477a02d66bde97580000000000fdffffff0180a81201000000001976a9141fc11f39be1729bf973a7ab6a615ca4729d6457488ac00000000 +02000000011f5c38dfcf6f1a5f5a87c416076d392c87e6d41970d5ad5e477a02d66bde97580000000000fdffffff0180a81201000000001976a9141fc11f39be1729bf973a7ab6a615ca4729d6457488ac00000000 diff --git a/test/util/data/txcreatedata_seq0.json b/test/util/data/txcreatedata_seq0.json index a17f15b17..a6656b5ad 100644 --- a/test/util/data/txcreatedata_seq0.json +++ b/test/util/data/txcreatedata_seq0.json @@ -1,9 +1,10 @@ { - "txid": "71603ccb1cd76d73d76eb6cfd5f0b9df6d65d90d76860ee52cb461c4be7032e8", - "hash": "71603ccb1cd76d73d76eb6cfd5f0b9df6d65d90d76860ee52cb461c4be7032e8", - "version": 1, + "txid": "8df6ed527472542dd5e137c242a7c5a9f337ac34f7b257ae4af886aeaebb51b0", + "hash": "8df6ed527472542dd5e137c242a7c5a9f337ac34f7b257ae4af886aeaebb51b0", + "version": 2, "size": 85, "vsize": 85, + "weight": 340, "locktime": 0, "vin": [ { @@ -26,10 +27,10 @@ "reqSigs": 1, "type": "pubkeyhash", "addresses": [ - "ZeGm46MkY3D96sygu5HFtbgadSaQKKrJV1" + "13tuJJDR2RgArmgfv6JScSdreahzgc4T6o" ] } } ], - "hex": "01000000011f5c38dfcf6f1a5f5a87c416076d392c87e6d41970d5ad5e477a02d66bde97580000000000fdffffff0180a81201000000001976a9141fc11f39be1729bf973a7ab6a615ca4729d6457488ac00000000" + "hex": "02000000011f5c38dfcf6f1a5f5a87c416076d392c87e6d41970d5ad5e477a02d66bde97580000000000fdffffff0180a81201000000001976a9141fc11f39be1729bf973a7ab6a615ca4729d6457488ac00000000" } diff --git a/test/util/data/txcreatedata_seq1.json b/test/util/data/txcreatedata_seq1.json index 7ba45d5ce..e5980427b 100644 --- a/test/util/data/txcreatedata_seq1.json +++ b/test/util/data/txcreatedata_seq1.json @@ -4,6 +4,7 @@ "version": 1, "size": 126, "vsize": 126, + "weight": 504, "locktime": 0, "vin": [ { @@ -14,7 +15,7 @@ "hex": "" }, "sequence": 4294967293 - }, + }, { "txid": "5897de6bd6027a475eadd57019d4e6872c396d0716c4875a5f1a6fcfdf385c1f", "vout": 0, @@ -35,7 +36,7 @@ "reqSigs": 1, "type": "pubkeyhash", "addresses": [ - "ZeGm46MkY3D96sygu5HFtbgadSaQKKrJV1" + "13tuJJDR2RgArmgfv6JScSdreahzgc4T6o" ] } } diff --git a/test/util/data/txcreatemultisig1.json b/test/util/data/txcreatemultisig1.json index 05c8ffefd..c32e755db 100644 --- a/test/util/data/txcreatemultisig1.json +++ b/test/util/data/txcreatemultisig1.json @@ -4,6 +4,7 @@ "version": 1, "size": 124, "vsize": 124, + "weight": 496, "locktime": 0, "vin": [ ], @@ -17,9 +18,9 @@ "reqSigs": 2, "type": "multisig", "addresses": [ - "ZrB7mqGRksZrQQjAZbLizNv3wwyyrwsz41", - "ZqukjwToduujeLGJchDaMSGS9CmneH4BX6", - "ZeimLikX796WYCkty2aykZmsRmHCtcH1ct" + "1FoG2386FG2tAJS9acMuiDsKy67aGg9MKz", + "1FXtz9KU8JNmQDyHdiEm5HDiALuP3zdHvV", + "14LuavcBbXZYJ6Tsz3cAUQj9SuQoL2xCQX" ] } } diff --git a/test/util/data/txcreatemultisig2.json b/test/util/data/txcreatemultisig2.json index 7d94ce739..f97d26589 100644 --- a/test/util/data/txcreatemultisig2.json +++ b/test/util/data/txcreatemultisig2.json @@ -4,6 +4,7 @@ "version": 1, "size": 42, "vsize": 42, + "weight": 168, "locktime": 0, "vin": [ ], diff --git a/test/util/data/txcreatemultisig3.json b/test/util/data/txcreatemultisig3.json index 06e093e22..b355d7b19 100644 --- a/test/util/data/txcreatemultisig3.json +++ b/test/util/data/txcreatemultisig3.json @@ -4,6 +4,7 @@ "version": 1, "size": 53, "vsize": 53, + "weight": 212, "locktime": 0, "vin": [ ], @@ -14,7 +15,11 @@ "scriptPubKey": { "asm": "0 e15a86a23178f433d514dbbce042e87d72662b8b5edcacfd2e37ab7a2d135f05", "hex": "0020e15a86a23178f433d514dbbce042e87d72662b8b5edcacfd2e37ab7a2d135f05", - "type": "witness_v0_scripthash" + "reqSigs": 1, + "type": "witness_v0_scripthash", + "addresses": [ + "bc1qu9dgdg330r6r84g5mw7wqshg04exv2uttmw2elfwx74h5tgntuzs44gyfg" + ] } } ], diff --git a/test/util/data/txcreatemultisig4.json b/test/util/data/txcreatemultisig4.json index 9a5d2f4a0..a00dbe3f5 100644 --- a/test/util/data/txcreatemultisig4.json +++ b/test/util/data/txcreatemultisig4.json @@ -4,6 +4,7 @@ "version": 1, "size": 42, "vsize": 42, + "weight": 168, "locktime": 0, "vin": [ ], diff --git a/test/util/data/txcreatemultisig5.json b/test/util/data/txcreatemultisig5.json index 112f49d92..ea07822dd 100644 --- a/test/util/data/txcreatemultisig5.json +++ b/test/util/data/txcreatemultisig5.json @@ -1,9 +1,10 @@ { - "txid": "b4b1e00b892c9da3fecdae06b3b9e65d4e96b5ab3134b2095a3c99c9b1c84e9e", - "hash": "b4b1e00b892c9da3fecdae06b3b9e65d4e96b5ab3134b2095a3c99c9b1c84e9e", - "version": 1, + "txid": "813cf75e1f08debd242ef7c8192b7d478fb651355209369499a0de779ba7eb2f", + "hash": "813cf75e1f08debd242ef7c8192b7d478fb651355209369499a0de779ba7eb2f", + "version": 2, "size": 42, "vsize": 42, + "weight": 168, "locktime": 0, "vin": [ ], @@ -22,5 +23,5 @@ } } ], - "hex": "01000000000100e1f5050000000017a914a4051c02398868af83f28f083208fae99a7692638700000000" + "hex": "02000000000100e1f5050000000017a914a4051c02398868af83f28f083208fae99a7692638700000000" } diff --git a/test/util/data/txcreateoutpubkey1.json b/test/util/data/txcreateoutpubkey1.json index ec9c3298a..32097b3eb 100644 --- a/test/util/data/txcreateoutpubkey1.json +++ b/test/util/data/txcreateoutpubkey1.json @@ -4,6 +4,7 @@ "version": 1, "size": 54, "vsize": 54, + "weight": 216, "locktime": 0, "vin": [ ], @@ -17,7 +18,7 @@ "reqSigs": 1, "type": "pubkey", "addresses": [ - "ZrB7mqGRksZrQQjAZbLizNv3wwyyrwsz41" + "1FoG2386FG2tAJS9acMuiDsKy67aGg9MKz" ] } } diff --git a/test/util/data/txcreateoutpubkey2.json b/test/util/data/txcreateoutpubkey2.json index 514472223..c0ee181ed 100644 --- a/test/util/data/txcreateoutpubkey2.json +++ b/test/util/data/txcreateoutpubkey2.json @@ -4,6 +4,7 @@ "version": 1, "size": 41, "vsize": 41, + "weight": 164, "locktime": 0, "vin": [ ], @@ -14,7 +15,11 @@ "scriptPubKey": { "asm": "0 a2516e770582864a6a56ed21a102044e388c62e3", "hex": "0014a2516e770582864a6a56ed21a102044e388c62e3", - "type": "witness_v0_keyhash" + "reqSigs": 1, + "type": "witness_v0_keyhash", + "addresses": [ + "bc1q5fgkuac9s2ry56jka5s6zqsyfcugcchry5cwu0" + ] } } ], diff --git a/test/util/data/txcreateoutpubkey3.json b/test/util/data/txcreateoutpubkey3.json index 0a5d489e1..4d904df3c 100644 --- a/test/util/data/txcreateoutpubkey3.json +++ b/test/util/data/txcreateoutpubkey3.json @@ -4,6 +4,7 @@ "version": 1, "size": 42, "vsize": 42, + "weight": 168, "locktime": 0, "vin": [ ], diff --git a/test/util/data/txcreatescript1.json b/test/util/data/txcreatescript1.json index 5072452fe..af1c4c35e 100644 --- a/test/util/data/txcreatescript1.json +++ b/test/util/data/txcreatescript1.json @@ -4,6 +4,7 @@ "version": 1, "size": 20, "vsize": 20, + "weight": 80, "locktime": 0, "vin": [ ], diff --git a/test/util/data/txcreatescript2.json b/test/util/data/txcreatescript2.json index 94b669ffb..32dd64457 100644 --- a/test/util/data/txcreatescript2.json +++ b/test/util/data/txcreatescript2.json @@ -4,6 +4,7 @@ "version": 1, "size": 42, "vsize": 42, + "weight": 168, "locktime": 0, "vin": [ ], diff --git a/test/util/data/txcreatescript3.json b/test/util/data/txcreatescript3.json index 980da2fb3..b9192d9a8 100644 --- a/test/util/data/txcreatescript3.json +++ b/test/util/data/txcreatescript3.json @@ -4,6 +4,7 @@ "version": 1, "size": 53, "vsize": 53, + "weight": 212, "locktime": 0, "vin": [ ], @@ -14,7 +15,11 @@ "scriptPubKey": { "asm": "0 0bfe935e70c321c7ca3afc75ce0d0ca2f98b5422e008bb31c00c6d7f1f1c0ad6", "hex": "00200bfe935e70c321c7ca3afc75ce0d0ca2f98b5422e008bb31c00c6d7f1f1c0ad6", - "type": "witness_v0_scripthash" + "reqSigs": 1, + "type": "witness_v0_scripthash", + "addresses": [ + "bc1qp0lfxhnscvsu0j36l36uurgv5tuck4pzuqytkvwqp3kh78cupttqyf705v" + ] } } ], diff --git a/test/util/data/txcreatescript4.json b/test/util/data/txcreatescript4.json index eecdf858b..2271ecfa0 100644 --- a/test/util/data/txcreatescript4.json +++ b/test/util/data/txcreatescript4.json @@ -4,6 +4,7 @@ "version": 1, "size": 42, "vsize": 42, + "weight": 168, "locktime": 0, "vin": [ ], diff --git a/test/util/data/txcreatesignv1.hex b/test/util/data/txcreatesignv1.hex index a46fcc88c..40039319b 100644 --- a/test/util/data/txcreatesignv1.hex +++ b/test/util/data/txcreatesignv1.hex @@ -1 +1 @@ -01000000018594c5bdcaec8f06b78b596f31cd292a294fd031e24eec716f43dac91ea7494d000000008b48304502210096a75056c9e2cc62b7214777b3d2a592cfda7092520126d4ebfcd6d590c99bd8022051bb746359cf98c0603f3004477eac68701132380db8facba19c89dc5ab5c5e201410479be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8ffffffff01a0860100000000001976a9145834479edbbe0539b31ffd3a8f8ebadc2165ed0188ac00000000 +01000000018594c5bdcaec8f06b78b596f31cd292a294fd031e24eec716f43dac91ea7494d000000008a4730440220131432090a6af42da3e8335ff110831b41a44f4e9d18d88f5d50278380696c7202200fc2e48938f323ad13625890c0ea926c8a189c08b8efc38376b20c8a2188e96e01410479be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8ffffffff01a0860100000000001976a9145834479edbbe0539b31ffd3a8f8ebadc2165ed0188ac00000000 diff --git a/test/util/data/txcreatesignv1.json b/test/util/data/txcreatesignv1.json index f5e1eaf01..7a06aa9ff 100644 --- a/test/util/data/txcreatesignv1.json +++ b/test/util/data/txcreatesignv1.json @@ -1,17 +1,18 @@ { - "txid": "977e7cd286cb72cd470d539ba6cb48400f8f387d97451d45cdb8819437a303af", - "hash": "977e7cd286cb72cd470d539ba6cb48400f8f387d97451d45cdb8819437a303af", + "txid": "ffc7e509ec3fd60a182eb712621d41a47dc7d4ff310a70826c2fb0e9afb3fa02", + "hash": "ffc7e509ec3fd60a182eb712621d41a47dc7d4ff310a70826c2fb0e9afb3fa02", "version": 1, - "size": 224, - "vsize": 224, + "size": 223, + "vsize": 223, + "weight": 892, "locktime": 0, "vin": [ { "txid": "4d49a71ec9da436f71ec4ee231d04f292a29cd316f598bb7068feccabdc59485", "vout": 0, "scriptSig": { - "asm": "304502210096a75056c9e2cc62b7214777b3d2a592cfda7092520126d4ebfcd6d590c99bd8022051bb746359cf98c0603f3004477eac68701132380db8facba19c89dc5ab5c5e2[ALL] 0479be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8", - "hex": "48304502210096a75056c9e2cc62b7214777b3d2a592cfda7092520126d4ebfcd6d590c99bd8022051bb746359cf98c0603f3004477eac68701132380db8facba19c89dc5ab5c5e201410479be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8" + "asm": "30440220131432090a6af42da3e8335ff110831b41a44f4e9d18d88f5d50278380696c7202200fc2e48938f323ad13625890c0ea926c8a189c08b8efc38376b20c8a2188e96e[ALL] 0479be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8", + "hex": "4730440220131432090a6af42da3e8335ff110831b41a44f4e9d18d88f5d50278380696c7202200fc2e48938f323ad13625890c0ea926c8a189c08b8efc38376b20c8a2188e96e01410479be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8" }, "sequence": 4294967295 } @@ -26,10 +27,10 @@ "reqSigs": 1, "type": "pubkeyhash", "addresses": [ - "ZjREr93FwgKB2rWwL8tM4AjbRHi6ZDzxHJ" + "193P6LtvS4nCnkDvM9uXn1gsSRqh4aDAz7" ] } } ], - "hex": "01000000018594c5bdcaec8f06b78b596f31cd292a294fd031e24eec716f43dac91ea7494d000000008b48304502210096a75056c9e2cc62b7214777b3d2a592cfda7092520126d4ebfcd6d590c99bd8022051bb746359cf98c0603f3004477eac68701132380db8facba19c89dc5ab5c5e201410479be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8ffffffff01a0860100000000001976a9145834479edbbe0539b31ffd3a8f8ebadc2165ed0188ac00000000" + "hex": "01000000018594c5bdcaec8f06b78b596f31cd292a294fd031e24eec716f43dac91ea7494d000000008a4730440220131432090a6af42da3e8335ff110831b41a44f4e9d18d88f5d50278380696c7202200fc2e48938f323ad13625890c0ea926c8a189c08b8efc38376b20c8a2188e96e01410479be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8ffffffff01a0860100000000001976a9145834479edbbe0539b31ffd3a8f8ebadc2165ed0188ac00000000" } diff --git a/test/util/rpcauth-test.py b/test/util/rpcauth-test.py new file mode 100755 index 000000000..46e9fbc73 --- /dev/null +++ b/test/util/rpcauth-test.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python3 +# Copyright (c) 2015-2018 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test share/rpcauth/rpcauth.py +""" +import base64 +import configparser +import hmac +import importlib +import os +import sys +import unittest + +class TestRPCAuth(unittest.TestCase): + def setUp(self): + config = configparser.ConfigParser() + config_path = os.path.abspath( + os.path.join(os.sep, os.path.abspath(os.path.dirname(__file__)), + "../config.ini")) + with open(config_path, encoding="utf8") as config_file: + config.read_file(config_file) + sys.path.insert(0, os.path.dirname(config['environment']['RPCAUTH'])) + self.rpcauth = importlib.import_module('rpcauth') + + def test_generate_salt(self): + self.assertLessEqual(len(self.rpcauth.generate_salt()), 32) + self.assertGreaterEqual(len(self.rpcauth.generate_salt()), 16) + + def test_generate_password(self): + password = self.rpcauth.generate_password() + expected_password = base64.urlsafe_b64encode( + base64.urlsafe_b64decode(password)).decode('utf-8') + self.assertEqual(expected_password, password) + + def test_check_password_hmac(self): + salt = self.rpcauth.generate_salt() + password = self.rpcauth.generate_password() + password_hmac = self.rpcauth.password_to_hmac(salt, password) + + m = hmac.new(bytearray(salt, 'utf-8'), + bytearray(password, 'utf-8'), 'SHA256') + expected_password_hmac = m.hexdigest() + + self.assertEqual(expected_password_hmac, password_hmac) + +if __name__ == '__main__': + unittest.main()