1 Commits

Author SHA1 Message Date
tecnovert
877cefb40f WIP 2023-08-06 11:29:29 +02:00
377 changed files with 32419 additions and 133006 deletions

View File

@@ -3,10 +3,11 @@ container:
lint_task:
setup_script:
- pip install flake8 codespell
- pip install flake8
- pip install codespell
script:
- flake8 --version
- flake8 --ignore=E203,E501,W503 --exclude=basicswap/contrib,basicswap/interface/contrib,.eggs,.tox,bin/install_certifi.py
- PYTHONWARNINGS="ignore" flake8 --ignore=E501,F841,W503 --exclude=basicswap/contrib,basicswap/interface/contrib,messages_pb2.py,.eggs,.tox,bin/install_certifi.py
- codespell --check-filenames --disable-colors --quiet-level=7 --ignore-words=tests/lint/spelling.ignore-words.txt -S .git,.eggs,.tox,pgp,*.pyc,*basicswap/contrib,*basicswap/interface/contrib,*mnemonics.py,bin/install_certifi.py,*basicswap/static
test_task:
@@ -16,21 +17,25 @@ test_task:
- BIN_DIR: /tmp/cached_bin
- PARTICL_BINDIR: ${BIN_DIR}/particl
- BITCOIN_BINDIR: ${BIN_DIR}/bitcoin
- BITCOINCASH_BINDIR: ${BIN_DIR}/bitcoincash
- LITECOIN_BINDIR: ${BIN_DIR}/litecoin
- XMR_BINDIR: ${BIN_DIR}/monero
setup_script:
- apt-get update
- apt-get install -y python3-pip pkg-config
- apt-get install -y wget python3-pip gnupg unzip protobuf-compiler automake libtool pkg-config
- pip install tox pytest
- pip install .
- python3 setup.py install
- wget -O coincurve-anonswap.zip https://github.com/tecnovert/coincurve/archive/refs/tags/anonswap_v0.1.zip
- unzip -d coincurve-anonswap coincurve-anonswap.zip
- mv ./coincurve-anonswap/*/{.,}* ./coincurve-anonswap || true
- cd coincurve-anonswap
- python3 setup.py install --force
bins_cache:
folder: /tmp/cached_bin
reupload_on_changes: false
fingerprint_script:
- basicswap-prepare -v
populate_script:
- basicswap-prepare --bindir=/tmp/cached_bin --preparebinonly --withcoins=particl,bitcoin,bitcoincash,litecoin,monero
- basicswap-prepare --bindir=/tmp/cached_bin --preparebinonly --withcoins=particl,bitcoin,litecoin,monero
script:
- cd "${CIRRUS_WORKING_DIR}"
- export DATADIRS="${TEST_DIR}"
@@ -42,4 +47,3 @@ test_task:
- pytest tests/basicswap/test_other.py
- pytest tests/basicswap/test_run.py
- pytest tests/basicswap/test_reload.py
- pytest tests/basicswap/test_btc_xmr.py -k 'test_01_a or test_01_b or test_02_a or test_02_b'

View File

@@ -1,3 +0,0 @@
{
"indent": 2
}

View File

@@ -1,11 +0,0 @@
# Please see the documentation for all configuration options:
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
version: 2
updates:
- package-ecosystem: "pip" # See documentation for possible values
directory: "/" # Location of package manifests
schedule:
interval: "weekly"
open-pull-requests-limit: 20
target-branch: "dev"

View File

@@ -1,120 +0,0 @@
name: ci
on: [push, pull_request]
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
env:
BIN_DIR: /tmp/cached_bin
TEST_RELOAD_PATH: /tmp/test_basicswap
BSX_SELENIUM_DRIVER: firefox-ci
XMR_RPC_USER: xmr_user
XMR_RPC_PWD: xmr_pwd
jobs:
ci:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.12"]
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
if [ $(dpkg-query -W -f='${Status}' firefox 2>/dev/null | grep -c "ok installed") -eq 0 ]; then
install -d -m 0755 /etc/apt/keyrings
wget -q https://packages.mozilla.org/apt/repo-signing-key.gpg -O- | sudo tee /etc/apt/keyrings/packages.mozilla.org.asc > /dev/null
echo "deb [signed-by=/etc/apt/keyrings/packages.mozilla.org.asc] https://packages.mozilla.org/apt mozilla main" | sudo tee -a /etc/apt/sources.list.d/mozilla.list > /dev/null
echo "Package: *" | sudo tee /etc/apt/preferences.d/mozilla
echo "Pin: origin packages.mozilla.org" | sudo tee -a /etc/apt/preferences.d/mozilla
echo "Pin-Priority: 1000" | sudo tee -a /etc/apt/preferences.d/mozilla
sudo apt-get update
sudo apt-get install -y firefox
fi
python -m pip install --upgrade pip
pip install python-gnupg
pip install -e .[dev]
pip install -r requirements.txt --require-hashes
- name: Install
run: |
pip install .
# Print the core versions to a file for caching
basicswap-prepare --version --withcoins=bitcoin | tail -n +2 > core_versions.txt
cat core_versions.txt
- name: Run flake8
run: |
flake8 --ignore=E203,E501,W503 --exclude=basicswap/contrib,basicswap/interface/contrib,.eggs,.tox,bin/install_certifi.py
- name: Run codespell
run: |
codespell --check-filenames --disable-colors --quiet-level=7 --ignore-words=tests/lint/spelling.ignore-words.txt -S .git,.eggs,.tox,pgp,*.pyc,*basicswap/contrib,*basicswap/interface/contrib,*mnemonics.py,bin/install_certifi.py,*basicswap/static
- name: Run black
run: |
black --check --diff --exclude="contrib" .
- name: Run test_other
run: |
pytest tests/basicswap/test_other.py
- name: Cache coin cores
id: cache-cores
uses: actions/cache@v3
env:
cache-name: cache-cores
with:
path: /tmp/cached_bin
key: cores-${{ runner.os }}-${{ hashFiles('**/core_versions.txt') }}
- if: ${{ steps.cache-cores.outputs.cache-hit != 'true' }}
name: Running basicswap-prepare
run: |
basicswap-prepare --bindir="$BIN_DIR" --preparebinonly --withcoins=particl,bitcoin,monero
- name: Run test_prepare
run: |
export PYTHONPATH=$(pwd)
export TEST_BIN_PATH="$BIN_DIR"
export TEST_PATH=/tmp/test_prepare
pytest tests/basicswap/extended/test_prepare.py
- name: Run test_xmr
run: |
export PYTHONPATH=$(pwd)
export PARTICL_BINDIR="$BIN_DIR/particl"
export BITCOIN_BINDIR="$BIN_DIR/bitcoin"
export XMR_BINDIR="$BIN_DIR/monero"
pytest tests/basicswap/test_btc_xmr.py::TestBTC -k "test_003_api or test_02_a_leader_recover_a_lock_tx"
- name: Run test_encrypted_xmr_reload
run: |
export PYTHONPATH=$(pwd)
export TEST_PATH=${TEST_RELOAD_PATH}
mkdir -p ${TEST_PATH}/bin
cp -r $BIN_DIR/* ${TEST_PATH}/bin/
pytest tests/basicswap/extended/test_encrypted_xmr_reload.py
- name: Run selenium tests
run: |
export TEST_PATH=/tmp/test_persistent
mkdir -p ${TEST_PATH}/bin
cp -r $BIN_DIR/* ${TEST_PATH}/bin/
export PYTHONPATH=$(pwd)
python tests/basicswap/extended/test_xmr_persistent.py > /tmp/log.txt 2>&1 & TEST_NETWORK_PID=$!
echo "Starting test_xmr_persistent, PID $TEST_NETWORK_PID"
i=0
until curl -s -f -o /dev/null "http://localhost:12701/json/coins"
do
tail -n 1 /tmp/log.txt
sleep 2
((++i))
if [ $i -ge 60 ]; then
echo "Timed out waiting for test_xmr_persistent, PID $TEST_NETWORK_PID"
kill $TEST_NETWORK_PID
(exit 1) # Fail test
break
fi
done
echo "Running test_settings.py"
python tests/basicswap/selenium/test_settings.py
echo "Running test_swap_direction.py"
python tests/basicswap/selenium/test_swap_direction.py
kill $TEST_NETWORK_PID

11
.gitignore vendored
View File

@@ -1,6 +1,5 @@
old/
build/
venv/
*.pyc
__pycache__
/dist/
@@ -9,14 +8,4 @@ __pycache__
/*.eggs
.tox
.eggs
.ruff_cache
.pytest_cache
.vectorcode
*~
# geckodriver.log
*.log
docker/.env
# vscode dev container settings
compose-dev.yaml

View File

@@ -1,40 +0,0 @@
repos:
# Common hooks
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
- id: check-added-large-files
- id: check-merge-conflict
args: ["--assume-in-merge"]
- id: check-yaml
- id: detect-private-key
- id: end-of-file-fixer
- id: trailing-whitespace
args: ["--markdown-linebreak-ext=md"]
# Black - Python formatter
- repo: https://github.com/psf/black
rev: 25.1.0
hooks:
- id: black
exclude: (basicswap/contrib|basicswap/interface/contrib)/
# Flake8 - Lint Python
- repo: https://github.com/pycqa/flake8
rev: 7.3.0
hooks:
- id: flake8
args: ["--ignore=E203,E501,W503", "--exclude=basicswap/contrib,basicswap/interface/contrib,.eggs,.tox,bin/install_certifi.py"]
# ESLint - Lint Javascript and fix issues where possible
- repo: https://github.com/pre-commit/mirrors-eslint
rev: v9.30.1
hooks:
- id: eslint
#args: ["--fix"]
# djLint - Lint HTML
#- repo: https://github.com/djlint/djlint
# rev: v1.36.4
# hooks:
# - id: djlint

60
.travis.yml Normal file
View File

@@ -0,0 +1,60 @@
dist: bionic
os: linux
language: python
python: '3.7'
stages:
- lint
- test
env:
global:
- TEST_DIR=${HOME}/test_basicswap2
- TEST_RELOAD_PATH=~/test_basicswap1
- BIN_DIR=~/cached_bin
- PARTICL_BINDIR=${BIN_DIR}/particl
- BITCOIN_BINDIR=${BIN_DIR}/bitcoin
- LITECOIN_BINDIR=${BIN_DIR}/litecoin
- XMR_BINDIR=${BIN_DIR}/monero
cache:
directories:
- "$BIN_DIR"
before_install:
- sudo apt-get install -y wget python3-pip gnupg unzip protobuf-compiler automake libtool pkg-config
install:
- travis_retry pip install tox pytest
before_script:
- wget -O coincurve-anonswap.zip https://github.com/tecnovert/coincurve/archive/refs/tags/anonswap_v0.1.zip
- unzip -d coincurve-anonswap coincurve-anonswap.zip
- mv ./coincurve-anonswap/*/{.,}* ./coincurve-anonswap || true
- cd coincurve-anonswap
- python3 setup.py install --force
script:
- cd $TRAVIS_BUILD_DIR
- python3 setup.py install
- basicswap-prepare --bindir=${BIN_DIR} --preparebinonly --withcoins=particl,bitcoin,litecoin,monero
- export DATADIRS="${TEST_DIR}"
- mkdir -p "${DATADIRS}/bin"
- cp -r ${BIN_DIR} "${DATADIRS}/bin"
- mkdir -p "${TEST_RELOAD_PATH}/bin"
- cp -r ${BIN_DIR} "${TEST_RELOAD_PATH}/bin"
- # tox
- pytest tests/basicswap/test_xmr.py
- pytest tests/basicswap/test_xmr_reload.py
- pytest tests/basicswap/test_xmr_bids_offline.py
after_success:
- echo "End test"
jobs:
include:
- stage: lint
env:
cache: false
install:
- travis_retry pip install flake8==3.7.0
- travis_retry pip install codespell==1.15.0
before_script:
script:
- PYTHONWARNINGS="ignore" flake8 --ignore=E501,F841,W503 --exclude=basicswap/contrib,basicswap/interface/contrib,messages_pb2.py,.eggs,.tox,bin/install_certifi.py
- codespell --check-filenames --disable-colors --quiet-level=7 --ignore-words=tests/lint/spelling.ignore-words.txt -S .git,.eggs,.tox,pgp,*.pyc,*basicswap/contrib,*basicswap/interface/contrib,*mnemonics.py,bin/install_certifi.py,*basicswap/static
after_success:
- echo "End lint"
- stage: test
env:

View File

@@ -5,15 +5,30 @@ ENV LANG=C.UTF-8 \
DATADIRS="/coindata"
RUN apt-get update; \
apt-get install -y --no-install-recommends \
python3-pip libpython3-dev gnupg pkg-config gcc libc-dev gosu tzdata cmake ninja-build;
apt-get install -y wget python3-pip gnupg unzip make g++ autoconf automake libtool pkg-config gosu tzdata;
# Must install protoc directly as latest package is only on 3.12
RUN wget -O protobuf_src.tar.gz https://github.com/protocolbuffers/protobuf/releases/download/v21.1/protobuf-python-4.21.1.tar.gz && \
tar xvf protobuf_src.tar.gz && \
cd protobuf-3.21.1 && \
./configure --prefix=/usr && \
make -j$(nproc) install && \
ldconfig
ARG COINCURVE_VERSION=v0.1
RUN wget -O coincurve-anonswap.zip https://github.com/tecnovert/coincurve/archive/refs/tags/anonswap_$COINCURVE_VERSION.zip && \
unzip coincurve-anonswap.zip && \
mv ./coincurve-anonswap_$COINCURVE_VERSION ./coincurve-anonswap && \
cd coincurve-anonswap && \
python3 setup.py install --force
# Install requirements first so as to skip in subsequent rebuilds
COPY ./requirements.txt requirements.txt
RUN pip3 install -r requirements.txt --require-hashes
RUN pip3 install -r requirements.txt
COPY . basicswap-master
RUN cd basicswap-master; \
protoc -I=basicswap --python_out=basicswap basicswap/messages.proto; \
pip3 install .;
RUN useradd -ms /bin/bash swap_user && \

View File

@@ -1,5 +1,5 @@
The MIT License (MIT)
Copyright (c) 2019-2024 tecnovert
Copyright (c) 2019 tecnovert
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

5
MANIFEST.in Normal file
View File

@@ -0,0 +1,5 @@
include *.md LICENSE
recursive-include doc *
recursive-include basicswap/templates *
recursive-include basicswap/static *

View File

@@ -15,31 +15,27 @@ Table of Contents
## About
**BasicSwap** is the worlds most secure and decentralized DEX. It facilitates cross-chain atomic swaps by enabling peers to interact directly with each other within a free and open environment without central points of failure.
The BasicSwap DEX is a privacy-first and decentralized exchange which features cross-chain atomic swaps and a distributed order book.
This DEX is fully non-custodial and features a decentralized order book, letting you create or accept swap offers without any fees, counterparties, or the need for accounts.
[BasicSwap](https://academy.particl.io/en/latest/glossary.html#term-BasicSwap) is a cross-chain and privacy-centric DEX (decentralized exchange) that lets you trade cryptocurrencies with no third party involvement. Its distributed order book lets you make or take orders at no cost and trade within a free and open environment without central points of failure.
Built as a low-friction, highly secure solution to the frequent losses of funds on centralized exchanges (e.g., FTX, BitFinex, MtGox), **BasicSwap** aims to provide more reliable and secure cryptocurrency trading conditions for everyone.
This DEX protocol was built in direct response to the increasingly invasive demands and data mining practices of todays cryptocurrency exchanges. It strives to bring more decentralized and more private cryptocurrency trading conditions for all.
**BasicSwap** is currently in active development by the community. While it already offers some of the essential trading features you'd expect from an exchange, more features and quality-of-life improvements are being worked on with the goal to provide a smoother user experience.
BasicSwap is still in beta. This means that, while it already offers most of the vital trading features youd expect to see on centralized exchanges, it is still in heavy development, and many more features will come about in the near future.
Check out our [roadmap](https://basicswapdex.com/roadmap) to get a better idea of what we've got planned for it!
## Features
* **True cross-chain support** — Swap cryptocurrencies that live on entirely different blockchain environments, like Bitcoin and Monero.
* **Decentralized order book** — Make or take swap offers on a completely decentralized order book system.
* **No third-party or middleman** — Trade crypto with no intermediaries, completely eliminating central points of failure.
* **Distributed order book** — Make or take limit orders on a completely distributed order book system.
* **No third-party or middleman** — Trade crypto with no intermediaries whatsoever.
* **No trading fees** — Only pay the typical cryptocurrency network fee.
* **Superior financial privacy** — Protect your financial information from unauthorized access with BasicSwaps privacy-conscious technology.
* **Full Monero support** — Swap Monero with a variety of other cryptocurrencies like Bitcoin or Particl. No wrapped assets or layer-2 involved.
* **Privacy from the ground up** — Every component of BasicSwap is built with a privacy-first commitment.
* **Full Monero support** — Swap Monero with a variety of other cryptocurrencies like Bitcoin or Particl. No wrapped assets or trickery involved.
* **User-friendly interface** — Enjoy all these features within a user-friendly and intuitive interface that handles all the complicated parts for you.
## Under the Hood
**BasicSwap** can be best understood as the decentralized version of the SWIFT messaging network; providing a decentralized messaging protocol that allows for peers to connect directly with each other with the purpose of executing atomic swaps without central points of failure and using official core wallets (Bitcoin Core, Litecoin Core, etc).
**BasicSwap** does not process, initiate, or execute swaps; it merely enables peers to communicate with each other and exchange the required information to simplify the process of using atomic swaps on the respective blockchains of the coins being swapped.
In essence, **BasicSwap** operates merely as a decentralized messaging protocol supplemented by a user-friendly interface.
BasicSwap is still in beta. This means that, while it already offers most of the vital trading features youd expect to see on centralized exchanges, it is still in heavy development, and many more features will come about in the near future.
## Available Assets
@@ -64,12 +60,6 @@ BasicSwap is compatible with the following digital assets.
<td>XMR
</td>
</tr>
<tr>
<td>Bitcoin Cash
</td>
<td>BCH
</td>
</tr>
<tr>
<td>Dash
</td>
@@ -94,63 +84,47 @@ BasicSwap is compatible with the following digital assets.
<td>PIVX
</td>
</tr>
<tr>
<td>Decred
</td>
<td>DCR
</td>
</tr>
<tr>
<td>Wownero
</td>
<td>WOW
</td>
</tr>
<tr>
<td>Particl
</td>
<td>PART
</td>
</tr>
<tr>
<td>Dogecoin
</td>
<td>DOGE
</td>
</tr>
<tr>
<td>Namecoin
</td>
<td>NMC
</td>
</tr>
</table>
If youd like to add a cryptocurrency to BasicSwap, refer to how other cryptocurrencies have been integrated to the DEX by following [this link](https://academy.particl.io/en/latest/basicswap-guides/basicswapguides_apply.html).
We plan on adding many other cryptocurrencies moving forward, including ETH and its ERC-20 tokens. However, due to the true cross-chain nature of the BasicSwap DEX protocol, each integration has to be done on a case-by-case basis.
If youd like to add a cryptocurrency to BasicSwap, either [apply for a listing using our listing application form](https://forms.gle/9DsHoHTJVqSiMNHW9), or try coding the integration yourself by referencing how other cryptocurrencies have been added. Follow [this link](https://academy.particl.io/en/latest/basicswap-guides/basicswapguides_apply.html) for more information on how to integrate a coin yourself.
# Participate
### Chats
* **For support** Join the community on [#basicswap:matrix.org](https://matrix.to/#/#basicswap:matrix.org) using a Matrix client.
* **For developers** The chat [#particl-dev:matrix.org](https://app.element.io/#/room/#particl-dev:matrix.org) using [Element](https://element.io) (formerly Riot).
* **For community** The community chat [https://discord.me/particl](https://discord.me/particl) [![Discord](https://img.shields.io/discord/391967609660112925)](https://discord.me/particl).
[![Twitter Follow](https://img.shields.io/twitter/follow/BasicSwapDEX?label=follow%20us&style=social)](http://twitter.com/BasicSwapDEX)
[![Subreddit subscribers](https://img.shields.io/reddit/subreddit-subscribers/particl?style=social)](http://reddit.com/r/particl)
### Documentation, installation
Follow the guides on [Particl Academy](https://academy.particl.io) for tutorials and guides on how BasicSwap works.
For non-developers curious to explore a new world of commerce, binaries can be downloaded and installed. It is the easiest way to get started. Following the guides on [Particl Academy](https://academy.particl.io), a reference book in straightforward language, is recommended.
* [Download BasicSwapDEX](https://github.com/basicswap/basicswap/tree/master/doc)
* [Download BasicSwapDEX](https://github.com/tecnovert/basicswap/tree/master/doc)
#### Community chat support
* [Matrix](https://matrix.to/#/#basicswap:matrix.org)
* [Discord](https://discord.me/particl) navigate to the #support channel
* [Telegram](https://t.me/particlhelp)
* [Element](https://app.element.io/#/room/#particlhelp:matrix.org)
# Tutorials
You can find a wide variety of tutorials and step-by-step guides about BasicSwap on the [Particl Academy](https://academy.particl.io) or on Particls Youtube channel.
If you encounter an issue or try to accomplish something not mentioned in any of the tutorials included in the links above, please join the community chat support channel; youll be sure to find help and support from current contributors there!
If you encounter an issue or try to accomplish something not mentioned in any of the tutorials included in the links above, please join the community chat support channels; youll be sure to find help and support from our awesome community and open-source team there!
# License

View File

@@ -1,3 +1,3 @@
name = "basicswap"
__version__ = "0.15.0"
__version__ = "0.11.66"

View File

@@ -1,38 +1,28 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019-2024 tecnovert
# Copyright (c) 2024-2025 The Basicswap developers
# Copyright (c) 2019-2023 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import logging
import os
import random
import shlex
import socket
import socks
import subprocess
import sys
import threading
import time
import traceback
import shlex
import socks
import socket
import urllib
import logging
import threading
import traceback
import subprocess
from sockshandler import SocksiPyHandler
from .db import (
DBMethods,
)
from .rpc import (
callrpc,
)
from .util import (
TemporaryError,
)
from .util.logging import (
BSXLogger,
LogCategories as LC,
)
from .chainparams import (
Coins,
chainparams,
@@ -43,10 +33,11 @@ def getaddrinfo_tor(*args):
return [(socket.AF_INET, socket.SOCK_STREAM, 6, "", (args[0], args[1]))]
class BaseApp(DBMethods):
def __init__(self, data_dir, settings, chain, log_name="BasicSwap", **kwargs):
self.fp = None
class BaseApp:
def __init__(self, fp, data_dir, settings, chain, log_name='BasicSwap'):
self.log_name = log_name
self.fp = fp
self.is_running = True
self.fail_code = 0
self.mock_time_offset = 0
@@ -55,94 +46,41 @@ class BaseApp(DBMethods):
self.settings = settings
self.coin_clients = {}
self.coin_interfaces = {}
self.mxDB = threading.Lock()
self.debug = self.settings.get("debug", False)
self.mxDB = threading.RLock()
self.debug = self.settings.get('debug', False)
self.delay_event = threading.Event()
self.chainstate_delay_event = threading.Event()
self._network = None
self.prepareLogging()
self.log.info(f"Network: {self.chain}")
self.log.info('Network: {}'.format(self.chain))
self.use_tor_proxy = self.settings.get("use_tor", False)
self.tor_proxy_host = self.settings.get("tor_proxy_host", "127.0.0.1")
self.tor_proxy_port = self.settings.get("tor_proxy_port", 9050)
self.tor_control_password = self.settings.get("tor_control_password", None)
self.tor_control_port = self.settings.get("tor_control_port", 9051)
self.use_tor_proxy = self.settings.get('use_tor', False)
self.tor_proxy_host = self.settings.get('tor_proxy_host', '127.0.0.1')
self.tor_proxy_port = self.settings.get('tor_proxy_port', 9050)
self.tor_control_password = self.settings.get('tor_control_password', None)
self.tor_control_port = self.settings.get('tor_control_port', 9051)
self.default_socket = socket.socket
self.default_socket_timeout = socket.getdefaulttimeout()
self.default_socket_getaddrinfo = socket.getaddrinfo
self._force_db_upgrade = False
self._enabled_log_categories = set()
for category in self.settings.get("enabled_log_categories", []):
category = category.lower()
if category == "net":
self._enabled_log_categories.add(LC.NET)
else:
self.log.warning(
f'Unknown entry "{category}" in "enabled_log_categories"'
)
if len(self._enabled_log_categories) > 0:
self.log.info(
"Enabled logging categories: {}".format(
",".join(sorted([c.name for c in self._enabled_log_categories]))
)
)
super().__init__(
data_dir=data_dir,
settings=settings,
chain=chain,
log_name=log_name,
**kwargs,
)
def __del__(self):
if self.fp:
self.fp.close()
def stopRunning(self, with_code=0):
self.fail_code = with_code
# Wait for lock to shutdown gracefully.
if self.mxDB.acquire(timeout=5):
self.chainstate_delay_event.set()
with self.mxDB:
self.is_running = False
self.delay_event.set()
self.mxDB.release()
else:
# Waiting for lock timed out, stop anyway
self.chainstate_delay_event.set()
self.delay_event.set()
def openLogFile(self):
self.fp = open(os.path.join(self.data_dir, "basicswap.log"), "a")
def prepareLogging(self):
logging.setLoggerClass(BSXLogger)
self.log = logging.getLogger(self.log_name)
self.log.propagate = False
self.openLogFile()
# Remove any existing handlers
self.log.handlers = []
formatter = logging.Formatter(
"%(asctime)s %(levelname)s : %(message)s", "%Y-%m-%d %H:%M:%S"
)
stream_stdout = logging.StreamHandler(sys.stdout)
if self.log_name != "BasicSwap":
stream_stdout.setFormatter(
logging.Formatter(
"%(asctime)s %(name)s %(levelname)s : %(message)s",
"%Y-%m-%d %H:%M:%S",
)
)
formatter = logging.Formatter('%(asctime)s %(levelname)s : %(message)s')
stream_stdout = logging.StreamHandler()
if self.log_name != 'BasicSwap':
stream_stdout.setFormatter(logging.Formatter('%(asctime)s %(name)s %(levelname)s : %(message)s'))
else:
stream_stdout.setFormatter(formatter)
self.log_formatter = formatter
stream_fp = logging.StreamHandler(self.fp)
stream_fp.setFormatter(formatter)
@@ -152,91 +90,67 @@ class BaseApp(DBMethods):
def getChainClientSettings(self, coin):
try:
return self.settings["chainclients"][chainparams[coin]["name"]]
return self.settings['chainclients'][chainparams[coin]['name']]
except Exception:
return {}
def setDaemonPID(self, name, pid) -> None:
if isinstance(name, Coins):
self.coin_clients[name]["pid"] = pid
self.coin_clients[name]['pid'] = pid
return
for c, v in self.coin_clients.items():
if v["name"] == name:
v["pid"] = pid
if v['name'] == name:
v['pid'] = pid
def getChainDatadirPath(self, coin) -> str:
datadir = self.coin_clients[coin]["datadir"]
testnet_name = (
""
if self.chain == "mainnet"
else chainparams[coin][self.chain].get("name", self.chain)
)
datadir = self.coin_clients[coin]['datadir']
testnet_name = '' if self.chain == 'mainnet' else chainparams[coin][self.chain].get('name', self.chain)
return os.path.join(datadir, testnet_name)
def getCoinIdFromName(self, coin_name: str):
for c, params in chainparams.items():
if coin_name.lower() == params["name"].lower():
if coin_name.lower() == params['name'].lower():
return c
raise ValueError(f"Unknown coin: {coin_name}")
raise ValueError('Unknown coin: {}'.format(coin_name))
def callrpc(self, method, params=[], wallet=None):
cc = self.coin_clients[Coins.PART]
return callrpc(
cc["rpcport"], cc["rpcauth"], method, params, wallet, cc["rpchost"]
)
return callrpc(cc['rpcport'], cc['rpcauth'], method, params, wallet, cc['rpchost'])
def callcoinrpc(self, coin, method, params=[], wallet=None):
cc = self.coin_clients[coin]
return callrpc(
cc["rpcport"], cc["rpcauth"], method, params, wallet, cc["rpchost"]
)
return callrpc(cc['rpcport'], cc['rpcauth'], method, params, wallet, cc['rpchost'])
def callcoincli(self, coin_type, params, wallet=None, timeout=None):
bindir = self.coin_clients[coin_type]["bindir"]
datadir = self.coin_clients[coin_type]["datadir"]
cli_bin: str = chainparams[coin_type].get(
"cli_binname", chainparams[coin_type]["name"] + "-cli"
)
command_cli = os.path.join(
bindir, cli_bin + (".exe" if os.name == "nt" else "")
)
args = [
command_cli,
]
if self.chain != "mainnet":
args.append("-" + self.chain)
args.append("-datadir=" + datadir)
bindir = self.coin_clients[coin_type]['bindir']
datadir = self.coin_clients[coin_type]['datadir']
command_cli = os.path.join(bindir, chainparams[coin_type]['name'] + '-cli' + ('.exe' if os.name == 'nt' else ''))
args = [command_cli, ]
if self.chain != 'mainnet':
args.append('-' + self.chain)
args.append('-datadir=' + datadir)
args += shlex.split(params)
p = subprocess.Popen(
args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
p = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out = p.communicate(timeout=timeout)
if len(out[1]) > 0:
raise ValueError("CLI error " + str(out[1]))
return out[0].decode("utf-8").strip()
raise ValueError('CLI error ' + str(out[1]))
return out[0].decode('utf-8').strip()
def is_transient_error(self, ex) -> bool:
if isinstance(ex, TemporaryError):
return True
str_error = str(ex).lower()
return "read timed out" in str_error or "no connection to daemon" in str_error
return 'read timed out' in str_error or 'no connection to daemon' in str_error
def setConnectionParameters(self, timeout=120):
opener = urllib.request.build_opener()
opener.addheaders = [("User-agent", "Mozilla/5.0")]
opener.addheaders = [('User-agent', 'Mozilla/5.0')]
urllib.request.install_opener(opener)
if self.use_tor_proxy:
socks.setdefaultproxy(
socks.PROXY_TYPE_SOCKS5,
self.tor_proxy_host,
self.tor_proxy_port,
rdns=True,
)
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, self.tor_proxy_host, self.tor_proxy_port, rdns=True)
socket.socket = socks.socksocket
socket.getaddrinfo = (
getaddrinfo_tor # Without this accessing .onion links would fail
)
socket.getaddrinfo = getaddrinfo_tor # Without this accessing .onion links would fail
socket.setdefaulttimeout(timeout)
@@ -246,37 +160,23 @@ class BaseApp(DBMethods):
socket.getaddrinfo = self.default_socket_getaddrinfo
socket.setdefaulttimeout(self.default_socket_timeout)
def readURL(self, url: str, timeout: int = 120, headers={}) -> bytes:
def readURL(self, url: str, timeout: int = 120, headers=None) -> bytes:
open_handler = None
if self.use_tor_proxy:
open_handler = SocksiPyHandler(
socks.PROXY_TYPE_SOCKS5, self.tor_proxy_host, self.tor_proxy_port
)
opener = (
urllib.request.build_opener(open_handler)
if self.use_tor_proxy
else urllib.request.build_opener()
)
if headers is None:
opener.addheaders = [("User-agent", "Mozilla/5.0")]
open_handler = SocksiPyHandler(socks.PROXY_TYPE_SOCKS5, self.tor_proxy_host, self.tor_proxy_port)
opener = urllib.request.build_opener(open_handler) if self.use_tor_proxy else urllib.request.build_opener()
opener.addheaders = [('User-agent', 'Mozilla/5.0')]
request = urllib.request.Request(url, headers=headers)
return opener.open(request, timeout=timeout).read()
def logException(self, message: str) -> None:
def logException(self, message) -> None:
self.log.error(message)
if self.debug:
self.log.error(traceback.format_exc())
def logD(self, log_category: int, message: str) -> None:
if log_category not in self._enabled_log_categories:
return
self.log.debug("(" + LC(log_category).name + ") " + message)
def torControl(self, query):
try:
command = 'AUTHENTICATE "{}"\r\n{}\r\nQUIT\r\n'.format(
self.tor_control_password, query
).encode("utf-8")
command = 'AUTHENTICATE "{}"\r\n{}\r\nQUIT\r\n'.format(self.tor_control_password, query).encode('utf-8')
c = socket.create_connection((self.tor_proxy_host, self.tor_control_port))
c.send(command)
response = bytearray()
@@ -288,37 +188,12 @@ class BaseApp(DBMethods):
c.close()
return response
except Exception as e:
self.log.error(f"torControl {e}")
self.log.error(f'torControl {e}')
return
def getTime(self) -> int:
return int(time.time()) + self.mock_time_offset
def setMockTimeOffset(self, new_offset: int) -> None:
self.log.warning(f"Setting mocktime to {new_offset}")
self.log.warning(f'Setting mocktime to {new_offset}')
self.mock_time_offset = new_offset
def get_int_setting(self, name: str, default_v: int, min_v: int, max_v) -> int:
value: int = self.settings.get(name, default_v)
if value < min_v:
self.log.warning(f"Setting {name} to {min_v}")
value = min_v
if value > max_v:
self.log.warning(f"Setting {name} to {max_v}")
value = max_v
return value
def get_delay_event_seconds(self):
if self.min_delay_event == self.max_delay_event:
return self.min_delay_event
return random.randrange(self.min_delay_event, self.max_delay_event)
def get_short_delay_event_seconds(self):
if self.min_delay_event_short == self.max_delay_event_short:
return self.min_delay_event_short
return random.randrange(self.min_delay_event_short, self.max_delay_event_short)
def get_delay_retry_seconds(self):
if self.min_delay_retry == self.max_delay_retry:
return self.min_delay_retry
return random.randrange(self.min_delay_retry, self.max_delay_retry)

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2021-2024 tecnovert
# Copyright (c) 2024-2025 The Basicswap developers
# Copyright (c) 2021-2023 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
@@ -9,14 +8,12 @@
import struct
import hashlib
from enum import IntEnum, auto
from html import escape as html_escape
from .util.address import (
encodeAddress,
decodeAddress,
)
from .chainparams import (
chainparams,
Fiat,
)
@@ -36,16 +33,6 @@ class KeyTypes(IntEnum):
KAF = 6
class MessageNetworks(IntEnum):
SMSG = auto()
SIMPLEX = auto()
class MessageNetworkLinkTypes(IntEnum):
RECEIVED_ON = auto()
SENT_ON = auto()
class MessageTypes(IntEnum):
OFFER = auto()
BID = auto()
@@ -63,18 +50,12 @@ class MessageTypes(IntEnum):
ADS_BID_LF = auto()
ADS_BID_ACCEPT_FL = auto()
CONNECT_REQ = auto()
PORTAL_OFFER = auto()
PORTAL_SEND = auto()
class AddressTypes(IntEnum):
OFFER = auto()
BID = auto()
RECV_OFFER = auto()
SEND_OFFER = auto()
PORTAL_LOCAL = auto()
PORTAL = auto()
class SwapTypes(IntEnum):
@@ -83,25 +64,23 @@ class SwapTypes(IntEnum):
SELLER_FIRST_2MSG = auto()
BUYER_FIRST_2MSG = auto()
XMR_SWAP = auto()
XMR_BCH_SWAP = auto()
class OfferStates(IntEnum):
OFFER_SENT = 1
OFFER_RECEIVED = 2
OFFER_ABANDONED = 3
OFFER_EXPIRED = 4
class BidStates(IntEnum):
BID_SENT = 1
BID_RECEIVING = 2 # Partially received
BID_RECEIVING = 2 # Partially received
BID_RECEIVED = 3
BID_RECEIVING_ACC = 4 # Partially received accept message
BID_ACCEPTED = 5 # BidAcceptMessage received/sent
SWAP_INITIATED = 6 # Initiate txn validated
SWAP_PARTICIPATING = 7 # Participate txn validated
SWAP_COMPLETED = 8 # All swap txns spent
BID_RECEIVING_ACC = 4 # Partially received accept message
BID_ACCEPTED = 5 # BidAcceptMessage received/sent
SWAP_INITIATED = 6 # Initiate txn validated
SWAP_PARTICIPATING = 7 # Participate txn validated
SWAP_COMPLETED = 8 # All swap txns spent
XMR_SWAP_SCRIPT_COIN_LOCKED = 9
XMR_SWAP_HAVE_SCRIPT_COIN_SPEND_TX = 10
XMR_SWAP_NOSCRIPT_COIN_LOCKED = 11
@@ -115,19 +94,15 @@ class BidStates(IntEnum):
XMR_SWAP_FAILED = 19
SWAP_DELAYING = 20
SWAP_TIMEDOUT = 21
BID_ABANDONED = 22 # Bid will no longer be processed
BID_ERROR = 23 # An error occurred
BID_ABANDONED = 22 # Bid will no longer be processed
BID_ERROR = 23 # An error occurred
BID_STALLED_FOR_TEST = 24
BID_REJECTED = 25
BID_STATE_UNKNOWN = 26
XMR_SWAP_MSG_SCRIPT_LOCK_TX_SIGS = 27 # XmrBidLockTxSigsMessage
XMR_SWAP_MSG_SCRIPT_LOCK_SPEND_TX = 28 # XmrBidLockSpendTxMessage
XMR_SWAP_MSG_SCRIPT_LOCK_TX_SIGS = 27 # XmrBidLockTxSigsMessage
XMR_SWAP_MSG_SCRIPT_LOCK_SPEND_TX = 28 # XmrBidLockSpendTxMessage
BID_REQUEST_SENT = 29
BID_REQUEST_ACCEPTED = 30
BID_EXPIRED = 31
BID_AACCEPT_DELAY = 32
BID_AACCEPT_FAIL = 33
CONNECT_REQ_SENT = 34
class TxStates(IntEnum):
@@ -159,8 +134,6 @@ class TxTypes(IntEnum):
ITX_PRE_FUNDED = auto()
BCH_MERCY = auto()
class ActionTypes(IntEnum):
ACCEPT_BID = auto()
@@ -207,11 +180,6 @@ class EventLogTypes(IntEnum):
PTX_PUBLISHED = auto()
PTX_REDEEM_PUBLISHED = auto()
PTX_REFUND_PUBLISHED = auto()
LOCK_TX_B_IN_MEMPOOL = auto()
BCH_MERCY_TX_PUBLISHED = auto()
BCH_MERCY_TX_FOUND = auto()
LOCK_TX_A_IN_MEMPOOL = auto()
LOCK_TX_A_CONFLICTS = auto()
class XmrSplitMsgTypes(IntEnum):
@@ -223,7 +191,6 @@ class DebugTypes(IntEnum):
NONE = 0
BID_STOP_AFTER_COIN_A_LOCK = auto()
BID_DONT_SPEND_COIN_A_LOCK_REFUND = auto()
BID_DONT_SPEND_COIN_A_LOCK_REFUND2 = auto() # continues
CREATE_INVALID_COIN_B_LOCK = auto()
BUYER_STOP_AFTER_ITX = auto()
MAKE_INVALID_PTX = auto()
@@ -231,13 +198,6 @@ class DebugTypes(IntEnum):
SKIP_LOCK_TX_REFUND = auto()
SEND_LOCKED_XMR = auto()
B_LOCK_TX_MISSED_SEND = auto()
DUPLICATE_ACTIONS = auto()
DONT_CONFIRM_PTX = auto()
OFFER_LOCK_2_VALUE_INC = auto()
BID_STOP_AFTER_COIN_B_LOCK = auto()
BID_DONT_SPEND_COIN_B_LOCK = auto()
WAIT_FOR_COIN_B_LOCK_BEFORE_REFUND = auto()
BID_DONT_SPEND_COIN_A_LOCK = auto()
class NotificationTypes(IntEnum):
@@ -245,12 +205,6 @@ class NotificationTypes(IntEnum):
OFFER_RECEIVED = auto()
BID_RECEIVED = auto()
BID_ACCEPTED = auto()
SWAP_COMPLETED = auto()
UPDATE_AVAILABLE = auto()
class ConnectionRequestTypes(IntEnum):
BID = 1
class AutomationOverrideOptions(IntEnum):
@@ -261,12 +215,12 @@ class AutomationOverrideOptions(IntEnum):
def strAutomationOverrideOption(option):
if option == AutomationOverrideOptions.DEFAULT:
return "Default"
return 'Default'
if option == AutomationOverrideOptions.ALWAYS_ACCEPT:
return "Always Accept"
return 'Always Accept'
if option == AutomationOverrideOptions.NEVER_ACCEPT:
return "Never Accept"
return "Unknown"
return 'Never Accept'
return 'Unknown'
class VisibilityOverrideOptions(IntEnum):
@@ -277,263 +231,240 @@ class VisibilityOverrideOptions(IntEnum):
def strVisibilityOverrideOption(option):
if option == VisibilityOverrideOptions.DEFAULT:
return "Default"
return 'Default'
if option == VisibilityOverrideOptions.HIDE:
return "Hide"
return 'Hide'
if option == VisibilityOverrideOptions.BLOCK:
return "Block"
return "Unknown"
return 'Block'
return 'Unknown'
def strOfferState(state):
if state == OfferStates.OFFER_SENT:
return "Sent"
return 'Sent'
if state == OfferStates.OFFER_RECEIVED:
return "Received"
return 'Received'
if state == OfferStates.OFFER_ABANDONED:
return "Abandoned"
if state == OfferStates.OFFER_EXPIRED:
return "Expired"
return "Unknown"
return 'Abandoned'
return 'Unknown'
def strBidState(state):
if state == BidStates.BID_SENT:
return "Sent"
return 'Sent'
if state == BidStates.BID_RECEIVING:
return "Receiving"
return 'Receiving'
if state == BidStates.BID_RECEIVING_ACC:
return "Receiving accept"
return 'Receiving accept'
if state == BidStates.BID_RECEIVED:
return "Received"
return 'Received'
if state == BidStates.BID_ACCEPTED:
return "Accepted"
return 'Accepted'
if state == BidStates.SWAP_INITIATED:
return "Initiated"
return 'Initiated'
if state == BidStates.SWAP_PARTICIPATING:
return "Participating"
return 'Participating'
if state == BidStates.SWAP_COMPLETED:
return "Completed"
return 'Completed'
if state == BidStates.SWAP_TIMEDOUT:
return "Timed-out"
return 'Timed-out'
if state == BidStates.BID_ABANDONED:
return "Abandoned"
return 'Abandoned'
if state == BidStates.BID_STALLED_FOR_TEST:
return "Stalled (debug)"
return 'Stalled (debug)'
if state == BidStates.BID_ERROR:
return "Error"
return 'Error'
if state == BidStates.BID_REJECTED:
return "Rejected"
return 'Rejected'
if state == BidStates.XMR_SWAP_SCRIPT_COIN_LOCKED:
return "Script coin locked"
return 'Script coin locked'
if state == BidStates.XMR_SWAP_HAVE_SCRIPT_COIN_SPEND_TX:
return "Script coin spend tx valid"
return 'Script coin spend tx valid'
if state == BidStates.XMR_SWAP_NOSCRIPT_COIN_LOCKED:
return "Scriptless coin locked"
return 'Scriptless coin locked'
if state == BidStates.XMR_SWAP_LOCK_RELEASED:
return "Script coin lock released"
return 'Script coin lock released'
if state == BidStates.XMR_SWAP_SCRIPT_TX_REDEEMED:
return "Script tx redeemed"
return 'Script tx redeemed'
if state == BidStates.XMR_SWAP_SCRIPT_TX_PREREFUND:
return "Script pre-refund tx in chain"
return 'Script pre-refund tx in chain'
if state == BidStates.XMR_SWAP_NOSCRIPT_TX_REDEEMED:
return "Scriptless tx redeemed"
return 'Scriptless tx redeemed'
if state == BidStates.XMR_SWAP_NOSCRIPT_TX_RECOVERED:
return "Scriptless tx recovered"
return 'Scriptless tx recovered'
if state == BidStates.XMR_SWAP_FAILED_REFUNDED:
return "Failed, refunded"
return 'Failed, refunded'
if state == BidStates.XMR_SWAP_FAILED_SWIPED:
return "Failed, swiped"
return 'Failed, swiped'
if state == BidStates.XMR_SWAP_FAILED:
return "Failed"
return 'Failed'
if state == BidStates.SWAP_DELAYING:
return "Delaying"
return 'Delaying'
if state == BidStates.XMR_SWAP_MSG_SCRIPT_LOCK_TX_SIGS:
return "Exchanged script lock tx sigs msg"
return 'Exchanged script lock tx sigs msg'
if state == BidStates.XMR_SWAP_MSG_SCRIPT_LOCK_SPEND_TX:
return "Exchanged script lock spend tx msg"
return 'Exchanged script lock spend tx msg'
if state == BidStates.BID_REQUEST_SENT:
return "Request sent"
return 'Request sent'
if state == BidStates.BID_REQUEST_ACCEPTED:
return "Request accepted"
return 'Request accepted'
if state == BidStates.BID_STATE_UNKNOWN:
return "Unknown bid state"
if state == BidStates.BID_EXPIRED:
return "Expired"
if state == BidStates.BID_AACCEPT_DELAY:
return "Auto accept delay"
if state == BidStates.BID_AACCEPT_FAIL:
return "Auto accept failed"
if state == BidStates.CONNECT_REQ_SENT:
return "Connect request sent"
return "Unknown" + " " + str(state)
return 'Unknown bid state'
return 'Unknown' + ' ' + str(state)
def strTxState(state):
if state == TxStates.TX_NONE:
return "None"
return 'None'
if state == TxStates.TX_SENT:
return "Sent"
return 'Sent'
if state == TxStates.TX_CONFIRMED:
return "Confirmed"
return 'Confirmed'
if state == TxStates.TX_REDEEMED:
return "Redeemed"
return 'Redeemed'
if state == TxStates.TX_REFUNDED:
return "Refunded"
return 'Refunded'
if state == TxStates.TX_IN_MEMPOOL:
return "In Mempool"
return 'In Mempool'
if state == TxStates.TX_IN_CHAIN:
return "In Chain"
return "Unknown"
return 'In Chain'
return 'Unknown'
def strTxType(tx_type):
if tx_type == TxTypes.XMR_SWAP_A_LOCK:
return "Chain A Lock Tx"
return 'Chain A Lock Tx'
if tx_type == TxTypes.XMR_SWAP_A_LOCK_SPEND:
return "Chain A Lock Spend Tx"
return 'Chain A Lock Spend Tx'
if tx_type == TxTypes.XMR_SWAP_A_LOCK_REFUND:
return "Chain A Lock Refund Tx"
return 'Chain A Lock Refund Tx'
if tx_type == TxTypes.XMR_SWAP_A_LOCK_REFUND_SPEND:
return "Chain A Lock Refund Spend Tx"
return 'Chain A Lock Refund Spend Tx'
if tx_type == TxTypes.XMR_SWAP_A_LOCK_REFUND_SWIPE:
return "Chain A Lock Refund Swipe Tx"
return 'Chain A Lock Refund Swipe Tx'
if tx_type == TxTypes.XMR_SWAP_B_LOCK:
return "Chain B Lock Tx"
return 'Chain B Lock Tx'
if tx_type == TxTypes.ITX_PRE_FUNDED:
return "Funded mock initiate Tx"
if tx_type == TxTypes.BCH_MERCY:
return "BCH Mercy Tx"
return "Unknown"
return 'Funded mock initiate tx'
return 'Unknown'
def strAddressType(addr_type):
return {
AddressTypes.OFFER: "Offer",
AddressTypes.BID: "Bid",
AddressTypes.RECV_OFFER: "Offer recv",
AddressTypes.SEND_OFFER: "Offer send",
AddressTypes.PORTAL_LOCAL: "Portal (local)",
AddressTypes.PORTAL: "Portal",
}.get(addr_type, "Unknown")
if addr_type == AddressTypes.OFFER:
return 'Offer'
if addr_type == AddressTypes.BID:
return 'Bid'
if addr_type == AddressTypes.RECV_OFFER:
return 'Offer recv'
if addr_type == AddressTypes.SEND_OFFER:
return 'Offer send'
return 'Unknown'
def getLockName(lock_type):
if lock_type == TxLockTypes.SEQUENCE_LOCK_BLOCKS:
return "Sequence lock, blocks"
return 'Sequence lock, blocks'
if lock_type == TxLockTypes.SEQUENCE_LOCK_TIME:
return "Sequence lock, time"
return 'Sequence lock, time'
if lock_type == TxLockTypes.ABS_LOCK_BLOCKS:
return "blocks"
return 'blocks'
if lock_type == TxLockTypes.ABS_LOCK_TIME:
return "time"
return 'time'
def describeEventEntry(event_type, event_msg):
if event_type == EventLogTypes.FAILED_TX_B_LOCK_PUBLISH:
return "Failed to publish lock tx B"
return 'Failed to publish lock tx B'
if event_type == EventLogTypes.LOCK_TX_A_PUBLISHED:
return "Lock tx A published"
return 'Lock tx A published'
if event_type == EventLogTypes.LOCK_TX_B_PUBLISHED:
return "Lock tx B published"
return 'Lock tx B published'
if event_type == EventLogTypes.FAILED_TX_B_SPEND:
return "Failed to publish lock tx B spend: " + event_msg
if event_type == EventLogTypes.LOCK_TX_A_IN_MEMPOOL:
return "Lock tx A seen in mempool"
if event_type == EventLogTypes.LOCK_TX_A_CONFLICTS:
return "Lock tx A conflicting txn/s"
return 'Failed to publish lock tx B spend: ' + event_msg
if event_type == EventLogTypes.LOCK_TX_A_SEEN:
return "Lock tx A seen in chain"
return 'Lock tx A seen in chain'
if event_type == EventLogTypes.LOCK_TX_A_CONFIRMED:
return "Lock tx A confirmed in chain"
return 'Lock tx A confirmed in chain'
if event_type == EventLogTypes.LOCK_TX_B_SEEN:
return "Lock tx B seen in chain"
return 'Lock tx B seen in chain'
if event_type == EventLogTypes.LOCK_TX_B_CONFIRMED:
return "Lock tx B confirmed in chain"
if event_type == EventLogTypes.LOCK_TX_B_IN_MEMPOOL:
return "Lock tx B seen in mempool"
return 'Lock tx B confirmed in chain'
if event_type == EventLogTypes.DEBUG_TWEAK_APPLIED:
return "Debug tweak applied " + event_msg
return 'Debug tweak applied ' + event_msg
if event_type == EventLogTypes.FAILED_TX_B_REFUND:
return "Failed to publish lock tx B refund"
return 'Failed to publish lock tx B refund'
if event_type == EventLogTypes.LOCK_TX_B_INVALID:
return "Detected invalid lock Tx B"
return 'Detected invalid lock Tx B'
if event_type == EventLogTypes.LOCK_TX_A_REFUND_TX_PUBLISHED:
return "Lock tx A refund tx published"
return 'Lock tx A refund tx published'
if event_type == EventLogTypes.LOCK_TX_A_REFUND_SPEND_TX_PUBLISHED:
return "Lock tx A refund spend tx published"
return 'Lock tx A refund spend tx published'
if event_type == EventLogTypes.LOCK_TX_A_REFUND_SWIPE_TX_PUBLISHED:
return "Lock tx A refund swipe tx published"
return 'Lock tx A refund swipe tx published'
if event_type == EventLogTypes.LOCK_TX_B_REFUND_TX_PUBLISHED:
return "Lock tx B refund tx published"
return 'Lock tx B refund tx published'
if event_type == EventLogTypes.LOCK_TX_A_SPEND_TX_PUBLISHED:
return "Lock tx A spend tx published"
return 'Lock tx A spend tx published'
if event_type == EventLogTypes.LOCK_TX_B_SPEND_TX_PUBLISHED:
return "Lock tx B spend tx published"
return 'Lock tx B spend tx published'
if event_type == EventLogTypes.LOCK_TX_A_REFUND_TX_SEEN:
return "Lock tx A refund tx seen in chain"
return 'Lock tx A refund tx seen in chain'
if event_type == EventLogTypes.LOCK_TX_A_REFUND_SPEND_TX_SEEN:
return "Lock tx A refund spend tx seen in chain"
return 'Lock tx A refund spend tx seen in chain'
if event_type == EventLogTypes.SYSTEM_WARNING:
return "Warning: " + event_msg
return 'Warning: ' + event_msg
if event_type == EventLogTypes.ERROR:
return "Error: " + event_msg
return 'Error: ' + event_msg
if event_type == EventLogTypes.AUTOMATION_CONSTRAINT:
return "Failed auto accepting"
return 'Failed auto accepting'
if event_type == EventLogTypes.AUTOMATION_ACCEPTING_BID:
return "Auto accepting"
return 'Auto accepting'
if event_type == EventLogTypes.ITX_PUBLISHED:
return "Initiate tx published"
return 'Initiate tx published'
if event_type == EventLogTypes.ITX_REDEEM_PUBLISHED:
return "Initiate tx redeem tx published"
return 'Initiate tx redeem tx published'
if event_type == EventLogTypes.ITX_REFUND_PUBLISHED:
return "Initiate tx refund tx published"
return 'Initiate tx refund tx published'
if event_type == EventLogTypes.PTX_PUBLISHED:
return "Participate tx published"
return 'Participate tx published'
if event_type == EventLogTypes.PTX_REDEEM_PUBLISHED:
return "Participate tx redeem tx published"
return 'Participate tx redeem tx published'
if event_type == EventLogTypes.PTX_REFUND_PUBLISHED:
return "Participate tx refund tx published"
if event_type == EventLogTypes.BCH_MERCY_TX_FOUND:
return "BCH mercy tx found"
if event_type == EventLogTypes.BCH_MERCY_TX_PUBLISHED:
return "Lock tx B mercy tx published"
return 'Participate tx refund tx published'
def getVoutByAddress(txjs, p2sh):
for o in txjs["vout"]:
for o in txjs['vout']:
try:
if "address" in o["scriptPubKey"] and o["scriptPubKey"]["address"] == p2sh:
return o["n"]
if p2sh in o["scriptPubKey"]["addresses"]:
return o["n"]
if 'address' in o['scriptPubKey'] and o['scriptPubKey']['address'] == p2sh:
return o['n']
if p2sh in o['scriptPubKey']['addresses']:
return o['n']
except Exception:
pass
raise ValueError("Address output not found in txn")
raise ValueError('Address output not found in txn')
def getVoutByScriptPubKey(txjs, scriptPubKey_hex: str) -> int:
for o in txjs["vout"]:
def getVoutByP2WSH(txjs, p2wsh_hex):
for o in txjs['vout']:
try:
if scriptPubKey_hex == o["scriptPubKey"]["hex"]:
return o["n"]
if p2wsh_hex == o['scriptPubKey']['hex']:
return o['n']
except Exception:
pass
raise ValueError("scriptPubKey output not found in txn")
raise ValueError('P2WSH output not found in txn')
def replaceAddrPrefix(addr, coin_type, chain_name, addr_type="pubkey_address"):
return encodeAddress(
bytes((chainparams[coin_type][chain_name][addr_type],))
+ decodeAddress(addr)[1:]
)
def replaceAddrPrefix(addr, coin_type, chain_name, addr_type='pubkey_address'):
return encodeAddress(bytes((chainparams[coin_type][chain_name][addr_type],)) + decodeAddress(addr)[1:])
def getOfferProofOfFundsHash(offer_msg, offer_addr):
# TODO: Hash must not include proof_of_funds sig if it exists in offer_msg
h = hashlib.sha256()
h.update(offer_addr.encode("utf-8"))
offer_bytes = offer_msg.to_bytes()
h.update(offer_addr.encode('utf-8'))
offer_bytes = offer_msg.SerializeToString()
h.update(offer_bytes)
return h.digest()
@@ -542,93 +473,33 @@ def getLastBidState(packed_states):
num_states = len(packed_states) // 12
if num_states < 2:
return BidStates.BID_STATE_UNKNOWN
return struct.unpack_from("<i", packed_states[(num_states - 2) * 12 :])[0]
return struct.unpack_from('<i', packed_states[(num_states - 2) * 12:])[0]
try:
num_states = len(packed_states) // 12
if num_states < 2:
return BidStates.BID_STATE_UNKNOWN
return struct.unpack_from("<i", packed_states[(num_states - 2) * 12 :])[0]
return struct.unpack_from('<i', packed_states[(num_states - 2) * 12:])[0]
except Exception:
return BidStates.BID_STATE_UNKNOWN
def strSwapType(swap_type) -> str:
def strSwapType(swap_type):
if swap_type == SwapTypes.SELLER_FIRST:
return "seller_first"
return 'seller_first'
if swap_type == SwapTypes.XMR_SWAP:
return "xmr_swap"
return 'xmr_swap'
return None
def strSwapDesc(swap_type) -> str:
def strSwapDesc(swap_type):
if swap_type == SwapTypes.SELLER_FIRST:
return "Secret Hash"
return 'Secret Hash'
if swap_type == SwapTypes.XMR_SWAP:
return "Adaptor Sig"
return 'Adaptor Sig'
return None
def fiatTicker(fiat_ind: int) -> str:
try:
return Fiat(fiat_ind).name
except Exception as e: # noqa: F841
raise ValueError(f"Unknown fiat ind {fiat_ind}")
def fiatFromTicker(ticker: str) -> int:
ticker_uc = ticker.upper()
for entry in Fiat:
if entry.name == ticker_uc:
return entry
raise ValueError(f"Unknown fiat {ticker}")
def get_api_key_setting(
settings, setting_name: str, default_value: str = "", escape: bool = False
):
setting_name_enc: str = setting_name + "_enc"
if setting_name_enc in settings:
rv = bytes.fromhex(settings[setting_name_enc]).decode("utf-8")
return html_escape(rv) if escape else rv
return settings.get(setting_name, default_value)
inactive_states = [
BidStates.SWAP_COMPLETED,
BidStates.BID_ERROR,
BidStates.BID_REJECTED,
BidStates.SWAP_TIMEDOUT,
BidStates.BID_ABANDONED,
BidStates.BID_EXPIRED,
]
def canAcceptBidState(state):
return state in (
BidStates.BID_RECEIVED,
BidStates.BID_AACCEPT_DELAY,
BidStates.BID_AACCEPT_FAIL,
)
def canExpireBidState(state):
return state in (
BidStates.BID_SENT,
BidStates.BID_RECEIVING,
BidStates.BID_RECEIVED,
BidStates.BID_AACCEPT_DELAY,
BidStates.BID_AACCEPT_FAIL,
BidStates.BID_REQUEST_SENT,
)
def canTimeoutBidState(state):
return state in (
BidStates.BID_ACCEPTED,
BidStates.XMR_SWAP_MSG_SCRIPT_LOCK_TX_SIGS,
BidStates.XMR_SWAP_HAVE_SCRIPT_COIN_SPEND_TX,
BidStates.XMR_SWAP_MSG_SCRIPT_LOCK_SPEND_TX,
)
inactive_states = [BidStates.SWAP_COMPLETED, BidStates.BID_ERROR, BidStates.BID_REJECTED, BidStates.SWAP_TIMEDOUT, BidStates.BID_ABANDONED]
def isActiveBidState(state):

View File

@@ -1 +0,0 @@
name = "bin"

File diff suppressed because it is too large Load Diff

View File

@@ -1,778 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2019-2024 tecnovert
# Copyright (c) 2024-2025 The Basicswap developers
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import json
import logging
import os
import shutil
import signal
import subprocess
import sys
import traceback
import basicswap.config as cfg
from basicswap import __version__
from basicswap.basicswap import BasicSwap
from basicswap.chainparams import chainparams, Coins, isKnownCoinName
from basicswap.network.simplex_chat import startSimplexClient
from basicswap.ui.util import getCoinName
from basicswap.util.daemon import Daemon
initial_logger = logging.getLogger()
initial_logger.level = logging.DEBUG
if not len(initial_logger.handlers):
initial_logger.addHandler(initial_logger.StreamHandler(sys.stdout))
logger = initial_logger
swap_client = None
def signal_handler(sig, frame):
os.write(
sys.stdout.fileno(), f"Signal {sig} detected, ending program.\n".encode("utf-8")
)
if swap_client is not None and not swap_client.chainstate_delay_event.is_set():
try:
from basicswap.ui.page_amm import stop_amm_process, get_amm_status
amm_status = get_amm_status()
if amm_status == "running":
logger.info("Signal handler stopping AMM process...")
success, msg = stop_amm_process(swap_client)
if success:
logger.info(f"AMM signal shutdown: {msg}")
else:
logger.warning(f"AMM signal shutdown warning: {msg}")
except Exception as e:
logger.error(f"Error stopping AMM in signal handler: {e}")
swap_client.stopRunning()
def checkPARTZmqConfigBeforeStart(part_settings, swap_settings):
try:
datadir = part_settings.get("datadir")
if not datadir:
return
config_path = os.path.join(datadir, "particl.conf")
if not os.path.exists(config_path):
return
with open(config_path, "r") as f:
config_content = f.read()
zmq_host = swap_settings.get("zmqhost", "tcp://127.0.0.1")
zmq_port = swap_settings.get("zmqport", 14792)
expected_line = f"zmqpubhashwtx={zmq_host}:{zmq_port}"
if "zmqpubhashwtx=" not in config_content:
with open(config_path, "a") as f:
f.write(f"{expected_line}\n")
elif expected_line not in config_content:
lines = config_content.split("\n")
updated_lines = []
for line in lines:
if line.startswith("zmqpubhashwtx="):
updated_lines.append(expected_line)
else:
updated_lines.append(line)
with open(config_path, "w") as f:
f.write("\n".join(updated_lines))
except Exception as e:
logger.debug(f"Error checking PART ZMQ config: {e}")
def startDaemon(node_dir, bin_dir, daemon_bin, opts=[], extra_config={}):
daemon_bin = os.path.expanduser(os.path.join(bin_dir, daemon_bin))
datadir_path = os.path.expanduser(node_dir)
coin_name = extra_config.get("coin_name", "")
# Rewrite litecoin.conf
# TODO: Remove
ltc_conf_path = os.path.join(datadir_path, "litecoin.conf")
if os.path.exists(ltc_conf_path):
needs_rewrite: bool = False
add_changetype: bool = True
with open(ltc_conf_path) as fp:
for line in fp:
line = line.strip()
if line.startswith("changetype="):
add_changetype = False
break
if line.endswith("=onion"):
needs_rewrite = True
break
if needs_rewrite:
logger.info("Rewriting litecoin.conf")
shutil.copyfile(ltc_conf_path, ltc_conf_path + ".last")
with (
open(ltc_conf_path + ".last") as fp_from,
open(ltc_conf_path, "w") as fp_to,
):
for line in fp_from:
if line.strip().endswith("=onion"):
fp_to.write(line.strip()[:-6] + "\n")
else:
fp_to.write(line)
if add_changetype:
fp_to.write("changetype=bech32\n")
add_changetype = False
if add_changetype:
logger.info("Adding changetype to litecoin.conf")
with open(ltc_conf_path, "a") as fp:
fp.write("changetype=bech32\n")
# Rewrite bitcoin.conf
# TODO: Remove
btc_conf_path = os.path.join(datadir_path, "bitcoin.conf")
if coin_name == "bitcoin" and os.path.exists(btc_conf_path):
add_changetype: bool = True
with open(btc_conf_path) as fp:
for line in fp:
line = line.strip()
if line.startswith("changetype="):
add_changetype = False
break
if add_changetype:
logger.info("Adding changetype to bitcoin.conf")
with open(btc_conf_path, "a") as fp:
fp.write("changetype=bech32\n")
args = [
daemon_bin,
]
add_datadir: bool = extra_config.get("add_datadir", True)
if add_datadir:
args.append("-datadir=" + datadir_path)
args += opts
logger.info(f"Starting node {daemon_bin}")
logger.debug("Arguments {}".format(" ".join(args)))
opened_files = []
if extra_config.get("stdout_to_file", False):
stdout_dest = open(
os.path.join(
datadir_path, extra_config.get("stdout_filename", "core_stdout.log")
),
"w",
)
opened_files.append(stdout_dest)
stderr_dest = stdout_dest
else:
stdout_dest = subprocess.PIPE
stderr_dest = subprocess.PIPE
shell: bool = False
if extra_config.get("use_shell", False):
args = " ".join(args)
shell = True
return Daemon(
subprocess.Popen(
args,
shell=shell,
stdin=subprocess.PIPE,
stdout=stdout_dest,
stderr=stderr_dest,
cwd=datadir_path,
),
opened_files,
os.path.basename(daemon_bin),
)
def startXmrDaemon(node_dir, bin_dir, daemon_bin, opts=[]):
daemon_path = os.path.expanduser(os.path.join(bin_dir, daemon_bin))
datadir_path = os.path.expanduser(node_dir)
config_filename = (
"wownerod.conf" if daemon_bin.startswith("wow") else "monerod.conf"
)
args = [
daemon_path,
"--non-interactive",
"--config-file=" + os.path.join(datadir_path, config_filename),
] + opts
logger.info(f"Starting node {daemon_bin}")
logger.debug("Arguments {}".format(" ".join(args)))
# return subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
file_stdout = open(os.path.join(datadir_path, "core_stdout.log"), "w")
file_stderr = open(os.path.join(datadir_path, "core_stderr.log"), "w")
return Daemon(
subprocess.Popen(
args,
stdin=subprocess.PIPE,
stdout=file_stdout,
stderr=file_stderr,
cwd=datadir_path,
),
[file_stdout, file_stderr],
os.path.basename(daemon_bin),
)
def startXmrWalletDaemon(node_dir, bin_dir, wallet_bin, opts=[]):
daemon_path = os.path.expanduser(os.path.join(bin_dir, wallet_bin))
args = [daemon_path, "--non-interactive"]
needs_rewrite: bool = False
config_to_remove = [
"daemon-address=",
"untrusted-daemon=",
"trusted-daemon=",
"proxy=",
]
data_dir = os.path.expanduser(node_dir)
wallet_config_filename = (
"wownero-wallet-rpc.conf"
if wallet_bin.startswith("wow")
else "monero_wallet.conf"
)
config_path = os.path.join(data_dir, wallet_config_filename)
if os.path.exists(config_path):
args += ["--config-file=" + config_path]
with open(config_path) as fp:
for line in fp:
if any(
line.startswith(config_line) for config_line in config_to_remove
):
logger.warning(
"Found old config in monero_wallet.conf: {}".format(
line.strip()
)
)
needs_rewrite = True
args += opts
if needs_rewrite:
logger.info("Rewriting wallet config")
shutil.copyfile(config_path, config_path + ".last")
with open(config_path + ".last") as fp_from, open(config_path, "w") as fp_to:
for line in fp_from:
if not any(
line.startswith(config_line) for config_line in config_to_remove
):
fp_to.write(line)
logger.info(f"Starting wallet daemon {wallet_bin}")
logger.debug("Arguments {}".format(" ".join(args)))
# TODO: return subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=data_dir)
wallet_stdout = open(os.path.join(data_dir, "wallet_stdout.log"), "w")
wallet_stderr = open(os.path.join(data_dir, "wallet_stderr.log"), "w")
return Daemon(
subprocess.Popen(
args,
stdin=subprocess.PIPE,
stdout=wallet_stdout,
stderr=wallet_stderr,
cwd=data_dir,
),
[wallet_stdout, wallet_stderr],
os.path.basename(wallet_bin),
)
def getCoreBinName(coin_id: int, coin_settings, default_name: str) -> str:
return coin_settings.get(
"core_binname", chainparams[coin_id].get("core_binname", default_name)
) + (".exe" if os.name == "nt" else "")
def getWalletBinName(coin_id: int, coin_settings, default_name: str) -> str:
return coin_settings.get(
"wallet_binname", chainparams[coin_id].get("wallet_binname", default_name)
) + (".exe" if os.name == "nt" else "")
def getCoreBinArgs(coin_id: int, coin_settings, prepare=False, use_tor_proxy=False):
extra_args = []
if "config_filename" in coin_settings:
extra_args.append("--conf=" + coin_settings["config_filename"])
if "port" in coin_settings and coin_id != Coins.BTC:
if prepare is False and use_tor_proxy:
if coin_id == Coins.BCH:
# Without this BCH (27.1) will bind to the default BTC port, even with proxy set
extra_args.append("--bind=127.0.0.1:" + str(int(coin_settings["port"])))
else:
extra_args.append("--port=" + str(int(coin_settings["port"])))
# BTC versions from v28 fail to start if the onionport is in use.
# As BCH may use port 8334, disable it here.
# When tor is enabled a bind option for the onionport will be added to bitcoin.conf.
# https://github.com/bitcoin/bitcoin/blob/master/doc/release-notes/release-notes-28.0.md?plain=1#L84
if (
prepare is False
and use_tor_proxy is False
and coin_id in (Coins.BTC, Coins.NMC)
):
port: int = coin_settings.get("port", 8333)
extra_args.append(f"--bind=0.0.0.0:{port}")
return extra_args
def mainLoop(daemons, update: bool = True):
while not swap_client.delay_event.wait(0.5):
if update:
swap_client.update()
else:
pass
for daemon in daemons:
if daemon.running is False:
continue
poll = daemon.handle.poll()
if poll is None:
pass # Process is running
else:
daemon.running = False
swap_client.log.error(
f"Process {daemon.handle.pid} for {daemon.name} terminated unexpectedly returning {poll}."
)
def runClient(
data_dir: str,
chain: str,
start_only_coins: bool,
log_prefix: str = "BasicSwap",
extra_opts=dict(),
) -> int:
global swap_client, logger
daemons = []
pids = []
settings_path = os.path.join(data_dir, cfg.CONFIG_FILENAME)
pids_path = os.path.join(data_dir, ".pids")
if os.getenv("WALLET_ENCRYPTION_PWD", "") != "":
if "decred" in start_only_coins:
# Workaround for dcrwallet requiring password for initial startup
logger.warning(
"Allowing set WALLET_ENCRYPTION_PWD var with --startonlycoin=decred."
)
else:
raise ValueError(
"Please unset the WALLET_ENCRYPTION_PWD environment variable."
)
if not os.path.exists(settings_path):
raise ValueError("Settings file not found: " + str(settings_path))
with open(settings_path) as fs:
settings = json.load(fs)
swap_client = BasicSwap(
data_dir, settings, chain, log_name=log_prefix, extra_opts=extra_opts
)
logger = swap_client.log
if os.path.exists(pids_path):
with open(pids_path) as fd:
for ln in fd:
# TODO: try close
logger.warning("Found pid for daemon {}".format(ln.strip()))
# Ensure daemons are stopped
swap_client.stopDaemons()
# Settings may have been modified
settings = swap_client.settings
try:
# Try start daemons
for network in settings.get("networks", []):
if network.get("enabled", True) is False:
continue
network_type: str = network.get("type", "unknown")
if network_type == "simplex":
simplex_dir = os.path.join(data_dir, "simplex")
log_level = "debug" if swap_client.debug else "info"
socks_proxy = None
if "socks_proxy_override" in network:
socks_proxy = network["socks_proxy_override"]
elif swap_client.use_tor_proxy:
socks_proxy = (
f"{swap_client.tor_proxy_host}:{swap_client.tor_proxy_port}"
)
daemons.append(
startSimplexClient(
network["client_path"],
simplex_dir,
network["server_address"],
network["ws_port"],
logger,
swap_client.delay_event,
socks_proxy=socks_proxy,
log_level=log_level,
)
)
pid = daemons[-1].handle.pid
swap_client.log.info(f"Started Simplex client {pid}")
for c, v in settings["chainclients"].items():
if len(start_only_coins) > 0 and c not in start_only_coins:
continue
if (
len(swap_client.with_coins_override) > 0
and c not in swap_client.with_coins_override
) or c in swap_client.without_coins_override:
if v.get("manage_daemon", False) or v.get(
"manage_wallet_daemon", False
):
logger.warning(
f"Not starting coin {c.capitalize()}, disabled by arguments."
)
continue
try:
coin_id = swap_client.getCoinIdFromName(c)
display_name = getCoinName(coin_id)
except Exception as e: # noqa: F841
logger.warning(f"Not starting unknown coin: {c}")
continue
if c in ("monero", "wownero"):
if v["manage_daemon"] is True:
swap_client.log.info(f"Starting {display_name} daemon")
filename: str = getCoreBinName(coin_id, v, c + "d")
daemons.append(startXmrDaemon(v["datadir"], v["bindir"], filename))
pid = daemons[-1].handle.pid
swap_client.log.info(f"Started {filename} {pid}")
if v["manage_wallet_daemon"] is True:
swap_client.log.info(f"Starting {display_name} wallet daemon")
daemon_addr = "{}:{}".format(v["rpchost"], v["rpcport"])
trusted_daemon: bool = swap_client.getXMRTrustedDaemon(
coin_id, v["rpchost"]
)
opts = [
"--daemon-address",
daemon_addr,
]
proxy_log_str = ""
proxy_host, proxy_port = swap_client.getXMRWalletProxy(
coin_id, v["rpchost"]
)
if proxy_host:
proxy_log_str = " through proxy"
opts += [
"--proxy",
f"{proxy_host}:{proxy_port}",
"--daemon-ssl-allow-any-cert",
]
swap_client.log.info(
"daemon-address: {} ({}){}".format(
daemon_addr,
"trusted" if trusted_daemon else "untrusted",
proxy_log_str,
)
)
daemon_rpcuser = v.get("rpcuser", "")
daemon_rpcpass = v.get("rpcpassword", "")
if daemon_rpcuser != "":
opts.append("--daemon-login")
opts.append(daemon_rpcuser + ":" + daemon_rpcpass)
opts.append(
"--trusted-daemon" if trusted_daemon else "--untrusted-daemon"
)
filename: str = getWalletBinName(coin_id, v, c + "-wallet-rpc")
daemons.append(
startXmrWalletDaemon(v["datadir"], v["bindir"], filename, opts)
)
pid = daemons[-1].handle.pid
swap_client.log.info(f"Started {filename} {pid}")
continue # /monero
if c == "decred":
appdata = v["datadir"]
extra_opts = [
f'--appdata="{appdata}"',
]
use_shell: bool = True if os.name == "nt" else False
if v["manage_daemon"] is True:
swap_client.log.info(f"Starting {display_name} daemon")
filename: str = getCoreBinName(coin_id, v, "dcrd")
extra_config = {
"add_datadir": False,
"stdout_to_file": True,
"stdout_filename": "dcrd_stdout.log",
"use_shell": use_shell,
"coin_name": "decred",
}
daemons.append(
startDaemon(
appdata,
v["bindir"],
filename,
opts=extra_opts,
extra_config=extra_config,
)
)
pid = daemons[-1].handle.pid
swap_client.log.info(f"Started {filename} {pid}")
if v["manage_wallet_daemon"] is True:
swap_client.log.info(f"Starting {display_name} wallet daemon")
filename: str = getWalletBinName(coin_id, v, "dcrwallet")
wallet_pwd = v["wallet_pwd"]
if wallet_pwd == "":
# Only set when in startonlycoin mode
wallet_pwd = os.getenv("WALLET_ENCRYPTION_PWD", "")
if wallet_pwd != "":
extra_opts.append(f'--pass="{wallet_pwd}"')
extra_config = {
"add_datadir": False,
"stdout_to_file": True,
"stdout_filename": "dcrwallet_stdout.log",
"use_shell": use_shell,
"coin_name": "decred",
}
daemons.append(
startDaemon(
appdata,
v["bindir"],
filename,
opts=extra_opts,
extra_config=extra_config,
)
)
pid = daemons[-1].handle.pid
swap_client.log.info(f"Started {filename} {pid}")
continue # /decred
if v["manage_daemon"] is True:
if c == "particl" and swap_client._zmq_queue_enabled:
checkPARTZmqConfigBeforeStart(v, swap_client.settings)
swap_client.log.info(f"Starting {display_name} daemon")
filename: str = getCoreBinName(coin_id, v, c + "d")
extra_opts = getCoreBinArgs(
coin_id, v, use_tor_proxy=swap_client.use_tor_proxy
)
extra_config = {"coin_name": c}
daemons.append(
startDaemon(
v["datadir"],
v["bindir"],
filename,
opts=extra_opts,
extra_config=extra_config,
)
)
pid = daemons[-1].handle.pid
pids.append((c, pid))
swap_client.setDaemonPID(c, pid)
swap_client.log.info(f"Started {filename} {pid}")
if len(pids) > 0:
with open(pids_path, "w") as fd:
for p in pids:
fd.write("{}:{}\n".format(*p))
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
signal.signal(signal.SIGHUP, signal_handler)
if len(start_only_coins) > 0:
logger.info(
f"Only running {start_only_coins}. Manually exit with Ctrl + c when ready."
)
mainLoop(daemons, update=False)
else:
swap_client.start()
logger.info("Exit with Ctrl + c.")
mainLoop(daemons)
except Exception as e: # noqa: F841
traceback.print_exc()
if swap_client.ws_server:
try:
swap_client.log.info("Stopping websocket server.")
swap_client.ws_server.shutdown_gracefully()
except Exception as e: # noqa: F841
traceback.print_exc()
swap_client.finalise()
closed_pids = []
for d in daemons:
swap_client.log.info(f"Interrupting {d.name} {d.handle.pid}")
try:
d.handle.send_signal(
signal.CTRL_C_EVENT if os.name == "nt" else signal.SIGINT
)
except Exception as e:
swap_client.log.info(f"Interrupting {d.name} {d.handle.pid}, error {e}")
for d in daemons:
try:
d.handle.wait(timeout=120)
for fp in [d.handle.stdout, d.handle.stderr, d.handle.stdin] + d.files:
if fp:
fp.close()
closed_pids.append(d.handle.pid)
except Exception as e:
swap_client.log.error(f"Error: {e}")
fail_code: int = swap_client.fail_code
del swap_client
if os.path.exists(pids_path):
with open(pids_path) as fd:
lines = fd.read().split("\n")
still_running = ""
for ln in lines:
try:
if int(ln.split(":")[1]) not in closed_pids:
still_running += ln + "\n"
except Exception:
pass
with open(pids_path, "w") as fd:
fd.write(still_running)
return fail_code
def printVersion():
logger.info(
f"Basicswap version: {__version__}",
)
def ensure_coin_valid(coin: str) -> bool:
if isKnownCoinName(coin) is False:
raise ValueError(f"Unknown coin: {coin}")
def printHelp():
print("Usage: basicswap-run ")
print("\n--help, -h Print help.")
print("--version, -v Print version.")
print(
f"--datadir=PATH Path to basicswap data directory, default:{cfg.BASICSWAP_DATADIR}."
)
print("--mainnet Run in mainnet mode.")
print("--testnet Run in testnet mode.")
print("--regtest Run in regtest mode.")
print("--withcoin= Run only with coin/s.")
print("--withoutcoin= Run without coin/s.")
print(
"--startonlycoin Only start the provides coin daemon/s, use this if a chain requires extra processing."
)
print("--logprefix Specify log prefix.")
print(
"--forcedbupgrade Recheck database against schema regardless of version."
)
def main():
data_dir = None
chain = "mainnet"
start_only_coins = set()
log_prefix: str = "BasicSwap"
options = dict()
with_coins = set()
without_coins = set()
for v in sys.argv[1:]:
if len(v) < 2 or v[0] != "-":
logger.warning(f"Unknown argument {v}")
continue
s = v.split("=")
name = s[0].strip()
for i in range(2):
if name[0] == "-":
name = name[1:]
if name == "v" or name == "version":
printVersion()
return 0
if name == "h" or name == "help":
printHelp()
return 0
if name in ("mainnet", "testnet", "regtest"):
chain = name
continue
if name in ("withcoin", "withcoins"):
for coin in [s.strip().lower() for s in s[1].split(",")]:
ensure_coin_valid(coin)
with_coins.add(coin)
continue
if name in ("withoutcoin", "withoutcoins"):
for coin in [s.strip().lower() for s in s[1].split(",")]:
if coin == "particl":
raise ValueError("Particl is required.")
ensure_coin_valid(coin)
without_coins.add(coin)
continue
if name == "forcedbupgrade":
options["force_db_upgrade"] = True
continue
if len(s) == 2:
if name == "datadir":
data_dir = os.path.abspath(os.path.expanduser(s[1]))
continue
if name == "logprefix":
log_prefix = s[1]
continue
if name == "startonlycoin":
for coin in [s.lower() for s in s[1].split(",")]:
ensure_coin_valid(coin)
start_only_coins.add(coin)
continue
logger.warning(f"Unknown argument {v}")
if os.name == "nt":
logger.warning(
"Running on windows is discouraged and windows support may be discontinued in the future. Please consider using the WSL docker setup instead."
)
if data_dir is None:
data_dir = os.path.join(os.path.expanduser(cfg.BASICSWAP_DATADIR))
logger.info(f"Using datadir: {data_dir}")
logger.info(f"Chain: {chain}")
if not os.path.exists(data_dir):
os.makedirs(data_dir)
if len(with_coins) > 0:
with_coins.add("particl")
options["with_coins"] = with_coins
if len(without_coins) > 0:
options["without_coins"] = without_coins
logger.info(os.path.basename(sys.argv[0]) + ", version: " + __version__ + "\n\n")
fail_code = runClient(data_dir, chain, start_only_coins, log_prefix, options)
print("Done.")
return fail_code
if __name__ == "__main__":
main()

View File

@@ -1,577 +1,465 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019-2024 tecnovert
# Copyright (c) 2024-2025 The Basicswap developers
# Copyright (c) 2019-2023 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import threading
from enum import IntEnum
from .util import (
COIN,
make_int,
format_amount,
TemporaryError,
)
XMR_COIN = 10**12
WOW_COIN = 10**11
XMR_COIN = 10 ** 12
class Coins(IntEnum):
PART = 1
BTC = 2
LTC = 3
DCR = 4
# DCR = 4
NMC = 5
XMR = 6
PART_BLIND = 7
PART_ANON = 8
WOW = 9
# ZANO = 9
# NDAU = 10
PIVX = 11
DASH = 12
FIRO = 13
NAV = 14
LTC_MWEB = 15
# ZANO = 16
BCH = 17
DOGE = 18
class Fiat(IntEnum):
USD = -1
GBP = -2
EUR = -3
VEIL = 14
chainparams = {
Coins.PART: {
"name": "particl",
"ticker": "PART",
"message_magic": "Bitcoin Signed Message:\n",
"blocks_target": 60 * 2,
"decimal_places": 8,
"mainnet": {
"rpcport": 51735,
"pubkey_address": 0x38,
"script_address": 0x3C,
"key_prefix": 0x6C,
"stealth_key_prefix": 0x14,
"hrp": "pw",
"bip44": 44,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"ext_public_key_prefix": 0x696E82D1,
"ext_secret_key_prefix": 0x8F1DAEB8,
'name': 'particl',
'ticker': 'PART',
'message_magic': 'Bitcoin Signed Message:\n',
'blocks_target': 60 * 2,
'decimal_places': 8,
'mainnet': {
'rpcport': 51735,
'pubkey_address': 0x38,
'script_address': 0x3c,
'key_prefix': 0x6c,
'stealth_key_prefix': 0x14,
'hrp': 'pw',
'bip44': 44,
'min_amount': 1000,
'max_amount': 100000 * COIN,
},
"testnet": {
"rpcport": 51935,
"pubkey_address": 0x76,
"script_address": 0x7A,
"key_prefix": 0x2E,
"stealth_key_prefix": 0x15,
"hrp": "tpw",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"ext_public_key_prefix": 0xE1427800,
"ext_secret_key_prefix": 0x04889478,
},
"regtest": {
"rpcport": 51936,
"pubkey_address": 0x76,
"script_address": 0x7A,
"key_prefix": 0x2E,
"stealth_key_prefix": 0x15,
"hrp": "rtpw",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"ext_public_key_prefix": 0xE1427800,
"ext_secret_key_prefix": 0x04889478,
'testnet': {
'rpcport': 51935,
'pubkey_address': 0x76,
'script_address': 0x7a,
'key_prefix': 0x2e,
'stealth_key_prefix': 0x15,
'hrp': 'tpw',
'bip44': 1,
'min_amount': 1000,
'max_amount': 100000 * COIN,
},
'regtest': {
'rpcport': 51936,
'pubkey_address': 0x76,
'script_address': 0x7a,
'key_prefix': 0x2e,
'stealth_key_prefix': 0x15,
'hrp': 'rtpw',
'bip44': 1,
'min_amount': 1000,
'max_amount': 100000 * COIN,
}
},
Coins.BTC: {
"name": "bitcoin",
"ticker": "BTC",
"message_magic": "Bitcoin Signed Message:\n",
"blocks_target": 60 * 10,
"decimal_places": 8,
"mainnet": {
"rpcport": 8332,
"pubkey_address": 0,
"script_address": 5,
"key_prefix": 128,
"hrp": "bc",
"bip44": 0,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"ext_public_key_prefix": 0x0488B21E,
"ext_secret_key_prefix": 0x0488ADE4,
'name': 'bitcoin',
'ticker': 'BTC',
'message_magic': 'Bitcoin Signed Message:\n',
'blocks_target': 60 * 10,
'decimal_places': 8,
'mainnet': {
'rpcport': 8332,
'pubkey_address': 0,
'script_address': 5,
'key_prefix': 128,
'hrp': 'bc',
'bip44': 0,
'min_amount': 1000,
'max_amount': 100000 * COIN,
},
"testnet": {
"rpcport": 18332,
"pubkey_address": 111,
"script_address": 196,
"key_prefix": 239,
"hrp": "tb",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"name": "testnet3",
"ext_public_key_prefix": 0x043587CF,
"ext_secret_key_prefix": 0x04358394,
},
"regtest": {
"rpcport": 18443,
"pubkey_address": 111,
"script_address": 196,
"key_prefix": 239,
"hrp": "bcrt",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"ext_public_key_prefix": 0x043587CF,
"ext_secret_key_prefix": 0x04358394,
'testnet': {
'rpcport': 18332,
'pubkey_address': 111,
'script_address': 196,
'key_prefix': 239,
'hrp': 'tb',
'bip44': 1,
'min_amount': 1000,
'max_amount': 100000 * COIN,
'name': 'testnet3',
},
'regtest': {
'rpcport': 18443,
'pubkey_address': 111,
'script_address': 196,
'key_prefix': 239,
'hrp': 'bcrt',
'bip44': 1,
'min_amount': 1000,
'max_amount': 100000 * COIN,
}
},
Coins.LTC: {
"name": "litecoin",
"ticker": "LTC",
"message_magic": "Litecoin Signed Message:\n",
"blocks_target": 60 * 1,
"decimal_places": 8,
"mainnet": {
"rpcport": 9332,
"pubkey_address": 48,
"script_address": 5,
"script_address2": 50,
"key_prefix": 176,
"hrp": "ltc",
"bip44": 2,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
'name': 'litecoin',
'ticker': 'LTC',
'message_magic': 'Litecoin Signed Message:\n',
'blocks_target': 60 * 1,
'decimal_places': 8,
'mainnet': {
'rpcport': 9332,
'pubkey_address': 48,
'script_address': 5,
'script_address2': 50,
'key_prefix': 176,
'hrp': 'ltc',
'bip44': 2,
'min_amount': 1000,
'max_amount': 100000 * COIN,
},
"testnet": {
"rpcport": 19332,
"pubkey_address": 111,
"script_address": 196,
"script_address2": 58,
"key_prefix": 239,
"hrp": "tltc",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"name": "testnet4",
},
"regtest": {
"rpcport": 19443,
"pubkey_address": 111,
"script_address": 196,
"script_address2": 58,
"key_prefix": 239,
"hrp": "rltc",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
},
},
Coins.DOGE: {
"name": "dogecoin",
"ticker": "DOGE",
"message_magic": "Dogecoin Signed Message:\n",
"blocks_target": 60 * 1,
"decimal_places": 8,
"mainnet": {
"rpcport": 22555,
"pubkey_address": 30,
"script_address": 22,
"key_prefix": 158,
"hrp": "doge",
"bip44": 3,
"min_amount": 100000, # TODO increase above fee
"max_amount": 10000000 * COIN,
},
"testnet": {
"rpcport": 44555,
"pubkey_address": 113,
"script_address": 196,
"key_prefix": 241,
"hrp": "tdge",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"name": "testnet4",
},
"regtest": {
"rpcport": 18332,
"pubkey_address": 111,
"script_address": 196,
"key_prefix": 239,
"hrp": "rdge",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
},
},
Coins.DCR: {
"name": "decred",
"ticker": "DCR",
"message_magic": "Decred Signed Message:\n",
"blocks_target": 60 * 5,
"decimal_places": 8,
"has_multiwallet": False,
"mainnet": {
"rpcport": 9109,
"pubkey_address": 0x073F,
"script_address": 0x071A,
"key_prefix": 0x22DE,
"bip44": 42,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
},
"testnet": {
"rpcport": 19109,
"pubkey_address": 0x0F21,
"script_address": 0x0EFC,
"key_prefix": 0x230E,
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"name": "testnet3",
},
"regtest": { # simnet
"rpcport": 18656,
"pubkey_address": 0x0E91,
"script_address": 0x0E6C,
"key_prefix": 0x2307,
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
'testnet': {
'rpcport': 19332,
'pubkey_address': 111,
'script_address': 196,
'script_address2': 58,
'key_prefix': 239,
'hrp': 'tltc',
'bip44': 1,
'min_amount': 1000,
'max_amount': 100000 * COIN,
'name': 'testnet4',
},
'regtest': {
'rpcport': 19443,
'pubkey_address': 111,
'script_address': 196,
'script_address2': 58,
'key_prefix': 239,
'hrp': 'rltc',
'bip44': 1,
'min_amount': 1000,
'max_amount': 100000 * COIN,
}
},
Coins.NMC: {
"name": "namecoin",
"ticker": "NMC",
"message_magic": "Namecoin Signed Message:\n",
"blocks_target": 60 * 10,
"decimal_places": 8,
"mainnet": {
"rpcport": 8336,
"pubkey_address": 52,
"script_address": 13,
"key_prefix": 180,
"hrp": "nc",
"bip44": 7,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"ext_public_key_prefix": 0x0488B21E, # base58Prefixes[EXT_PUBLIC_KEY]
"ext_secret_key_prefix": 0x0488ADE4,
'name': 'namecoin',
'ticker': 'NMC',
'message_magic': 'Namecoin Signed Message:\n',
'blocks_target': 60 * 10,
'decimal_places': 8,
'mainnet': {
'rpcport': 8336,
'pubkey_address': 52,
'script_address': 13,
'hrp': 'nc',
'bip44': 7,
'min_amount': 1000,
'max_amount': 100000 * COIN,
},
"testnet": {
"rpcport": 18336,
"pubkey_address": 111,
"script_address": 196,
"key_prefix": 239,
"hrp": "tn",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"name": "testnet3",
"ext_public_key_prefix": 0x043587CF,
"ext_secret_key_prefix": 0x04358394,
},
"regtest": {
"rpcport": 18443,
"pubkey_address": 111,
"script_address": 196,
"key_prefix": 239,
"hrp": "ncrt",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"ext_public_key_prefix": 0x043587CF,
"ext_secret_key_prefix": 0x04358394,
'testnet': {
'rpcport': 18336,
'pubkey_address': 111,
'script_address': 196,
'hrp': 'tn',
'bip44': 1,
'min_amount': 1000,
'max_amount': 100000 * COIN,
'name': 'testnet3',
},
'regtest': {
'rpcport': 18443,
'pubkey_address': 111,
'script_address': 196,
'hrp': 'ncrt',
'bip44': 1,
'min_amount': 1000,
'max_amount': 100000 * COIN,
}
},
Coins.XMR: {
"name": "monero",
"ticker": "XMR",
"client": "xmr",
"decimal_places": 12,
"mainnet": {
"rpcport": 18081,
"walletrpcport": 18082,
"min_amount": 1000000000,
"max_amount": 10000000 * XMR_COIN,
"address_prefix": 18,
'name': 'monero',
'ticker': 'XMR',
'client': 'xmr',
'decimal_places': 12,
'mainnet': {
'rpcport': 18081,
'walletrpcport': 18082,
'min_amount': 100000,
'max_amount': 10000 * XMR_COIN,
},
"testnet": {
"rpcport": 28081,
"walletrpcport": 28082,
"min_amount": 1000000000,
"max_amount": 10000000 * XMR_COIN,
"address_prefix": 18,
},
"regtest": {
"rpcport": 18081,
"walletrpcport": 18082,
"min_amount": 1000000000,
"max_amount": 10000000 * XMR_COIN,
"address_prefix": 18,
},
},
Coins.WOW: {
"name": "wownero",
"ticker": "WOW",
"client": "wow",
"decimal_places": 11,
"mainnet": {
"rpcport": 34568,
"walletrpcport": 34572, # todo
"min_amount": 100000000,
"max_amount": 10000000 * WOW_COIN,
"address_prefix": 4146,
},
"testnet": {
"rpcport": 44568,
"walletrpcport": 44572,
"min_amount": 100000000,
"max_amount": 10000000 * WOW_COIN,
"address_prefix": 4146,
},
"regtest": {
"rpcport": 54568,
"walletrpcport": 54572,
"min_amount": 100000000,
"max_amount": 10000000 * WOW_COIN,
"address_prefix": 4146,
'testnet': {
'rpcport': 28081,
'walletrpcport': 28082,
'min_amount': 100000,
'max_amount': 10000 * XMR_COIN,
},
'regtest': {
'rpcport': 18081,
'walletrpcport': 18082,
'min_amount': 100000,
'max_amount': 10000 * XMR_COIN,
}
},
Coins.PIVX: {
"name": "pivx",
"ticker": "PIVX",
"display_name": "PIVX",
"message_magic": "DarkNet Signed Message:\n",
"blocks_target": 60 * 1,
"decimal_places": 8,
"has_cltv": True,
"has_csv": False,
"has_segwit": False,
"mainnet": {
"rpcport": 51473,
"pubkey_address": 30,
"script_address": 13,
"key_prefix": 212,
"bip44": 119,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
'name': 'pivx',
'ticker': 'PIVX',
'message_magic': 'DarkNet Signed Message:\n',
'blocks_target': 60 * 1,
'decimal_places': 8,
'has_csv': False,
'has_segwit': False,
'use_ticker_as_name': True,
'mainnet': {
'rpcport': 51473,
'pubkey_address': 30,
'script_address': 13,
'key_prefix': 212,
'bip44': 119,
'min_amount': 1000,
'max_amount': 100000 * COIN,
},
"testnet": {
"rpcport": 51475,
"pubkey_address": 139,
"script_address": 19,
"key_prefix": 239,
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"name": "testnet4",
},
"regtest": {
"rpcport": 51477,
"pubkey_address": 139,
"script_address": 19,
"key_prefix": 239,
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
'testnet': {
'rpcport': 51475,
'pubkey_address': 139,
'script_address': 19,
'key_prefix': 239,
'bip44': 1,
'min_amount': 1000,
'max_amount': 100000 * COIN,
'name': 'testnet4',
},
'regtest': {
'rpcport': 51477,
'pubkey_address': 139,
'script_address': 19,
'key_prefix': 239,
'bip44': 1,
'min_amount': 1000,
'max_amount': 100000 * COIN,
}
},
Coins.DASH: {
"name": "dash",
"ticker": "DASH",
"message_magic": "DarkCoin Signed Message:\n",
"blocks_target": 60 * 2.5,
"decimal_places": 8,
"has_csv": True,
"has_segwit": False,
"mainnet": {
"rpcport": 9998,
"pubkey_address": 76,
"script_address": 16,
"key_prefix": 204,
"hrp": "",
"bip44": 5,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
'name': 'dash',
'ticker': 'DASH',
'message_magic': 'DarkCoin Signed Message:\n',
'blocks_target': 60 * 2.5,
'decimal_places': 8,
'has_csv': True,
'has_segwit': False,
'mainnet': {
'rpcport': 9998,
'pubkey_address': 76,
'script_address': 16,
'key_prefix': 204,
'hrp': '',
'bip44': 5,
'min_amount': 1000,
'max_amount': 100000 * COIN,
},
"testnet": {
"rpcport": 19998,
"pubkey_address": 140,
"script_address": 19,
"key_prefix": 239,
"hrp": "",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
},
"regtest": {
"rpcport": 18332,
"pubkey_address": 140,
"script_address": 19,
"key_prefix": 239,
"hrp": "",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
'testnet': {
'rpcport': 19998,
'pubkey_address': 140,
'script_address': 19,
'key_prefix': 239,
'hrp': '',
'bip44': 1,
'min_amount': 1000,
'max_amount': 100000 * COIN,
},
'regtest': {
'rpcport': 18332,
'pubkey_address': 140,
'script_address': 19,
'key_prefix': 239,
'hrp': '',
'bip44': 1,
'min_amount': 1000,
'max_amount': 100000 * COIN,
}
},
Coins.FIRO: {
"name": "firo",
"ticker": "FIRO",
"message_magic": "Zcoin Signed Message:\n",
"blocks_target": 60 * 10,
"decimal_places": 8,
"has_cltv": False,
"has_csv": False,
"has_segwit": False,
"has_multiwallet": False,
"mainnet": {
"rpcport": 8888,
"pubkey_address": 82,
"script_address": 7,
"key_prefix": 210,
"hrp": "",
"bip44": 136,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
'name': 'firo',
'ticker': 'FIRO',
'message_magic': 'Zcoin Signed Message:\n',
'blocks_target': 60 * 10,
'decimal_places': 8,
'has_csv': True,
'has_segwit': False,
'mainnet': {
'rpcport': 8888,
'pubkey_address': 82,
'script_address': 7,
'key_prefix': 210,
'hrp': '',
'bip44': 136,
'min_amount': 1000,
'max_amount': 100000 * COIN,
},
"testnet": {
"rpcport": 18888,
"pubkey_address": 65,
"script_address": 178,
"key_prefix": 185,
"hrp": "",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
},
"regtest": {
"rpcport": 28888,
"pubkey_address": 65,
"script_address": 178,
"key_prefix": 239,
"hrp": "",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
'testnet': {
'rpcport': 18888,
'pubkey_address': 65,
'script_address': 178,
'key_prefix': 185,
'hrp': '',
'bip44': 1,
'min_amount': 1000,
'max_amount': 100000 * COIN,
},
'regtest': {
'rpcport': 28888,
'pubkey_address': 65,
'script_address': 178,
'key_prefix': 239,
'hrp': '',
'bip44': 1,
'min_amount': 1000,
'max_amount': 100000 * COIN,
}
},
Coins.NAV: {
"name": "navcoin",
"ticker": "NAV",
"message_magic": "Navcoin Signed Message:\n",
"blocks_target": 30,
"decimal_places": 8,
"has_csv": True,
"has_segwit": True,
"has_multiwallet": False,
"mainnet": {
"rpcport": 44444,
"pubkey_address": 53,
"script_address": 85,
"key_prefix": 150,
"hrp": "",
"bip44": 130,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
Coins.VEIL: {
'name': 'veil',
'ticker': 'VEIL',
'message_magic': 'Veil Signed Message:\n',
'blocks_target': 120,
'decimal_places': 8,
'has_csv': True,
'has_segwit': False,
'mainnet': {
'rpcport': 58812,
'pubkey_address': 70,
'script_address': 5,
'key_prefix': 128,
'hrp': 'bv',
'bip44': 698,
'min_amount': 1000,
'max_amount': 100000 * COIN,
},
"testnet": {
"rpcport": 44445,
"pubkey_address": 111,
"script_address": 196,
"key_prefix": 239,
"hrp": "",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
},
"regtest": {
"rpcport": 44446,
"pubkey_address": 111,
"script_address": 196,
"key_prefix": 239,
"hrp": "",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
},
},
Coins.BCH: {
"name": "bitcoincash",
"ticker": "BCH",
"display_name": "Bitcoin Cash",
"message_magic": "Bitcoin Signed Message:\n",
"blocks_target": 60 * 2,
"decimal_places": 8,
"has_cltv": True,
"has_csv": True,
"has_segwit": False,
"cli_binname": "bitcoin-cli",
"core_binname": "bitcoind",
"mainnet": {
"rpcport": 8332,
"pubkey_address": 0,
"script_address": 5,
"key_prefix": 128,
"hrp": "bitcoincash",
"bip44": 0,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
},
"testnet": {
"rpcport": 18332,
"pubkey_address": 111,
"script_address": 196,
"key_prefix": 239,
"hrp": "bchtest",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"name": "testnet3",
},
"regtest": {
"rpcport": 18443,
"pubkey_address": 111,
"script_address": 196,
"key_prefix": 239,
"hrp": "bchreg",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
'testnet': {
'rpcport': 58813,
'pubkey_address': 111,
'script_address': 196,
'key_prefix': 239,
'hrp': 'tv',
'bip44': 1,
'min_amount': 1000,
'max_amount': 100000 * COIN,
},
'regtest': {
'rpcport': 58823,
'pubkey_address': 111,
'script_address': 196,
'key_prefix': 239,
'hrp': 'tv',
'bip44': 1,
'min_amount': 1000,
'max_amount': 100000 * COIN,
}
},
}
name_map = {}
ticker_map = {}
for c, params in chainparams.items():
name_map[params["name"].lower()] = c
ticker_map[params["ticker"].lower()] = c
ticker_map[params['ticker'].lower()] = c
def getCoinIdFromTicker(ticker: str) -> str:
def getCoinIdFromTicker(ticker):
try:
return ticker_map[ticker.lower()]
except Exception:
raise ValueError(f"Unknown coin {ticker}")
raise ValueError('Unknown coin')
def getCoinIdFromName(name: str) -> str:
try:
return name_map[name.lower()]
except Exception:
raise ValueError(f"Unknown coin {name}")
class CoinInterface:
def __init__(self, network):
self.setDefaults()
self._network = network
self._mx_wallet = threading.Lock()
def setDefaults(self):
self._unknown_wallet_seed = True
self._restore_height = None
def isKnownCoinName(name: str) -> bool:
return params["name"].lower() in name_map
def make_int(self, amount_in: int, r: int = 0) -> int:
return make_int(amount_in, self.exp(), r=r)
def format_amount(self, amount_in, conv_int=False, r=0):
amount_int = make_int(amount_in, self.exp(), r=r) if conv_int else amount_in
return format_amount(amount_int, self.exp())
def coin_name(self) -> str:
coin_chainparams = chainparams[self.coin_type()]
if coin_chainparams.get('use_ticker_as_name', False):
return coin_chainparams['ticker']
return coin_chainparams['name'].capitalize()
def ticker(self) -> str:
ticker = chainparams[self.coin_type()]['ticker']
if self._network == 'testnet':
ticker = 't' + ticker
elif self._network == 'regtest':
ticker = 'rt' + ticker
return ticker
def getExchangeTicker(self, exchange_name: str) -> str:
return chainparams[self.coin_type()]['ticker']
def getExchangeName(self, exchange_name: str) -> str:
return chainparams[self.coin_type()]['name']
def ticker_mainnet(self) -> str:
ticker = chainparams[self.coin_type()]['ticker']
return ticker
def min_amount(self) -> int:
return chainparams[self.coin_type()][self._network]['min_amount']
def max_amount(self) -> int:
return chainparams[self.coin_type()][self._network]['max_amount']
def setWalletSeedWarning(self, value: bool) -> None:
self._unknown_wallet_seed = value
def setWalletRestoreHeight(self, value: int) -> None:
self._restore_height = value
def knownWalletSeed(self) -> bool:
return not self._unknown_wallet_seed
def chainparams(self):
return chainparams[self.coin_type()]
def chainparams_network(self):
return chainparams[self.coin_type()][self._network]
def has_segwit(self) -> bool:
return chainparams[self.coin_type()].get('has_segwit', True)
def is_transient_error(self, ex) -> bool:
if isinstance(ex, TemporaryError):
return True
str_error: str = str(ex).lower()
if 'not enough unlocked money' in str_error:
return True
if 'no unlocked balance' in str_error:
return True
if 'transaction was rejected by daemon' in str_error:
return True
if 'invalid unlocked_balance' in str_error:
return True
if 'daemon is busy' in str_error:
return True
return False

View File

@@ -1,51 +1,53 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019-2025 The Basicswap developers
# Copyright (c) 2019-2022 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import os
CONFIG_FILENAME = "basicswap.json"
BASICSWAP_DATADIR = os.getenv("BASICSWAP_DATADIR", os.path.join("~", ".basicswap"))
CONFIG_FILENAME = 'basicswap.json'
BASICSWAP_DATADIR = os.getenv('BASICSWAP_DATADIR', '~/.basicswap')
DEFAULT_ALLOW_CORS = False
DEFAULT_RPC_POOL_ENABLED = True
DEFAULT_RPC_POOL_MAX_CONNECTIONS = 5
TEST_DATADIRS = os.path.expanduser(os.getenv("DATADIRS", "/tmp/basicswap"))
DEFAULT_TEST_BINDIR = os.path.expanduser(
os.getenv("DEFAULT_TEST_BINDIR", os.path.join("~", ".basicswap", "bin"))
)
TEST_DATADIRS = os.path.expanduser(os.getenv('DATADIRS', '/tmp/basicswap'))
DEFAULT_TEST_BINDIR = os.path.expanduser(os.getenv('DEFAULT_TEST_BINDIR', '~/.basicswap/bin'))
bin_suffix = ".exe" if os.name == "nt" else ""
PARTICL_BINDIR = os.path.expanduser(
os.getenv("PARTICL_BINDIR", os.path.join(DEFAULT_TEST_BINDIR, "particl"))
)
PARTICLD = os.getenv("PARTICLD", "particld" + bin_suffix)
PARTICL_CLI = os.getenv("PARTICL_CLI", "particl-cli" + bin_suffix)
PARTICL_TX = os.getenv("PARTICL_TX", "particl-tx" + bin_suffix)
bin_suffix = ('.exe' if os.name == 'nt' else '')
PARTICL_BINDIR = os.path.expanduser(os.getenv('PARTICL_BINDIR', os.path.join(DEFAULT_TEST_BINDIR, 'particl')))
PARTICLD = os.getenv('PARTICLD', 'particld' + bin_suffix)
PARTICL_CLI = os.getenv('PARTICL_CLI', 'particl-cli' + bin_suffix)
PARTICL_TX = os.getenv('PARTICL_TX', 'particl-tx' + bin_suffix)
BITCOIN_BINDIR = os.path.expanduser(
os.getenv("BITCOIN_BINDIR", os.path.join(DEFAULT_TEST_BINDIR, "bitcoin"))
)
BITCOIND = os.getenv("BITCOIND", "bitcoind" + bin_suffix)
BITCOIN_CLI = os.getenv("BITCOIN_CLI", "bitcoin-cli" + bin_suffix)
BITCOIN_TX = os.getenv("BITCOIN_TX", "bitcoin-tx" + bin_suffix)
BITCOIN_BINDIR = os.path.expanduser(os.getenv('BITCOIN_BINDIR', os.path.join(DEFAULT_TEST_BINDIR, 'bitcoin')))
BITCOIND = os.getenv('BITCOIND', 'bitcoind' + bin_suffix)
BITCOIN_CLI = os.getenv('BITCOIN_CLI', 'bitcoin-cli' + bin_suffix)
BITCOIN_TX = os.getenv('BITCOIN_TX', 'bitcoin-tx' + bin_suffix)
LITECOIN_BINDIR = os.path.expanduser(
os.getenv("LITECOIN_BINDIR", os.path.join(DEFAULT_TEST_BINDIR, "litecoin"))
)
LITECOIND = os.getenv("LITECOIND", "litecoind" + bin_suffix)
LITECOIN_CLI = os.getenv("LITECOIN_CLI", "litecoin-cli" + bin_suffix)
LITECOIN_TX = os.getenv("LITECOIN_TX", "litecoin-tx" + bin_suffix)
LITECOIN_BINDIR = os.path.expanduser(os.getenv('LITECOIN_BINDIR', os.path.join(DEFAULT_TEST_BINDIR, 'litecoin')))
LITECOIND = os.getenv('LITECOIND', 'litecoind' + bin_suffix)
LITECOIN_CLI = os.getenv('LITECOIN_CLI', 'litecoin-cli' + bin_suffix)
LITECOIN_TX = os.getenv('LITECOIN_TX', 'litecoin-tx' + bin_suffix)
DOGECOIND = os.getenv("DOGECOIND", "dogecoind" + bin_suffix)
DOGECOIN_CLI = os.getenv("DOGECOIN_CLI", "dogecoin-cli" + bin_suffix)
DOGECOIN_TX = os.getenv("DOGECOIN_TX", "dogecoin-tx" + bin_suffix)
NAMECOIN_BINDIR = os.path.expanduser(os.getenv('NAMECOIN_BINDIR', os.path.join(DEFAULT_TEST_BINDIR, 'namecoin')))
NAMECOIND = os.getenv('NAMECOIND', 'namecoind' + bin_suffix)
NAMECOIN_CLI = os.getenv('NAMECOIN_CLI', 'namecoin-cli' + bin_suffix)
NAMECOIN_TX = os.getenv('NAMECOIN_TX', 'namecoin-tx' + bin_suffix)
XMR_BINDIR = os.path.expanduser(
os.getenv("XMR_BINDIR", os.path.join(DEFAULT_TEST_BINDIR, "monero"))
)
XMRD = os.getenv("XMRD", "monerod" + bin_suffix)
XMR_WALLET_RPC = os.getenv("XMR_WALLET_RPC", "monero-wallet-rpc" + bin_suffix)
XMR_BINDIR = os.path.expanduser(os.getenv('XMR_BINDIR', os.path.join(DEFAULT_TEST_BINDIR, 'monero')))
XMRD = os.getenv('XMRD', 'monerod' + bin_suffix)
XMR_WALLET_RPC = os.getenv('XMR_WALLET_RPC', 'monero-wallet-rpc' + bin_suffix)
# NOTE: Adding coin definitions here is deprecated. Please add in coin test file.
PIVX_BINDIR = os.path.expanduser(os.getenv('PIVX_BINDIR', os.path.join(DEFAULT_TEST_BINDIR, 'pivx')))
PIVXD = os.getenv('PIVXD', 'pivxd' + bin_suffix)
PIVX_CLI = os.getenv('PIVX_CLI', 'pivx-cli' + bin_suffix)
PIVX_TX = os.getenv('PIVX_TX', 'pivx-tx' + bin_suffix)
DASH_BINDIR = os.path.expanduser(os.getenv('DASH_BINDIR', os.path.join(DEFAULT_TEST_BINDIR, 'dash')))
DASHD = os.getenv('DASHD', 'dashd' + bin_suffix)
DASH_CLI = os.getenv('DASH_CLI', 'dash-cli' + bin_suffix)
DASH_TX = os.getenv('DASH_TX', 'dash-tx' + bin_suffix)
FIRO_BINDIR = os.path.expanduser(os.getenv('FIRO_BINDIR', os.path.join(DEFAULT_TEST_BINDIR, 'firo')))
FIROD = os.getenv('FIROD', 'firod' + bin_suffix)
FIRO_CLI = os.getenv('FIRO_CLI', 'firo-cli' + bin_suffix)
FIRO_TX = os.getenv('FIRO_TX', 'firo-tx' + bin_suffix)

View File

@@ -1 +0,0 @@

View File

@@ -1,533 +0,0 @@
intro = """
blake.py
version 5, 2-Apr-2014
BLAKE is a SHA3 round-3 finalist designed and submitted by
Jean-Philippe Aumasson et al.
At the core of BLAKE is a ChaCha-like mixer, very similar
to that found in the stream cipher, ChaCha8. Besides being
a very good mixer, ChaCha is fast.
References:
http://www.131002.net/blake/
http://csrc.nist.gov/groups/ST/hash/sha-3/index.html
http://en.wikipedia.org/wiki/BLAKE_(hash_function)
This implementation assumes all data is in increments of
whole bytes. (The formal definition of BLAKE allows for
hashing individual bits.) Note too that this implementation
does include the round-3 tweaks where the number of rounds
was increased to 14/16 from 10/14.
This version can be imported into both Python2 (2.6 and 2.7)
and Python3 programs. Python 2.5 requires an older version
of blake.py (version 4).
Here are some comparative times for different versions of
Python:
64-bit:
2.6 6.284s
2.7 6.343s
3.2 7.620s
pypy (2.7) 2.080s
32-bit:
2.5 (32) 15.389s (with psyco)
2.7-32 13.645s
3.2-32 12.574s
One test on a 2.0GHz Core 2 Duo of 10,000 iterations of
BLAKE-256 on a short message produced a time of 5.7 seconds.
Not bad, but if raw speed is what you want, look to the
the C version. It is 40x faster and did the same thing
in 0.13 seconds.
Copyright (c) 2009-2012 by Larry Bugbee, Kent, WA
ALL RIGHTS RESERVED.
blake.py IS EXPERIMENTAL SOFTWARE FOR EDUCATIONAL
PURPOSES ONLY. IT IS MADE AVAILABLE "AS-IS" WITHOUT
WARRANTY OR GUARANTEE OF ANY KIND. USE SIGNIFIES
ACCEPTANCE OF ALL RISK.
To make your learning and experimentation less cumbersome,
blake.py is free for any use.
Enjoy,
Larry Bugbee
March 2011
rev May 2011 - fixed Python version check (tx JP)
rev Apr 2012 - fixed an out-of-order bit set in final()
- moved self-test to a separate test pgm
- this now works with Python2 and Python3
rev Apr 2014 - added test and conversion of string input
to byte string in update() (tx Soham)
- added hexdigest() method.
- now support state 3 so only one call to
final() per instantiation is allowed. all
subsequent calls to final(), digest() or
hexdigest() simply return the stored value.
"""
import struct
from binascii import hexlify, unhexlify
#---------------------------------------------------------------
class BLAKE(object):
# - - - - - - - - - - - - - - - - - - - - - - - - - - -
# initial values, constants and padding
# IVx for BLAKE-x
IV64 = [
0x6A09E667F3BCC908, 0xBB67AE8584CAA73B,
0x3C6EF372FE94F82B, 0xA54FF53A5F1D36F1,
0x510E527FADE682D1, 0x9B05688C2B3E6C1F,
0x1F83D9ABFB41BD6B, 0x5BE0CD19137E2179,
]
IV48 = [
0xCBBB9D5DC1059ED8, 0x629A292A367CD507,
0x9159015A3070DD17, 0x152FECD8F70E5939,
0x67332667FFC00B31, 0x8EB44A8768581511,
0xDB0C2E0D64F98FA7, 0x47B5481DBEFA4FA4,
]
# note: the values here are the same as the high-order
# half-words of IV64
IV32 = [
0x6A09E667, 0xBB67AE85,
0x3C6EF372, 0xA54FF53A,
0x510E527F, 0x9B05688C,
0x1F83D9AB, 0x5BE0CD19,
]
# note: the values here are the same as the low-order
# half-words of IV48
IV28 = [
0xC1059ED8, 0x367CD507,
0x3070DD17, 0xF70E5939,
0xFFC00B31, 0x68581511,
0x64F98FA7, 0xBEFA4FA4,
]
# constants for BLAKE-64 and BLAKE-48
C64 = [
0x243F6A8885A308D3, 0x13198A2E03707344,
0xA4093822299F31D0, 0x082EFA98EC4E6C89,
0x452821E638D01377, 0xBE5466CF34E90C6C,
0xC0AC29B7C97C50DD, 0x3F84D5B5B5470917,
0x9216D5D98979FB1B, 0xD1310BA698DFB5AC,
0x2FFD72DBD01ADFB7, 0xB8E1AFED6A267E96,
0xBA7C9045F12C7F99, 0x24A19947B3916CF7,
0x0801F2E2858EFC16, 0x636920D871574E69,
]
# constants for BLAKE-32 and BLAKE-28
# note: concatenate and the values are the same as the values
# for the 1st half of C64
C32 = [
0x243F6A88, 0x85A308D3,
0x13198A2E, 0x03707344,
0xA4093822, 0x299F31D0,
0x082EFA98, 0xEC4E6C89,
0x452821E6, 0x38D01377,
0xBE5466CF, 0x34E90C6C,
0xC0AC29B7, 0xC97C50DD,
0x3F84D5B5, 0xB5470917,
]
# the 10 permutations of:0,...15}
SIGMA = [
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15],
[14,10, 4, 8, 9,15,13, 6, 1,12, 0, 2,11, 7, 5, 3],
[11, 8,12, 0, 5, 2,15,13,10,14, 3, 6, 7, 1, 9, 4],
[ 7, 9, 3, 1,13,12,11,14, 2, 6, 5,10, 4, 0,15, 8],
[ 9, 0, 5, 7, 2, 4,10,15,14, 1,11,12, 6, 8, 3,13],
[ 2,12, 6,10, 0,11, 8, 3, 4,13, 7, 5,15,14, 1, 9],
[12, 5, 1,15,14,13, 4,10, 0, 7, 6, 3, 9, 2, 8,11],
[13,11, 7,14,12, 1, 3, 9, 5, 0,15, 4, 8, 6, 2,10],
[ 6,15,14, 9,11, 3, 0, 8,12, 2,13, 7, 1, 4,10, 5],
[10, 2, 8, 4, 7, 6, 1, 5,15,11, 9,14, 3,12,13, 0],
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15],
[14,10, 4, 8, 9,15,13, 6, 1,12, 0, 2,11, 7, 5, 3],
[11, 8,12, 0, 5, 2,15,13,10,14, 3, 6, 7, 1, 9, 4],
[ 7, 9, 3, 1,13,12,11,14, 2, 6, 5,10, 4, 0,15, 8],
[ 9, 0, 5, 7, 2, 4,10,15,14, 1,11,12, 6, 8, 3,13],
[ 2,12, 6,10, 0,11, 8, 3, 4,13, 7, 5,15,14, 1, 9],
[12, 5, 1,15,14,13, 4,10, 0, 7, 6, 3, 9, 2, 8,11],
[13,11, 7,14,12, 1, 3, 9, 5, 0,15, 4, 8, 6, 2,10],
[ 6,15,14, 9,11, 3, 0, 8,12, 2,13, 7, 1, 4,10, 5],
[10, 2, 8, 4, 7, 6, 1, 5,15,11, 9,14, 3,12,13, 0],
]
MASK32BITS = 0xFFFFFFFF
MASK64BITS = 0xFFFFFFFFFFFFFFFF
# - - - - - - - - - - - - - - - - - - - - - - - - - - -
def __init__(self, hashbitlen):
"""
load the hashSate structure (copy hashbitlen...)
hashbitlen: length of the hash output
"""
if hashbitlen not in [224, 256, 384, 512]:
raise Exception('hash length not 224, 256, 384 or 512')
self.hashbitlen = hashbitlen
self.h = [0]*8 # current chain value (initialized to the IV)
self.t = 0 # number of *BITS* hashed so far
self.cache = b'' # cached leftover data not yet compressed
self.salt = [0]*4 # salt (null by default)
self.state = 1 # set to 2 by update and 3 by final
self.nullt = 0 # Boolean value for special case \ell_i=0
# The algorithm is the same for both the 32- and 64- versions
# of BLAKE. The difference is in word size (4 vs 8 bytes),
# blocksize (64 vs 128 bytes), number of rounds (14 vs 16)
# and a few very specific constants.
if (hashbitlen == 224) or (hashbitlen == 256):
# setup for 32-bit words and 64-bit block
self.byte2int = self._fourByte2int
self.int2byte = self._int2fourByte
self.MASK = self.MASK32BITS
self.WORDBYTES = 4
self.WORDBITS = 32
self.BLKBYTES = 64
self.BLKBITS = 512
self.ROUNDS = 14 # was 10 before round 3
self.cxx = self.C32
self.rot1 = 16 # num bits to shift in G
self.rot2 = 12 # num bits to shift in G
self.rot3 = 8 # num bits to shift in G
self.rot4 = 7 # num bits to shift in G
self.mul = 0 # for 32-bit words, 32<<self.mul where self.mul = 0
# 224- and 256-bit versions (32-bit words)
if hashbitlen == 224:
self.h = self.IV28[:]
else:
self.h = self.IV32[:]
elif (hashbitlen == 384) or (hashbitlen == 512):
# setup for 64-bit words and 128-bit block
self.byte2int = self._eightByte2int
self.int2byte = self._int2eightByte
self.MASK = self.MASK64BITS
self.WORDBYTES = 8
self.WORDBITS = 64
self.BLKBYTES = 128
self.BLKBITS = 1024
self.ROUNDS = 16 # was 14 before round 3
self.cxx = self.C64
self.rot1 = 32 # num bits to shift in G
self.rot2 = 25 # num bits to shift in G
self.rot3 = 16 # num bits to shift in G
self.rot4 = 11 # num bits to shift in G
self.mul = 1 # for 64-bit words, 32<<self.mul where self.mul = 1
# 384- and 512-bit versions (64-bit words)
if hashbitlen == 384:
self.h = self.IV48[:]
else:
self.h = self.IV64[:]
# - - - - - - - - - - - - - - - - - - - - - - - - - - -
def _compress(self, block):
byte2int = self.byte2int
mul = self.mul # de-reference these for ...speed? ;-)
cxx = self.cxx
rot1 = self.rot1
rot2 = self.rot2
rot3 = self.rot3
rot4 = self.rot4
MASK = self.MASK
WORDBITS = self.WORDBITS
SIGMA = self.SIGMA
# get message (<<2 is the same as *4 but faster)
m = [byte2int(block[i<<2<<mul:(i<<2<<mul)+(4<<mul)]) for i in range(16)]
# initialization
v = [0]*16
v[ 0: 8] = [self.h[i] for i in range(8)]
v[ 8:16] = [self.cxx[i] for i in range(8)]
v[ 8:12] = [v[8+i] ^ self.salt[i] for i in range(4)]
if self.nullt == 0: # (i>>1 is the same as i/2 but faster)
v[12] = v[12] ^ (self.t & MASK)
v[13] = v[13] ^ (self.t & MASK)
v[14] = v[14] ^ (self.t >> self.WORDBITS)
v[15] = v[15] ^ (self.t >> self.WORDBITS)
# - - - - - - - - - - - - - - - - -
# ready? let's ChaCha!!!
def G(a, b, c, d, i):
va = v[a] # it's faster to deref and reref later
vb = v[b]
vc = v[c]
vd = v[d]
sri = SIGMA[round][i]
sri1 = SIGMA[round][i+1]
va = ((va + vb) + (m[sri] ^ cxx[sri1]) ) & MASK
x = vd ^ va
vd = (x >> rot1) | ((x << (WORDBITS-rot1)) & MASK)
vc = (vc + vd) & MASK
x = vb ^ vc
vb = (x >> rot2) | ((x << (WORDBITS-rot2)) & MASK)
va = ((va + vb) + (m[sri1] ^ cxx[sri]) ) & MASK
x = vd ^ va
vd = (x >> rot3) | ((x << (WORDBITS-rot3)) & MASK)
vc = (vc + vd) & MASK
x = vb ^ vc
vb = (x >> rot4) | ((x << (WORDBITS-rot4)) & MASK)
v[a] = va
v[b] = vb
v[c] = vc
v[d] = vd
for round in range(self.ROUNDS):
# column step
G( 0, 4, 8,12, 0)
G( 1, 5, 9,13, 2)
G( 2, 6,10,14, 4)
G( 3, 7,11,15, 6)
# diagonal step
G( 0, 5,10,15, 8)
G( 1, 6,11,12,10)
G( 2, 7, 8,13,12)
G( 3, 4, 9,14,14)
# - - - - - - - - - - - - - - - - -
# save current hash value (use i&0x3 to get 0,1,2,3,0,1,2,3)
self.h = [self.h[i]^v[i]^v[i+8]^self.salt[i&0x3]
for i in range(8)]
# print 'self.h', [num2hex(h) for h in self.h]
# - - - - - - - - - - - - - - - - - - - - - - - - - - -
def addsalt(self, salt):
""" adds a salt to the hash function (OPTIONAL)
should be called AFTER Init, and BEFORE update
salt: a bytestring, length determined by hashbitlen.
if not of sufficient length, the bytestring
will be assumed to be a big endian number and
prefixed with an appropriate number of null
bytes, and if too large, only the low order
bytes will be used.
if hashbitlen=224 or 256, then salt will be 16 bytes
if hashbitlen=384 or 512, then salt will be 32 bytes
"""
# fail if addsalt() was not called at the right time
if self.state != 1:
raise Exception('addsalt() not called after init() and before update()')
# salt size is to be 4x word size
saltsize = self.WORDBYTES * 4
# if too short, prefix with null bytes. if too long,
# truncate high order bytes
if len(salt) < saltsize:
salt = (chr(0)*(saltsize-len(salt)) + salt)
else:
salt = salt[-saltsize:]
# prep the salt array
self.salt[0] = self.byte2int(salt[ : 4<<self.mul])
self.salt[1] = self.byte2int(salt[ 4<<self.mul: 8<<self.mul])
self.salt[2] = self.byte2int(salt[ 8<<self.mul:12<<self.mul])
self.salt[3] = self.byte2int(salt[12<<self.mul: ])
# - - - - - - - - - - - - - - - - - - - - - - - - - - -
def update(self, data):
""" update the state with new data, storing excess data
as necessary. may be called multiple times and if a
call sends less than a full block in size, the leftover
is cached and will be consumed in the next call
data: data to be hashed (bytestring)
"""
self.state = 2
BLKBYTES = self.BLKBYTES # de-referenced for improved readability
BLKBITS = self.BLKBITS
datalen = len(data)
if not datalen: return
if type(data) == type(u''):
# use either of the next two lines for a proper
# response under both Python2 and Python3
data = data.encode('UTF-8') # converts to byte string
#data = bytearray(data, 'utf-8') # use if want mutable
# This next line works for Py3 but fails under
# Py2 because the Py2 version of bytes() will
# accept only *one* argument. Arrrrgh!!!
#data = bytes(data, 'utf-8') # converts to immutable byte
# string but... under p7
# bytes() wants only 1 arg
# ...a dummy, 2nd argument like encoding=None
# that does nothing would at least allow
# compatibility between Python2 and Python3.
left = len(self.cache)
fill = BLKBYTES - left
# if any cached data and any added new data will fill a
# full block, fill and compress
if left and datalen >= fill:
self.cache = self.cache + data[:fill]
self.t += BLKBITS # update counter
self._compress(self.cache)
self.cache = b''
data = data[fill:]
datalen -= fill
# compress new data until not enough for a full block
while datalen >= BLKBYTES:
self.t += BLKBITS # update counter
self._compress(data[:BLKBYTES])
data = data[BLKBYTES:]
datalen -= BLKBYTES
# cache all leftover bytes until next call to update()
if datalen > 0:
self.cache = self.cache + data[:datalen]
# - - - - - - - - - - - - - - - - - - - - - - - - - - -
def final(self, data=''):
""" finalize the hash -- pad and hash remaining data
returns hashval, the digest
"""
if self.state == 3:
# we have already finalized so simply return the
# previously calculated/stored hash value
return self.hash
if data:
self.update(data)
ZZ = b'\x00'
ZO = b'\x01'
OZ = b'\x80'
OO = b'\x81'
PADDING = OZ + ZZ*128 # pre-formatted padding data
# copy nb. bits hash in total as a 64-bit BE word
# copy nb. bits hash in total as a 128-bit BE word
tt = self.t + (len(self.cache) << 3)
if self.BLKBYTES == 64:
msglen = self._int2eightByte(tt)
else:
low = tt & self.MASK
high = tt >> self.WORDBITS
msglen = self._int2eightByte(high) + self._int2eightByte(low)
# size of block without the words at the end that count
# the number of bits, 55 or 111.
# Note: (((self.WORDBITS/8)*2)+1) equals ((self.WORDBITS>>2)+1)
sizewithout = self.BLKBYTES - ((self.WORDBITS>>2)+1)
if len(self.cache) == sizewithout:
# special case of one padding byte
self.t -= 8
if self.hashbitlen in [224, 384]:
self.update(OZ)
else:
self.update(OO)
else:
if len(self.cache) < sizewithout:
# enough space to fill the block
# use t=0 if no remaining data
if len(self.cache) == 0:
self.nullt=1
self.t -= (sizewithout - len(self.cache)) << 3
self.update(PADDING[:sizewithout - len(self.cache)])
else:
# NOT enough space, need 2 compressions
# ...add marker, pad with nulls and compress
self.t -= (self.BLKBYTES - len(self.cache)) << 3
self.update(PADDING[:self.BLKBYTES - len(self.cache)])
# ...now pad w/nulls leaving space for marker & bit count
self.t -= (sizewithout+1) << 3
self.update(PADDING[1:sizewithout+1]) # pad with zeroes
self.nullt = 1 # raise flag to set t=0 at the next _compress
# append a marker byte
if self.hashbitlen in [224, 384]:
self.update(ZZ)
else:
self.update(ZO)
self.t -= 8
# append the number of bits (long long)
self.t -= self.BLKBYTES
self.update(msglen)
hashval = []
if self.BLKBYTES == 64:
for h in self.h:
hashval.append(self._int2fourByte(h))
else:
for h in self.h:
hashval.append(self._int2eightByte(h))
self.hash = b''.join(hashval)[:self.hashbitlen >> 3]
self.state = 3
return self.hash
digest = final # may use digest() as a synonym for final()
# - - - - - - - - - - - - - - - - - - - - - - - - - - -
def hexdigest(self, data=''):
return hexlify(self.final(data)).decode('UTF-8')
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# utility functions
def _fourByte2int(self, bytestr): # see also long2byt() below
""" convert a 4-byte string to an int (long) """
return struct.unpack('!L', bytestr)[0]
def _eightByte2int(self, bytestr):
""" convert a 8-byte string to an int (long long) """
return struct.unpack('!Q', bytestr)[0]
def _int2fourByte(self, x): # see also long2byt() below
""" convert a number to a 4-byte string, high order
truncation possible (in Python x could be a BIGNUM)
"""
return struct.pack('!L', x)
def _int2eightByte(self, x):
""" convert a number to a 8-byte string, high order
truncation possible (in Python x could be a BIGNUM)
"""
return struct.pack('!Q', x)
#---------------------------------------------------------------
#---------------------------------------------------------------
#---------------------------------------------------------------
def blake_hash(data):
return BLAKE(256).digest(data)

View File

@@ -1,37 +0,0 @@
from blake256 import blake_hash
testVectors = [
["716f6e863f744b9ac22c97ec7b76ea5f5908bc5b2f67c61510bfc4751384ea7a", ""],
["43234ff894a9c0590d0246cfc574eb781a80958b01d7a2fa1ac73c673ba5e311", "a"],
["658c6d9019a1deddbcb3640a066dfd23471553a307ab941fd3e677ba887be329", "ab"],
["1833a9fa7cf4086bd5fda73da32e5a1d75b4c3f89d5c436369f9d78bb2da5c28", "abc"],
["35282468f3b93c5aaca6408582fced36e578f67671ed0741c332d68ac72d7aa2", "abcd"],
["9278d633efce801c6aa62987d7483d50e3c918caed7d46679551eed91fba8904", "abcde"],
["7a17ee5e289845adcafaf6ca1b05c4a281b232a71c7083f66c19ba1d1169a8d4", "abcdef"],
["ee8c7f94ff805cb2e644643010ea43b0222056420917ec70c3da764175193f8f", "abcdefg"],
["7b37c0876d29c5add7800a1823795a82b809fc12f799ff6a4b5e58d52c42b17e", "abcdefgh"],
["bdc514bea74ffbb9c3aa6470b08ceb80a88e313ad65e4a01457bbffd0acc86de", "abcdefghi"],
["12e3afb9739df8d727e93d853faeafc374cc55aedc937e5a1e66f5843b1d4c2e", "abcdefghij"],
["22297d373b751f581944bb26315133f6fda2f0bf60f65db773900f61f81b7e79", "Discard medicine more than two years old."],
["4d48d137bc9cf6d21415b805bf33f59320337d85c673998260e03a02a0d760cd", "He who has a shady past knows that nice guys finish last."],
["beba299e10f93e17d45663a6dc4b8c9349e4f5b9bac0d7832389c40a1b401e5c", "I wouldn't marry him with a ten foot pole."],
["42e082ae7f967781c6cd4e0ceeaeeb19fb2955adbdbaf8c7ec4613ac130071b3", "Free! Free!/A trip/to Mars/for 900/empty jars/Burma Shave"],
["207d06b205bfb359df91b48b6fd8aa6e4798b712d1cc5e91a254da9cef8684a3", "The days of the digital watch are numbered. -Tom Stoppard"],
["d56eab6927e371e2148b0788779aaf565d30567af2af822b6be3b90db9767a70", "Nepal premier won't resign."],
["01020709ca7fd10dc7756ce767d508d7206167d300b7a7ed76838a8547a7898c", "For every action there is an equal and opposite government program."],
["5569a6cc6535a66da221d8f6ad25008f28752d0343f3f1d757f1ecc9b1c61536", "His money is twice tainted: 'taint yours and 'taint mine."],
["8ff699b5ac7687c82600e89d0ff6cfa87e7179759184386971feb76fbae9975f", "There is no reason for any individual to have a computer in their home. -Ken Olsen, 1977"],
["f4b3a7c85a418b15ce330fd41ae0254b036ad48dd98aa37f0506a995ba9c6029", "It's a tiny change to the code and not completely disgusting. - Bob Manchek"],
["1ed94bab64fe560ef0983165fcb067e9a8a971c1db8e6fb151ff9a7c7fe877e3", "size: a.out: bad magic"],
["ff15b54992eedf9889f7b4bbb16692881aa01ed10dfc860fdb04785d8185cd3c", "The major problem is with sendmail. -Mark Horton"],
["8a0a7c417a47deec0b6474d8c247da142d2e315113a2817af3de8f45690d8652", "Give me a rock, paper and scissors and I will move the world. CCFestoon"],
["310d263fdab056a930324cdea5f46f9ea70219c1a74b01009994484113222a62", "If the enemy is within range, then so are you."],
["1aaa0903aa4cf872fe494c322a6e535698ea2140e15f26fb6088287aedceb6ba", "It's well we cannot hear the screams/That we create in others' dreams."],
["2eb81bcaa9e9185a7587a1b26299dcfb30f2a58a7f29adb584b969725457ad4f", "You remind me of a TV show, but that's all right: I watch it anyway."],
["c27b1683ef76e274680ab5492e592997b0d9d5ac5a5f4651b6036f64215256af", "C is as portable as Stonehedge!!"],
["3995cce8f32b174c22ffac916124bd095c80205d9d5f1bb08a155ac24b40d6cb", "Even if I could be Shakespeare, I think I should still choose to be Faraday. - A. Huxley"],
["496f7063f8bd479bf54e9d87e9ba53e277839ac7fdaecc5105f2879b58ee562f", "The fugacity of a constituent in a mixture of gases at a given temperature is proportional to its mole fraction. Lewis-Randall Rule"],
["2e0eff918940b01eea9539a02212f33ee84f77fab201f4287aa6167e4a1ed043", "How can you write a big system without C++? -Paul Glick"]]
for vectorSet in testVectors:
assert vectorSet[0] == blake_hash(vectorSet[1]).encode('hex')

View File

@@ -0,0 +1,356 @@
# ed25519.py - Optimized version of the reference implementation of Ed25519
#
# Written in 2011? by Daniel J. Bernstein <djb@cr.yp.to>
# 2013 by Donald Stufft <donald@stufft.io>
# 2013 by Alex Gaynor <alex.gaynor@gmail.com>
# 2013 by Greg Price <price@mit.edu>
#
# To the extent possible under law, the author(s) have dedicated all copyright
# and related and neighboring rights to this software to the public domain
# worldwide. This software is distributed without any warranty.
#
# You should have received a copy of the CC0 Public Domain Dedication along
# with this software. If not, see
# <http://creativecommons.org/publicdomain/zero/1.0/>.
"""
NB: This code is not safe for use with secret keys or secret data.
The only safe use of this code is for verifying signatures on public messages.
Functions for computing the public key of a secret key and for signing
a message are included, namely publickey_unsafe and signature_unsafe,
for testing purposes only.
The root of the problem is that Python's long-integer arithmetic is
not designed for use in cryptography. Specifically, it may take more
or less time to execute an operation depending on the values of the
inputs, and its memory access patterns may also depend on the inputs.
This opens it to timing and cache side-channel attacks which can
disclose data to an attacker. We rely on Python's long-integer
arithmetic, so we cannot handle secrets without risking their disclosure.
"""
import hashlib
import operator
import sys
__version__ = "1.0.dev0"
# Useful for very coarse version differentiation.
PY3 = sys.version_info[0] == 3
if PY3:
indexbytes = operator.getitem
intlist2bytes = bytes
int2byte = operator.methodcaller("to_bytes", 1, "big")
else:
int2byte = chr
range = xrange
def indexbytes(buf, i):
return ord(buf[i])
def intlist2bytes(l):
return b"".join(chr(c) for c in l)
b = 256
q = 2 ** 255 - 19
l = 2 ** 252 + 27742317777372353535851937790883648493
def H(m):
return hashlib.sha512(m).digest()
def pow2(x, p):
"""== pow(x, 2**p, q)"""
while p > 0:
x = x * x % q
p -= 1
return x
def inv(z):
"""$= z^{-1} \mod q$, for z != 0"""
# Adapted from curve25519_athlon.c in djb's Curve25519.
z2 = z * z % q # 2
z9 = pow2(z2, 2) * z % q # 9
z11 = z9 * z2 % q # 11
z2_5_0 = (z11 * z11) % q * z9 % q # 31 == 2^5 - 2^0
z2_10_0 = pow2(z2_5_0, 5) * z2_5_0 % q # 2^10 - 2^0
z2_20_0 = pow2(z2_10_0, 10) * z2_10_0 % q # ...
z2_40_0 = pow2(z2_20_0, 20) * z2_20_0 % q
z2_50_0 = pow2(z2_40_0, 10) * z2_10_0 % q
z2_100_0 = pow2(z2_50_0, 50) * z2_50_0 % q
z2_200_0 = pow2(z2_100_0, 100) * z2_100_0 % q
z2_250_0 = pow2(z2_200_0, 50) * z2_50_0 % q # 2^250 - 2^0
return pow2(z2_250_0, 5) * z11 % q # 2^255 - 2^5 + 11 = q - 2
d = -121665 * inv(121666) % q
I = pow(2, (q - 1) // 4, q)
def xrecover(y, sign=0):
xx = (y * y - 1) * inv(d * y * y + 1)
x = pow(xx, (q + 3) // 8, q)
if (x * x - xx) % q != 0:
x = (x * I) % q
if x % 2 != sign:
x = q-x
return x
By = 4 * inv(5)
Bx = xrecover(By)
B = (Bx % q, By % q, 1, (Bx * By) % q)
ident = (0, 1, 1, 0)
def edwards_add(P, Q):
# This is formula sequence 'addition-add-2008-hwcd-3' from
# http://www.hyperelliptic.org/EFD/g1p/auto-twisted-extended-1.html
(x1, y1, z1, t1) = P
(x2, y2, z2, t2) = Q
a = (y1-x1)*(y2-x2) % q
b = (y1+x1)*(y2+x2) % q
c = t1*2*d*t2 % q
dd = z1*2*z2 % q
e = b - a
f = dd - c
g = dd + c
h = b + a
x3 = e*f
y3 = g*h
t3 = e*h
z3 = f*g
return (x3 % q, y3 % q, z3 % q, t3 % q)
def edwards_sub(P, Q):
# This is formula sequence 'addition-add-2008-hwcd-3' from
# http://www.hyperelliptic.org/EFD/g1p/auto-twisted-extended-1.html
(x1, y1, z1, t1) = P
(x2, y2, z2, t2) = Q
# https://eprint.iacr.org/2008/522.pdf
# The negative of (X:Y:Z)is (X:Y:Z)
#x2 = q-x2
"""
doesn't work
x2 = q-x2
t2 = (x2*y2) % q
"""
zi = inv(z2)
x2 = q-((x2 * zi) % q)
y2 = (y2 * zi) % q
z2 = 1
t2 = (x2*y2) % q
a = (y1-x1)*(y2-x2) % q
b = (y1+x1)*(y2+x2) % q
c = t1*2*d*t2 % q
dd = z1*2*z2 % q
e = b - a
f = dd - c
g = dd + c
h = b + a
x3 = e*f
y3 = g*h
t3 = e*h
z3 = f*g
return (x3 % q, y3 % q, z3 % q, t3 % q)
def edwards_double(P):
# This is formula sequence 'dbl-2008-hwcd' from
# http://www.hyperelliptic.org/EFD/g1p/auto-twisted-extended-1.html
(x1, y1, z1, t1) = P
a = x1*x1 % q
b = y1*y1 % q
c = 2*z1*z1 % q
# dd = -a
e = ((x1+y1)*(x1+y1) - a - b) % q
g = -a + b # dd + b
f = g - c
h = -a - b # dd - b
x3 = e*f
y3 = g*h
t3 = e*h
z3 = f*g
return (x3 % q, y3 % q, z3 % q, t3 % q)
def scalarmult(P, e):
if e == 0:
return ident
Q = scalarmult(P, e // 2)
Q = edwards_double(Q)
if e & 1:
Q = edwards_add(Q, P)
return Q
# Bpow[i] == scalarmult(B, 2**i)
Bpow = []
def make_Bpow():
P = B
for i in range(253):
Bpow.append(P)
P = edwards_double(P)
make_Bpow()
def scalarmult_B(e):
"""
Implements scalarmult(B, e) more efficiently.
"""
# scalarmult(B, l) is the identity
e = e % l
P = ident
for i in range(253):
if e & 1:
P = edwards_add(P, Bpow[i])
e = e // 2
assert e == 0, e
return P
def encodeint(y):
bits = [(y >> i) & 1 for i in range(b)]
return b''.join([
int2byte(sum([bits[i * 8 + j] << j for j in range(8)]))
for i in range(b//8)
])
def encodepoint(P):
(x, y, z, t) = P
zi = inv(z)
x = (x * zi) % q
y = (y * zi) % q
bits = [(y >> i) & 1 for i in range(b - 1)] + [x & 1]
return b''.join([
int2byte(sum([bits[i * 8 + j] << j for j in range(8)]))
for i in range(b // 8)
])
def bit(h, i):
return (indexbytes(h, i // 8) >> (i % 8)) & 1
def publickey_unsafe(sk):
"""
Not safe to use with secret keys or secret data.
See module docstring. This function should be used for testing only.
"""
h = H(sk)
a = 2 ** (b - 2) + sum(2 ** i * bit(h, i) for i in range(3, b - 2))
A = scalarmult_B(a)
return encodepoint(A)
def Hint(m):
h = H(m)
return sum(2 ** i * bit(h, i) for i in range(2 * b))
def signature_unsafe(m, sk, pk):
"""
Not safe to use with secret keys or secret data.
See module docstring. This function should be used for testing only.
"""
h = H(sk)
a = 2 ** (b - 2) + sum(2 ** i * bit(h, i) for i in range(3, b - 2))
r = Hint(
intlist2bytes([indexbytes(h, j) for j in range(b // 8, b // 4)]) + m
)
R = scalarmult_B(r)
S = (r + Hint(encodepoint(R) + pk + m) * a) % l
return encodepoint(R) + encodeint(S)
def isoncurve(P):
(x, y, z, t) = P
return (z % q != 0 and
x*y % q == z*t % q and
(y*y - x*x - z*z - d*t*t) % q == 0)
def decodeint(s):
return sum(2 ** i * bit(s, i) for i in range(0, b))
def decodepoint(s):
y = sum(2 ** i * bit(s, i) for i in range(0, b - 1))
x = xrecover(y)
if x & 1 != bit(s, b-1):
x = q - x
P = (x, y, 1, (x*y) % q)
if not isoncurve(P):
raise ValueError("decoding point that is not on curve")
return P
class SignatureMismatch(Exception):
pass
def checkvalid(s, m, pk):
"""
Not safe to use when any argument is secret.
See module docstring. This function should be used only for
verifying public signatures of public messages.
"""
if len(s) != b // 4:
raise ValueError("signature length is wrong")
if len(pk) != b // 8:
raise ValueError("public-key length is wrong")
R = decodepoint(s[:b // 8])
A = decodepoint(pk)
S = decodeint(s[b // 8:b // 4])
h = Hint(encodepoint(R) + pk + m)
(x1, y1, z1, t1) = P = scalarmult_B(S)
(x2, y2, z2, t2) = Q = edwards_add(R, scalarmult(A, h))
if (not isoncurve(P) or not isoncurve(Q) or
(x1*z2 - x2*z1) % q != 0 or (y1*z2 - y2*z1) % q != 0):
raise SignatureMismatch("signature does not pass verification")
def is_identity(P):
return True if P[0] == 0 else False
def edwards_negated(P):
(x, y, z, t) = P
zi = inv(z)
x = q - ((x * zi) % q)
y = (y * zi) % q
z = 1
t = (x * y) % q
return (x, y, z, t)

View File

@@ -0,0 +1,486 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Implementation of elliptic curves, for cryptographic applications.
#
# This module doesn't provide any way to choose a random elliptic
# curve, nor to verify that an elliptic curve was chosen randomly,
# because one can simply use NIST's standard curves.
#
# Notes from X9.62-1998 (draft):
# Nomenclature:
# - Q is a public key.
# The "Elliptic Curve Domain Parameters" include:
# - q is the "field size", which in our case equals p.
# - p is a big prime.
# - G is a point of prime order (5.1.1.1).
# - n is the order of G (5.1.1.1).
# Public-key validation (5.2.2):
# - Verify that Q is not the point at infinity.
# - Verify that X_Q and Y_Q are in [0,p-1].
# - Verify that Q is on the curve.
# - Verify that nQ is the point at infinity.
# Signature generation (5.3):
# - Pick random k from [1,n-1].
# Signature checking (5.4.2):
# - Verify that r and s are in [1,n-1].
#
# Version of 2008.11.25.
#
# Revision history:
# 2005.12.31 - Initial version.
# 2008.11.25 - Change CurveFp.is_on to contains_point.
#
# Written in 2005 by Peter Pearson and placed in the public domain.
def inverse_mod(a, m):
"""Inverse of a mod m."""
if a < 0 or m <= a:
a = a % m
# From Ferguson and Schneier, roughly:
c, d = a, m
uc, vc, ud, vd = 1, 0, 0, 1
while c != 0:
q, c, d = divmod(d, c) + (c,)
uc, vc, ud, vd = ud - q * uc, vd - q * vc, uc, vc
# At this point, d is the GCD, and ud*a+vd*m = d.
# If d == 1, this means that ud is a inverse.
assert d == 1
if ud > 0:
return ud
else:
return ud + m
def modular_sqrt(a, p):
# from http://eli.thegreenplace.net/2009/03/07/computing-modular-square-roots-in-python/
""" Find a quadratic residue (mod p) of 'a'. p
must be an odd prime.
Solve the congruence of the form:
x^2 = a (mod p)
And returns x. Note that p - x is also a root.
0 is returned is no square root exists for
these a and p.
The Tonelli-Shanks algorithm is used (except
for some simple cases in which the solution
is known from an identity). This algorithm
runs in polynomial time (unless the
generalized Riemann hypothesis is false).
"""
# Simple cases
#
if legendre_symbol(a, p) != 1:
return 0
elif a == 0:
return 0
elif p == 2:
return p
elif p % 4 == 3:
return pow(a, (p + 1) // 4, p)
# Partition p-1 to s * 2^e for an odd s (i.e.
# reduce all the powers of 2 from p-1)
#
s = p - 1
e = 0
while s % 2 == 0:
s /= 2
e += 1
# Find some 'n' with a legendre symbol n|p = -1.
# Shouldn't take long.
#
n = 2
while legendre_symbol(n, p) != -1:
n += 1
# Here be dragons!
# Read the paper "Square roots from 1; 24, 51,
# 10 to Dan Shanks" by Ezra Brown for more
# information
#
# x is a guess of the square root that gets better
# with each iteration.
# b is the "fudge factor" - by how much we're off
# with the guess. The invariant x^2 = ab (mod p)
# is maintained throughout the loop.
# g is used for successive powers of n to update
# both a and b
# r is the exponent - decreases with each update
#
x = pow(a, (s + 1) // 2, p)
b = pow(a, s, p)
g = pow(n, s, p)
r = e
while True:
t = b
m = 0
for m in range(r):
if t == 1:
break
t = pow(t, 2, p)
if m == 0:
return x
gs = pow(g, 2 ** (r - m - 1), p)
g = (gs * gs) % p
x = (x * gs) % p
b = (b * g) % p
r = m
def legendre_symbol(a, p):
""" Compute the Legendre symbol a|p using
Euler's criterion. p is a prime, a is
relatively prime to p (if p divides
a, then a|p = 0)
Returns 1 if a has a square root modulo
p, -1 otherwise.
"""
ls = pow(a, (p - 1) // 2, p)
return -1 if ls == p - 1 else ls
def jacobi_symbol(n, k):
"""Compute the Jacobi symbol of n modulo k
See http://en.wikipedia.org/wiki/Jacobi_symbol
For our application k is always prime, so this is the same as the Legendre symbol."""
assert k > 0 and k & 1, "jacobi symbol is only defined for positive odd k"
n %= k
t = 0
while n != 0:
while n & 1 == 0:
n >>= 1
r = k & 7
t ^= (r == 3 or r == 5)
n, k = k, n
t ^= (n & k & 3 == 3)
n = n % k
if k == 1:
return -1 if t else 1
return 0
class CurveFp(object):
"""Elliptic Curve over the field of integers modulo a prime."""
def __init__(self, p, a, b):
"""The curve of points satisfying y^2 = x^3 + a*x + b (mod p)."""
self.__p = p
self.__a = a
self.__b = b
def p(self):
return self.__p
def a(self):
return self.__a
def b(self):
return self.__b
def contains_point(self, x, y):
"""Is the point (x,y) on this curve?"""
return (y * y - (x * x * x + self.__a * x + self.__b)) % self.__p == 0
class Point(object):
""" A point on an elliptic curve. Altering x and y is forbidding,
but they can be read by the x() and y() methods."""
def __init__(self, curve, x, y, order=None):
"""curve, x, y, order; order (optional) is the order of this point."""
self.__curve = curve
self.__x = x
self.__y = y
self.__order = order
# self.curve is allowed to be None only for INFINITY:
if self.__curve:
assert self.__curve.contains_point(x, y)
if order:
assert self * order == INFINITY
def __eq__(self, other):
"""Return 1 if the points are identical, 0 otherwise."""
if self.__curve == other.__curve \
and self.__x == other.__x \
and self.__y == other.__y:
return 1
else:
return 0
def __add__(self, other):
"""Add one point to another point."""
# X9.62 B.3:
if other == INFINITY:
return self
if self == INFINITY:
return other
assert self.__curve == other.__curve
if self.__x == other.__x:
if (self.__y + other.__y) % self.__curve.p() == 0:
return INFINITY
else:
return self.double()
p = self.__curve.p()
l = ((other.__y - self.__y) * inverse_mod(other.__x - self.__x, p)) % p
x3 = (l * l - self.__x - other.__x) % p
y3 = (l * (self.__x - x3) - self.__y) % p
return Point(self.__curve, x3, y3)
def __sub__(self, other):
#The inverse of a point P=(xP,yP) is its reflexion across the x-axis : P=(xP,yP).
#If you want to compute QP, just replace yP by yP in the usual formula for point addition.
# X9.62 B.3:
if other == INFINITY:
return self
if self == INFINITY:
return other
assert self.__curve == other.__curve
p = self.__curve.p()
#opi = inverse_mod(other.__y, p)
opi = -other.__y % p
#print(opi)
#print(-other.__y % p)
if self.__x == other.__x:
if (self.__y + opi) % self.__curve.p() == 0:
return INFINITY
else:
return self.double
l = ((opi - self.__y) * inverse_mod(other.__x - self.__x, p)) % p
x3 = (l * l - self.__x - other.__x) % p
y3 = (l * (self.__x - x3) - self.__y) % p
return Point(self.__curve, x3, y3)
def __mul__(self, e):
if self.__order:
e %= self.__order
if e == 0 or self == INFINITY:
return INFINITY
result, q = INFINITY, self
while e:
if e & 1:
result += q
e, q = e >> 1, q.double()
return result
"""
def __mul__(self, other):
#Multiply a point by an integer.
def leftmost_bit( x ):
assert x > 0
result = 1
while result <= x: result = 2 * result
return result // 2
e = other
if self.__order: e = e % self.__order
if e == 0: return INFINITY
if self == INFINITY: return INFINITY
assert e > 0
# From X9.62 D.3.2:
e3 = 3 * e
negative_self = Point( self.__curve, self.__x, -self.__y, self.__order )
i = leftmost_bit( e3 ) // 2
result = self
# print "Multiplying %s by %d (e3 = %d):" % ( self, other, e3 )
while i > 1:
result = result.double()
if ( e3 & i ) != 0 and ( e & i ) == 0: result = result + self
if ( e3 & i ) == 0 and ( e & i ) != 0: result = result + negative_self
# print ". . . i = %d, result = %s" % ( i, result )
i = i // 2
return result
"""
def __rmul__(self, other):
"""Multiply a point by an integer."""
return self * other
def __str__(self):
if self == INFINITY:
return "infinity"
return "(%d, %d)" % (self.__x, self.__y)
def inverse(self):
return Point(self.__curve, self.__x, -self.__y % self.__curve.p())
def double(self):
"""Return a new point that is twice the old."""
if self == INFINITY:
return INFINITY
# X9.62 B.3:
p = self.__curve.p()
a = self.__curve.a()
l = ((3 * self.__x * self.__x + a) * inverse_mod(2 * self.__y, p)) % p
x3 = (l * l - 2 * self.__x) % p
y3 = (l * (self.__x - x3) - self.__y) % p
return Point(self.__curve, x3, y3)
def x(self):
return self.__x
def y(self):
return self.__y
def pair(self):
return (self.__x, self.__y)
def curve(self):
return self.__curve
def order(self):
return self.__order
# This one point is the Point At Infinity for all purposes:
INFINITY = Point(None, None, None)
def __main__():
class FailedTest(Exception):
pass
def test_add(c, x1, y1, x2, y2, x3, y3):
"""We expect that on curve c, (x1,y1) + (x2, y2 ) = (x3, y3)."""
p1 = Point(c, x1, y1)
p2 = Point(c, x2, y2)
p3 = p1 + p2
print("%s + %s = %s" % (p1, p2, p3))
if p3.x() != x3 or p3.y() != y3:
raise FailedTest("Failure: should give (%d,%d)." % (x3, y3))
else:
print(" Good.")
def test_double(c, x1, y1, x3, y3):
"""We expect that on curve c, 2*(x1,y1) = (x3, y3)."""
p1 = Point(c, x1, y1)
p3 = p1.double()
print("%s doubled = %s" % (p1, p3))
if p3.x() != x3 or p3.y() != y3:
raise FailedTest("Failure: should give (%d,%d)." % (x3, y3))
else:
print(" Good.")
def test_double_infinity(c):
"""We expect that on curve c, 2*INFINITY = INFINITY."""
p1 = INFINITY
p3 = p1.double()
print("%s doubled = %s" % (p1, p3))
if p3.x() != INFINITY.x() or p3.y() != INFINITY.y():
raise FailedTest("Failure: should give (%d,%d)." % (INFINITY.x(), INFINITY.y()))
else:
print(" Good.")
def test_multiply(c, x1, y1, m, x3, y3):
"""We expect that on curve c, m*(x1,y1) = (x3,y3)."""
p1 = Point(c, x1, y1)
p3 = p1 * m
print("%s * %d = %s" % (p1, m, p3))
if p3.x() != x3 or p3.y() != y3:
raise FailedTest("Failure: should give (%d,%d)." % (x3, y3))
else:
print(" Good.")
# A few tests from X9.62 B.3:
c = CurveFp(23, 1, 1)
test_add(c, 3, 10, 9, 7, 17, 20)
test_double(c, 3, 10, 7, 12)
test_add(c, 3, 10, 3, 10, 7, 12) # (Should just invoke double.)
test_multiply(c, 3, 10, 2, 7, 12)
test_double_infinity(c)
# From X9.62 I.1 (p. 96):
g = Point(c, 13, 7, 7)
check = INFINITY
for i in range(7 + 1):
p = (i % 7) * g
print("%s * %d = %s, expected %s . . ." % (g, i, p, check))
if p == check:
print(" Good.")
else:
raise FailedTest("Bad.")
check = check + g
# NIST Curve P-192:
p = 6277101735386680763835789423207666416083908700390324961279
r = 6277101735386680763835789423176059013767194773182842284081
#s = 0x3045ae6fc8422f64ed579528d38120eae12196d5L
c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65
b = 0x64210519e59c80e70fa7e9ab72243049feb8deecc146b9b1
Gx = 0x188da80eb03090f67cbf20eb43a18800f4ff0afd82ff1012
Gy = 0x07192b95ffc8da78631011ed6b24cdd573f977a11e794811
c192 = CurveFp(p, -3, b)
p192 = Point(c192, Gx, Gy, r)
# Checking against some sample computations presented
# in X9.62:
d = 651056770906015076056810763456358567190100156695615665659
Q = d * p192
if Q.x() != 0x62B12D60690CDCF330BABAB6E69763B471F994DD702D16A5:
raise FailedTest("p192 * d came out wrong.")
else:
print("p192 * d came out right.")
k = 6140507067065001063065065565667405560006161556565665656654
R = k * p192
if R.x() != 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD \
or R.y() != 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835:
raise FailedTest("k * p192 came out wrong.")
else:
print("k * p192 came out right.")
u1 = 2563697409189434185194736134579731015366492496392189760599
u2 = 6266643813348617967186477710235785849136406323338782220568
temp = u1 * p192 + u2 * Q
if temp.x() != 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD \
or temp.y() != 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835:
raise FailedTest("u1 * p192 + u2 * Q came out wrong.")
else:
print("u1 * p192 + u2 * Q came out right.")
if __name__ == "__main__":
__main__()

386
basicswap/contrib/key.py Normal file
View File

@@ -0,0 +1,386 @@
# Copyright (c) 2019 Pieter Wuille
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test-only secp256k1 elliptic curve implementation
WARNING: This code is slow, uses bad randomness, does not properly protect
keys, and is trivially vulnerable to side channel attacks. Do not use for
anything but tests."""
import random
def modinv(a, n):
"""Compute the modular inverse of a modulo n
See https://en.wikipedia.org/wiki/Extended_Euclidean_algorithm#Modular_integers.
"""
t1, t2 = 0, 1
r1, r2 = n, a
while r2 != 0:
q = r1 // r2
t1, t2 = t2, t1 - q * t2
r1, r2 = r2, r1 - q * r2
if r1 > 1:
return None
if t1 < 0:
t1 += n
return t1
def jacobi_symbol(n, k):
"""Compute the Jacobi symbol of n modulo k
See http://en.wikipedia.org/wiki/Jacobi_symbol
For our application k is always prime, so this is the same as the Legendre symbol."""
assert k > 0 and k & 1, "jacobi symbol is only defined for positive odd k"
n %= k
t = 0
while n != 0:
while n & 1 == 0:
n >>= 1
r = k & 7
t ^= (r == 3 or r == 5)
n, k = k, n
t ^= (n & k & 3 == 3)
n = n % k
if k == 1:
return -1 if t else 1
return 0
def modsqrt(a, p):
"""Compute the square root of a modulo p when p % 4 = 3.
The Tonelli-Shanks algorithm can be used. See https://en.wikipedia.org/wiki/Tonelli-Shanks_algorithm
Limiting this function to only work for p % 4 = 3 means we don't need to
iterate through the loop. The highest n such that p - 1 = 2^n Q with Q odd
is n = 1. Therefore Q = (p-1)/2 and sqrt = a^((Q+1)/2) = a^((p+1)/4)
secp256k1's is defined over field of size 2**256 - 2**32 - 977, which is 3 mod 4.
"""
if p % 4 != 3:
raise NotImplementedError("modsqrt only implemented for p % 4 = 3")
sqrt = pow(a, (p + 1)//4, p)
if pow(sqrt, 2, p) == a % p:
return sqrt
return None
class EllipticCurve:
def __init__(self, p, a, b):
"""Initialize elliptic curve y^2 = x^3 + a*x + b over GF(p)."""
self.p = p
self.a = a % p
self.b = b % p
def affine(self, p1):
"""Convert a Jacobian point tuple p1 to affine form, or None if at infinity.
An affine point is represented as the Jacobian (x, y, 1)"""
x1, y1, z1 = p1
if z1 == 0:
return None
inv = modinv(z1, self.p)
inv_2 = (inv**2) % self.p
inv_3 = (inv_2 * inv) % self.p
return ((inv_2 * x1) % self.p, (inv_3 * y1) % self.p, 1)
def negate(self, p1):
"""Negate a Jacobian point tuple p1."""
x1, y1, z1 = p1
return (x1, (self.p - y1) % self.p, z1)
def on_curve(self, p1):
"""Determine whether a Jacobian tuple p is on the curve (and not infinity)"""
x1, y1, z1 = p1
z2 = pow(z1, 2, self.p)
z4 = pow(z2, 2, self.p)
return z1 != 0 and (pow(x1, 3, self.p) + self.a * x1 * z4 + self.b * z2 * z4 - pow(y1, 2, self.p)) % self.p == 0
def is_x_coord(self, x):
"""Test whether x is a valid X coordinate on the curve."""
x_3 = pow(x, 3, self.p)
return jacobi_symbol(x_3 + self.a * x + self.b, self.p) != -1
def lift_x(self, x):
"""Given an X coordinate on the curve, return a corresponding affine point."""
x_3 = pow(x, 3, self.p)
v = x_3 + self.a * x + self.b
y = modsqrt(v, self.p)
if y is None:
return None
return (x, y, 1)
def double(self, p1):
"""Double a Jacobian tuple p1
See https://en.wikibooks.org/wiki/Cryptography/Prime_Curve/Jacobian_Coordinates - Point Doubling"""
x1, y1, z1 = p1
if z1 == 0:
return (0, 1, 0)
y1_2 = (y1**2) % self.p
y1_4 = (y1_2**2) % self.p
x1_2 = (x1**2) % self.p
s = (4*x1*y1_2) % self.p
m = 3*x1_2
if self.a:
m += self.a * pow(z1, 4, self.p)
m = m % self.p
x2 = (m**2 - 2*s) % self.p
y2 = (m*(s - x2) - 8*y1_4) % self.p
z2 = (2*y1*z1) % self.p
return (x2, y2, z2)
def add_mixed(self, p1, p2):
"""Add a Jacobian tuple p1 and an affine tuple p2
See https://en.wikibooks.org/wiki/Cryptography/Prime_Curve/Jacobian_Coordinates - Point Addition (with affine point)"""
x1, y1, z1 = p1
x2, y2, z2 = p2
assert(z2 == 1)
# Adding to the point at infinity is a no-op
if z1 == 0:
return p2
z1_2 = (z1**2) % self.p
z1_3 = (z1_2 * z1) % self.p
u2 = (x2 * z1_2) % self.p
s2 = (y2 * z1_3) % self.p
if x1 == u2:
if (y1 != s2):
# p1 and p2 are inverses. Return the point at infinity.
return (0, 1, 0)
# p1 == p2. The formulas below fail when the two points are equal.
return self.double(p1)
h = u2 - x1
r = s2 - y1
h_2 = (h**2) % self.p
h_3 = (h_2 * h) % self.p
u1_h_2 = (x1 * h_2) % self.p
x3 = (r**2 - h_3 - 2*u1_h_2) % self.p
y3 = (r*(u1_h_2 - x3) - y1*h_3) % self.p
z3 = (h*z1) % self.p
return (x3, y3, z3)
def add(self, p1, p2):
"""Add two Jacobian tuples p1 and p2
See https://en.wikibooks.org/wiki/Cryptography/Prime_Curve/Jacobian_Coordinates - Point Addition"""
x1, y1, z1 = p1
x2, y2, z2 = p2
# Adding the point at infinity is a no-op
if z1 == 0:
return p2
if z2 == 0:
return p1
# Adding an Affine to a Jacobian is more efficient since we save field multiplications and squarings when z = 1
if z1 == 1:
return self.add_mixed(p2, p1)
if z2 == 1:
return self.add_mixed(p1, p2)
z1_2 = (z1**2) % self.p
z1_3 = (z1_2 * z1) % self.p
z2_2 = (z2**2) % self.p
z2_3 = (z2_2 * z2) % self.p
u1 = (x1 * z2_2) % self.p
u2 = (x2 * z1_2) % self.p
s1 = (y1 * z2_3) % self.p
s2 = (y2 * z1_3) % self.p
if u1 == u2:
if (s1 != s2):
# p1 and p2 are inverses. Return the point at infinity.
return (0, 1, 0)
# p1 == p2. The formulas below fail when the two points are equal.
return self.double(p1)
h = u2 - u1
r = s2 - s1
h_2 = (h**2) % self.p
h_3 = (h_2 * h) % self.p
u1_h_2 = (u1 * h_2) % self.p
x3 = (r**2 - h_3 - 2*u1_h_2) % self.p
y3 = (r*(u1_h_2 - x3) - s1*h_3) % self.p
z3 = (h*z1*z2) % self.p
return (x3, y3, z3)
def mul(self, ps):
"""Compute a (multi) point multiplication
ps is a list of (Jacobian tuple, scalar) pairs.
"""
r = (0, 1, 0)
for i in range(255, -1, -1):
r = self.double(r)
for (p, n) in ps:
if ((n >> i) & 1):
r = self.add(r, p)
return r
SECP256K1 = EllipticCurve(2**256 - 2**32 - 977, 0, 7)
SECP256K1_G = (0x79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798, 0x483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8, 1)
SECP256K1_ORDER = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141
SECP256K1_ORDER_HALF = SECP256K1_ORDER // 2
class ECPubKey():
"""A secp256k1 public key"""
def __init__(self):
"""Construct an uninitialized public key"""
self.valid = False
def set(self, data):
"""Construct a public key from a serialization in compressed or uncompressed format"""
if (len(data) == 65 and data[0] == 0x04):
p = (int.from_bytes(data[1:33], 'big'), int.from_bytes(data[33:65], 'big'), 1)
self.valid = SECP256K1.on_curve(p)
if self.valid:
self.p = p
self.compressed = False
elif (len(data) == 33 and (data[0] == 0x02 or data[0] == 0x03)):
x = int.from_bytes(data[1:33], 'big')
if SECP256K1.is_x_coord(x):
p = SECP256K1.lift_x(x)
# if the oddness of the y co-ord isn't correct, find the other
# valid y
if (p[1] & 1) != (data[0] & 1):
p = SECP256K1.negate(p)
self.p = p
self.valid = True
self.compressed = True
else:
self.valid = False
else:
self.valid = False
@property
def is_compressed(self):
return self.compressed
@property
def is_valid(self):
return self.valid
def get_bytes(self):
assert(self.valid)
p = SECP256K1.affine(self.p)
if p is None:
return None
if self.compressed:
return bytes([0x02 + (p[1] & 1)]) + p[0].to_bytes(32, 'big')
else:
return bytes([0x04]) + p[0].to_bytes(32, 'big') + p[1].to_bytes(32, 'big')
def verify_ecdsa(self, sig, msg, low_s=True):
"""Verify a strictly DER-encoded ECDSA signature against this pubkey.
See https://en.wikipedia.org/wiki/Elliptic_Curve_Digital_Signature_Algorithm for the
ECDSA verifier algorithm"""
assert(self.valid)
# Extract r and s from the DER formatted signature. Return false for
# any DER encoding errors.
if (sig[1] + 2 != len(sig)):
return False
if (len(sig) < 4):
return False
if (sig[0] != 0x30):
return False
if (sig[2] != 0x02):
return False
rlen = sig[3]
if (len(sig) < 6 + rlen):
return False
if rlen < 1 or rlen > 33:
return False
if sig[4] >= 0x80:
return False
if (rlen > 1 and (sig[4] == 0) and not (sig[5] & 0x80)):
return False
r = int.from_bytes(sig[4:4+rlen], 'big')
if (sig[4+rlen] != 0x02):
return False
slen = sig[5+rlen]
if slen < 1 or slen > 33:
return False
if (len(sig) != 6 + rlen + slen):
return False
if sig[6+rlen] >= 0x80:
return False
if (slen > 1 and (sig[6+rlen] == 0) and not (sig[7+rlen] & 0x80)):
return False
s = int.from_bytes(sig[6+rlen:6+rlen+slen], 'big')
# Verify that r and s are within the group order
if r < 1 or s < 1 or r >= SECP256K1_ORDER or s >= SECP256K1_ORDER:
return False
if low_s and s >= SECP256K1_ORDER_HALF:
return False
z = int.from_bytes(msg, 'big')
# Run verifier algorithm on r, s
w = modinv(s, SECP256K1_ORDER)
u1 = z*w % SECP256K1_ORDER
u2 = r*w % SECP256K1_ORDER
R = SECP256K1.affine(SECP256K1.mul([(SECP256K1_G, u1), (self.p, u2)]))
if R is None or R[0] != r:
return False
return True
class ECKey():
"""A secp256k1 private key"""
def __init__(self):
self.valid = False
def set(self, secret, compressed):
"""Construct a private key object with given 32-byte secret and compressed flag."""
assert(len(secret) == 32)
secret = int.from_bytes(secret, 'big')
self.valid = (secret > 0 and secret < SECP256K1_ORDER)
if self.valid:
self.secret = secret
self.compressed = compressed
def generate(self, compressed=True):
"""Generate a random private key (compressed or uncompressed)."""
self.set(random.randrange(1, SECP256K1_ORDER).to_bytes(32, 'big'), compressed)
def get_bytes(self):
"""Retrieve the 32-byte representation of this key."""
assert(self.valid)
return self.secret.to_bytes(32, 'big')
@property
def is_valid(self):
return self.valid
@property
def is_compressed(self):
return self.compressed
def get_pubkey(self):
"""Compute an ECPubKey object for this secret key."""
assert(self.valid)
ret = ECPubKey()
p = SECP256K1.mul([(SECP256K1_G, self.secret)])
ret.p = p
ret.valid = True
ret.compressed = self.compressed
return ret
def sign_ecdsa(self, msg, low_s=True):
"""Construct a DER-encoded ECDSA signature with this key.
See https://en.wikipedia.org/wiki/Elliptic_Curve_Digital_Signature_Algorithm for the
ECDSA signer algorithm."""
assert(self.valid)
z = int.from_bytes(msg, 'big')
# Note: no RFC6979, but a simple random nonce (some tests rely on distinct transactions for the same operation)
k = random.randrange(1, SECP256K1_ORDER)
R = SECP256K1.affine(SECP256K1.mul([(SECP256K1_G, k)]))
r = R[0] % SECP256K1_ORDER
s = (modinv(k, SECP256K1_ORDER) * (z + self.secret * r)) % SECP256K1_ORDER
if low_s and s > SECP256K1_ORDER_HALF:
s = SECP256K1_ORDER - s
# Represent in DER format. The byte representations of r and s have
# length rounded up (255 bits becomes 32 bytes and 256 bits becomes 33
# bytes).
rb = r.to_bytes((r.bit_length() + 8) // 8, 'big')
sb = s.to_bytes((s.bit_length() + 8) // 8, 'big')
return b'\x30' + bytes([4 + len(rb) + len(sb), 2, len(rb)]) + rb + bytes([2, len(sb)]) + sb

View File

@@ -1,3 +0,0 @@
from .mnemonic import Mnemonic
__all__ = ["Mnemonic"]

View File

@@ -1,298 +0,0 @@
#
# Copyright (c) 2013 Pavol Rusnak
# Copyright (c) 2017 mruddy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from __future__ import annotations
import hashlib
import hmac
import itertools
import os
import secrets
import typing as t
import unicodedata
PBKDF2_ROUNDS = 2048
class ConfigurationError(Exception):
pass
# Refactored code segments from <https://github.com/keis/base58>
def b58encode(v: bytes) -> str:
alphabet = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
p, acc = 1, 0
for c in reversed(v):
acc += p * c
p = p << 8
string = ""
while acc:
acc, idx = divmod(acc, 58)
string = alphabet[idx : idx + 1] + string
return string
class Mnemonic(object):
def __init__(self, language: str = "english", wordlist: list[str] | None = None):
self.radix = 2048
self.language = language
if wordlist is None:
d = os.path.join(os.path.dirname(__file__), f"wordlist/{language}.txt")
if os.path.exists(d) and os.path.isfile(d):
with open(d, "r", encoding="utf-8") as f:
wordlist = [w.strip() for w in f.readlines()]
else:
raise ConfigurationError("Language not detected")
if len(wordlist) != self.radix:
raise ConfigurationError(f"Wordlist must contain {self.radix} words.")
self.wordlist = wordlist
# Japanese must be joined by ideographic space
self.delimiter = "\u3000" if language == "japanese" else " "
@classmethod
def list_languages(cls) -> list[str]:
return [
f.split(".")[0]
for f in os.listdir(os.path.join(os.path.dirname(__file__), "wordlist"))
if f.endswith(".txt")
]
@staticmethod
def normalize_string(txt: t.AnyStr) -> str:
if isinstance(txt, bytes):
utxt = txt.decode("utf8")
elif isinstance(txt, str):
utxt = txt
else:
raise TypeError("String value expected")
return unicodedata.normalize("NFKD", utxt)
@classmethod
def detect_language(cls, code: str) -> str:
"""Scan the Mnemonic until the language becomes unambiguous, including as abbreviation prefixes.
Unfortunately, there are valid words that are ambiguous between languages, which are complete words
in one language and are prefixes in another:
english: abandon ... about
french: abandon ... aboutir
If prefixes remain ambiguous, require exactly one language where word(s) match exactly.
"""
code = cls.normalize_string(code)
possible = set(cls(lang) for lang in cls.list_languages())
words = set(code.split())
for word in words:
# possible languages have candidate(s) starting with the word/prefix
possible = set(
p for p in possible if any(c.startswith(word) for c in p.wordlist)
)
if not possible:
raise ConfigurationError(f"Language unrecognized for {word!r}")
if len(possible) == 1:
return possible.pop().language
# Multiple languages match: A prefix in many, but an exact match in one determines language.
complete = set()
for word in words:
exact = set(p for p in possible if word in p.wordlist)
if len(exact) == 1:
complete.update(exact)
if len(complete) == 1:
return complete.pop().language
raise ConfigurationError(
f"Language ambiguous between {', '.join(p.language for p in possible)}"
)
def generate(self, strength: int = 128) -> str:
"""
Create a new mnemonic using a random generated number as entropy.
As defined in BIP39, the entropy must be a multiple of 32 bits, and its size must be between 128 and 256 bits.
Therefore the possible values for `strength` are 128, 160, 192, 224 and 256.
If not provided, the default entropy length will be set to 128 bits.
The return is a list of words that encodes the generated entropy.
:param strength: Number of bytes used as entropy
:type strength: int
:return: A randomly generated mnemonic
:rtype: str
"""
if strength not in [128, 160, 192, 224, 256]:
raise ValueError(
"Invalid strength value. Allowed values are [128, 160, 192, 224, 256]."
)
return self.to_mnemonic(secrets.token_bytes(strength // 8))
# Adapted from <http://tinyurl.com/oxmn476>
def to_entropy(self, words: list[str] | str) -> bytearray:
if not isinstance(words, list):
words = words.split(" ")
if len(words) not in [12, 15, 18, 21, 24]:
raise ValueError(
"Number of words must be one of the following: [12, 15, 18, 21, 24], but it is not (%d)."
% len(words)
)
# Look up all the words in the list and construct the
# concatenation of the original entropy and the checksum.
concatLenBits = len(words) * 11
concatBits = [False] * concatLenBits
wordindex = 0
for word in words:
# Find the words index in the wordlist
ndx = self.wordlist.index(self.normalize_string(word))
if ndx < 0:
raise LookupError('Unable to find "%s" in word list.' % word)
# Set the next 11 bits to the value of the index.
for ii in range(11):
concatBits[(wordindex * 11) + ii] = (ndx & (1 << (10 - ii))) != 0
wordindex += 1
checksumLengthBits = concatLenBits // 33
entropyLengthBits = concatLenBits - checksumLengthBits
# Extract original entropy as bytes.
entropy = bytearray(entropyLengthBits // 8)
for ii in range(len(entropy)):
for jj in range(8):
if concatBits[(ii * 8) + jj]:
entropy[ii] |= 1 << (7 - jj)
# Take the digest of the entropy.
hashBytes = hashlib.sha256(entropy).digest()
hashBits = list(
itertools.chain.from_iterable(
[c & (1 << (7 - i)) != 0 for i in range(8)] for c in hashBytes
)
)
# Check all the checksum bits.
for i in range(checksumLengthBits):
if concatBits[entropyLengthBits + i] != hashBits[i]:
raise ValueError("Failed checksum.")
return entropy
def to_mnemonic(self, data: bytes) -> str:
if len(data) not in [16, 20, 24, 28, 32]:
raise ValueError(
f"Data length should be one of the following: [16, 20, 24, 28, 32], but it is not {len(data)}."
)
h = hashlib.sha256(data).hexdigest()
b = (
bin(int.from_bytes(data, byteorder="big"))[2:].zfill(len(data) * 8)
+ bin(int(h, 16))[2:].zfill(256)[: len(data) * 8 // 32]
)
result = []
for i in range(len(b) // 11):
idx = int(b[i * 11 : (i + 1) * 11], 2)
result.append(self.wordlist[idx])
return self.delimiter.join(result)
def check(self, mnemonic: str) -> bool:
mnemonic_list = self.normalize_string(mnemonic).split(" ")
# list of valid mnemonic lengths
if len(mnemonic_list) not in [12, 15, 18, 21, 24]:
return False
try:
idx = map(
lambda x: bin(self.wordlist.index(x))[2:].zfill(11), mnemonic_list
)
b = "".join(idx)
except ValueError:
return False
l = len(b) # noqa: E741
d = b[: l // 33 * 32]
h = b[-l // 33 :]
nd = int(d, 2).to_bytes(l // 33 * 4, byteorder="big")
nh = bin(int(hashlib.sha256(nd).hexdigest(), 16))[2:].zfill(256)[: l // 33]
return h == nh
def expand_word(self, prefix: str) -> str:
if prefix in self.wordlist:
return prefix
else:
matches = [word for word in self.wordlist if word.startswith(prefix)]
if len(matches) == 1: # matched exactly one word in the wordlist
return matches[0]
else:
# exact match not found.
# this is not a validation routine, just return the input
return prefix
def expand(self, mnemonic: str) -> str:
return " ".join(map(self.expand_word, mnemonic.split(" ")))
@classmethod
def to_seed(cls, mnemonic: str, passphrase: str = "") -> bytes:
mnemonic = cls.normalize_string(mnemonic)
passphrase = cls.normalize_string(passphrase)
passphrase = "mnemonic" + passphrase
mnemonic_bytes = mnemonic.encode("utf-8")
passphrase_bytes = passphrase.encode("utf-8")
stretched = hashlib.pbkdf2_hmac(
"sha512", mnemonic_bytes, passphrase_bytes, PBKDF2_ROUNDS
)
return stretched[:64]
@staticmethod
def to_hd_master_key(seed: bytes, testnet: bool = False) -> str:
if len(seed) != 64:
raise ValueError("Provided seed should have length of 64")
# Compute HMAC-SHA512 of seed
seed = hmac.new(b"Bitcoin seed", seed, digestmod=hashlib.sha512).digest()
# Serialization format can be found at: https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki#serialization-format
xprv = b"\x04\x88\xad\xe4" # Version for private mainnet
if testnet:
xprv = b"\x04\x35\x83\x94" # Version for private testnet
xprv += b"\x00" * 9 # Depth, parent fingerprint, and child number
xprv += seed[32:] # Chain code
xprv += b"\x00" + seed[:32] # Master key
# Double hash using SHA256
hashed_xprv = hashlib.sha256(xprv).digest()
hashed_xprv = hashlib.sha256(hashed_xprv).digest()
# Append 4 bytes of checksum
xprv += hashed_xprv[:4]
# Return base58
return b58encode(xprv)
def main() -> None:
import sys
if len(sys.argv) > 1:
hex_data = sys.argv[1]
else:
hex_data = sys.stdin.readline().strip()
data = bytes.fromhex(hex_data)
m = Mnemonic("english")
print(m.to_mnemonic(data))
if __name__ == "__main__":
main()

View File

@@ -1 +0,0 @@
# Marker file for PEP 561.

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,64 +0,0 @@
#!/usr/bin/env python3
# Copyright (c) 2019 Pieter Wuille
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Utility functions related to output descriptors"""
import re
INPUT_CHARSET = "0123456789()[],'/*abcdefgh@:$%{}IJKLMNOPQRSTUVWXYZ&+-.;<=>?!^_|~ijklmnopqrstuvwxyzABCDEFGH`#\"\\ "
CHECKSUM_CHARSET = "qpzry9x8gf2tvdw0s3jn54khce6mua7l"
GENERATOR = [0xf5dee51989, 0xa9fdca3312, 0x1bab10e32d, 0x3706b1677a, 0x644d626ffd]
def descsum_polymod(symbols):
"""Internal function that computes the descriptor checksum."""
chk = 1
for value in symbols:
top = chk >> 35
chk = (chk & 0x7ffffffff) << 5 ^ value
for i in range(5):
chk ^= GENERATOR[i] if ((top >> i) & 1) else 0
return chk
def descsum_expand(s):
"""Internal function that does the character to symbol expansion"""
groups = []
symbols = []
for c in s:
if not c in INPUT_CHARSET:
return None
v = INPUT_CHARSET.find(c)
symbols.append(v & 31)
groups.append(v >> 5)
if len(groups) == 3:
symbols.append(groups[0] * 9 + groups[1] * 3 + groups[2])
groups = []
if len(groups) == 1:
symbols.append(groups[0])
elif len(groups) == 2:
symbols.append(groups[0] * 3 + groups[1])
return symbols
def descsum_create(s):
"""Add a checksum to a descriptor without"""
symbols = descsum_expand(s) + [0, 0, 0, 0, 0, 0, 0, 0]
checksum = descsum_polymod(symbols) ^ 1
return s + '#' + ''.join(CHECKSUM_CHARSET[(checksum >> (5 * (7 - i))) & 31] for i in range(8))
def descsum_check(s, require=True):
"""Verify that the checksum is correct in a descriptor"""
if not '#' in s:
return not require
if s[-9] != '#':
return False
if not all(x in CHECKSUM_CHARSET for x in s[-8:]):
return False
symbols = descsum_expand(s[:-9]) + [CHECKSUM_CHARSET.find(x) for x in s[-8:]]
return descsum_polymod(symbols) == 1
def drop_origins(s):
'''Drop the key origins from a descriptor'''
desc = re.sub(r'\[.+?\]', '', s)
if '#' in s:
desc = desc[:desc.index('#')]
return descsum_create(desc)

View File

@@ -0,0 +1,393 @@
# Copyright (c) 2019 Pieter Wuille
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test-only secp256k1 elliptic curve implementation
WARNING: This code is slow, uses bad randomness, does not properly protect
keys, and is trivially vulnerable to side channel attacks. Do not use for
anything but tests."""
import random
def modinv(a, n):
"""Compute the modular inverse of a modulo n
See https://en.wikipedia.org/wiki/Extended_Euclidean_algorithm#Modular_integers.
"""
t1, t2 = 0, 1
r1, r2 = n, a
while r2 != 0:
q = r1 // r2
t1, t2 = t2, t1 - q * t2
r1, r2 = r2, r1 - q * r2
if r1 > 1:
return None
if t1 < 0:
t1 += n
return t1
def jacobi_symbol(n, k):
"""Compute the Jacobi symbol of n modulo k
See http://en.wikipedia.org/wiki/Jacobi_symbol
For our application k is always prime, so this is the same as the Legendre symbol."""
assert k > 0 and k & 1, "jacobi symbol is only defined for positive odd k"
n %= k
t = 0
while n != 0:
while n & 1 == 0:
n >>= 1
r = k & 7
t ^= (r == 3 or r == 5)
n, k = k, n
t ^= (n & k & 3 == 3)
n = n % k
if k == 1:
return -1 if t else 1
return 0
def modsqrt(a, p):
"""Compute the square root of a modulo p when p % 4 = 3.
The Tonelli-Shanks algorithm can be used. See https://en.wikipedia.org/wiki/Tonelli-Shanks_algorithm
Limiting this function to only work for p % 4 = 3 means we don't need to
iterate through the loop. The highest n such that p - 1 = 2^n Q with Q odd
is n = 1. Therefore Q = (p-1)/2 and sqrt = a^((Q+1)/2) = a^((p+1)/4)
secp256k1's is defined over field of size 2**256 - 2**32 - 977, which is 3 mod 4.
"""
if p % 4 != 3:
raise NotImplementedError("modsqrt only implemented for p % 4 = 3")
sqrt = pow(a, (p + 1)//4, p)
if pow(sqrt, 2, p) == a % p:
return sqrt
return None
class EllipticCurve:
def __init__(self, p, a, b):
"""Initialize elliptic curve y^2 = x^3 + a*x + b over GF(p)."""
self.p = p
self.a = a % p
self.b = b % p
def affine(self, p1):
"""Convert a Jacobian point tuple p1 to affine form, or None if at infinity.
An affine point is represented as the Jacobian (x, y, 1)"""
x1, y1, z1 = p1
if z1 == 0:
return None
inv = modinv(z1, self.p)
inv_2 = (inv**2) % self.p
inv_3 = (inv_2 * inv) % self.p
return ((inv_2 * x1) % self.p, (inv_3 * y1) % self.p, 1)
def negate(self, p1):
"""Negate a Jacobian point tuple p1."""
x1, y1, z1 = p1
return (x1, (self.p - y1) % self.p, z1)
def on_curve(self, p1):
"""Determine whether a Jacobian tuple p is on the curve (and not infinity)"""
x1, y1, z1 = p1
z2 = pow(z1, 2, self.p)
z4 = pow(z2, 2, self.p)
return z1 != 0 and (pow(x1, 3, self.p) + self.a * x1 * z4 + self.b * z2 * z4 - pow(y1, 2, self.p)) % self.p == 0
def is_x_coord(self, x):
"""Test whether x is a valid X coordinate on the curve."""
x_3 = pow(x, 3, self.p)
return jacobi_symbol(x_3 + self.a * x + self.b, self.p) != -1
def lift_x(self, x):
"""Given an X coordinate on the curve, return a corresponding affine point."""
x_3 = pow(x, 3, self.p)
v = x_3 + self.a * x + self.b
y = modsqrt(v, self.p)
if y is None:
return None
return (x, y, 1)
def double(self, p1):
"""Double a Jacobian tuple p1
See https://en.wikibooks.org/wiki/Cryptography/Prime_Curve/Jacobian_Coordinates - Point Doubling"""
x1, y1, z1 = p1
if z1 == 0:
return (0, 1, 0)
y1_2 = (y1**2) % self.p
y1_4 = (y1_2**2) % self.p
x1_2 = (x1**2) % self.p
s = (4*x1*y1_2) % self.p
m = 3*x1_2
if self.a:
m += self.a * pow(z1, 4, self.p)
m = m % self.p
x2 = (m**2 - 2*s) % self.p
y2 = (m*(s - x2) - 8*y1_4) % self.p
z2 = (2*y1*z1) % self.p
return (x2, y2, z2)
def add_mixed(self, p1, p2):
"""Add a Jacobian tuple p1 and an affine tuple p2
See https://en.wikibooks.org/wiki/Cryptography/Prime_Curve/Jacobian_Coordinates - Point Addition (with affine point)"""
x1, y1, z1 = p1
x2, y2, z2 = p2
assert(z2 == 1)
# Adding to the point at infinity is a no-op
if z1 == 0:
return p2
z1_2 = (z1**2) % self.p
z1_3 = (z1_2 * z1) % self.p
u2 = (x2 * z1_2) % self.p
s2 = (y2 * z1_3) % self.p
if x1 == u2:
if (y1 != s2):
# p1 and p2 are inverses. Return the point at infinity.
return (0, 1, 0)
# p1 == p2. The formulas below fail when the two points are equal.
return self.double(p1)
h = u2 - x1
r = s2 - y1
h_2 = (h**2) % self.p
h_3 = (h_2 * h) % self.p
u1_h_2 = (x1 * h_2) % self.p
x3 = (r**2 - h_3 - 2*u1_h_2) % self.p
y3 = (r*(u1_h_2 - x3) - y1*h_3) % self.p
z3 = (h*z1) % self.p
return (x3, y3, z3)
def add(self, p1, p2):
"""Add two Jacobian tuples p1 and p2
See https://en.wikibooks.org/wiki/Cryptography/Prime_Curve/Jacobian_Coordinates - Point Addition"""
x1, y1, z1 = p1
x2, y2, z2 = p2
# Adding the point at infinity is a no-op
if z1 == 0:
return p2
if z2 == 0:
return p1
# Adding an Affine to a Jacobian is more efficient since we save field multiplications and squarings when z = 1
if z1 == 1:
return self.add_mixed(p2, p1)
if z2 == 1:
return self.add_mixed(p1, p2)
z1_2 = (z1**2) % self.p
z1_3 = (z1_2 * z1) % self.p
z2_2 = (z2**2) % self.p
z2_3 = (z2_2 * z2) % self.p
u1 = (x1 * z2_2) % self.p
u2 = (x2 * z1_2) % self.p
s1 = (y1 * z2_3) % self.p
s2 = (y2 * z1_3) % self.p
if u1 == u2:
if (s1 != s2):
# p1 and p2 are inverses. Return the point at infinity.
return (0, 1, 0)
# p1 == p2. The formulas below fail when the two points are equal.
return self.double(p1)
h = u2 - u1
r = s2 - s1
h_2 = (h**2) % self.p
h_3 = (h_2 * h) % self.p
u1_h_2 = (u1 * h_2) % self.p
x3 = (r**2 - h_3 - 2*u1_h_2) % self.p
y3 = (r*(u1_h_2 - x3) - s1*h_3) % self.p
z3 = (h*z1*z2) % self.p
return (x3, y3, z3)
def mul(self, ps):
"""Compute a (multi) point multiplication
ps is a list of (Jacobian tuple, scalar) pairs.
"""
r = (0, 1, 0)
for i in range(255, -1, -1):
r = self.double(r)
for (p, n) in ps:
if ((n >> i) & 1):
r = self.add(r, p)
return r
SECP256K1 = EllipticCurve(2**256 - 2**32 - 977, 0, 7)
SECP256K1_G = (0x79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798, 0x483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8, 1)
SECP256K1_ORDER = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141
SECP256K1_ORDER_HALF = SECP256K1_ORDER // 2
class ECPubKey():
"""A secp256k1 public key"""
def __init__(self):
"""Construct an uninitialized public key"""
self.valid = False
def set_int(self, x, y):
p = (x, y, 1)
self.valid = SECP256K1.on_curve(p)
if self.valid:
self.p = p
self.compressed = False
def set(self, data):
"""Construct a public key from a serialization in compressed or uncompressed format"""
if (len(data) == 65 and data[0] == 0x04):
p = (int.from_bytes(data[1:33], 'big'), int.from_bytes(data[33:65], 'big'), 1)
self.valid = SECP256K1.on_curve(p)
if self.valid:
self.p = p
self.compressed = False
elif (len(data) == 33 and (data[0] == 0x02 or data[0] == 0x03)):
x = int.from_bytes(data[1:33], 'big')
if SECP256K1.is_x_coord(x):
p = SECP256K1.lift_x(x)
# if the oddness of the y co-ord isn't correct, find the other
# valid y
if (p[1] & 1) != (data[0] & 1):
p = SECP256K1.negate(p)
self.p = p
self.valid = True
self.compressed = True
else:
self.valid = False
else:
self.valid = False
@property
def is_compressed(self):
return self.compressed
@property
def is_valid(self):
return self.valid
def get_bytes(self):
assert(self.valid)
p = SECP256K1.affine(self.p)
if p is None:
return None
if self.compressed:
return bytes([0x02 + (p[1] & 1)]) + p[0].to_bytes(32, 'big')
else:
return bytes([0x04]) + p[0].to_bytes(32, 'big') + p[1].to_bytes(32, 'big')
def verify_ecdsa(self, sig, msg, low_s=True):
"""Verify a strictly DER-encoded ECDSA signature against this pubkey.
See https://en.wikipedia.org/wiki/Elliptic_Curve_Digital_Signature_Algorithm for the
ECDSA verifier algorithm"""
assert(self.valid)
# Extract r and s from the DER formatted signature. Return false for
# any DER encoding errors.
if (sig[1] + 2 != len(sig)):
return False
if (len(sig) < 4):
return False
if (sig[0] != 0x30):
return False
if (sig[2] != 0x02):
return False
rlen = sig[3]
if (len(sig) < 6 + rlen):
return False
if rlen < 1 or rlen > 33:
return False
if sig[4] >= 0x80:
return False
if (rlen > 1 and (sig[4] == 0) and not (sig[5] & 0x80)):
return False
r = int.from_bytes(sig[4:4+rlen], 'big')
if (sig[4+rlen] != 0x02):
return False
slen = sig[5+rlen]
if slen < 1 or slen > 33:
return False
if (len(sig) != 6 + rlen + slen):
return False
if sig[6+rlen] >= 0x80:
return False
if (slen > 1 and (sig[6+rlen] == 0) and not (sig[7+rlen] & 0x80)):
return False
s = int.from_bytes(sig[6+rlen:6+rlen+slen], 'big')
# Verify that r and s are within the group order
if r < 1 or s < 1 or r >= SECP256K1_ORDER or s >= SECP256K1_ORDER:
return False
if low_s and s >= SECP256K1_ORDER_HALF:
return False
z = int.from_bytes(msg, 'big')
# Run verifier algorithm on r, s
w = modinv(s, SECP256K1_ORDER)
u1 = z*w % SECP256K1_ORDER
u2 = r*w % SECP256K1_ORDER
R = SECP256K1.affine(SECP256K1.mul([(SECP256K1_G, u1), (self.p, u2)]))
if R is None or R[0] != r:
return False
return True
class ECKey():
"""A secp256k1 private key"""
def __init__(self):
self.valid = False
def set(self, secret, compressed):
"""Construct a private key object with given 32-byte secret and compressed flag."""
assert(len(secret) == 32)
secret = int.from_bytes(secret, 'big')
self.valid = (secret > 0 and secret < SECP256K1_ORDER)
if self.valid:
self.secret = secret
self.compressed = compressed
def generate(self, compressed=True):
"""Generate a random private key (compressed or uncompressed)."""
self.set(random.randrange(1, SECP256K1_ORDER).to_bytes(32, 'big'), compressed)
def get_bytes(self):
"""Retrieve the 32-byte representation of this key."""
assert(self.valid)
return self.secret.to_bytes(32, 'big')
@property
def is_valid(self):
return self.valid
@property
def is_compressed(self):
return self.compressed
def get_pubkey(self):
"""Compute an ECPubKey object for this secret key."""
assert(self.valid)
ret = ECPubKey()
p = SECP256K1.mul([(SECP256K1_G, self.secret)])
ret.p = p
ret.valid = True
ret.compressed = self.compressed
return ret
def sign_ecdsa(self, msg, low_s=True):
"""Construct a DER-encoded ECDSA signature with this key.
See https://en.wikipedia.org/wiki/Elliptic_Curve_Digital_Signature_Algorithm for the
ECDSA signer algorithm."""
assert(self.valid)
z = int.from_bytes(msg, 'big')
# Note: no RFC6979, but a simple random nonce (some tests rely on distinct transactions for the same operation)
k = random.randrange(1, SECP256K1_ORDER)
R = SECP256K1.affine(SECP256K1.mul([(SECP256K1_G, k)]))
r = R[0] % SECP256K1_ORDER
s = (modinv(k, SECP256K1_ORDER) * (z + self.secret * r)) % SECP256K1_ORDER
if low_s and s > SECP256K1_ORDER_HALF:
s = SECP256K1_ORDER - s
# Represent in DER format. The byte representations of r and s have
# length rounded up (255 bits becomes 32 bytes and 256 bits becomes 33
# bytes).
rb = r.to_bytes((r.bit_length() + 8) // 8, 'big')
sb = s.to_bytes((s.bit_length() + 8) // 8, 'big')
return b'\x30' + bytes([4 + len(rb) + len(sb), 2, len(rb)]) + rb + bytes([2, len(sb)]) + sb

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -5,22 +5,20 @@
"""Helpful routines for regression testing."""
from base64 import b64encode
from binascii import unhexlify
from decimal import Decimal, ROUND_DOWN
from subprocess import CalledProcessError
import hashlib
import inspect
import json
import logging
import os
import pathlib
import platform
import random
import re
import time
from . import coverage
from .authproxy import AuthServiceProxy, JSONRPCException
from collections.abc import Callable
from typing import Optional
from io import BytesIO
logger = logging.getLogger("TestFramework.utils")
@@ -30,46 +28,23 @@ logger = logging.getLogger("TestFramework.utils")
def assert_approx(v, vexp, vspan=0.00001):
"""Assert that `v` is within `vspan` of `vexp`"""
if isinstance(v, Decimal) or isinstance(vexp, Decimal):
v=Decimal(v)
vexp=Decimal(vexp)
vspan=Decimal(vspan)
if v < vexp - vspan:
raise AssertionError("%s < [%s..%s]" % (str(v), str(vexp - vspan), str(vexp + vspan)))
if v > vexp + vspan:
raise AssertionError("%s > [%s..%s]" % (str(v), str(vexp - vspan), str(vexp + vspan)))
def assert_fee_amount(fee, tx_size, feerate_BTC_kvB):
"""Assert the fee is in range."""
assert isinstance(tx_size, int)
target_fee = get_fee(tx_size, feerate_BTC_kvB)
def assert_fee_amount(fee, tx_size, fee_per_kB):
"""Assert the fee was in range"""
target_fee = round(tx_size * fee_per_kB / 1000, 8)
if fee < target_fee:
raise AssertionError("Fee of %s BTC too low! (Should be %s BTC)" % (str(fee), str(target_fee)))
# allow the wallet's estimation to be at most 2 bytes off
high_fee = get_fee(tx_size + 2, feerate_BTC_kvB)
if fee > high_fee:
if fee > (tx_size + 2) * fee_per_kB / 1000:
raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)" % (str(fee), str(target_fee)))
def summarise_dict_differences(thing1, thing2):
if not isinstance(thing1, dict) or not isinstance(thing2, dict):
return thing1, thing2
d1, d2 = {}, {}
for k in sorted(thing1.keys()):
if k not in thing2:
d1[k] = thing1[k]
elif thing1[k] != thing2[k]:
d1[k], d2[k] = summarise_dict_differences(thing1[k], thing2[k])
for k in sorted(thing2.keys()):
if k not in thing1:
d2[k] = thing2[k]
return d1, d2
def assert_equal(thing1, thing2, *args):
if thing1 != thing2 and not args and isinstance(thing1, dict) and isinstance(thing2, dict):
d1,d2 = summarise_dict_differences(thing1, thing2)
raise AssertionError("not(%s == %s)\n in particular not(%s == %s)" % (thing1, thing2, d1, d2))
if thing1 != thing2 or any(thing1 != arg for arg in args):
raise AssertionError("not(%s)" % " == ".join(str(arg) for arg in (thing1, thing2) + args))
@@ -104,7 +79,7 @@ def assert_raises_message(exc, message, fun, *args, **kwds):
raise AssertionError("No exception raised")
def assert_raises_process_error(returncode: int, output: str, fun: Callable, *args, **kwds):
def assert_raises_process_error(returncode, output, fun, *args, **kwds):
"""Execute a process and asserts the process return code and output.
Calls function `fun` with arguments `args` and `kwds`. Catches a CalledProcessError
@@ -112,9 +87,9 @@ def assert_raises_process_error(returncode: int, output: str, fun: Callable, *ar
no CalledProcessError was raised or if the return code and output are not as expected.
Args:
returncode: the process return code.
output: [a substring of] the process output.
fun: the function to call. This should execute a process.
returncode (int): the process return code.
output (string): [a substring of] the process output.
fun (function): the function to call. This should execute a process.
args*: positional arguments for the function.
kwds**: named arguments for the function.
"""
@@ -129,7 +104,7 @@ def assert_raises_process_error(returncode: int, output: str, fun: Callable, *ar
raise AssertionError("No exception raised")
def assert_raises_rpc_error(code: Optional[int], message: Optional[str], fun: Callable, *args, **kwds):
def assert_raises_rpc_error(code, message, fun, *args, **kwds):
"""Run an RPC and verify that a specific JSONRPC exception code and message is raised.
Calls function `fun` with arguments `args` and `kwds`. Catches a JSONRPCException
@@ -137,11 +112,11 @@ def assert_raises_rpc_error(code: Optional[int], message: Optional[str], fun: Ca
no JSONRPCException was raised or if the error code/message are not as expected.
Args:
code: the error code returned by the RPC call (defined in src/rpc/protocol.h).
Set to None if checking the error code is not required.
message: [a substring of] the error string returned by the RPC call.
Set to None if checking the error string is not required.
fun: the function to call. This should be the name of an RPC.
code (int), optional: the error code returned by the RPC call (defined
in src/rpc/protocol.h). Set to None if checking the error code is not required.
message (string), optional: [a substring of] the error string returned by the
RPC call. Set to None if checking the error string is not required.
fun (function): the function to call. This should be the name of an RPC.
args*: positional arguments for the function.
kwds**: named arguments for the function.
"""
@@ -228,45 +203,29 @@ def check_json_precision():
raise RuntimeError("JSON encode/decode loses precision")
def EncodeDecimal(o):
if isinstance(o, Decimal):
return str(o)
raise TypeError(repr(o) + " is not JSON serializable")
def count_bytes(hex_string):
return len(bytearray.fromhex(hex_string))
def hex_str_to_bytes(hex_str):
return unhexlify(hex_str.encode('ascii'))
def str_to_b64str(string):
return b64encode(string.encode('utf-8')).decode('ascii')
def ceildiv(a, b):
"""
Divide 2 ints and round up to next int rather than round down
Implementation requires python integers, which have a // operator that does floor division.
Other types like decimal.Decimal whose // operator truncates towards 0 will not work.
"""
assert isinstance(a, int)
assert isinstance(b, int)
return -(-a // b)
def get_fee(tx_size, feerate_btc_kvb):
"""Calculate the fee in BTC given a feerate is BTC/kvB. Reflects CFeeRate::GetFee"""
feerate_sat_kvb = int(feerate_btc_kvb * Decimal(1e8)) # Fee in sat/kvb as an int to avoid float precision errors
target_fee_sat = ceildiv(feerate_sat_kvb * tx_size, 1000) # Round calculated fee up to nearest sat
return target_fee_sat / Decimal(1e8) # Return result in BTC
def satoshi_round(amount):
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
def wait_until_helper_internal(predicate, *, attempts=float('inf'), timeout=float('inf'), lock=None, timeout_factor=1.0):
"""Sleep until the predicate resolves to be True.
Warning: Note that this method is not recommended to be used in tests as it is
not aware of the context of the test framework. Using the `wait_until()` members
from `BitcoinTestFramework` or `P2PInterface` class ensures the timeout is
properly scaled. Furthermore, `wait_until()` from `P2PInterface` class in
`p2p.py` has a preset lock.
"""
def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf'), lock=None, timeout_factor=1.0):
if attempts == float('inf') and timeout == float('inf'):
timeout = 60
timeout = timeout * timeout_factor
@@ -294,16 +253,6 @@ def wait_until_helper_internal(predicate, *, attempts=float('inf'), timeout=floa
raise RuntimeError('Unreachable')
def sha256sum_file(filename):
h = hashlib.sha256()
with open(filename, 'rb') as f:
d = f.read(4096)
while len(d) > 0:
h.update(d)
d = f.read(4096)
return h.digest()
# RPC/P2P connection constants and functions
############################################
@@ -320,15 +269,15 @@ class PortSeed:
n = None
def get_rpc_proxy(url: str, node_number: int, *, timeout: Optional[int]=None, coveragedir: Optional[str]=None) -> coverage.AuthServiceProxyWrapper:
def get_rpc_proxy(url, node_number, *, timeout=None, coveragedir=None):
"""
Args:
url: URL of the RPC server to call
node_number: the node number (or id) that this calls to
url (str): URL of the RPC server to call
node_number (int): the node number (or id) that this calls to
Kwargs:
timeout: HTTP timeout in seconds
coveragedir: Directory
timeout (int): HTTP timeout in seconds
coveragedir (str): Directory
Returns:
AuthServiceProxy. convenience object for making RPC calls.
@@ -339,10 +288,11 @@ def get_rpc_proxy(url: str, node_number: int, *, timeout: Optional[int]=None, co
proxy_kwargs['timeout'] = int(timeout)
proxy = AuthServiceProxy(url, **proxy_kwargs)
proxy.url = url # store URL on proxy for info
coverage_logfile = coverage.get_filename(coveragedir, node_number) if coveragedir else None
return coverage.AuthServiceProxyWrapper(proxy, url, coverage_logfile)
return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile)
def p2p_port(n):
@@ -371,76 +321,38 @@ def rpc_url(datadir, i, chain, rpchost):
################
def initialize_datadir(dirname, n, chain, disable_autoconnect=True):
def initialize_datadir(dirname, n, chain):
datadir = get_datadir_path(dirname, n)
if not os.path.isdir(datadir):
os.makedirs(datadir)
write_config(os.path.join(datadir, "particl.conf"), n=n, chain=chain, disable_autoconnect=disable_autoconnect)
os.makedirs(os.path.join(datadir, 'stderr'), exist_ok=True)
os.makedirs(os.path.join(datadir, 'stdout'), exist_ok=True)
return datadir
def write_config(config_path, *, n, chain, extra_config="", disable_autoconnect=True):
# Translate chain subdirectory name to config name
if chain == 'testnet':
# Translate chain name to config name
if chain == 'testnet3':
chain_name_conf_arg = 'testnet'
chain_name_conf_section = 'test'
else:
chain_name_conf_arg = chain
chain_name_conf_section = chain
with open(config_path, 'w', encoding='utf8') as f:
if chain_name_conf_arg:
f.write("{}=1\n".format(chain_name_conf_arg))
if chain_name_conf_section:
f.write("[{}]\n".format(chain_name_conf_section))
with open(os.path.join(datadir, "particl.conf"), 'w', encoding='utf8') as f:
f.write("{}=1\n".format(chain_name_conf_arg))
f.write("[{}]\n".format(chain_name_conf_section))
f.write("port=" + str(p2p_port(n)) + "\n")
f.write("rpcport=" + str(rpc_port(n)) + "\n")
# Disable server-side timeouts to avoid intermittent issues
f.write("rpcservertimeout=99000\n")
f.write("rpcdoccheck=1\n")
f.write("fallbackfee=0.0002\n")
f.write("server=1\n")
f.write("keypool=1\n")
f.write("discover=0\n")
f.write("dnsseed=0\n")
f.write("fixedseeds=0\n")
f.write("listenonion=0\n")
# Increase peertimeout to avoid disconnects while using mocktime.
# peertimeout is measured in mock time, so setting it large enough to
# cover any duration in mock time is sufficient. It can be overridden
# in tests.
f.write("peertimeout=999999999\n")
f.write("printtoconsole=0\n")
f.write("upnp=0\n")
f.write("natpmp=0\n")
f.write("shrinkdebugfile=0\n")
f.write("deprecatedrpc=create_bdb\n") # Required to run the tests
# To improve SQLite wallet performance so that the tests don't timeout, use -unsafesqlitesync
f.write("unsafesqlitesync=1\n")
if disable_autoconnect:
f.write("connect=0\n")
f.write(extra_config)
os.makedirs(os.path.join(datadir, 'stderr'), exist_ok=True)
os.makedirs(os.path.join(datadir, 'stdout'), exist_ok=True)
return datadir
def get_datadir_path(dirname, n):
return pathlib.Path(dirname) / f"node{n}"
def get_temp_default_datadir(temp_dir: pathlib.Path) -> tuple[dict, pathlib.Path]:
"""Return os-specific environment variables that can be set to make the
GetDefaultDataDir() function return a datadir path under the provided
temp_dir, as well as the complete path it would return."""
if platform.system() == "Windows":
env = dict(APPDATA=str(temp_dir))
datadir = temp_dir / "Particl"
else:
env = dict(HOME=str(temp_dir))
if platform.system() == "Darwin":
datadir = temp_dir / "Library/Application Support/Particl"
else:
datadir = temp_dir / ".particl"
return env, datadir
return os.path.join(dirname, "node" + str(n))
def append_config(datadir, options):
@@ -483,7 +395,7 @@ def delete_cookie_file(datadir, chain):
def softfork_active(node, key):
"""Return whether a softfork is active."""
return node.getdeploymentinfo()['deployments'][key]['active']
return node.getblockchaininfo()['softforks'][key]['active']
def set_node_times(nodes, t):
@@ -491,51 +403,208 @@ def set_node_times(nodes, t):
node.setmocktime(t)
def check_node_connections(*, node, num_in, num_out):
info = node.getnetworkinfo()
assert_equal(info["connections_in"], num_in)
assert_equal(info["connections_out"], num_out)
def disconnect_nodes(from_connection, node_num):
def get_peer_ids():
result = []
for peer in from_connection.getpeerinfo():
if "testnode{}".format(node_num) in peer['subver']:
result.append(peer['id'])
return result
peer_ids = get_peer_ids()
if not peer_ids:
logger.warning("disconnect_nodes: {} and {} were not connected".format(
from_connection.index,
node_num,
))
return
for peer_id in peer_ids:
try:
from_connection.disconnectnode(nodeid=peer_id)
except JSONRPCException as e:
# If this node is disconnected between calculating the peer id
# and issuing the disconnect, don't worry about it.
# This avoids a race condition if we're mass-disconnecting peers.
if e.error['code'] != -29: # RPC_CLIENT_NODE_NOT_CONNECTED
raise
# wait to disconnect
wait_until(lambda: not get_peer_ids(), timeout=5)
def connect_nodes(from_connection, node_num):
ip_port = "127.0.0.1:" + str(p2p_port(node_num))
from_connection.addnode(ip_port, "onetry")
# poll until version handshake complete to avoid race conditions
# with transaction relaying
# See comments in net_processing:
# * Must have a version message before anything else
# * Must have a verack message before anything else
wait_until(lambda: all(peer['version'] != 0 for peer in from_connection.getpeerinfo()))
wait_until(lambda: all(peer['bytesrecv_per_msg'].pop('verack', 0) == 24 for peer in from_connection.getpeerinfo()))
# Transaction/Block functions
#############################
def find_output(node, txid, amount, *, blockhash=None):
"""
Return index to output of txid with value amount
Raises exception if there is none.
"""
txdata = node.getrawtransaction(txid, 1, blockhash)
for i in range(len(txdata["vout"])):
if txdata["vout"][i]["value"] == amount:
return i
raise RuntimeError("find_output txid %s : %s not found" % (txid, str(amount)))
def gather_inputs(from_node, amount_needed, confirmations_required=1):
"""
Return a random set of unspent txouts that are enough to pay amount_needed
"""
assert confirmations_required >= 0
utxo = from_node.listunspent(confirmations_required)
random.shuffle(utxo)
inputs = []
total_in = Decimal("0.00000000")
while total_in < amount_needed and len(utxo) > 0:
t = utxo.pop()
total_in += t["amount"]
inputs.append({"txid": t["txid"], "vout": t["vout"], "address": t["address"]})
if total_in < amount_needed:
raise RuntimeError("Insufficient funds: need %d, have %d" % (amount_needed, total_in))
return (total_in, inputs)
def make_change(from_node, amount_in, amount_out, fee):
"""
Create change output(s), return them
"""
outputs = {}
amount = amount_out + fee
change = amount_in - amount
if change > amount * 2:
# Create an extra change output to break up big inputs
change_address = from_node.getnewaddress()
# Split change in two, being careful of rounding:
outputs[change_address] = Decimal(change / 2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
change = amount_in - amount - outputs[change_address]
if change > 0:
outputs[from_node.getnewaddress()] = change
return outputs
def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random transaction.
Returns (txid, hex-encoded-transaction-data, fee)
"""
from_node = random.choice(nodes)
to_node = random.choice(nodes)
fee = min_fee + fee_increment * random.randint(0, fee_variants)
(total_in, inputs) = gather_inputs(from_node, amount + fee)
outputs = make_change(from_node, total_in, amount, fee)
outputs[to_node.getnewaddress()] = float(amount)
rawtx = from_node.createrawtransaction(inputs, outputs)
signresult = from_node.signrawtransactionwithwallet(rawtx)
txid = from_node.sendrawtransaction(signresult["hex"], 0)
return (txid, signresult["hex"], fee)
# Helper to create at least "count" utxos
# Pass in a fee that is sufficient for relay and mining new transactions.
def create_confirmed_utxos(fee, node, count):
to_generate = int(0.5 * count) + 101
while to_generate > 0:
node.generate(min(25, to_generate))
to_generate -= 25
utxos = node.listunspent()
iterations = count - len(utxos)
addr1 = node.getnewaddress()
addr2 = node.getnewaddress()
if iterations <= 0:
return utxos
for i in range(iterations):
t = utxos.pop()
inputs = []
inputs.append({"txid": t["txid"], "vout": t["vout"]})
outputs = {}
send_value = t['amount'] - fee
outputs[addr1] = satoshi_round(send_value / 2)
outputs[addr2] = satoshi_round(send_value / 2)
raw_tx = node.createrawtransaction(inputs, outputs)
signed_tx = node.signrawtransactionwithwallet(raw_tx)["hex"]
node.sendrawtransaction(signed_tx)
while (node.getmempoolinfo()['size'] > 0):
node.generate(1)
utxos = node.listunspent()
assert len(utxos) >= count
return utxos
# Create large OP_RETURN txouts that can be appended to a transaction
# to make it large (helper for constructing large transactions). The
# total serialized size of the txouts is about 66k vbytes.
# to make it large (helper for constructing large transactions).
def gen_return_txouts():
# Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create
# So we have big transactions (and therefore can't fit very many into each block)
# create one script_pubkey
script_pubkey = "6a4d0200" # OP_RETURN OP_PUSH2 512 bytes
for i in range(512):
script_pubkey = script_pubkey + "01"
# concatenate 128 txouts of above script_pubkey which we'll insert before the txout for change
txouts = []
from .messages import CTxOut
from .script import CScript, OP_RETURN
txouts = [CTxOut(nValue=0, scriptPubKey=CScript([OP_RETURN, b'\x01'*67437]))]
assert_equal(sum([len(txout.serialize()) for txout in txouts]), 67456)
txout = CTxOut()
txout.nValue = 0
txout.scriptPubKey = hex_str_to_bytes(script_pubkey)
for k in range(128):
txouts.append(txout)
return txouts
# Create a spend of each passed-in utxo, splicing in "txouts" to each raw
# transaction to make it large. See gen_return_txouts() above.
def create_lots_of_big_transactions(mini_wallet, node, fee, tx_batch_size, txouts, utxos=None):
def create_lots_of_big_transactions(node, txouts, utxos, num, fee):
addr = node.getnewaddress()
txids = []
use_internal_utxos = utxos is None
for _ in range(tx_batch_size):
tx = mini_wallet.create_self_transfer(
utxo_to_spend=None if use_internal_utxos else utxos.pop(),
fee=fee,
)["tx"]
tx.vout.extend(txouts)
res = node.testmempoolaccept([tx.serialize().hex()])[0]
assert_equal(res['fees']['base'], fee)
txids.append(node.sendrawtransaction(tx.serialize().hex()))
from .messages import CTransaction
for _ in range(num):
t = utxos.pop()
inputs = [{"txid": t["txid"], "vout": t["vout"]}]
outputs = {}
change = t['amount'] - fee
outputs[addr] = satoshi_round(change)
rawtx = node.createrawtransaction(inputs, outputs)
tx = CTransaction()
tx.deserialize(BytesIO(hex_str_to_bytes(rawtx)))
for txout in txouts:
tx.vout.append(txout)
newtx = tx.serialize().hex()
signresult = node.signrawtransactionwithwallet(newtx, None, "NONE")
txid = node.sendrawtransaction(signresult["hex"], 0)
txids.append(txid)
return txids
def mine_large_block(test_framework, mini_wallet, node):
def mine_large_block(node, utxos=None):
# generate a 66k transaction,
# and 14 of them is close to the 1MB block limit
num = 14
txouts = gen_return_txouts()
utxos = utxos if utxos is not None else []
if len(utxos) < num:
utxos.clear()
utxos.extend(node.listunspent())
fee = 100 * node.getnetworkinfo()["relayfee"]
create_lots_of_big_transactions(mini_wallet, node, fee, 14, txouts)
test_framework.generate(node, 1)
create_lots_of_big_transactions(node, txouts, utxos, num, fee=fee)
node.generate(1)
def find_vout_for_address(node, txid, addr):
@@ -545,6 +614,11 @@ def find_vout_for_address(node, txid, addr):
"""
tx = node.getrawtransaction(txid, True)
for i in range(len(tx["vout"])):
if addr == tx["vout"][i]["scriptPubKey"]["address"]:
return i
scriptPubKey = tx["vout"][i]["scriptPubKey"]
if "addresses" in scriptPubKey:
if any([addr == a for a in scriptPubKey["addresses"]]):
return i
elif "address" in scriptPubKey:
if addr == scriptPubKey["address"]:
return i
raise RuntimeError("Vout not found for address: txid=%s, addr=%s" % (txid, addr))

View File

@@ -166,9 +166,6 @@ class WebsocketServer(ThreadingMixIn, TCPServer, API):
def _message_received_(self, handler, msg):
self.message_received(self.handler_to_client(handler), self, msg)
def _binary_message_received_(self, handler, msg):
self.binary_message_received(self.handler_to_client(handler), self, msg)
def _ping_received_(self, handler, msg):
handler.send_pong(msg)
@@ -312,7 +309,6 @@ class WebSocketHandler(StreamRequestHandler):
opcode = b1 & OPCODE
masked = b2 & MASKED
payload_length = b2 & PAYLOAD_LEN
is_binary: bool = False
if opcode == OPCODE_CLOSE_CONN:
logger.info("Client asked to close connection.")
@@ -326,8 +322,8 @@ class WebSocketHandler(StreamRequestHandler):
logger.warning("Continuation frames are not supported.")
return
elif opcode == OPCODE_BINARY:
is_binary = True
opcode_handler = self.server._binary_message_received_
logger.warning("Binary frames are not supported.")
return
elif opcode == OPCODE_TEXT:
opcode_handler = self.server._message_received_
elif opcode == OPCODE_PING:
@@ -349,8 +345,7 @@ class WebSocketHandler(StreamRequestHandler):
for message_byte in self.read_bytes(payload_length):
message_byte ^= masks[len(message_bytes) % 4]
message_bytes.append(message_byte)
opcode_handler(self, message_bytes if is_binary else message_bytes.decode('utf8'))
opcode_handler(self, message_bytes.decode('utf8'))
def send_message(self, message):
self.send_text(message)
@@ -380,35 +375,6 @@ class WebSocketHandler(StreamRequestHandler):
with self._send_lock:
self.request.send(header + payload)
def send_bytes(self, message, opcode=OPCODE_BINARY):
header = bytearray()
payload = message
payload_length = len(payload)
# Normal payload
if payload_length <= 125:
header.append(FIN | opcode)
header.append(payload_length)
# Extended payload
elif payload_length >= 126 and payload_length <= 65535:
header.append(FIN | opcode)
header.append(PAYLOAD_LEN_EXT16)
header.extend(struct.pack(">H", payload_length))
# Huge extended payload
elif payload_length < 18446744073709551616:
header.append(FIN | opcode)
header.append(PAYLOAD_LEN_EXT64)
header.extend(struct.pack(">Q", payload_length))
else:
raise Exception("Message is too big. Consider breaking it into chunks.")
return
with self._send_lock:
self.request.send(header + payload)
def send_text(self, message, opcode=OPCODE_TEXT):
"""
Important: Fragmented(=continuation) messages are not supported since

File diff suppressed because it is too large Load Diff

View File

@@ -1,283 +1,308 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2022-2024 tecnovert
# Copyright (c) 2024-2025 The Basicswap developers
# Copyright (c) 2022-2023 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import json
import time
from sqlalchemy.orm import scoped_session
from .db import (
AutomationStrategy,
BidState,
Concepts,
create_table,
CURRENT_DB_DATA_VERSION,
AutomationStrategy,
CURRENT_DB_VERSION,
extract_schema,
)
CURRENT_DB_DATA_VERSION)
from .basicswap_util import (
BidStates,
canAcceptBidState,
canExpireBidState,
canTimeoutBidState,
strBidState,
isActiveBidState,
isErrorBidState,
isFailingBidState,
isFinalBidState,
strBidState,
)
def addBidState(self, state, now, cursor):
self.add(
BidState(
active_ind=1,
state_id=int(state),
in_progress=isActiveBidState(state),
in_error=isErrorBidState(state),
swap_failed=isFailingBidState(state),
swap_ended=isFinalBidState(state),
can_accept=canAcceptBidState(state),
can_expire=canExpireBidState(state),
can_timeout=canTimeoutBidState(state),
label=strBidState(state),
created_at=now,
),
cursor,
)
def upgradeDatabaseData(self, data_version):
if data_version >= CURRENT_DB_DATA_VERSION:
return
self.log.info(
f"Upgrading database records from version {data_version} to {CURRENT_DB_DATA_VERSION}."
)
self.log.info('Upgrading database records from version %d to %d.', data_version, CURRENT_DB_DATA_VERSION)
with self.mxDB:
try:
session = scoped_session(self.session_factory)
cursor = self.openDB()
try:
now = int(time.time())
now = int(time.time())
if data_version < 1:
self.add(
AutomationStrategy(
if data_version < 1:
session.add(AutomationStrategy(
active_ind=1,
label="Accept All",
label='Accept All',
type_ind=Concepts.OFFER,
data=json.dumps(
{"exact_rate_only": True, "max_concurrent_bids": 1}
).encode("utf-8"),
data=json.dumps({'exact_rate_only': True,
'max_concurrent_bids': 5}).encode('utf-8'),
only_known_identities=False,
created_at=now,
),
cursor,
)
self.add(
AutomationStrategy(
created_at=now))
session.add(AutomationStrategy(
active_ind=1,
label="Accept Known",
label='Accept Known',
type_ind=Concepts.OFFER,
data=json.dumps(
{"exact_rate_only": True, "max_concurrent_bids": 1}
).encode("utf-8"),
data=json.dumps({'exact_rate_only': True,
'max_concurrent_bids': 5}).encode('utf-8'),
only_known_identities=True,
note="Accept bids from identities with previously successful swaps only",
created_at=now,
),
cursor,
)
note='Accept bids from identities with previously successful swaps only',
created_at=now))
for state in BidStates:
addBidState(self, state, now, cursor)
for state in BidStates:
session.add(BidState(
active_ind=1,
state_id=int(state),
in_progress=isActiveBidState(state),
in_error=isErrorBidState(state),
swap_failed=isFailingBidState(state),
swap_ended=isFinalBidState(state),
label=strBidState(state),
created_at=now))
if data_version > 0 and data_version < 2:
for state in (
BidStates.XMR_SWAP_MSG_SCRIPT_LOCK_TX_SIGS,
BidStates.XMR_SWAP_MSG_SCRIPT_LOCK_SPEND_TX,
):
self.add(
BidState(
if data_version > 0 and data_version < 2:
for state in (BidStates.XMR_SWAP_MSG_SCRIPT_LOCK_TX_SIGS, BidStates.XMR_SWAP_MSG_SCRIPT_LOCK_SPEND_TX):
session.add(BidState(
active_ind=1,
state_id=int(state),
in_progress=isActiveBidState(state),
label=strBidState(state),
created_at=now,
),
cursor,
)
if data_version > 0 and data_version < 7:
for state in BidStates:
in_error = isErrorBidState(state)
swap_failed = isFailingBidState(state)
swap_ended = isFinalBidState(state)
can_accept = canAcceptBidState(state)
can_expire = canExpireBidState(state)
can_timeout = canTimeoutBidState(state)
cursor.execute(
"UPDATE bidstates SET can_accept = :can_accept, can_expire = :can_expire, can_timeout = :can_timeout, in_error = :in_error, swap_failed = :swap_failed, swap_ended = :swap_ended WHERE state_id = :state_id",
{
"in_error": in_error,
"swap_failed": swap_failed,
"swap_ended": swap_ended,
"can_accept": can_accept,
"can_expire": can_expire,
"can_timeout": can_timeout,
"state_id": int(state),
},
)
if data_version > 0 and data_version < 4:
for state in (
BidStates.BID_REQUEST_SENT,
BidStates.BID_REQUEST_ACCEPTED,
):
addBidState(self, state, now, cursor)
created_at=now))
if data_version > 0 and data_version < 3:
for state in BidStates:
in_error = isErrorBidState(state)
swap_failed = isFailingBidState(state)
swap_ended = isFinalBidState(state)
session.execute('UPDATE bidstates SET in_error = :in_error, swap_failed = :swap_failed, swap_ended = :swap_ended WHERE state_id = :state_id', {'in_error': in_error, 'swap_failed': swap_failed, 'swap_ended': swap_ended, 'state_id': int(state)})
if data_version > 0 and data_version < 4:
for state in (BidStates.BID_REQUEST_SENT, BidStates.BID_REQUEST_ACCEPTED):
session.add(BidState(
active_ind=1,
state_id=int(state),
in_progress=isActiveBidState(state),
in_error=isErrorBidState(state),
swap_failed=isFailingBidState(state),
swap_ended=isFinalBidState(state),
label=strBidState(state),
created_at=now))
if data_version > 0 and data_version < 5:
for state in (
BidStates.BID_EXPIRED,
BidStates.BID_AACCEPT_DELAY,
BidStates.BID_AACCEPT_FAIL,
):
addBidState(self, state, now, cursor)
self.db_data_version = CURRENT_DB_DATA_VERSION
self.setIntKV("db_data_version", self.db_data_version, cursor)
self.commitDB()
self.log.info(f"Upgraded database records to version {self.db_data_version}")
finally:
self.closeDB(cursor, commit=False)
self.db_data_version = CURRENT_DB_DATA_VERSION
self.setIntKVInSession('db_data_version', self.db_data_version, session)
session.commit()
self.log.info('Upgraded database records to version {}'.format(self.db_data_version))
finally:
session.close()
session.remove()
def upgradeDatabase(self, db_version):
if self._force_db_upgrade is False and db_version >= CURRENT_DB_VERSION:
if db_version >= CURRENT_DB_VERSION:
return
self.log.info(
f"Upgrading database from version {db_version} to {CURRENT_DB_VERSION}."
)
self.log.info('Upgrading database from version %d to %d.', db_version, CURRENT_DB_VERSION)
# db_version, tablename, oldcolumnname, newcolumnname
rename_columns = [
(13, "actions", "event_id", "action_id"),
(13, "actions", "event_type", "action_type"),
(13, "actions", "event_data", "action_data"),
(
14,
"xmr_swaps",
"coin_a_lock_refund_spend_tx_msg_id",
"coin_a_lock_spend_tx_msg_id",
),
]
while True:
session = scoped_session(self.session_factory)
expect_schema = extract_schema()
have_tables = {}
try:
cursor = self.openDB()
current_version = db_version
if current_version == 6:
session.execute('ALTER TABLE bids ADD COLUMN security_token BLOB')
session.execute('ALTER TABLE offers ADD COLUMN security_token BLOB')
db_version += 1
elif current_version == 7:
session.execute('ALTER TABLE transactions ADD COLUMN block_hash BLOB')
session.execute('ALTER TABLE transactions ADD COLUMN block_height INTEGER')
session.execute('ALTER TABLE transactions ADD COLUMN block_time INTEGER')
db_version += 1
elif current_version == 8:
session.execute('''
CREATE TABLE wallets (
record_id INTEGER NOT NULL,
coin_id INTEGER,
wallet_name VARCHAR,
wallet_data VARCHAR,
balance_type INTEGER,
created_at BIGINT,
PRIMARY KEY (record_id))''')
db_version += 1
elif current_version == 9:
session.execute('ALTER TABLE wallets ADD COLUMN wallet_data VARCHAR')
db_version += 1
elif current_version == 10:
session.execute('ALTER TABLE smsgaddresses ADD COLUMN active_ind INTEGER')
session.execute('ALTER TABLE smsgaddresses ADD COLUMN created_at INTEGER')
session.execute('ALTER TABLE smsgaddresses ADD COLUMN note VARCHAR')
session.execute('ALTER TABLE smsgaddresses ADD COLUMN pubkey VARCHAR')
session.execute('UPDATE smsgaddresses SET active_ind = 1, created_at = 1')
for rename_column in rename_columns:
dbv, table_name, colname_from, colname_to = rename_column
if db_version < dbv:
cursor.execute(
f"ALTER TABLE {table_name} RENAME COLUMN {colname_from} TO {colname_to}"
)
session.execute('ALTER TABLE offers ADD COLUMN addr_to VARCHAR')
session.execute(f'UPDATE offers SET addr_to = "{self.network_addr}"')
db_version += 1
elif current_version == 11:
session.execute('ALTER TABLE bids ADD COLUMN chain_a_height_start INTEGER')
session.execute('ALTER TABLE bids ADD COLUMN chain_b_height_start INTEGER')
session.execute('ALTER TABLE bids ADD COLUMN protocol_version INTEGER')
session.execute('ALTER TABLE offers ADD COLUMN protocol_version INTEGER')
session.execute('ALTER TABLE transactions ADD COLUMN tx_data BLOB')
db_version += 1
elif current_version == 12:
session.execute('''
CREATE TABLE knownidentities (
record_id INTEGER NOT NULL,
address VARCHAR,
label VARCHAR,
publickey BLOB,
num_sent_bids_successful INTEGER,
num_recv_bids_successful INTEGER,
num_sent_bids_rejected INTEGER,
num_recv_bids_rejected INTEGER,
num_sent_bids_failed INTEGER,
num_recv_bids_failed INTEGER,
note VARCHAR,
updated_at BIGINT,
created_at BIGINT,
PRIMARY KEY (record_id))''')
session.execute('ALTER TABLE bids ADD COLUMN reject_code INTEGER')
session.execute('ALTER TABLE bids ADD COLUMN rate INTEGER')
session.execute('ALTER TABLE offers ADD COLUMN amount_negotiable INTEGER')
session.execute('ALTER TABLE offers ADD COLUMN rate_negotiable INTEGER')
db_version += 1
elif current_version == 13:
db_version += 1
session.execute('''
CREATE TABLE automationstrategies (
record_id INTEGER NOT NULL,
active_ind INTEGER,
label VARCHAR,
type_ind INTEGER,
only_known_identities INTEGER,
num_concurrent INTEGER,
data BLOB,
query = "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name;"
tables = cursor.execute(query).fetchall()
for table in tables:
table_name = table[0]
if table_name in ("sqlite_sequence",):
continue
note VARCHAR,
created_at BIGINT,
PRIMARY KEY (record_id))''')
have_table = {}
have_columns = {}
query = "SELECT * FROM PRAGMA_TABLE_INFO(:table_name) ORDER BY cid DESC;"
columns = cursor.execute(query, {"table_name": table_name}).fetchall()
for column in columns:
cid, name, data_type, notnull, default_value, primary_key = column
have_columns[name] = {"type": data_type, "primary_key": primary_key}
session.execute('''
CREATE TABLE automationlinks (
record_id INTEGER NOT NULL,
active_ind INTEGER,
have_table["columns"] = have_columns
linked_type INTEGER,
linked_id BLOB,
strategy_id INTEGER,
cursor.execute(f"PRAGMA INDEX_LIST('{table_name}');")
indices = cursor.fetchall()
for index in indices:
seq, index_name, unique, origin, partial = index
data BLOB,
repeat_limit INTEGER,
repeat_count INTEGER,
if origin == "pk": # Created by a PRIMARY KEY constraint
continue
note VARCHAR,
created_at BIGINT,
PRIMARY KEY (record_id))''')
cursor.execute(f"PRAGMA INDEX_INFO('{index_name}');")
index_info = cursor.fetchall()
session.execute('''
CREATE TABLE history (
record_id INTEGER NOT NULL,
concept_type INTEGER,
concept_id INTEGER,
changed_data BLOB,
add_index = {"index_name": index_name}
for index_columns in index_info:
seqno, cid, name = index_columns
if origin == "u": # Created by a UNIQUE constraint
have_columns[name]["unique"] = 1
else:
if "column_1" not in add_index:
add_index["column_1"] = name
elif "column_2" not in add_index:
add_index["column_2"] = name
elif "column_3" not in add_index:
add_index["column_3"] = name
else:
raise RuntimeError("Add more index columns.")
if origin == "c":
if "indices" not in table:
have_table["indices"] = []
have_table["indices"].append(add_index)
note VARCHAR,
created_at BIGINT,
PRIMARY KEY (record_id))''')
have_tables[table_name] = have_table
session.execute('''
CREATE TABLE bidstates (
record_id INTEGER NOT NULL,
active_ind INTEGER,
state_id INTEGER,
label VARCHAR,
in_progress INTEGER,
for table_name, table in expect_schema.items():
if table_name not in have_tables:
self.log.info(f"Creating table {table_name}.")
create_table(cursor, table_name, table)
continue
note VARCHAR,
created_at BIGINT,
PRIMARY KEY (record_id))''')
have_table = have_tables[table_name]
have_columns = have_table["columns"]
for colname, column in table["columns"].items():
if colname not in have_columns:
col_type = column["type"]
self.log.info(f"Adding column {colname} to table {table_name}.")
cursor.execute(
f"ALTER TABLE {table_name} ADD COLUMN {colname} {col_type}"
)
indices = table.get("indices", [])
have_indices = have_table.get("indices", [])
for index in indices:
index_name = index["index_name"]
if not any(
have_idx.get("index_name") == index_name
for have_idx in have_indices
):
self.log.info(f"Adding index {index_name} to table {table_name}.")
column_1 = index["column_1"]
column_2 = index.get("column_2", None)
column_3 = index.get("column_3", None)
query: str = (
f"CREATE INDEX {index_name} ON {table_name} ({column_1}"
)
if column_2:
query += f", {column_2}"
if column_3:
query += f", {column_3}"
query += ")"
cursor.execute(query)
session.execute('ALTER TABLE wallets ADD COLUMN active_ind INTEGER')
session.execute('ALTER TABLE knownidentities ADD COLUMN active_ind INTEGER')
session.execute('ALTER TABLE eventqueue RENAME TO actions')
session.execute('ALTER TABLE actions RENAME COLUMN event_id TO action_id')
session.execute('ALTER TABLE actions RENAME COLUMN event_type TO action_type')
session.execute('ALTER TABLE actions RENAME COLUMN event_data TO action_data')
elif current_version == 14:
db_version += 1
session.execute('ALTER TABLE xmr_swaps ADD COLUMN coin_a_lock_release_msg_id BLOB')
session.execute('ALTER TABLE xmr_swaps RENAME COLUMN coin_a_lock_refund_spend_tx_msg_id TO coin_a_lock_spend_tx_msg_id')
elif current_version == 15:
db_version += 1
session.execute('''
CREATE TABLE notifications (
record_id INTEGER NOT NULL,
active_ind INTEGER,
event_type INTEGER,
event_data BLOB,
created_at BIGINT,
PRIMARY KEY (record_id))''')
elif current_version == 16:
db_version += 1
session.execute('''
CREATE TABLE prefunded_transactions (
record_id INTEGER NOT NULL,
active_ind INTEGER,
created_at BIGINT,
linked_type INTEGER,
linked_id BLOB,
tx_type INTEGER,
tx_data BLOB,
used_by BLOB,
PRIMARY KEY (record_id))''')
elif current_version == 17:
db_version += 1
session.execute('ALTER TABLE knownidentities ADD COLUMN automation_override INTEGER')
session.execute('ALTER TABLE knownidentities ADD COLUMN visibility_override INTEGER')
session.execute('ALTER TABLE knownidentities ADD COLUMN data BLOB')
session.execute('UPDATE knownidentities SET active_ind = 1')
elif current_version == 18:
db_version += 1
session.execute('ALTER TABLE xmr_split_data ADD COLUMN addr_from STRING')
session.execute('ALTER TABLE xmr_split_data ADD COLUMN addr_to STRING')
elif current_version == 19:
db_version += 1
session.execute('ALTER TABLE bidstates ADD COLUMN in_error INTEGER')
session.execute('ALTER TABLE bidstates ADD COLUMN swap_failed INTEGER')
session.execute('ALTER TABLE bidstates ADD COLUMN swap_ended INTEGER')
elif current_version == 20:
db_version += 1
session.execute('''
CREATE TABLE message_links (
record_id INTEGER NOT NULL,
active_ind INTEGER,
created_at BIGINT,
if CURRENT_DB_VERSION != db_version:
self.db_version = CURRENT_DB_VERSION
self.setIntKV("db_version", CURRENT_DB_VERSION, cursor)
self.log.info(f"Upgraded database to version {self.db_version}")
self.commitDB()
except Exception as e:
self.log.error(f"Upgrade failed {e}")
self.rollbackDB()
finally:
self.closeDB(cursor, commit=False)
linked_type INTEGER,
linked_id BLOB,
msg_type INTEGER,
msg_sequence INTEGER,
msg_id BLOB,
PRIMARY KEY (record_id))''')
session.execute('ALTER TABLE offers ADD COLUMN bid_reversed INTEGER')
if current_version != db_version:
self.db_version = db_version
self.setIntKVInSession('db_version', db_version, session)
session.commit()
session.close()
session.remove()
self.log.info('Upgraded database to version {}'.format(self.db_version))
continue
break
if db_version != CURRENT_DB_VERSION:
raise ValueError('Unable to upgrade database.')

View File

@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2023-2025 The Basicswap Developers
# Copyright (c) 2023 The BSX Developers
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
@@ -12,75 +12,45 @@ from .db import (
def remove_expired_data(self, time_offset: int = 0):
now: int = self.getTime()
try:
cursor = self.openDB()
session = self.openSession()
active_bids_insert: str = self.activeBidsQueryStr("", "b2")
query_str = f"""
active_bids_insert = self.activeBidsQueryStr(now, '', 'b2')
query_str = f'''
SELECT o.offer_id FROM offers o
WHERE o.expire_at <= :expired_at AND 0 = (SELECT COUNT(*) FROM bids b2 WHERE b2.offer_id = o.offer_id AND {active_bids_insert})
"""
'''
num_offers = 0
num_bids = 0
offer_rows = cursor.execute(
query_str, {"now": now, "expired_at": now - time_offset}
)
offer_rows = session.execute(query_str, {'expired_at': now - time_offset})
for offer_row in offer_rows:
num_offers += 1
offer_query_data = {
"type_ind": int(Concepts.OFFER),
"offer_id": offer_row[0],
}
bid_rows = cursor.execute(
"SELECT bids.bid_id FROM bids WHERE bids.offer_id = :offer_id",
offer_query_data,
)
bid_rows = session.execute('SELECT bids.bid_id FROM bids WHERE bids.offer_id = :offer_id', {'offer_id': offer_row[0]})
for bid_row in bid_rows:
num_bids += 1
bid_query_data = {"type_ind": int(Concepts.BID), "bid_id": bid_row[0]}
for query_str in [
"DELETE FROM transactions WHERE transactions.bid_id = :bid_id",
"DELETE FROM eventlog WHERE eventlog.linked_type = :type_ind AND eventlog.linked_id = :bid_id",
"DELETE FROM automationlinks WHERE automationlinks.linked_type = :type_ind AND automationlinks.linked_id = :bid_id",
"DELETE FROM prefunded_transactions WHERE prefunded_transactions.linked_type = :type_ind AND prefunded_transactions.linked_id = :bid_id",
"DELETE FROM history WHERE history.concept_type = :type_ind AND history.concept_id = :bid_id",
"DELETE FROM xmr_swaps WHERE xmr_swaps.bid_id = :bid_id",
"DELETE FROM actions WHERE actions.linked_id = :bid_id",
"DELETE FROM addresspool WHERE addresspool.bid_id = :bid_id",
"DELETE FROM xmr_split_data WHERE xmr_split_data.bid_id = :bid_id",
"DELETE FROM bids WHERE bids.bid_id = :bid_id",
"DELETE FROM message_links WHERE linked_type = :type_ind AND linked_id = :bid_id",
"DELETE FROM direct_message_route_links WHERE linked_type = :type_ind AND linked_id = :bid_id",
"DELETE FROM message_network_links WHERE linked_type = :type_ind AND linked_id = :bid_id",
]:
cursor.execute(query_str, bid_query_data)
for query_str in [
"DELETE FROM eventlog WHERE eventlog.linked_type = :type_ind AND eventlog.linked_id = :offer_id",
"DELETE FROM automationlinks WHERE automationlinks.linked_type = :type_ind AND automationlinks.linked_id = :offer_id",
"DELETE FROM prefunded_transactions WHERE prefunded_transactions.linked_type = :type_ind AND prefunded_transactions.linked_id = :offer_id",
"DELETE FROM history WHERE history.concept_type = :type_ind AND history.concept_id = :offer_id",
"DELETE FROM xmr_offers WHERE xmr_offers.offer_id = :offer_id",
"DELETE FROM sentoffers WHERE sentoffers.offer_id = :offer_id",
"DELETE FROM actions WHERE actions.linked_id = :offer_id",
"DELETE FROM offers WHERE offers.offer_id = :offer_id",
"DELETE FROM message_links WHERE linked_type = :type_ind AND linked_id = :offer_id",
"DELETE FROM message_network_links WHERE linked_type = :type_ind AND linked_id = :offer_id",
]:
cursor.execute(query_str, offer_query_data)
session.execute('DELETE FROM transactions WHERE transactions.bid_id = :bid_id', {'bid_id': bid_row[0]})
session.execute('DELETE FROM eventlog WHERE eventlog.linked_type = :type_ind AND eventlog.linked_id = :bid_id', {'type_ind': int(Concepts.BID), 'bid_id': bid_row[0]})
session.execute('DELETE FROM automationlinks WHERE automationlinks.linked_type = :type_ind AND automationlinks.linked_id = :bid_id', {'type_ind': int(Concepts.BID), 'bid_id': bid_row[0]})
session.execute('DELETE FROM prefunded_transactions WHERE prefunded_transactions.linked_type = :type_ind AND prefunded_transactions.linked_id = :bid_id', {'type_ind': int(Concepts.BID), 'bid_id': bid_row[0]})
session.execute('DELETE FROM history WHERE history.concept_type = :type_ind AND history.concept_id = :bid_id', {'type_ind': int(Concepts.BID), 'bid_id': bid_row[0]})
session.execute('DELETE FROM xmr_swaps WHERE xmr_swaps.bid_id = :bid_id', {'bid_id': bid_row[0]})
session.execute('DELETE FROM actions WHERE actions.linked_id = :bid_id', {'bid_id': bid_row[0]})
session.execute('DELETE FROM addresspool WHERE addresspool.bid_id = :bid_id', {'bid_id': bid_row[0]})
session.execute('DELETE FROM xmr_split_data WHERE xmr_split_data.bid_id = :bid_id', {'bid_id': bid_row[0]})
session.execute('DELETE FROM bids WHERE bids.bid_id = :bid_id', {'bid_id': bid_row[0]})
session.execute('DELETE FROM message_links WHERE linked_type = :type_ind AND linked_id = :linked_id', {'type_ind': int(Concepts.BID), 'linked_id': bid_row[0]})
session.execute('DELETE FROM eventlog WHERE eventlog.linked_type = :type_ind AND eventlog.linked_id = :offer_id', {'type_ind': int(Concepts.OFFER), 'offer_id': offer_row[0]})
session.execute('DELETE FROM automationlinks WHERE automationlinks.linked_type = :type_ind AND automationlinks.linked_id = :offer_id', {'type_ind': int(Concepts.OFFER), 'offer_id': offer_row[0]})
session.execute('DELETE FROM prefunded_transactions WHERE prefunded_transactions.linked_type = :type_ind AND prefunded_transactions.linked_id = :offer_id', {'type_ind': int(Concepts.OFFER), 'offer_id': offer_row[0]})
session.execute('DELETE FROM history WHERE history.concept_type = :type_ind AND history.concept_id = :offer_id', {'type_ind': int(Concepts.OFFER), 'offer_id': offer_row[0]})
session.execute('DELETE FROM xmr_offers WHERE xmr_offers.offer_id = :offer_id', {'offer_id': offer_row[0]})
session.execute('DELETE FROM sentoffers WHERE sentoffers.offer_id = :offer_id', {'offer_id': offer_row[0]})
session.execute('DELETE FROM actions WHERE actions.linked_id = :offer_id', {'offer_id': offer_row[0]})
session.execute('DELETE FROM offers WHERE offers.offer_id = :offer_id', {'offer_id': offer_row[0]})
session.execute('DELETE FROM message_links WHERE linked_type = :type_ind AND linked_id = :offer_id', {'type_ind': int(Concepts.OFFER), 'offer_id': offer_row[0]})
if num_offers > 0 or num_bids > 0:
self.log.info(
"Removed data for {} expired offer{} and {} bid{}.".format(
num_offers,
"s" if num_offers != 1 else "",
num_bids,
"s" if num_bids != 1 else "",
)
)
cursor.execute(
"DELETE FROM checkedblocks WHERE created_at <= :expired_at",
{"expired_at": now - time_offset},
)
self.log.info('Removed data for {} expired offer{} and {} bid{}.'.format(num_offers, 's' if num_offers != 1 else '', num_bids, 's' if num_bids != 1 else ''))
finally:
self.closeDB(cursor)
self.closeSession(session)

View File

@@ -0,0 +1,36 @@
# -*- coding: utf-8 -*-
import secrets
import hashlib
import basicswap.contrib.ed25519_fast as edf
def get_secret():
return 9 + secrets.randbelow(edf.l - 9)
def encodepoint(P):
zi = edf.inv(P[2])
x = (P[0] * zi) % edf.q
y = (P[1] * zi) % edf.q
y += ((x & 1) << 255)
return y.to_bytes(32, byteorder='little')
def hashToEd25519(bytes_in):
hashed = hashlib.sha256(bytes_in).digest()
for i in range(1000):
h255 = bytearray(hashed)
x_sign = 0 if h255[31] & 0x80 == 0 else 1
h255[31] &= 0x7f # Clear top bit
y = int.from_bytes(h255, byteorder='little')
x = edf.xrecover(y, x_sign)
if x == 0 and y == 1: # Skip infinity point
continue
P = [x, y, 1, (x * y) % edf.q]
# Keep trying until the point is in the correct subgroup
if edf.isoncurve(P) and edf.is_identity(edf.scalarmult(P, edf.l)):
return P
hashed = hashlib.sha256(hashed).digest()
raise ValueError('hashToEd25519 failed')

View File

@@ -1,17 +1,13 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019-2023 tecnovert
# Copyright (c) 2025 The Basicswap developers
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import json
default_coingecko_api_key = "CG-8hm3r9iLfpEXv4ied8oLbeUj"
class Explorer:
class Explorer():
def __init__(self, swapclient, coin_type, base_url):
self.swapclient = swapclient
self.coin_type = coin_type
@@ -19,94 +15,82 @@ class Explorer:
self.log = self.swapclient.log
def readURL(self, url):
self.log.debug("Explorer url: {}".format(url))
self.log.debug('Explorer url: {}'.format(url))
return self.swapclient.readURL(url)
class ExplorerInsight(Explorer):
def getChainHeight(self):
return json.loads(self.readURL(self.base_url + "/sync"))["blockChainHeight"]
return json.loads(self.readURL(self.base_url + '/sync'))['blockChainHeight']
def getBlock(self, block_hash):
data = json.loads(self.readURL(self.base_url + "/block/{}".format(block_hash)))
data = json.loads(self.readURL(self.base_url + '/block/{}'.format(block_hash)))
return data
def getTransaction(self, txid):
data = json.loads(self.readURL(self.base_url + "/tx/{}".format(txid)))
data = json.loads(self.readURL(self.base_url + '/tx/{}'.format(txid)))
return data
def getBalance(self, address):
data = json.loads(
self.readURL(self.base_url + "/addr/{}/balance".format(address))
)
data = json.loads(self.readURL(self.base_url + '/addr/{}/balance'.format(address)))
return data
def lookupUnspentByAddress(self, address):
data = json.loads(self.readURL(self.base_url + "/addr/{}/utxo".format(address)))
data = json.loads(self.readURL(self.base_url + '/addr/{}/utxo'.format(address)))
rv = []
for utxo in data:
rv.append(
{
"txid": utxo["txid"],
"index": utxo["vout"],
"height": utxo["height"],
"n_conf": utxo["confirmations"],
"value": utxo["satoshis"],
}
)
rv.append({
'txid': utxo['txid'],
'index': utxo['vout'],
'height': utxo['height'],
'n_conf': utxo['confirmations'],
'value': utxo['satoshis'],
})
return rv
class ExplorerBitAps(Explorer):
def getChainHeight(self):
return json.loads(self.readURL(self.base_url + "/block/last"))["data"]["block"][
"height"
]
return json.loads(self.readURL(self.base_url + '/block/last'))['data']['block']['height']
def getBlock(self, block_hash):
data = json.loads(self.readURL(self.base_url + "/block/{}".format(block_hash)))
data = json.loads(self.readURL(self.base_url + '/block/{}'.format(block_hash)))
return data
def getTransaction(self, txid):
data = json.loads(self.readURL(self.base_url + "/transaction/{}".format(txid)))
data = json.loads(self.readURL(self.base_url + '/transaction/{}'.format(txid)))
return data
def getBalance(self, address):
data = json.loads(self.readURL(self.base_url + "/address/state/" + address))
return data["data"]["balance"]
data = json.loads(self.readURL(self.base_url + '/address/state/' + address))
return data['data']['balance']
def lookupUnspentByAddress(self, address):
# Can't get unspents return only if exactly one transaction exists
data = json.loads(
self.readURL(self.base_url + "/address/transactions/" + address)
)
data = json.loads(self.readURL(self.base_url + '/address/transactions/' + address))
try:
assert data["data"]["list"] == 1
assert data['data']['list'] == 1
except Exception as ex:
self.log.debug("Explorer error: {}".format(str(ex)))
self.log.debug('Explorer error: {}'.format(str(ex)))
return None
tx = data["data"]["list"][0]
tx_data = json.loads(
self.readURL(self.base_url + "/transaction/{}".format(tx["txId"]))
)["data"]
tx = data['data']['list'][0]
tx_data = json.loads(self.readURL(self.base_url + '/transaction/{}'.format(tx['txId'])))['data']
for i, vout in tx_data["vOut"].items():
if vout["address"] == address:
return [
{
"txid": tx_data["txId"],
"index": int(i),
"height": tx_data["blockHeight"],
"n_conf": tx_data["confirmations"],
"value": vout["value"],
}
]
for i, vout in tx_data['vOut'].items():
if vout['address'] == address:
return [{
'txid': tx_data['txId'],
'index': int(i),
'height': tx_data['blockHeight'],
'n_conf': tx_data['confirmations'],
'value': vout['value'],
}]
class ExplorerChainz(Explorer):
def getChainHeight(self):
return int(self.readURL(self.base_url + "?q=getblockcount"))
return int(self.readURL(self.base_url + '?q=getblockcount'))
def lookupUnspentByAddress(self, address):
chain_height = self.getChainHeight()
self.log.debug("[rm] chain_height %d", chain_height)
self.log.debug('[rm] chain_height %d', chain_height)

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,13 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2023 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
from enum import IntEnum
class Curves(IntEnum):
secp256k1 = 1
ed25519 = 2

View File

@@ -1,234 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2024 tecnovert
# Copyright (c) 2025 The Basicswap developers
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import threading
from enum import IntEnum
from basicswap.chainparams import (
chainparams,
)
from basicswap.util import (
ensure,
i2b,
b2i,
make_int,
format_amount,
TemporaryError,
)
from basicswap.util.crypto import (
hash160,
)
from basicswap.util.ecc import (
ep,
getSecretInt,
)
from coincurve.dleag import verify_secp256k1_point
from coincurve.keys import (
PublicKey,
)
class Curves(IntEnum):
secp256k1 = 1
ed25519 = 2
class CoinInterface:
@staticmethod
def watch_blocks_for_scripts() -> bool:
return False
@staticmethod
def compareFeeRates(a, b) -> bool:
return abs(a - b) < 20
def __init__(self, network):
self.setDefaults()
self._network = network
self._mx_wallet = threading.Lock()
self._altruistic = True
self._core_version = None # Set in getDaemonVersion()
def interface_type(self) -> int:
# coin_type() returns the base coin type, interface_type() returns the coin+balance type.
return self.coin_type()
def setDefaults(self):
self._unknown_wallet_seed = True
self._restore_height = None
def make_int(self, amount_in: int, r: int = 0) -> int:
return make_int(amount_in, self.exp(), r=r)
def format_amount(self, amount_in, conv_int=False, r=0):
amount_int = make_int(amount_in, self.exp(), r=r) if conv_int else amount_in
return format_amount(amount_int, self.exp())
def coin_name(self) -> str:
coin_chainparams = chainparams[self.coin_type()]
if "display_name" in coin_chainparams:
return coin_chainparams["display_name"]
return coin_chainparams["name"].capitalize()
def ticker(self) -> str:
ticker = chainparams[self.coin_type()]["ticker"]
if self._network == "testnet":
ticker = "t" + ticker
elif self._network == "regtest":
ticker = "rt" + ticker
return ticker
def getExchangeTicker(self, exchange_name: str) -> str:
return chainparams[self.coin_type()]["ticker"]
def getExchangeName(self, exchange_name: str) -> str:
return chainparams[self.coin_type()]["name"]
def ticker_mainnet(self) -> str:
ticker = chainparams[self.coin_type()]["ticker"]
return ticker
def min_amount(self) -> int:
return chainparams[self.coin_type()][self._network]["min_amount"]
def max_amount(self) -> int:
return chainparams[self.coin_type()][self._network]["max_amount"]
def setWalletSeedWarning(self, value: bool) -> None:
self._unknown_wallet_seed = value
def setWalletRestoreHeight(self, value: int) -> None:
self._restore_height = value
def knownWalletSeed(self) -> bool:
return not self._unknown_wallet_seed
def chainparams(self):
return chainparams[self.coin_type()]
def chainparams_network(self):
return chainparams[self.coin_type()][self._network]
def has_segwit(self) -> bool:
return chainparams[self.coin_type()].get("has_segwit", True)
def use_p2shp2wsh(self) -> bool:
# p2sh-p2wsh
return False
def is_transient_error(self, ex) -> bool:
if isinstance(ex, TemporaryError):
return True
str_error: str = str(ex).lower()
if "not enough unlocked money" in str_error:
return True
if "no unlocked balance" in str_error:
return True
if "transaction was rejected by daemon" in str_error:
return True
if "invalid unlocked_balance" in str_error:
return True
if "daemon is busy" in str_error:
return True
if "timed out" in str_error:
return True
if "request-sent" in str_error:
return True
return False
def setConfTarget(self, new_conf_target: int) -> None:
ensure(
new_conf_target >= 1 and new_conf_target < 33, "Invalid conf_target value"
)
self._conf_target = new_conf_target
def walletRestoreHeight(self) -> int:
return self._restore_height
def get_connection_type(self):
return self._connection_type
def using_segwit(self) -> bool:
# Using btc native segwit
return self._use_segwit
def use_tx_vsize(self) -> bool:
return self._use_segwit
def getLockTxSwapOutputValue(self, bid, xmr_swap) -> int:
return bid.amount
def getLockRefundTxSwapOutputValue(self, bid, xmr_swap) -> int:
return xmr_swap.a_swap_refund_value
def getLockRefundTxSwapOutput(self, xmr_swap) -> int:
# Only one prevout exists
return 0
def checkWallets(self) -> int:
return 1
def altruistic(self) -> bool:
return self._altruistic
class AdaptorSigInterface:
def getScriptLockTxDummyWitness(self, script: bytes):
return [b"", bytes(72), bytes(72), bytes(len(script))]
def getScriptLockRefundSpendTxDummyWitness(self, script: bytes):
return [b"", bytes(72), bytes(72), bytes((1,)), bytes(len(script))]
def getScriptLockRefundSwipeTxDummyWitness(self, script: bytes):
return [bytes(72), b"", bytes(len(script))]
class Secp256k1Interface(CoinInterface, AdaptorSigInterface):
@staticmethod
def curve_type():
return Curves.secp256k1
def getNewRandomKey(self) -> bytes:
return i2b(getSecretInt())
def getPubkey(self, privkey: bytes) -> bytes:
return PublicKey.from_secret(privkey).format()
def pkh(self, pubkey: bytes) -> bytes:
return hash160(pubkey)
def verifyKey(self, k: bytes) -> bool:
i = b2i(k)
return i < ep.o and i > 0
def verifyPubkey(self, pubkey_bytes: bytes) -> bool:
return verify_secp256k1_point(pubkey_bytes)
def isValidAddressHash(self, address_hash: bytes) -> bool:
hash_len = len(address_hash)
if hash_len == 20:
return True
def isValidPubkey(self, pubkey: bytes) -> bool:
try:
self.verifyPubkey(pubkey)
return True
except Exception:
return False
def verifySig(self, pubkey: bytes, signed_hash: bytes, sig: bytes) -> bool:
pubkey = PublicKey(pubkey)
return pubkey.verify(sig, signed_hash, hasher=None)
def sumKeys(self, ka: bytes, kb: bytes) -> bytes:
# TODO: Add to coincurve
return i2b((b2i(ka) + b2i(kb)) % ep.o)
def sumPubkeys(self, Ka: bytes, Kb: bytes) -> bytes:
return PublicKey.combine_keys([PublicKey(Ka), PublicKey(Kb)]).format()

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,247 +0,0 @@
import unittest
CHARSET = "qpzry9x8gf2tvdw0s3jn54khce6mua7l"
def polymod(values):
chk = 1
generator = [
(0x01, 0x98F2BC8E61),
(0x02, 0x79B76D99E2),
(0x04, 0xF33E5FB3C4),
(0x08, 0xAE2EABE2A8),
(0x10, 0x1E4F43E470),
]
for value in values:
top = chk >> 35
chk = ((chk & 0x07FFFFFFFF) << 5) ^ value
for i in generator:
if top & i[0] != 0:
chk ^= i[1]
return chk ^ 1
def calculate_checksum(prefix, payload):
poly = polymod(prefix_expand(prefix) + payload + [0, 0, 0, 0, 0, 0, 0, 0])
out = list()
for i in range(8):
out.append((poly >> 5 * (7 - i)) & 0x1F)
return out
def verify_checksum(prefix, payload):
return polymod(prefix_expand(prefix) + payload) == 0
def b32decode(inputs):
out = list()
for letter in inputs:
out.append(CHARSET.find(letter))
return out
def b32encode(inputs):
out = ""
for char_code in inputs:
out += CHARSET[char_code]
return out
def convertbits(data, frombits, tobits, pad=True):
acc = 0
bits = 0
ret = []
maxv = (1 << tobits) - 1
max_acc = (1 << (frombits + tobits - 1)) - 1
for value in data:
if value < 0 or (value >> frombits):
return None
acc = ((acc << frombits) | value) & max_acc
bits += frombits
while bits >= tobits:
bits -= tobits
ret.append((acc >> bits) & maxv)
if pad:
if bits:
ret.append((acc << (tobits - bits)) & maxv)
elif bits >= frombits or ((acc << (tobits - bits)) & maxv):
return None
return ret
def prefix_expand(prefix):
return [ord(x) & 0x1F for x in prefix] + [0]
class Address:
"""
Class to handle CashAddr.
:param version: Version of CashAddr
:type version: ``str``
:param payload: Payload of CashAddr as int list of the bytearray
:type payload: ``list`` of ``int``
"""
VERSIONS = {
"P2SH20": {"prefix": "bitcoincash", "version_bit": 8, "network": "mainnet"},
"P2SH32": {"prefix": "bitcoincash", "version_bit": 11, "network": "mainnet"},
"P2PKH": {"prefix": "bitcoincash", "version_bit": 0, "network": "mainnet"},
"P2SH20-TESTNET": {"prefix": "bchtest", "version_bit": 8, "network": "testnet"},
"P2SH32-TESTNET": {
"prefix": "bchtest",
"version_bit": 11,
"network": "testnet",
},
"P2PKH-TESTNET": {"prefix": "bchtest", "version_bit": 0, "network": "testnet"},
"P2SH20-REGTEST": {"prefix": "bchreg", "version_bit": 8, "network": "regtest"},
"P2SH32-REGTEST": {"prefix": "bchreg", "version_bit": 11, "network": "regtest"},
"P2PKH-REGTEST": {"prefix": "bchreg", "version_bit": 0, "network": "regtest"},
"P2SH20-CATKN": {
"prefix": "bitcoincash",
"version_bit": 24,
"network": "mainnet",
},
"P2SH32-CATKN": {
"prefix": "bitcoincash",
"version_bit": 27,
"network": "mainnet",
},
"P2PKH-CATKN": {
"prefix": "bitcoincash",
"version_bit": 16,
"network": "mainnet",
},
"P2SH20-CATKN-TESTNET": {
"prefix": "bchtest",
"version_bit": 24,
"network": "testnet",
},
"P2SH32-CATKN-TESTNET": {
"prefix": "bchtest",
"version_bit": 27,
"network": "testnet",
},
"P2PKH-CATKN-TESTNET": {
"prefix": "bchtest",
"version_bit": 16,
"network": "testnet",
},
"P2SH20-CATKN-REGTEST": {
"prefix": "bchreg",
"version_bit": 24,
"network": "regtest",
},
"P2SH32-CATKN-REGTEST": {
"prefix": "bchreg",
"version_bit": 27,
"network": "regtest",
},
"P2PKH-CATKN-REGTEST": {
"prefix": "bchreg",
"version_bit": 16,
"network": "regtest",
},
}
VERSION_SUFFIXES = {"bitcoincash": "", "bchtest": "-TESTNET", "bchreg": "-REGTEST"}
ADDRESS_TYPES = {
0: "P2PKH",
8: "P2SH20",
11: "P2SH32",
16: "P2PKH-CATKN",
24: "P2SH20-CATKN",
27: "P2SH32-CATKN",
}
def __init__(self, version, payload):
if version not in Address.VERSIONS:
raise ValueError("Invalid address version provided")
self.version = version
self.payload = payload
self.prefix = Address.VERSIONS[self.version]["prefix"]
def __str__(self):
return (
f"version: {self.version}\npayload: {self.payload}\nprefix: {self.prefix}"
)
def __repr__(self):
return f"Address('{self.cash_address()}')"
def __eq__(self, other):
if isinstance(other, str):
return self.cash_address() == other
elif isinstance(other, Address):
return self.cash_address() == other.cash_address()
else:
raise ValueError(
"Address can be compared to a string address"
" or an instance of Address"
)
def cash_address(self):
"""
Generate CashAddr of the Address
:rtype: ``str``
"""
version_bit = Address.VERSIONS[self.version]["version_bit"]
payload = [version_bit] + list(self.payload)
payload = convertbits(payload, 8, 5)
checksum = calculate_checksum(self.prefix, payload)
return self.prefix + ":" + b32encode(payload + checksum)
@staticmethod
def from_string(address):
"""
Generate Address from a cashadress string
:param scriptcode: The cashaddress string
:type scriptcode: ``str``
:returns: Instance of :class:~bitcash.cashaddress.Address
"""
try:
address = str(address)
except Exception:
raise ValueError("Expected string as input")
if address.upper() != address and address.lower() != address:
raise ValueError(
"Cash address contains uppercase and lowercase characters: " + address
)
address = address.lower()
colon_count = address.count(":")
if colon_count == 0:
raise ValueError("Cash address is missing prefix")
if colon_count > 1:
raise ValueError("Cash address contains more than one colon character")
prefix, base32string = address.split(":")
decoded = b32decode(base32string)
if not verify_checksum(prefix, decoded):
raise ValueError(
"Bad cash address checksum for address {}".format(address)
)
converted = convertbits(decoded, 5, 8)
try:
version = Address.ADDRESS_TYPES[converted[0]]
except Exception:
raise ValueError("Could not determine address version")
version += Address.VERSION_SUFFIXES[prefix]
payload = converted[1:-6]
return Address(version, payload)
class TestFrameworkScript(unittest.TestCase):
def test_base58encodedecode(self):
def check_cashaddress(address: str):
self.assertEqual(Address.from_string(address).cash_address(), address)
check_cashaddress("bitcoincash:qzfyvx77v2pmgc0vulwlfkl3uzjgh5gnmqk5hhyaa6")

View File

@@ -1,43 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2024 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
from basicswap.contrib.test_framework.script import CScriptOp
OP_TXINPUTCOUNT = CScriptOp(0xc3)
OP_1 = CScriptOp(0x51)
OP_NUMEQUALVERIFY = CScriptOp(0x9d)
OP_TXOUTPUTCOUNT = CScriptOp(0xc4)
OP_0 = CScriptOp(0x00)
OP_UTXOVALUE = CScriptOp(0xc6)
OP_OUTPUTVALUE = CScriptOp(0xcc)
OP_SUB = CScriptOp(0x94)
OP_UTXOTOKENCATEGORY = CScriptOp(0xce)
OP_OUTPUTTOKENCATEGORY = CScriptOp(0xd1)
OP_EQUALVERIFY = CScriptOp(0x88)
OP_UTXOTOKENCOMMITMENT = CScriptOp(0xcf)
OP_OUTPUTTOKENCOMMITMENT = CScriptOp(0xd2)
OP_UTXOTOKENAMOUNT = CScriptOp(0xd0)
OP_OUTPUTTOKENAMOUNT = CScriptOp(0xd3)
OP_INPUTSEQUENCENUMBER = CScriptOp(0xcb)
OP_NOTIF = CScriptOp(0x64)
OP_OUTPUTBYTECODE = CScriptOp(0xcd)
OP_OVER = CScriptOp(0x78)
OP_CHECKDATASIG = CScriptOp(0xba)
OP_CHECKDATASIGVERIFY = CScriptOp(0xbb)
OP_ELSE = CScriptOp(0x67)
OP_CHECKSEQUENCEVERIFY = CScriptOp(0xb2)
OP_DROP = CScriptOp(0x75)
OP_EQUAL = CScriptOp(0x87)
OP_ENDIF = CScriptOp(0x68)
OP_HASH256 = CScriptOp(0xaa)
OP_PUSHBYTES_32 = CScriptOp(0x20)
OP_DUP = CScriptOp(0x76)
OP_HASH160 = CScriptOp(0xa9)
OP_CHECKSIG = CScriptOp(0xac)
OP_SHA256 = CScriptOp(0xa8)
OP_VERIFY = CScriptOp(0x69)

View File

@@ -1,191 +0,0 @@
"""
Copyright (c) 2011 Jeff Garzik
AuthServiceProxy has the following improvements over python-jsonrpc's
ServiceProxy class:
- HTTP connections persist for the life of the AuthServiceProxy object
(if server supports HTTP/1.1)
- sends protocol 'version', per JSON-RPC 1.1
- sends proper, incrementing 'id'
- sends Basic HTTP authentication headers
- parses all JSON numbers that look like floats as Decimal
- uses standard Python json lib
Previous copyright, from python-jsonrpc/jsonrpc/proxy.py:
Copyright (c) 2007 Jan-Klaas Kollhof
This file is part of jsonrpc.
jsonrpc is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version.
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this software; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
try:
import http.client as httplib
except ImportError:
import httplib
import base64
import decimal
import json
import logging
import socket
try:
import urllib.parse as urlparse
except ImportError:
import urlparse
USER_AGENT = "AuthServiceProxy/0.1"
HTTP_TIMEOUT = 30
log = logging.getLogger("BitcoinRPC")
class JSONRPCException(Exception):
def __init__(self, rpc_error):
try:
errmsg = '%(message)s (%(code)i)' % rpc_error
except (KeyError, TypeError):
errmsg = ''
Exception.__init__(self, errmsg)
self.error = rpc_error
def EncodeDecimal(o):
if isinstance(o, decimal.Decimal):
return str(o)
raise TypeError(repr(o) + " is not JSON serializable")
class AuthServiceProxy(object):
__id_count = 0
# ensure_ascii: escape unicode as \uXXXX, passed to json.dumps
def __init__(self, service_url, service_name=None, timeout=HTTP_TIMEOUT, connection=None, ensure_ascii=True):
self.__service_url = service_url
self._service_name = service_name
self.ensure_ascii = ensure_ascii # can be toggled on the fly by tests
self.__url = urlparse.urlparse(service_url)
if self.__url.port is None:
port = 80
else:
port = self.__url.port
(user, passwd) = (self.__url.username, self.__url.password)
try:
user = user.encode('utf8')
except AttributeError:
pass
try:
passwd = passwd.encode('utf8')
except AttributeError:
pass
authpair = user + b':' + passwd
self.__auth_header = b'Basic ' + base64.b64encode(authpair)
if connection:
# Callables re-use the connection of the original proxy
self.__conn = connection
elif self.__url.scheme == 'https':
self.__conn = httplib.HTTPSConnection(self.__url.hostname, port,
timeout=timeout)
else:
self.__conn = httplib.HTTPConnection(self.__url.hostname, port,
timeout=timeout)
def __getattr__(self, name):
if name.startswith('__') and name.endswith('__'):
# Python internal stuff
raise AttributeError
if self._service_name is not None:
name = "%s.%s" % (self._service_name, name)
return AuthServiceProxy(self.__service_url, name, connection=self.__conn)
def _request(self, method, path, postdata):
'''
Do a HTTP request, with retry if we get disconnected (e.g. due to a timeout).
This is a workaround for https://bugs.python.org/issue3566 which is fixed in Python 3.5.
'''
headers = {'Host': self.__url.hostname,
'User-Agent': USER_AGENT,
'Authorization': self.__auth_header,
'Content-type': 'application/json'}
try:
self.__conn.request(method, path, postdata, headers)
return self._get_response()
except httplib.BadStatusLine as e:
if e.line == "''": # if connection was closed, try again
self.__conn.close()
self.__conn.request(method, path, postdata, headers)
return self._get_response()
else:
raise
except (BrokenPipeError,ConnectionResetError):
# Python 3.5+ raises BrokenPipeError instead of BadStatusLine when the connection was reset
# ConnectionResetError happens on FreeBSD with Python 3.4
self.__conn.close()
self.__conn.request(method, path, postdata, headers)
return self._get_response()
def __call__(self, *args, **argsn):
AuthServiceProxy.__id_count += 1
log.debug("-%s-> %s %s"%(AuthServiceProxy.__id_count, self._service_name,
json.dumps(args, default=EncodeDecimal, ensure_ascii=self.ensure_ascii)))
if args and argsn:
raise ValueError('Cannot handle both named and positional arguments')
postdata = json.dumps({'version': '1.1',
'method': self._service_name,
'params': args or argsn,
'id': AuthServiceProxy.__id_count}, default=EncodeDecimal, ensure_ascii=self.ensure_ascii)
response = self._request('POST', self.__url.path, postdata.encode('utf-8'))
if response['error'] is not None:
raise JSONRPCException(response['error'])
elif 'result' not in response:
raise JSONRPCException({
'code': -343, 'message': 'missing JSON-RPC result'})
else:
return response['result']
def _batch(self, rpc_call_list):
postdata = json.dumps(list(rpc_call_list), default=EncodeDecimal, ensure_ascii=self.ensure_ascii)
log.debug("--> "+postdata)
return self._request('POST', self.__url.path, postdata.encode('utf-8'))
def _get_response(self):
try:
http_response = self.__conn.getresponse()
except socket.timeout as e:
raise JSONRPCException({
'code': -344,
'message': '%r RPC took longer than %f seconds. Consider '
'using larger timeout for calls that take '
'longer to return.' % (self._service_name,
self.__conn.timeout)})
if http_response is None:
raise JSONRPCException({
'code': -342, 'message': 'missing HTTP response from server'})
content_type = http_response.getheader('Content-Type')
if content_type != 'application/json':
raise JSONRPCException({
'code': -342, 'message': 'non-JSON HTTP response with \'%i %s\' from server' % (http_response.status, http_response.reason)})
responsedata = http_response.read().decode('utf8')
response = json.loads(responsedata, parse_float=decimal.Decimal)
if "error" in response and response["error"] is None:
log.debug("<-%s- %s"%(response["id"], json.dumps(response["result"], default=EncodeDecimal, ensure_ascii=self.ensure_ascii)))
else:
log.debug("<-- "+responsedata)
return response

View File

@@ -1,101 +0,0 @@
#!/usr/bin/env python3
#
# bignum.py
#
# This file is copied from python-bitcoinlib.
#
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
"""Bignum routines"""
import struct
# generic big endian MPI format
def bn_bytes(v, have_ext=False):
ext = 0
if have_ext:
ext = 1
return ((v.bit_length()+7)//8) + ext
def bn2bin(v):
s = bytearray()
i = bn_bytes(v)
while i > 0:
s.append((v >> ((i-1) * 8)) & 0xff)
i -= 1
return s
def bin2bn(s):
l = 0
for ch in s:
l = (l << 8) | ch
return l
def bn2mpi(v):
have_ext = False
if v.bit_length() > 0:
have_ext = (v.bit_length() & 0x07) == 0
neg = False
if v < 0:
neg = True
v = -v
s = struct.pack(b">I", bn_bytes(v, have_ext))
ext = bytearray()
if have_ext:
ext.append(0)
v_bin = bn2bin(v)
if neg:
if have_ext:
ext[0] |= 0x80
else:
v_bin[0] |= 0x80
return s + ext + v_bin
def mpi2bn(s):
if len(s) < 4:
return None
s_size = bytes(s[:4])
v_len = struct.unpack(b">I", s_size)[0]
if len(s) != (v_len + 4):
return None
if v_len == 0:
return 0
v_str = bytearray(s[4:])
neg = False
i = v_str[0]
if i & 0x80:
neg = True
i &= ~0x80
v_str[0] = i
v = bin2bn(v_str)
if neg:
return -v
return v
# bitcoin-specific little endian format, with implicit size
def mpi2vch(s):
r = s[4:] # strip size
r = r[::-1] # reverse string, converting BE->LE
return r
def bn2vch(v):
return bytes(mpi2vch(bn2mpi(v)))
def vch2mpi(s):
r = struct.pack(b">I", len(s)) # size
r += s[::-1] # reverse string, converting LE->BE
return r
def vch2bn(s):
return mpi2bn(vch2mpi(s))

View File

@@ -1,106 +0,0 @@
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
This module contains utilities for doing coverage analysis on the RPC
interface.
It provides a way to track which RPC commands are exercised during
testing.
"""
import os
REFERENCE_FILENAME = 'rpc_interface.txt'
class AuthServiceProxyWrapper(object):
"""
An object that wraps AuthServiceProxy to record specific RPC calls.
"""
def __init__(self, auth_service_proxy_instance, coverage_logfile=None):
"""
Kwargs:
auth_service_proxy_instance (AuthServiceProxy): the instance
being wrapped.
coverage_logfile (str): if specified, write each service_name
out to a file when called.
"""
self.auth_service_proxy_instance = auth_service_proxy_instance
self.coverage_logfile = coverage_logfile
def __getattr__(self, *args, **kwargs):
return_val = self.auth_service_proxy_instance.__getattr__(
*args, **kwargs)
return AuthServiceProxyWrapper(return_val, self.coverage_logfile)
def __call__(self, *args, **kwargs):
"""
Delegates to AuthServiceProxy, then writes the particular RPC method
called to a file.
"""
return_val = self.auth_service_proxy_instance.__call__(*args, **kwargs)
rpc_method = self.auth_service_proxy_instance._service_name
if self.coverage_logfile:
with open(self.coverage_logfile, 'a+', encoding='utf8') as f:
f.write("%s\n" % rpc_method)
return return_val
@property
def url(self):
return self.auth_service_proxy_instance.url
def get_filename(dirname, n_node):
"""
Get a filename unique to the test process ID and node.
This file will contain a list of RPC commands covered.
"""
pid = str(os.getpid())
return os.path.join(
dirname, "coverage.pid%s.node%s.txt" % (pid, str(n_node)))
def write_all_rpc_commands(dirname, node):
"""
Write out a list of all RPC functions available in `bitcoin-cli` for
coverage comparison. This will only happen once per coverage
directory.
Args:
dirname (str): temporary test dir
node (AuthServiceProxy): client
Returns:
bool. if the RPC interface file was written.
"""
filename = os.path.join(dirname, REFERENCE_FILENAME)
if os.path.isfile(filename):
return False
help_output = node.help().split('\n')
commands = set()
for line in help_output:
line = line.strip()
# Ignore blanks and headers
if line and not line.startswith('='):
commands.add("%s\n" % line.split()[0])
with open(filename, 'w', encoding='utf8') as f:
f.writelines(list(commands))
return True

File diff suppressed because it is too large Load Diff

View File

@@ -1,943 +0,0 @@
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# script.py
#
# This file is modified from python-bitcoinlib.
#
"""Scripts
Functionality to build scripts, as well as SignatureHash().
"""
from .mininode import CTransaction, CTxOut, sha256, hash256, uint256_from_str, ser_uint256, ser_string
from binascii import hexlify
import hashlib
import sys
bchr = chr
bord = ord
if sys.version > '3':
long = int
bchr = lambda x: bytes([x])
bord = lambda x: x
import struct
from .bignum import bn2vch
MAX_SCRIPT_SIZE = 10000
MAX_SCRIPT_ELEMENT_SIZE = 520
MAX_SCRIPT_OPCODES = 201
OPCODE_NAMES = {}
_opcode_instances = []
class CScriptOp(int):
"""A single script opcode"""
__slots__ = []
@staticmethod
def encode_op_pushdata(d):
"""Encode a PUSHDATA op, returning bytes"""
if len(d) < 0x4c:
return b'' + bchr(len(d)) + d # OP_PUSHDATA
elif len(d) <= 0xff:
return b'\x4c' + bchr(len(d)) + d # OP_PUSHDATA1
elif len(d) <= 0xffff:
return b'\x4d' + struct.pack(b'<H', len(d)) + d # OP_PUSHDATA2
elif len(d) <= 0xffffffff:
return b'\x4e' + struct.pack(b'<I', len(d)) + d # OP_PUSHDATA4
else:
raise ValueError("Data too long to encode in a PUSHDATA op")
@staticmethod
def encode_op_n(n):
"""Encode a small integer op, returning an opcode"""
if not (0 <= n <= 16):
raise ValueError('Integer must be in range 0 <= n <= 16, got %d' % n)
if n == 0:
return OP_0
else:
return CScriptOp(OP_1 + n-1)
def decode_op_n(self):
"""Decode a small integer opcode, returning an integer"""
if self == OP_0:
return 0
if not (self == OP_0 or OP_1 <= self <= OP_16):
raise ValueError('op %r is not an OP_N' % self)
return int(self - OP_1+1)
def is_small_int(self):
"""Return true if the op pushes a small integer to the stack"""
if 0x51 <= self <= 0x60 or self == 0:
return True
else:
return False
def __str__(self):
return repr(self)
def __repr__(self):
if self in OPCODE_NAMES:
return OPCODE_NAMES[self]
else:
return 'CScriptOp(0x%x)' % self
def __new__(cls, n):
try:
return _opcode_instances[n]
except IndexError:
assert len(_opcode_instances) == n
_opcode_instances.append(super(CScriptOp, cls).__new__(cls, n))
return _opcode_instances[n]
# Populate opcode instance table
for n in range(0xff+1):
CScriptOp(n)
# push value
OP_0 = CScriptOp(0x00)
OP_FALSE = OP_0
OP_PUSHDATA1 = CScriptOp(0x4c)
OP_PUSHDATA2 = CScriptOp(0x4d)
OP_PUSHDATA4 = CScriptOp(0x4e)
OP_1NEGATE = CScriptOp(0x4f)
OP_RESERVED = CScriptOp(0x50)
OP_1 = CScriptOp(0x51)
OP_TRUE=OP_1
OP_2 = CScriptOp(0x52)
OP_3 = CScriptOp(0x53)
OP_4 = CScriptOp(0x54)
OP_5 = CScriptOp(0x55)
OP_6 = CScriptOp(0x56)
OP_7 = CScriptOp(0x57)
OP_8 = CScriptOp(0x58)
OP_9 = CScriptOp(0x59)
OP_10 = CScriptOp(0x5a)
OP_11 = CScriptOp(0x5b)
OP_12 = CScriptOp(0x5c)
OP_13 = CScriptOp(0x5d)
OP_14 = CScriptOp(0x5e)
OP_15 = CScriptOp(0x5f)
OP_16 = CScriptOp(0x60)
# control
OP_NOP = CScriptOp(0x61)
OP_VER = CScriptOp(0x62)
OP_IF = CScriptOp(0x63)
OP_NOTIF = CScriptOp(0x64)
OP_VERIF = CScriptOp(0x65)
OP_VERNOTIF = CScriptOp(0x66)
OP_ELSE = CScriptOp(0x67)
OP_ENDIF = CScriptOp(0x68)
OP_VERIFY = CScriptOp(0x69)
OP_RETURN = CScriptOp(0x6a)
# stack ops
OP_TOALTSTACK = CScriptOp(0x6b)
OP_FROMALTSTACK = CScriptOp(0x6c)
OP_2DROP = CScriptOp(0x6d)
OP_2DUP = CScriptOp(0x6e)
OP_3DUP = CScriptOp(0x6f)
OP_2OVER = CScriptOp(0x70)
OP_2ROT = CScriptOp(0x71)
OP_2SWAP = CScriptOp(0x72)
OP_IFDUP = CScriptOp(0x73)
OP_DEPTH = CScriptOp(0x74)
OP_DROP = CScriptOp(0x75)
OP_DUP = CScriptOp(0x76)
OP_NIP = CScriptOp(0x77)
OP_OVER = CScriptOp(0x78)
OP_PICK = CScriptOp(0x79)
OP_ROLL = CScriptOp(0x7a)
OP_ROT = CScriptOp(0x7b)
OP_SWAP = CScriptOp(0x7c)
OP_TUCK = CScriptOp(0x7d)
# splice ops
OP_CAT = CScriptOp(0x7e)
OP_SUBSTR = CScriptOp(0x7f)
OP_LEFT = CScriptOp(0x80)
OP_RIGHT = CScriptOp(0x81)
OP_SIZE = CScriptOp(0x82)
# bit logic
OP_INVERT = CScriptOp(0x83)
OP_AND = CScriptOp(0x84)
OP_OR = CScriptOp(0x85)
OP_XOR = CScriptOp(0x86)
OP_EQUAL = CScriptOp(0x87)
OP_EQUALVERIFY = CScriptOp(0x88)
OP_RESERVED1 = CScriptOp(0x89)
OP_RESERVED2 = CScriptOp(0x8a)
# numeric
OP_1ADD = CScriptOp(0x8b)
OP_1SUB = CScriptOp(0x8c)
OP_2MUL = CScriptOp(0x8d)
OP_2DIV = CScriptOp(0x8e)
OP_NEGATE = CScriptOp(0x8f)
OP_ABS = CScriptOp(0x90)
OP_NOT = CScriptOp(0x91)
OP_0NOTEQUAL = CScriptOp(0x92)
OP_ADD = CScriptOp(0x93)
OP_SUB = CScriptOp(0x94)
OP_MUL = CScriptOp(0x95)
OP_DIV = CScriptOp(0x96)
OP_MOD = CScriptOp(0x97)
OP_LSHIFT = CScriptOp(0x98)
OP_RSHIFT = CScriptOp(0x99)
OP_BOOLAND = CScriptOp(0x9a)
OP_BOOLOR = CScriptOp(0x9b)
OP_NUMEQUAL = CScriptOp(0x9c)
OP_NUMEQUALVERIFY = CScriptOp(0x9d)
OP_NUMNOTEQUAL = CScriptOp(0x9e)
OP_LESSTHAN = CScriptOp(0x9f)
OP_GREATERTHAN = CScriptOp(0xa0)
OP_LESSTHANOREQUAL = CScriptOp(0xa1)
OP_GREATERTHANOREQUAL = CScriptOp(0xa2)
OP_MIN = CScriptOp(0xa3)
OP_MAX = CScriptOp(0xa4)
OP_WITHIN = CScriptOp(0xa5)
# crypto
OP_RIPEMD160 = CScriptOp(0xa6)
OP_SHA1 = CScriptOp(0xa7)
OP_SHA256 = CScriptOp(0xa8)
OP_HASH160 = CScriptOp(0xa9)
OP_HASH256 = CScriptOp(0xaa)
OP_CODESEPARATOR = CScriptOp(0xab)
OP_CHECKSIG = CScriptOp(0xac)
OP_CHECKSIGVERIFY = CScriptOp(0xad)
OP_CHECKMULTISIG = CScriptOp(0xae)
OP_CHECKMULTISIGVERIFY = CScriptOp(0xaf)
# expansion
OP_NOP1 = CScriptOp(0xb0)
OP_CHECKLOCKTIMEVERIFY = CScriptOp(0xb1)
OP_CHECKSEQUENCEVERIFY = CScriptOp(0xb2)
OP_NOP4 = CScriptOp(0xb3)
OP_NOP5 = CScriptOp(0xb4)
OP_NOP6 = CScriptOp(0xb5)
OP_NOP7 = CScriptOp(0xb6)
OP_NOP8 = CScriptOp(0xb7)
OP_NOP9 = CScriptOp(0xb8)
OP_NOP10 = CScriptOp(0xb9)
# template matching params
OP_SMALLINTEGER = CScriptOp(0xfa)
OP_PUBKEYS = CScriptOp(0xfb)
OP_PUBKEYHASH = CScriptOp(0xfd)
OP_PUBKEY = CScriptOp(0xfe)
OP_INVALIDOPCODE = CScriptOp(0xff)
VALID_OPCODES = {
OP_1NEGATE,
OP_RESERVED,
OP_1,
OP_2,
OP_3,
OP_4,
OP_5,
OP_6,
OP_7,
OP_8,
OP_9,
OP_10,
OP_11,
OP_12,
OP_13,
OP_14,
OP_15,
OP_16,
OP_NOP,
OP_VER,
OP_IF,
OP_NOTIF,
OP_VERIF,
OP_VERNOTIF,
OP_ELSE,
OP_ENDIF,
OP_VERIFY,
OP_RETURN,
OP_TOALTSTACK,
OP_FROMALTSTACK,
OP_2DROP,
OP_2DUP,
OP_3DUP,
OP_2OVER,
OP_2ROT,
OP_2SWAP,
OP_IFDUP,
OP_DEPTH,
OP_DROP,
OP_DUP,
OP_NIP,
OP_OVER,
OP_PICK,
OP_ROLL,
OP_ROT,
OP_SWAP,
OP_TUCK,
OP_CAT,
OP_SUBSTR,
OP_LEFT,
OP_RIGHT,
OP_SIZE,
OP_INVERT,
OP_AND,
OP_OR,
OP_XOR,
OP_EQUAL,
OP_EQUALVERIFY,
OP_RESERVED1,
OP_RESERVED2,
OP_1ADD,
OP_1SUB,
OP_2MUL,
OP_2DIV,
OP_NEGATE,
OP_ABS,
OP_NOT,
OP_0NOTEQUAL,
OP_ADD,
OP_SUB,
OP_MUL,
OP_DIV,
OP_MOD,
OP_LSHIFT,
OP_RSHIFT,
OP_BOOLAND,
OP_BOOLOR,
OP_NUMEQUAL,
OP_NUMEQUALVERIFY,
OP_NUMNOTEQUAL,
OP_LESSTHAN,
OP_GREATERTHAN,
OP_LESSTHANOREQUAL,
OP_GREATERTHANOREQUAL,
OP_MIN,
OP_MAX,
OP_WITHIN,
OP_RIPEMD160,
OP_SHA1,
OP_SHA256,
OP_HASH160,
OP_HASH256,
OP_CODESEPARATOR,
OP_CHECKSIG,
OP_CHECKSIGVERIFY,
OP_CHECKMULTISIG,
OP_CHECKMULTISIGVERIFY,
OP_NOP1,
OP_CHECKLOCKTIMEVERIFY,
OP_CHECKSEQUENCEVERIFY,
OP_NOP4,
OP_NOP5,
OP_NOP6,
OP_NOP7,
OP_NOP8,
OP_NOP9,
OP_NOP10,
OP_SMALLINTEGER,
OP_PUBKEYS,
OP_PUBKEYHASH,
OP_PUBKEY,
}
OPCODE_NAMES.update({
OP_0 : 'OP_0',
OP_PUSHDATA1 : 'OP_PUSHDATA1',
OP_PUSHDATA2 : 'OP_PUSHDATA2',
OP_PUSHDATA4 : 'OP_PUSHDATA4',
OP_1NEGATE : 'OP_1NEGATE',
OP_RESERVED : 'OP_RESERVED',
OP_1 : 'OP_1',
OP_2 : 'OP_2',
OP_3 : 'OP_3',
OP_4 : 'OP_4',
OP_5 : 'OP_5',
OP_6 : 'OP_6',
OP_7 : 'OP_7',
OP_8 : 'OP_8',
OP_9 : 'OP_9',
OP_10 : 'OP_10',
OP_11 : 'OP_11',
OP_12 : 'OP_12',
OP_13 : 'OP_13',
OP_14 : 'OP_14',
OP_15 : 'OP_15',
OP_16 : 'OP_16',
OP_NOP : 'OP_NOP',
OP_VER : 'OP_VER',
OP_IF : 'OP_IF',
OP_NOTIF : 'OP_NOTIF',
OP_VERIF : 'OP_VERIF',
OP_VERNOTIF : 'OP_VERNOTIF',
OP_ELSE : 'OP_ELSE',
OP_ENDIF : 'OP_ENDIF',
OP_VERIFY : 'OP_VERIFY',
OP_RETURN : 'OP_RETURN',
OP_TOALTSTACK : 'OP_TOALTSTACK',
OP_FROMALTSTACK : 'OP_FROMALTSTACK',
OP_2DROP : 'OP_2DROP',
OP_2DUP : 'OP_2DUP',
OP_3DUP : 'OP_3DUP',
OP_2OVER : 'OP_2OVER',
OP_2ROT : 'OP_2ROT',
OP_2SWAP : 'OP_2SWAP',
OP_IFDUP : 'OP_IFDUP',
OP_DEPTH : 'OP_DEPTH',
OP_DROP : 'OP_DROP',
OP_DUP : 'OP_DUP',
OP_NIP : 'OP_NIP',
OP_OVER : 'OP_OVER',
OP_PICK : 'OP_PICK',
OP_ROLL : 'OP_ROLL',
OP_ROT : 'OP_ROT',
OP_SWAP : 'OP_SWAP',
OP_TUCK : 'OP_TUCK',
OP_CAT : 'OP_CAT',
OP_SUBSTR : 'OP_SUBSTR',
OP_LEFT : 'OP_LEFT',
OP_RIGHT : 'OP_RIGHT',
OP_SIZE : 'OP_SIZE',
OP_INVERT : 'OP_INVERT',
OP_AND : 'OP_AND',
OP_OR : 'OP_OR',
OP_XOR : 'OP_XOR',
OP_EQUAL : 'OP_EQUAL',
OP_EQUALVERIFY : 'OP_EQUALVERIFY',
OP_RESERVED1 : 'OP_RESERVED1',
OP_RESERVED2 : 'OP_RESERVED2',
OP_1ADD : 'OP_1ADD',
OP_1SUB : 'OP_1SUB',
OP_2MUL : 'OP_2MUL',
OP_2DIV : 'OP_2DIV',
OP_NEGATE : 'OP_NEGATE',
OP_ABS : 'OP_ABS',
OP_NOT : 'OP_NOT',
OP_0NOTEQUAL : 'OP_0NOTEQUAL',
OP_ADD : 'OP_ADD',
OP_SUB : 'OP_SUB',
OP_MUL : 'OP_MUL',
OP_DIV : 'OP_DIV',
OP_MOD : 'OP_MOD',
OP_LSHIFT : 'OP_LSHIFT',
OP_RSHIFT : 'OP_RSHIFT',
OP_BOOLAND : 'OP_BOOLAND',
OP_BOOLOR : 'OP_BOOLOR',
OP_NUMEQUAL : 'OP_NUMEQUAL',
OP_NUMEQUALVERIFY : 'OP_NUMEQUALVERIFY',
OP_NUMNOTEQUAL : 'OP_NUMNOTEQUAL',
OP_LESSTHAN : 'OP_LESSTHAN',
OP_GREATERTHAN : 'OP_GREATERTHAN',
OP_LESSTHANOREQUAL : 'OP_LESSTHANOREQUAL',
OP_GREATERTHANOREQUAL : 'OP_GREATERTHANOREQUAL',
OP_MIN : 'OP_MIN',
OP_MAX : 'OP_MAX',
OP_WITHIN : 'OP_WITHIN',
OP_RIPEMD160 : 'OP_RIPEMD160',
OP_SHA1 : 'OP_SHA1',
OP_SHA256 : 'OP_SHA256',
OP_HASH160 : 'OP_HASH160',
OP_HASH256 : 'OP_HASH256',
OP_CODESEPARATOR : 'OP_CODESEPARATOR',
OP_CHECKSIG : 'OP_CHECKSIG',
OP_CHECKSIGVERIFY : 'OP_CHECKSIGVERIFY',
OP_CHECKMULTISIG : 'OP_CHECKMULTISIG',
OP_CHECKMULTISIGVERIFY : 'OP_CHECKMULTISIGVERIFY',
OP_NOP1 : 'OP_NOP1',
OP_CHECKLOCKTIMEVERIFY : 'OP_CHECKLOCKTIMEVERIFY',
OP_CHECKSEQUENCEVERIFY : 'OP_CHECKSEQUENCEVERIFY',
OP_NOP4 : 'OP_NOP4',
OP_NOP5 : 'OP_NOP5',
OP_NOP6 : 'OP_NOP6',
OP_NOP7 : 'OP_NOP7',
OP_NOP8 : 'OP_NOP8',
OP_NOP9 : 'OP_NOP9',
OP_NOP10 : 'OP_NOP10',
OP_SMALLINTEGER : 'OP_SMALLINTEGER',
OP_PUBKEYS : 'OP_PUBKEYS',
OP_PUBKEYHASH : 'OP_PUBKEYHASH',
OP_PUBKEY : 'OP_PUBKEY',
OP_INVALIDOPCODE : 'OP_INVALIDOPCODE',
})
OPCODES_BY_NAME = {
'OP_0' : OP_0,
'OP_PUSHDATA1' : OP_PUSHDATA1,
'OP_PUSHDATA2' : OP_PUSHDATA2,
'OP_PUSHDATA4' : OP_PUSHDATA4,
'OP_1NEGATE' : OP_1NEGATE,
'OP_RESERVED' : OP_RESERVED,
'OP_1' : OP_1,
'OP_2' : OP_2,
'OP_3' : OP_3,
'OP_4' : OP_4,
'OP_5' : OP_5,
'OP_6' : OP_6,
'OP_7' : OP_7,
'OP_8' : OP_8,
'OP_9' : OP_9,
'OP_10' : OP_10,
'OP_11' : OP_11,
'OP_12' : OP_12,
'OP_13' : OP_13,
'OP_14' : OP_14,
'OP_15' : OP_15,
'OP_16' : OP_16,
'OP_NOP' : OP_NOP,
'OP_VER' : OP_VER,
'OP_IF' : OP_IF,
'OP_NOTIF' : OP_NOTIF,
'OP_VERIF' : OP_VERIF,
'OP_VERNOTIF' : OP_VERNOTIF,
'OP_ELSE' : OP_ELSE,
'OP_ENDIF' : OP_ENDIF,
'OP_VERIFY' : OP_VERIFY,
'OP_RETURN' : OP_RETURN,
'OP_TOALTSTACK' : OP_TOALTSTACK,
'OP_FROMALTSTACK' : OP_FROMALTSTACK,
'OP_2DROP' : OP_2DROP,
'OP_2DUP' : OP_2DUP,
'OP_3DUP' : OP_3DUP,
'OP_2OVER' : OP_2OVER,
'OP_2ROT' : OP_2ROT,
'OP_2SWAP' : OP_2SWAP,
'OP_IFDUP' : OP_IFDUP,
'OP_DEPTH' : OP_DEPTH,
'OP_DROP' : OP_DROP,
'OP_DUP' : OP_DUP,
'OP_NIP' : OP_NIP,
'OP_OVER' : OP_OVER,
'OP_PICK' : OP_PICK,
'OP_ROLL' : OP_ROLL,
'OP_ROT' : OP_ROT,
'OP_SWAP' : OP_SWAP,
'OP_TUCK' : OP_TUCK,
'OP_CAT' : OP_CAT,
'OP_SUBSTR' : OP_SUBSTR,
'OP_LEFT' : OP_LEFT,
'OP_RIGHT' : OP_RIGHT,
'OP_SIZE' : OP_SIZE,
'OP_INVERT' : OP_INVERT,
'OP_AND' : OP_AND,
'OP_OR' : OP_OR,
'OP_XOR' : OP_XOR,
'OP_EQUAL' : OP_EQUAL,
'OP_EQUALVERIFY' : OP_EQUALVERIFY,
'OP_RESERVED1' : OP_RESERVED1,
'OP_RESERVED2' : OP_RESERVED2,
'OP_1ADD' : OP_1ADD,
'OP_1SUB' : OP_1SUB,
'OP_2MUL' : OP_2MUL,
'OP_2DIV' : OP_2DIV,
'OP_NEGATE' : OP_NEGATE,
'OP_ABS' : OP_ABS,
'OP_NOT' : OP_NOT,
'OP_0NOTEQUAL' : OP_0NOTEQUAL,
'OP_ADD' : OP_ADD,
'OP_SUB' : OP_SUB,
'OP_MUL' : OP_MUL,
'OP_DIV' : OP_DIV,
'OP_MOD' : OP_MOD,
'OP_LSHIFT' : OP_LSHIFT,
'OP_RSHIFT' : OP_RSHIFT,
'OP_BOOLAND' : OP_BOOLAND,
'OP_BOOLOR' : OP_BOOLOR,
'OP_NUMEQUAL' : OP_NUMEQUAL,
'OP_NUMEQUALVERIFY' : OP_NUMEQUALVERIFY,
'OP_NUMNOTEQUAL' : OP_NUMNOTEQUAL,
'OP_LESSTHAN' : OP_LESSTHAN,
'OP_GREATERTHAN' : OP_GREATERTHAN,
'OP_LESSTHANOREQUAL' : OP_LESSTHANOREQUAL,
'OP_GREATERTHANOREQUAL' : OP_GREATERTHANOREQUAL,
'OP_MIN' : OP_MIN,
'OP_MAX' : OP_MAX,
'OP_WITHIN' : OP_WITHIN,
'OP_RIPEMD160' : OP_RIPEMD160,
'OP_SHA1' : OP_SHA1,
'OP_SHA256' : OP_SHA256,
'OP_HASH160' : OP_HASH160,
'OP_HASH256' : OP_HASH256,
'OP_CODESEPARATOR' : OP_CODESEPARATOR,
'OP_CHECKSIG' : OP_CHECKSIG,
'OP_CHECKSIGVERIFY' : OP_CHECKSIGVERIFY,
'OP_CHECKMULTISIG' : OP_CHECKMULTISIG,
'OP_CHECKMULTISIGVERIFY' : OP_CHECKMULTISIGVERIFY,
'OP_NOP1' : OP_NOP1,
'OP_CHECKLOCKTIMEVERIFY' : OP_CHECKLOCKTIMEVERIFY,
'OP_CHECKSEQUENCEVERIFY' : OP_CHECKSEQUENCEVERIFY,
'OP_NOP4' : OP_NOP4,
'OP_NOP5' : OP_NOP5,
'OP_NOP6' : OP_NOP6,
'OP_NOP7' : OP_NOP7,
'OP_NOP8' : OP_NOP8,
'OP_NOP9' : OP_NOP9,
'OP_NOP10' : OP_NOP10,
'OP_SMALLINTEGER' : OP_SMALLINTEGER,
'OP_PUBKEYS' : OP_PUBKEYS,
'OP_PUBKEYHASH' : OP_PUBKEYHASH,
'OP_PUBKEY' : OP_PUBKEY,
}
class CScriptInvalidError(Exception):
"""Base class for CScript exceptions"""
pass
class CScriptTruncatedPushDataError(CScriptInvalidError):
"""Invalid pushdata due to truncation"""
def __init__(self, msg, data):
self.data = data
super(CScriptTruncatedPushDataError, self).__init__(msg)
# This is used, eg, for blockchain heights in coinbase scripts (bip34)
class CScriptNum(object):
def __init__(self, d=0):
self.value = d
@staticmethod
def encode(obj):
r = bytearray(0)
if obj.value == 0:
return bytes(r)
neg = obj.value < 0
absvalue = -obj.value if neg else obj.value
while (absvalue):
r.append(absvalue & 0xff)
absvalue >>= 8
if r[-1] & 0x80:
r.append(0x80 if neg else 0)
elif neg:
r[-1] |= 0x80
return bytes(bchr(len(r)) + r)
class CScript(bytes):
"""Serialized script
A bytes subclass, so you can use this directly whenever bytes are accepted.
Note that this means that indexing does *not* work - you'll get an index by
byte rather than opcode. This format was chosen for efficiency so that the
general case would not require creating a lot of little CScriptOP objects.
iter(script) however does iterate by opcode.
"""
@classmethod
def __coerce_instance(cls, other):
# Coerce other into bytes
if isinstance(other, CScriptOp):
other = bchr(other)
elif isinstance(other, CScriptNum):
if (other.value == 0):
other = bchr(CScriptOp(OP_0))
else:
other = CScriptNum.encode(other)
elif isinstance(other, int):
if 0 <= other <= 16:
other = bytes(bchr(CScriptOp.encode_op_n(other)))
elif other == -1:
other = bytes(bchr(OP_1NEGATE))
else:
other = CScriptOp.encode_op_pushdata(bn2vch(other))
elif isinstance(other, (bytes, bytearray)):
other = CScriptOp.encode_op_pushdata(other)
return other
def __add__(self, other):
# Do the coercion outside of the try block so that errors in it are
# noticed.
other = self.__coerce_instance(other)
try:
# bytes.__add__ always returns bytes instances unfortunately
return CScript(super(CScript, self).__add__(other))
except TypeError:
raise TypeError('Can not add a %r instance to a CScript' % other.__class__)
def join(self, iterable):
# join makes no sense for a CScript()
raise NotImplementedError
def __new__(cls, value=b''):
if isinstance(value, bytes) or isinstance(value, bytearray):
return super(CScript, cls).__new__(cls, value)
else:
def coerce_iterable(iterable):
for instance in iterable:
yield cls.__coerce_instance(instance)
# Annoyingly on both python2 and python3 bytes.join() always
# returns a bytes instance even when subclassed.
return super(CScript, cls).__new__(cls, b''.join(coerce_iterable(value)))
def raw_iter(self):
"""Raw iteration
Yields tuples of (opcode, data, sop_idx) so that the different possible
PUSHDATA encodings can be accurately distinguished, as well as
determining the exact opcode byte indexes. (sop_idx)
"""
i = 0
while i < len(self):
sop_idx = i
opcode = bord(self[i])
i += 1
if opcode > OP_PUSHDATA4:
yield (opcode, None, sop_idx)
else:
datasize = None
pushdata_type = None
if opcode < OP_PUSHDATA1:
pushdata_type = 'PUSHDATA(%d)' % opcode
datasize = opcode
elif opcode == OP_PUSHDATA1:
pushdata_type = 'PUSHDATA1'
if i >= len(self):
raise CScriptInvalidError('PUSHDATA1: missing data length')
datasize = bord(self[i])
i += 1
elif opcode == OP_PUSHDATA2:
pushdata_type = 'PUSHDATA2'
if i + 1 >= len(self):
raise CScriptInvalidError('PUSHDATA2: missing data length')
datasize = bord(self[i]) + (bord(self[i+1]) << 8)
i += 2
elif opcode == OP_PUSHDATA4:
pushdata_type = 'PUSHDATA4'
if i + 3 >= len(self):
raise CScriptInvalidError('PUSHDATA4: missing data length')
datasize = bord(self[i]) + (bord(self[i+1]) << 8) + (bord(self[i+2]) << 16) + (bord(self[i+3]) << 24)
i += 4
else:
assert False # shouldn't happen
data = bytes(self[i:i+datasize])
# Check for truncation
if len(data) < datasize:
raise CScriptTruncatedPushDataError('%s: truncated data' % pushdata_type, data)
i += datasize
yield (opcode, data, sop_idx)
def __iter__(self):
"""'Cooked' iteration
Returns either a CScriptOP instance, an integer, or bytes, as
appropriate.
See raw_iter() if you need to distinguish the different possible
PUSHDATA encodings.
"""
for (opcode, data, sop_idx) in self.raw_iter():
if data is not None:
yield data
else:
opcode = CScriptOp(opcode)
if opcode.is_small_int():
yield opcode.decode_op_n()
else:
yield CScriptOp(opcode)
def __repr__(self):
# For Python3 compatibility add b before strings so testcases don't
# need to change
def _repr(o):
if isinstance(o, bytes):
return b"x('%s')" % hexlify(o).decode('ascii')
else:
return repr(o)
ops = []
i = iter(self)
while True:
op = None
try:
op = _repr(next(i))
except CScriptTruncatedPushDataError as err:
op = '%s...<ERROR: %s>' % (_repr(err.data), err)
break
except CScriptInvalidError as err:
op = '<ERROR: %s>' % err
break
except StopIteration:
break
finally:
if op is not None:
ops.append(op)
return "CScript([%s])" % ', '.join(ops)
def GetSigOpCount(self, fAccurate):
"""Get the SigOp count.
fAccurate - Accurately count CHECKMULTISIG, see BIP16 for details.
Note that this is consensus-critical.
"""
n = 0
lastOpcode = OP_INVALIDOPCODE
for (opcode, data, sop_idx) in self.raw_iter():
if opcode in (OP_CHECKSIG, OP_CHECKSIGVERIFY):
n += 1
elif opcode in (OP_CHECKMULTISIG, OP_CHECKMULTISIGVERIFY):
if fAccurate and (OP_1 <= lastOpcode <= OP_16):
n += opcode.decode_op_n()
else:
n += 20
lastOpcode = opcode
return n
SIGHASH_ALL = 1
SIGHASH_NONE = 2
SIGHASH_SINGLE = 3
SIGHASH_ANYONECANPAY = 0x80
def FindAndDelete(script, sig):
"""Consensus critical, see FindAndDelete() in Satoshi codebase"""
r = b''
last_sop_idx = sop_idx = 0
skip = True
for (opcode, data, sop_idx) in script.raw_iter():
if not skip:
r += script[last_sop_idx:sop_idx]
last_sop_idx = sop_idx
if script[sop_idx:sop_idx + len(sig)] == sig:
skip = True
else:
skip = False
if not skip:
r += script[last_sop_idx:]
return CScript(r)
def SignatureHash(script, txTo, inIdx, hashtype):
"""Consensus-correct SignatureHash
Returns (hash, err) to precisely match the consensus-critical behavior of
the SIGHASH_SINGLE bug. (inIdx is *not* checked for validity)
"""
HASH_ONE = b'\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
if inIdx >= len(txTo.vin):
return (HASH_ONE, "inIdx %d out of range (%d)" % (inIdx, len(txTo.vin)))
txtmp = CTransaction(txTo)
for txin in txtmp.vin:
txin.scriptSig = b''
txtmp.vin[inIdx].scriptSig = FindAndDelete(script, CScript([OP_CODESEPARATOR]))
if (hashtype & 0x1f) == SIGHASH_NONE:
txtmp.vout = []
for i in range(len(txtmp.vin)):
if i != inIdx:
txtmp.vin[i].nSequence = 0
elif (hashtype & 0x1f) == SIGHASH_SINGLE:
outIdx = inIdx
if outIdx >= len(txtmp.vout):
return (HASH_ONE, "outIdx %d out of range (%d)" % (outIdx, len(txtmp.vout)))
tmp = txtmp.vout[outIdx]
txtmp.vout = []
for i in range(outIdx):
txtmp.vout.append(CTxOut(-1))
txtmp.vout.append(tmp)
for i in range(len(txtmp.vin)):
if i != inIdx:
txtmp.vin[i].nSequence = 0
if hashtype & SIGHASH_ANYONECANPAY:
tmp = txtmp.vin[inIdx]
txtmp.vin = []
txtmp.vin.append(tmp)
s = txtmp.serialize()
s += struct.pack(b"<I", hashtype)
hash = hash256(s)
return (hash, None)
# TODO: Allow cached hashPrevouts/hashSequence/hashOutputs to be provided.
# Performance optimization probably not necessary for python tests, however.
# Note that this corresponds to sigversion == 1 in EvalScript, which is used
# for version 0 witnesses.
def SegwitVersion1SignatureHash(script, txTo, inIdx, hashtype, amount):
hashPrevouts = 0
hashSequence = 0
hashOutputs = 0
if not (hashtype & SIGHASH_ANYONECANPAY):
serialize_prevouts = bytes()
for i in txTo.vin:
serialize_prevouts += i.prevout.serialize()
hashPrevouts = uint256_from_str(hash256(serialize_prevouts))
if (not (hashtype & SIGHASH_ANYONECANPAY) and (hashtype & 0x1f) != SIGHASH_SINGLE and (hashtype & 0x1f) != SIGHASH_NONE):
serialize_sequence = bytes()
for i in txTo.vin:
serialize_sequence += struct.pack("<I", i.nSequence)
hashSequence = uint256_from_str(hash256(serialize_sequence))
if ((hashtype & 0x1f) != SIGHASH_SINGLE and (hashtype & 0x1f) != SIGHASH_NONE):
serialize_outputs = bytes()
for o in txTo.vout:
serialize_outputs += o.serialize()
hashOutputs = uint256_from_str(hash256(serialize_outputs))
elif ((hashtype & 0x1f) == SIGHASH_SINGLE and inIdx < len(txTo.vout)):
serialize_outputs = txTo.vout[inIdx].serialize()
hashOutputs = uint256_from_str(hash256(serialize_outputs))
ss = bytes()
ss += struct.pack("<i", txTo.nVersion)
ss += ser_uint256(hashPrevouts)
ss += ser_uint256(hashSequence)
ss += txTo.vin[inIdx].prevout.serialize()
ss += ser_string(script)
ss += struct.pack("<q", amount)
ss += struct.pack("<I", txTo.vin[inIdx].nSequence)
ss += ser_uint256(hashOutputs)
ss += struct.pack("<i", txTo.nLockTime)
ss += struct.pack("<I", hashtype)
return hash256(ss)

View File

@@ -1,64 +0,0 @@
#!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# siphash.py - Specialized SipHash-2-4 implementations
#
# This implements SipHash-2-4 for 256-bit integers.
def rotl64(n, b):
return n >> (64 - b) | (n & ((1 << (64 - b)) - 1)) << b
def siphash_round(v0, v1, v2, v3):
v0 = (v0 + v1) & ((1 << 64) - 1)
v1 = rotl64(v1, 13)
v1 ^= v0
v0 = rotl64(v0, 32)
v2 = (v2 + v3) & ((1 << 64) - 1)
v3 = rotl64(v3, 16)
v3 ^= v2
v0 = (v0 + v3) & ((1 << 64) - 1)
v3 = rotl64(v3, 21)
v3 ^= v0
v2 = (v2 + v1) & ((1 << 64) - 1)
v1 = rotl64(v1, 17)
v1 ^= v2
v2 = rotl64(v2, 32)
return (v0, v1, v2, v3)
def siphash256(k0, k1, h):
n0 = h & ((1 << 64) - 1)
n1 = (h >> 64) & ((1 << 64) - 1)
n2 = (h >> 128) & ((1 << 64) - 1)
n3 = (h >> 192) & ((1 << 64) - 1)
v0 = 0x736f6d6570736575 ^ k0
v1 = 0x646f72616e646f6d ^ k1
v2 = 0x6c7967656e657261 ^ k0
v3 = 0x7465646279746573 ^ k1 ^ n0
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n0
v3 ^= n1
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n1
v3 ^= n2
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n2
v3 ^= n3
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n3
v3 ^= 0x2000000000000000
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= 0x2000000000000000
v2 ^= 0xFF
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
return v0 ^ v1 ^ v2 ^ v3

View File

@@ -1,841 +0,0 @@
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Copyright (c) 2014-2017 The Dash Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Helpful routines for regression testing
#
import os
import sys
from binascii import hexlify, unhexlify
from base64 import b64encode
from decimal import Decimal, ROUND_DOWN
import json
import http.client
import random
import shutil
import subprocess
import tempfile
import time
import re
import errno
import logging
from . import coverage
from .authproxy import AuthServiceProxy, JSONRPCException
COVERAGE_DIR = None
logger = logging.getLogger("TestFramework.utils")
# The maximum number of nodes a single test can spawn
MAX_NODES = 15
# Don't assign rpc or p2p ports lower than this
PORT_MIN = 11000
# The number of ports to "reserve" for p2p and rpc, each
PORT_RANGE = 5000
BITCOIND_PROC_WAIT_TIMEOUT = 60
class PortSeed:
# Must be initialized with a unique integer for each process
n = None
#Set Mocktime default to OFF.
#MOCKTIME is only needed for scripts that use the
#cached version of the blockchain. If the cached
#version of the blockchain is used without MOCKTIME
#then the mempools will not sync due to IBD.
MOCKTIME = 0
def enable_mocktime():
#For backwared compatibility of the python scripts
#with previous versions of the cache, set MOCKTIME
#to Jan 1, 2014 + (201 * 10 * 60)
global MOCKTIME
MOCKTIME = 1414776313 + (201 * 10 * 60)
def set_mocktime(t):
global MOCKTIME
MOCKTIME = t
def disable_mocktime():
global MOCKTIME
MOCKTIME = 0
def get_mocktime():
return MOCKTIME
def enable_coverage(dirname):
"""Maintain a log of which RPC calls are made during testing."""
global COVERAGE_DIR
COVERAGE_DIR = dirname
def get_rpc_proxy(url, node_number, timeout=None):
"""
Args:
url (str): URL of the RPC server to call
node_number (int): the node number (or id) that this calls to
Kwargs:
timeout (int): HTTP timeout in seconds
Returns:
AuthServiceProxy. convenience object for making RPC calls.
"""
proxy_kwargs = {}
if timeout is not None:
proxy_kwargs['timeout'] = timeout
proxy = AuthServiceProxy(url, **proxy_kwargs)
proxy.url = url # store URL on proxy for info
coverage_logfile = coverage.get_filename(
COVERAGE_DIR, node_number) if COVERAGE_DIR else None
return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile)
def get_evoznsync_status(node):
result = node.evoznsync("status")
return result['IsSynced']
def wait_to_sync(node, fast_znsync=False):
tm = 0
synced = False
while tm < 30:
synced = get_evoznsync_status(node)
if synced:
return
time.sleep(0.2)
if fast_znsync:
# skip mnsync states
node.evoznsync("next")
tm += 0.2
assert(synced)
def p2p_port(n):
assert(n <= MAX_NODES)
return PORT_MIN + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
def rpc_port(n):
return PORT_MIN + PORT_RANGE + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
def check_json_precision():
"""Make sure json library being used does not lose precision converting BTC values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def count_bytes(hex_string):
return len(bytearray.fromhex(hex_string))
def bytes_to_hex_str(byte_str):
return hexlify(byte_str).decode('ascii')
def hex_str_to_bytes(hex_str):
return unhexlify(hex_str.encode('ascii'))
def str_to_b64str(string):
return b64encode(string.encode('utf-8')).decode('ascii')
def sync_blocks(rpc_connections, *, wait=1, timeout=60):
"""
Wait until everybody has the same tip.
sync_blocks needs to be called with an rpc_connections set that has least
one node already synced to the latest, stable tip, otherwise there's a
chance it might return before all nodes are stably synced.
"""
# Use getblockcount() instead of waitforblockheight() to determine the
# initial max height because the two RPCs look at different internal global
# variables (chainActive vs latestBlock) and the former gets updated
# earlier.
maxheight = max(x.getblockcount() for x in rpc_connections)
start_time = cur_time = time.time()
while cur_time <= start_time + timeout:
tips = [r.waitforblockheight(maxheight, int(wait * 1000)) for r in rpc_connections]
if all(t["height"] == maxheight for t in tips):
if all(t["hash"] == tips[0]["hash"] for t in tips):
return
raise AssertionError("Block sync failed, mismatched block hashes:{}".format(
"".join("\n {!r}".format(tip) for tip in tips)))
time.sleep(wait)
cur_time = time.time()
raise AssertionError("Block sync to height {} timed out:{}".format(
maxheight, "".join("\n {!r}".format(tip) for tip in tips)))
def sync_znodes(rpc_connections, *, timeout=60):
"""
Waits until every node has their znsync status is synced.
"""
start_time = cur_time = time.time()
while cur_time <= start_time + timeout:
statuses = [r.znsync("status") for r in rpc_connections]
if all(stat["IsSynced"] == True for stat in statuses):
return
cur_time = time.time()
raise AssertionError("Znode sync failed.")
def sync_chain(rpc_connections, *, wait=1, timeout=60):
"""
Wait until everybody has the same best block
"""
while timeout > 0:
best_hash = [x.getbestblockhash() for x in rpc_connections]
if best_hash == [best_hash[0]]*len(best_hash):
return
time.sleep(wait)
timeout -= wait
raise AssertionError("Chain sync failed: Best block hashes don't match")
def sync_mempools(rpc_connections, *, wait=1, timeout=60):
"""
Wait until everybody has the same transactions in their memory
pools
"""
while timeout > 0:
pool = set(rpc_connections[0].getrawmempool())
num_match = 1
for i in range(1, len(rpc_connections)):
if set(rpc_connections[i].getrawmempool()) == pool:
num_match = num_match+1
if num_match == len(rpc_connections):
return
time.sleep(wait)
timeout -= wait
raise AssertionError("Mempool sync failed")
def sync_znodes(rpc_connections, fast_mnsync=False):
for node in rpc_connections:
wait_to_sync(node, fast_mnsync)
bitcoind_processes = {}
def initialize_datadir(dirname, n):
datadir = os.path.join(dirname, "node"+str(n))
if not os.path.isdir(datadir):
os.makedirs(datadir)
rpc_u, rpc_p = rpc_auth_pair(n)
with open(os.path.join(datadir, "firo.conf"), 'w', encoding='utf8') as f:
f.write("regtest=1\n")
f.write("rpcuser=" + rpc_u + "\n")
f.write("rpcpassword=" + rpc_p + "\n")
f.write("port="+str(p2p_port(n))+"\n")
f.write("rpcport="+str(rpc_port(n))+"\n")
f.write("listenonion=0\n")
return datadir
def rpc_auth_pair(n):
return 'rpcuser💻' + str(n), 'rpcpass🔑' + str(n)
def rpc_url(i, rpchost=None):
rpc_u, rpc_p = rpc_auth_pair(i)
host = '127.0.0.1'
port = rpc_port(i)
if rpchost:
parts = rpchost.split(':')
if len(parts) == 2:
host, port = parts
else:
host = rpchost
return "http://%s:%s@%s:%d" % (rpc_u, rpc_p, host, int(port))
def wait_for_bitcoind_start(process, url, i):
'''
Wait for firod to start. This means that RPC is accessible and fully initialized.
Raise an exception if firod exits during initialization.
'''
while True:
if process.poll() is not None:
raise Exception('firod exited with status %i during initialization' % process.returncode)
try:
rpc = get_rpc_proxy(url, i)
blocks = rpc.getblockcount()
break # break out of loop on success
except IOError as e:
if e.errno != errno.ECONNREFUSED: # Port not yet open?
raise # unknown IO error
except JSONRPCException as e: # Initialization phase
if e.error['code'] != -28: # RPC in warmup?
raise # unknown JSON RPC exception
time.sleep(0.25)
def initialize_chain(test_dir, num_nodes, cachedir):
"""
Create a cache of a 200-block-long chain (with wallet) for MAX_NODES
Afterward, create num_nodes copies from the cache
"""
assert num_nodes <= MAX_NODES
create_cache = False
for i in range(MAX_NODES):
if not os.path.isdir(os.path.join(cachedir, 'node'+str(i))):
create_cache = True
break
if create_cache:
#find and delete old cache directories if any exist
for i in range(MAX_NODES):
if os.path.isdir(os.path.join(cachedir,"node"+str(i))):
shutil.rmtree(os.path.join(cachedir,"node"+str(i)))
# Create cache directories, run bitcoinds:
for i in range(MAX_NODES):
datadir=initialize_datadir(cachedir, i)
args = [ os.getenv("FIROD", "firod"), "-server", "-keypool=1", "-datadir="+datadir, "-discover=0" ]
if i > 0:
args.append("-connect=127.0.0.1:"+str(p2p_port(0)))
bitcoind_processes[i] = subprocess.Popen(args)
if os.getenv("PYTHON_DEBUG", ""):
print("initialize_chain: bitcoind started, waiting for RPC to come up")
wait_for_bitcoind_start(bitcoind_processes[i], rpc_url(i), i)
if os.getenv("PYTHON_DEBUG", ""):
print("initialize_chain: RPC successfully started")
rpcs = []
for i in range(MAX_NODES):
try:
rpcs.append(get_rpc_proxy(rpc_url(i), i))
except:
sys.stderr.write("Error connecting to "+url+"\n")
sys.exit(1)
# Create a 200-block-long chain; each of the 4 first nodes
# gets 25 mature blocks and 25 immature.
# Note: To preserve compatibility with older versions of
# initialize_chain, only 4 nodes will generate coins.
#
# blocks are created with timestamps 10 minutes apart
# starting from 2010 minutes in the past
enable_mocktime()
block_time = get_mocktime() - (201 * 10 * 60)
for i in range(2):
for peer in range(4):
for j in range(25):
set_node_times(rpcs, block_time)
rpcs[peer].generate(1)
block_time += 10*60
# Must sync before next peer starts generating blocks
sync_blocks(rpcs)
# Shut them down, and clean up cache directories:
stop_nodes(rpcs)
disable_mocktime()
for i in range(MAX_NODES):
try:
os.remove(log_filename(cachedir, i, "debug.log"))
os.remove(log_filename(cachedir, i, "db.log"))
os.remove(log_filename(cachedir, i, "peers.dat"))
os.remove(log_filename(cachedir, i, "fee_estimates.dat"))
except OSError:
pass
for i in range(num_nodes):
from_dir = os.path.join(cachedir, "node"+str(i))
to_dir = os.path.join(test_dir, "node"+str(i))
if from_dir != to_dir:
shutil.copytree(from_dir, to_dir)
initialize_datadir(test_dir, i) # Overwrite port/rpcport in bitcoin.conf
def initialize_chain_clean(test_dir, num_nodes):
"""
Create an empty blockchain and num_nodes wallets.
Useful if a test case wants complete control over initialization.
"""
for i in range(num_nodes):
datadir=initialize_datadir(test_dir, i)
def _rpchost_to_args(rpchost):
'''Convert optional IP:port spec to rpcconnect/rpcport args'''
if rpchost is None:
return []
match = re.match('(\[[0-9a-fA-f:]+\]|[^:]+)(?::([0-9]+))?$', rpchost)
if not match:
raise ValueError('Invalid RPC host spec ' + rpchost)
rpcconnect = match.group(1)
rpcport = match.group(2)
if rpcconnect.startswith('['): # remove IPv6 [...] wrapping
rpcconnect = rpcconnect[1:-1]
rv = ['-rpcconnect=' + rpcconnect]
if rpcport:
rv += ['-rpcport=' + rpcport]
return rv
def start_node(i, dirname, extra_args=None, rpchost=None, timewait=None, binary=None, redirect_stderr=False, stderr=None):
"""
Start a bitcoind and return RPC connection to it
"""
datadir = os.path.join(dirname, "node"+str(i))
if binary is None:
binary = os.getenv("FIROD", "firod")
args = [ binary, "-datadir="+datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-dandelion=0", "-usemnemonic=0", "-mocktime="+str(get_mocktime()) ]
#Useful args for debugging
# "screen", "--",
# "gdb", "-x", "/tmp/gdb_run", "--args",
# Don't try auto backups (they fail a lot when running tests)
args += [ "-createwalletbackups=0" ]
if extra_args is not None: args.extend(extra_args)
# Allow to redirect stderr to stdout in case we expect some non-critical warnings/errors printed to stderr
# Otherwise the whole test would be considered to be failed in such cases
if redirect_stderr:
stderr = sys.stdout
bitcoind_processes[i] = subprocess.Popen(args, stderr=stderr)
logger.debug("start_node: firod started, waiting for RPC to come up")
url = rpc_url(i, rpchost)
wait_for_bitcoind_start(bitcoind_processes[i], url, i)
logger.debug("start_node: RPC successfully started")
proxy = get_rpc_proxy(url, i, timeout=timewait)
if COVERAGE_DIR:
coverage.write_all_rpc_commands(COVERAGE_DIR, proxy)
return proxy
def start_nodes(num_nodes, dirname, extra_args=None, rpchost=None, timewait=None, binary=None):
"""
Start multiple bitcoinds, return RPC connections to them
"""
if extra_args is None: extra_args = [ None for _ in range(num_nodes) ]
if binary is None: binary = [ None for _ in range(num_nodes) ]
rpcs = []
try:
for i in range(num_nodes):
rpcs.append(start_node(i, dirname, extra_args[i], rpchost, timewait=timewait, binary=binary[i]))
except: # If one node failed to start, stop the others
stop_nodes(rpcs)
raise
return rpcs
def copy_datadir(from_node, to_node, dirname):
from_datadir = os.path.join(dirname, "node"+str(from_node), "regtest")
to_datadir = os.path.join(dirname, "node"+str(to_node), "regtest")
dirs = ["blocks", "chainstate", "evodb", "llmq"]
for d in dirs:
try:
src = os.path.join(from_datadir, d)
dst = os.path.join(to_datadir, d)
shutil.copytree(src, dst)
except:
pass
def log_filename(dirname, n_node, logname):
return os.path.join(dirname, "node"+str(n_node), "regtest", logname)
def wait_node(i):
return_code = bitcoind_processes[i].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT)
assert_equal(return_code, 0)
del bitcoind_processes[i]
def stop_node(node, i, wait=True):
logger.debug("Stopping node %d" % i)
try:
node.stop()
except http.client.CannotSendRequest as e:
logger.exception("Unable to stop node")
if wait:
wait_node(i)
def stop_nodes(nodes, fast=True):
for i, node in enumerate(nodes):
stop_node(node, i, not fast)
if fast:
for i, node in enumerate(nodes):
wait_node(i)
assert not bitcoind_processes.values() # All connections must be gone now
def set_node_times(nodes, t):
for node in nodes:
node.setmocktime(t)
def connect_nodes(from_connection, node_num):
# NOTE: In next line p2p_port(0) was replaced by rpc_port(0).
ip_port = "127.0.0.1:"+str(p2p_port(node_num))
from_connection.addnode(ip_port, "onetry")
# poll until version handshake complete to avoid race conditions
# with transaction relaying
while any(peer['version'] == 0 for peer in from_connection.getpeerinfo()):
time.sleep(0.1)
def connect_nodes_bi(nodes, a, b):
connect_nodes(nodes[a], b)
connect_nodes(nodes[b], a)
def isolate_node(node, timeout=5):
node.setnetworkactive(False)
st = time.time()
while time.time() < st + timeout:
if node.getconnectioncount() == 0:
return
time.sleep(0.5)
raise AssertionError("disconnect_node timed out")
def reconnect_isolated_node(node, node_num):
node.setnetworkactive(True)
connect_nodes(node, node_num)
def find_output(node, txid, amount):
"""
Return index to output of txid with value amount
Raises exception if there is none.
"""
txdata = node.getrawtransaction(txid, 1)
for i in range(len(txdata["vout"])):
if txdata["vout"][i]["value"] == amount:
return i
raise RuntimeError("find_output txid %s : %s not found"%(txid,str(amount)))
def gather_inputs(from_node, amount_needed, confirmations_required=1):
"""
Return a random set of unspent txouts that are enough to pay amount_needed
"""
assert(confirmations_required >=0)
utxo = from_node.listunspent(confirmations_required)
random.shuffle(utxo)
inputs = []
total_in = Decimal("0.00000000")
while total_in < amount_needed and len(utxo) > 0:
t = utxo.pop()
total_in += t["amount"]
inputs.append({ "txid" : t["txid"], "vout" : t["vout"], "address" : t["address"] } )
if total_in < amount_needed:
raise RuntimeError("Insufficient funds: need %d, have %d"%(amount_needed, total_in))
return (total_in, inputs)
def make_change(from_node, amount_in, amount_out, fee):
"""
Create change output(s), return them
"""
outputs = {}
amount = amount_out+fee
change = amount_in - amount
if change > amount*2:
# Create an extra change output to break up big inputs
change_address = from_node.getnewaddress()
# Split change in two, being careful of rounding:
outputs[change_address] = Decimal(change/2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
change = amount_in - amount - outputs[change_address]
if change > 0:
outputs[from_node.getnewaddress()] = change
return outputs
def send_zeropri_transaction(from_node, to_node, amount, fee):
"""
Create&broadcast a zero-priority transaction.
Returns (txid, hex-encoded-txdata)
Ensures transaction is zero-priority by first creating a send-to-self,
then using its output
"""
# Create a send-to-self with confirmed inputs:
self_address = from_node.getnewaddress()
(total_in, inputs) = gather_inputs(from_node, amount+fee*2)
outputs = make_change(from_node, total_in, amount+fee, fee)
outputs[self_address] = float(amount+fee)
self_rawtx = from_node.createrawtransaction(inputs, outputs)
self_signresult = from_node.signrawtransaction(self_rawtx)
self_txid = from_node.sendrawtransaction(self_signresult["hex"], True)
vout = find_output(from_node, self_txid, amount+fee)
# Now immediately spend the output to create a 1-input, 1-output
# zero-priority transaction:
inputs = [ { "txid" : self_txid, "vout" : vout } ]
outputs = { to_node.getnewaddress() : float(amount) }
rawtx = from_node.createrawtransaction(inputs, outputs)
signresult = from_node.signrawtransaction(rawtx)
txid = from_node.sendrawtransaction(signresult["hex"], True)
return (txid, signresult["hex"])
def random_zeropri_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random zero-priority transaction.
Returns (txid, hex-encoded-transaction-data, fee)
"""
from_node = random.choice(nodes)
to_node = random.choice(nodes)
fee = min_fee + fee_increment*random.randint(0,fee_variants)
(txid, txhex) = send_zeropri_transaction(from_node, to_node, amount, fee)
return (txid, txhex, fee)
def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random transaction.
Returns (txid, hex-encoded-transaction-data, fee)
"""
from_node = random.choice(nodes)
to_node = random.choice(nodes)
fee = min_fee + fee_increment*random.randint(0,fee_variants)
(total_in, inputs) = gather_inputs(from_node, amount+fee)
outputs = make_change(from_node, total_in, amount, fee)
outputs[to_node.getnewaddress()] = float(amount)
rawtx = from_node.createrawtransaction(inputs, outputs)
signresult = from_node.signrawtransaction(rawtx)
txid = from_node.sendrawtransaction(signresult["hex"], True)
return (txid, signresult["hex"], fee)
def assert_fee_amount(fee, tx_size, fee_per_kB):
"""Assert the fee was in range"""
target_fee = tx_size * fee_per_kB / 1000
if fee < target_fee:
raise AssertionError("Fee of %s BTC too low! (Should be %s BTC)"%(str(fee), str(target_fee)))
# allow the wallet's estimation to be at most 2 bytes off
if fee > (tx_size + 2) * fee_per_kB / 1000:
raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)"%(str(fee), str(target_fee)))
def assert_equal(thing1, thing2, *args):
if thing1 != thing2 or any(thing1 != arg for arg in args):
raise AssertionError("not(%s)" % " == ".join(str(arg) for arg in (thing1, thing2) + args))
def assert_greater_than(thing1, thing2):
if thing1 <= thing2:
raise AssertionError("%s <= %s"%(str(thing1),str(thing2)))
def assert_greater_than_or_equal(thing1, thing2):
if thing1 < thing2:
raise AssertionError("%s < %s"%(str(thing1),str(thing2)))
def assert_raises(exc, fun, *args, **kwds):
assert_raises_message(exc, None, fun, *args, **kwds)
def assert_raises_message(exc, message, fun, *args, **kwds):
try:
fun(*args, **kwds)
except exc as e:
if message is not None and message not in e.error['message']:
raise AssertionError("Expected substring not found:"+e.error['message'])
except Exception as e:
raise AssertionError("Unexpected exception raised: "+type(e).__name__)
else:
raise AssertionError("No exception raised")
def assert_raises_jsonrpc(code, message, fun, *args, **kwds):
"""Run an RPC and verify that a specific JSONRPC exception code and message is raised.
Calls function `fun` with arguments `args` and `kwds`. Catches a JSONRPCException
and verifies that the error code and message are as expected. Throws AssertionError if
no JSONRPCException was returned or if the error code/message are not as expected.
Args:
code (int), optional: the error code returned by the RPC call (defined
in src/rpc/protocol.h). Set to None if checking the error code is not required.
message (string), optional: [a substring of] the error string returned by the
RPC call. Set to None if checking the error string is not required
fun (function): the function to call. This should be the name of an RPC.
args*: positional arguments for the function.
kwds**: named arguments for the function.
"""
try:
fun(*args, **kwds)
except JSONRPCException as e:
# JSONRPCException was thrown as expected. Check the code and message values are correct.
if (code is not None) and (code != e.error["code"]):
raise AssertionError("Unexpected JSONRPC error code %i" % e.error["code"])
if (message is not None) and (message not in e.error['message']):
raise AssertionError("Expected substring not found:"+e.error['message'])
except Exception as e:
raise AssertionError("Unexpected exception raised: "+type(e).__name__)
else:
raise AssertionError("No exception raised")
def assert_is_hex_string(string):
try:
int(string, 16)
except Exception as e:
raise AssertionError(
"Couldn't interpret %r as hexadecimal; raised: %s" % (string, e))
def assert_is_hash_string(string, length=64):
if not isinstance(string, str):
raise AssertionError("Expected a string, got type %r" % type(string))
elif length and len(string) != length:
raise AssertionError(
"String of length %d expected; got %d" % (length, len(string)))
elif not re.match('[abcdef0-9]+$', string):
raise AssertionError(
"String %r contains invalid characters for a hash." % string)
def assert_array_result(object_array, to_match, expected, should_not_find = False):
"""
Pass in array of JSON objects, a dictionary with key/value pairs
to match against, and another dictionary with expected key/value
pairs.
If the should_not_find flag is true, to_match should not be found
in object_array
"""
if should_not_find == True:
assert_equal(expected, { })
num_matched = 0
for item in object_array:
all_match = True
for key,value in to_match.items():
if item[key] != value:
all_match = False
if not all_match:
continue
elif should_not_find == True:
num_matched = num_matched+1
for key,value in expected.items():
if item[key] != value:
raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value)))
num_matched = num_matched+1
if num_matched == 0 and should_not_find != True:
raise AssertionError("No objects matched %s"%(str(to_match)))
if num_matched > 0 and should_not_find == True:
raise AssertionError("Objects were found %s"%(str(to_match)))
def satoshi_round(amount):
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
# Helper to create at least "count" utxos
# Pass in a fee that is sufficient for relay and mining new transactions.
def create_confirmed_utxos(fee, node, count):
node.generate(int(0.5*count)+101)
utxos = node.listunspent()
iterations = count - len(utxos)
addr1 = node.getnewaddress()
addr2 = node.getnewaddress()
if iterations <= 0:
return utxos
for i in range(iterations):
t = utxos.pop()
inputs = []
inputs.append({ "txid" : t["txid"], "vout" : t["vout"]})
outputs = {}
send_value = t['amount'] - fee
outputs[addr1] = satoshi_round(send_value/2)
outputs[addr2] = satoshi_round(send_value/2)
raw_tx = node.createrawtransaction(inputs, outputs)
signed_tx = node.signrawtransaction(raw_tx)["hex"]
txid = node.sendrawtransaction(signed_tx)
while (node.getmempoolinfo()['size'] > 0):
node.generate(1)
utxos = node.listunspent()
assert(len(utxos) >= count)
return utxos
# Create large OP_RETURN txouts that can be appended to a transaction
# to make it large (helper for constructing large transactions).
def gen_return_txouts():
# Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create
# So we have big transactions (and therefore can't fit very many into each block)
# create one script_pubkey
script_pubkey = "6a4d0200" #OP_RETURN OP_PUSH2 512 bytes
for i in range (512):
script_pubkey = script_pubkey + "01"
# concatenate 128 txouts of above script_pubkey which we'll insert before the txout for change
txouts = "81"
for k in range(128):
# add txout value
txouts = txouts + "0000000000000000"
# add length of script_pubkey
txouts = txouts + "fd0402"
# add script_pubkey
txouts = txouts + script_pubkey
return txouts
def create_tx(node, coinbase, to_address, amount):
inputs = [{ "txid" : coinbase, "vout" : 0}]
outputs = { to_address : amount }
rawtx = node.createrawtransaction(inputs, outputs)
signresult = node.signrawtransaction(rawtx)
assert_equal(signresult["complete"], True)
return signresult["hex"]
def create_tx_multi_input(node, inputs, outputs):
rawtx = node.createrawtransaction(inputs, outputs)
signresult = node.signrawtransaction(rawtx)
assert_equal(signresult["complete"], True)
return signresult["hex"]
# Create a spend of each passed-in utxo, splicing in "txouts" to each raw
# transaction to make it large. See gen_return_txouts() above.
def create_lots_of_big_transactions(node, txouts, utxos, num, fee):
addr = node.getnewaddress()
txids = []
for _ in range(num):
t = utxos.pop()
inputs=[{ "txid" : t["txid"], "vout" : t["vout"]}]
outputs = {}
change = t['amount'] - fee
outputs[addr] = satoshi_round(change)
rawtx = node.createrawtransaction(inputs, outputs)
newtx = rawtx[0:92]
newtx = newtx + txouts
newtx = newtx + rawtx[94:]
signresult = node.signrawtransaction(newtx, None, None, "NONE")
txid = node.sendrawtransaction(signresult["hex"], True)
txids.append(txid)
return txids
def mine_large_block(node, utxos=None):
# generate a 66k transaction,
# and 14 of them is close to the 1MB block limit
num = 14
txouts = gen_return_txouts()
utxos = utxos if utxos is not None else []
if len(utxos) < num:
utxos.clear()
utxos.extend(node.listunspent())
fee = 100 * node.getnetworkinfo()["relayfee"]
create_lots_of_big_transactions(node, txouts, utxos, num, fee=fee)
node.generate(1)
def get_bip9_status(node, key):
info = node.getblockchaininfo()
return info['bip9_softforks'][key]
def dumpprivkey_otac(node, address):
import re
error_text = ''
try:
return node.dumpprivkey(address)
except JSONRPCException as e:
error_text = e.error
else:
raise
otac_match = re.search("Your one time authorization code is: ([a-zA-Z0-9]+)", error_text['message'])
if not otac_match:
raise JSONRPCException(error_text)
return node.dumpprivkey(address, otac_match.groups()[0])
def get_znsync_status(node):
result = node.znsync("status")
return result['IsSynced']
def wait_to_sync_znodes(node, fast_znsync=False):
while True:
synced = get_znsync_status(node)
if synced:
break
time.sleep(0.2)
if fast_znsync:
# skip mnsync states
node.znsync("next")
def get_full_balance(node):
wallet_info = node.getwalletinfo()
return wallet_info["balance"] + wallet_info["immature_balance"] + wallet_info["unconfirmed_balance"]

View File

@@ -1,175 +0,0 @@
"""
Copyright 2011 Jeff Garzik
AuthServiceProxy has the following improvements over python-jsonrpc's
ServiceProxy class:
- HTTP connections persist for the life of the AuthServiceProxy object
(if server supports HTTP/1.1)
- sends protocol 'version', per JSON-RPC 1.1
- sends proper, incrementing 'id'
- sends Basic HTTP authentication headers
- parses all JSON numbers that look like floats as Decimal
- uses standard Python json lib
Previous copyright, from python-jsonrpc/jsonrpc/proxy.py:
Copyright (c) 2007 Jan-Klaas Kollhof
This file is part of jsonrpc.
jsonrpc is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version.
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this software; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
try:
import http.client as httplib
except ImportError:
import httplib
import base64
import decimal
import json
import logging
try:
import urllib.parse as urlparse
except ImportError:
import urlparse
USER_AGENT = "AuthServiceProxy/0.1"
HTTP_TIMEOUT = 30
log = logging.getLogger("NavcoinRPC")
class JSONRPCException(Exception):
def __init__(self, rpc_error):
Exception.__init__(self)
self.error = rpc_error
def EncodeDecimal(o):
if isinstance(o, decimal.Decimal):
return str(o)
raise TypeError(repr(o) + " is not JSON serializable")
class AuthServiceProxy(object):
__id_count = 0
# ensure_ascii: escape unicode as \uXXXX, passed to json.dumps
def __init__(self, service_url, service_name=None, timeout=HTTP_TIMEOUT, connection=None, ensure_ascii=True):
self.__service_url = service_url
self._service_name = service_name
self.ensure_ascii = ensure_ascii # can be toggled on the fly by tests
self.__url = urlparse.urlparse(service_url)
if self.__url.port is None:
port = 80
else:
port = self.__url.port
(user, passwd) = (self.__url.username, self.__url.password)
try:
user = user.encode('utf8')
except AttributeError:
pass
try:
passwd = passwd.encode('utf8')
except AttributeError:
pass
authpair = user + b':' + passwd
self.__auth_header = b'Basic ' + base64.b64encode(authpair)
if connection:
# Callables re-use the connection of the original proxy
self.__conn = connection
elif self.__url.scheme == 'https':
self.__conn = httplib.HTTPSConnection(self.__url.hostname, port,
timeout=timeout)
else:
self.__conn = httplib.HTTPConnection(self.__url.hostname, port,
timeout=timeout)
def __getattr__(self, name):
if name.startswith('__') and name.endswith('__'):
# Python internal stuff
raise AttributeError
if self._service_name is not None:
name = "%s.%s" % (self._service_name, name)
return AuthServiceProxy(self.__service_url, name, connection=self.__conn)
def _request(self, method, path, postdata):
'''
Do a HTTP request, with retry if we get disconnected (e.g. due to a timeout).
This is a workaround for https://bugs.python.org/issue3566 which is fixed in Python 3.5.
'''
headers = {'Host': self.__url.hostname,
'User-Agent': USER_AGENT,
'Authorization': self.__auth_header,
'Content-type': 'application/json'}
try:
self.__conn.request(method, path, postdata, headers)
return self._get_response()
except httplib.BadStatusLine as e:
if e.line == "''": # if connection was closed, try again
self.__conn.close()
self.__conn.request(method, path, postdata, headers)
return self._get_response()
else:
raise
except BrokenPipeError:
# Python 3.5+ raises this instead of BadStatusLine when the connection was reset
self.__conn.close()
self.__conn.request(method, path, postdata, headers)
return self._get_response()
def __call__(self, *args):
AuthServiceProxy.__id_count += 1
log.debug("-%s-> %s %s"%(AuthServiceProxy.__id_count, self._service_name,
json.dumps(args, default=EncodeDecimal, ensure_ascii=self.ensure_ascii)))
postdata = json.dumps({'version': '1.1',
'method': self._service_name,
'params': args,
'id': AuthServiceProxy.__id_count}, default=EncodeDecimal, ensure_ascii=self.ensure_ascii)
response = self._request('POST', self.__url.path, postdata.encode('utf-8'))
if response['error'] is not None:
raise JSONRPCException(response['error'])
elif 'result' not in response:
raise JSONRPCException({
'code': -343, 'message': 'missing JSON-RPC result'})
else:
return response['result']
def _batch(self, rpc_call_list):
postdata = json.dumps(list(rpc_call_list), default=EncodeDecimal, ensure_ascii=self.ensure_ascii)
log.debug("--> "+postdata)
return self._request('POST', self.__url.path, postdata.encode('utf-8'))
def _get_response(self):
http_response = self.__conn.getresponse()
if http_response is None:
raise JSONRPCException({
'code': -342, 'message': 'missing HTTP response from server'})
content_type = http_response.getheader('Content-Type')
if content_type != 'application/json':
raise JSONRPCException({
'code': -342, 'message': 'non-JSON HTTP response with \'%i %s\' from server' % (http_response.status, http_response.reason)})
responsedata = http_response.read().decode('utf8')
response = json.loads(responsedata, parse_float=decimal.Decimal)
if "error" in response and response["error"] is None:
log.debug("<-%s- %s"%(response["id"], json.dumps(response["result"], default=EncodeDecimal, ensure_ascii=self.ensure_ascii)))
else:
log.debug("<-- "+responsedata)
return response

View File

@@ -1,101 +0,0 @@
#!/usr/bin/env python3
#
# bignum.py
#
# This file is copied from python-navcoinlib.
#
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
"""Bignum routines"""
import struct
# generic big endian MPI format
def bn_bytes(v, have_ext=False):
ext = 0
if have_ext:
ext = 1
return ((v.bit_length()+7)//8) + ext
def bn2bin(v):
s = bytearray()
i = bn_bytes(v)
while i > 0:
s.append((v >> ((i-1) * 8)) & 0xff)
i -= 1
return s
def bin2bn(s):
l = 0
for ch in s:
l = (l << 8) | ch
return l
def bn2mpi(v):
have_ext = False
if v.bit_length() > 0:
have_ext = (v.bit_length() & 0x07) == 0
neg = False
if v < 0:
neg = True
v = -v
s = struct.pack(b">I", bn_bytes(v, have_ext))
ext = bytearray()
if have_ext:
ext.append(0)
v_bin = bn2bin(v)
if neg:
if have_ext:
ext[0] |= 0x80
else:
v_bin[0] |= 0x80
return s + ext + v_bin
def mpi2bn(s):
if len(s) < 4:
return None
s_size = bytes(s[:4])
v_len = struct.unpack(b">I", s_size)[0]
if len(s) != (v_len + 4):
return None
if v_len == 0:
return 0
v_str = bytearray(s[4:])
neg = False
i = v_str[0]
if i & 0x80:
neg = True
i &= ~0x80
v_str[0] = i
v = bin2bn(v_str)
if neg:
return -v
return v
# navcoin-specific little endian format, with implicit size
def mpi2vch(s):
r = s[4:] # strip size
r = r[::-1] # reverse string, converting BE->LE
return r
def bn2vch(v):
return bytes(mpi2vch(bn2mpi(v)))
def vch2mpi(s):
r = struct.pack(b">I", len(s)) # size
r += s[::-1] # reverse string, converting LE->BE
return r
def vch2bn(s):
return mpi2bn(vch2mpi(s))

View File

@@ -1,106 +0,0 @@
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
This module contains utilities for doing coverage analysis on the RPC
interface.
It provides a way to track which RPC commands are exercised during
testing.
"""
import os
REFERENCE_FILENAME = 'rpc_interface.txt'
class AuthServiceProxyWrapper(object):
"""
An object that wraps AuthServiceProxy to record specific RPC calls.
"""
def __init__(self, auth_service_proxy_instance, coverage_logfile=None):
"""
Kwargs:
auth_service_proxy_instance (AuthServiceProxy): the instance
being wrapped.
coverage_logfile (str): if specified, write each service_name
out to a file when called.
"""
self.auth_service_proxy_instance = auth_service_proxy_instance
self.coverage_logfile = coverage_logfile
def __getattr__(self, *args, **kwargs):
return_val = self.auth_service_proxy_instance.__getattr__(
*args, **kwargs)
return AuthServiceProxyWrapper(return_val, self.coverage_logfile)
def __call__(self, *args, **kwargs):
"""
Delegates to AuthServiceProxy, then writes the particular RPC method
called to a file.
"""
return_val = self.auth_service_proxy_instance.__call__(*args, **kwargs)
rpc_method = self.auth_service_proxy_instance._service_name
if self.coverage_logfile:
with open(self.coverage_logfile, 'a+') as f:
f.write("%s\n" % rpc_method)
return return_val
@property
def url(self):
return self.auth_service_proxy_instance.url
def get_filename(dirname, n_node):
"""
Get a filename unique to the test process ID and node.
This file will contain a list of RPC commands covered.
"""
pid = str(os.getpid())
return os.path.join(
dirname, "coverage.pid%s.node%s.txt" % (pid, str(n_node)))
def write_all_rpc_commands(dirname, node):
"""
Write out a list of all RPC functions available in `navcoin-cli` for
coverage comparison. This will only happen once per coverage
directory.
Args:
dirname (str): temporary test dir
node (AuthServiceProxy): client
Returns:
bool. if the RPC interface file was written.
"""
filename = os.path.join(dirname, REFERENCE_FILENAME)
if os.path.isfile(filename):
return False
help_output = node.help().split('\n')
commands = set()
for line in help_output:
line = line.strip()
# Ignore blanks and headers
if line and not line.startswith('='):
commands.add("%s\n" % line.split()[0])
with open(filename, 'w') as f:
f.writelines(list(commands))
return True

File diff suppressed because it is too large Load Diff

View File

@@ -1,943 +0,0 @@
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# script.py
#
# This file is modified from python-navcoinlib.
#
"""Scripts
Functionality to build scripts, as well as SignatureHash().
"""
from .mininode import CTransaction, CTxOut, sha256, hash256, uint256_from_str, ser_uint256, ser_string
from binascii import hexlify
import hashlib
import sys
bchr = chr
bord = ord
if sys.version > '3':
long = int
bchr = lambda x: bytes([x])
bord = lambda x: x
import struct
from .bignum import bn2vch
MAX_SCRIPT_SIZE = 10000
MAX_SCRIPT_ELEMENT_SIZE = 520
MAX_SCRIPT_OPCODES = 201
OPCODE_NAMES = {}
_opcode_instances = []
class CScriptOp(int):
"""A single script opcode"""
__slots__ = []
@staticmethod
def encode_op_pushdata(d):
"""Encode a PUSHDATA op, returning bytes"""
if len(d) < 0x4c:
return b'' + bchr(len(d)) + d # OP_PUSHDATA
elif len(d) <= 0xff:
return b'\x4c' + bchr(len(d)) + d # OP_PUSHDATA1
elif len(d) <= 0xffff:
return b'\x4d' + struct.pack(b'<H', len(d)) + d # OP_PUSHDATA2
elif len(d) <= 0xffffffff:
return b'\x4e' + struct.pack(b'<I', len(d)) + d # OP_PUSHDATA4
else:
raise ValueError("Data too long to encode in a PUSHDATA op")
@staticmethod
def encode_op_n(n):
"""Encode a small integer op, returning an opcode"""
if not (0 <= n <= 16):
raise ValueError('Integer must be in range 0 <= n <= 16, got %d' % n)
if n == 0:
return OP_0
else:
return CScriptOp(OP_1 + n-1)
def decode_op_n(self):
"""Decode a small integer opcode, returning an integer"""
if self == OP_0:
return 0
if not (self == OP_0 or OP_1 <= self <= OP_16):
raise ValueError('op %r is not an OP_N' % self)
return int(self - OP_1+1)
def is_small_int(self):
"""Return true if the op pushes a small integer to the stack"""
if 0x51 <= self <= 0x60 or self == 0:
return True
else:
return False
def __str__(self):
return repr(self)
def __repr__(self):
if self in OPCODE_NAMES:
return OPCODE_NAMES[self]
else:
return 'CScriptOp(0x%x)' % self
def __new__(cls, n):
try:
return _opcode_instances[n]
except IndexError:
assert len(_opcode_instances) == n
_opcode_instances.append(super(CScriptOp, cls).__new__(cls, n))
return _opcode_instances[n]
# Populate opcode instance table
for n in range(0xff+1):
CScriptOp(n)
# push value
OP_0 = CScriptOp(0x00)
OP_FALSE = OP_0
OP_PUSHDATA1 = CScriptOp(0x4c)
OP_PUSHDATA2 = CScriptOp(0x4d)
OP_PUSHDATA4 = CScriptOp(0x4e)
OP_1NEGATE = CScriptOp(0x4f)
OP_RESERVED = CScriptOp(0x50)
OP_1 = CScriptOp(0x51)
OP_TRUE=OP_1
OP_2 = CScriptOp(0x52)
OP_3 = CScriptOp(0x53)
OP_4 = CScriptOp(0x54)
OP_5 = CScriptOp(0x55)
OP_6 = CScriptOp(0x56)
OP_7 = CScriptOp(0x57)
OP_8 = CScriptOp(0x58)
OP_9 = CScriptOp(0x59)
OP_10 = CScriptOp(0x5a)
OP_11 = CScriptOp(0x5b)
OP_12 = CScriptOp(0x5c)
OP_13 = CScriptOp(0x5d)
OP_14 = CScriptOp(0x5e)
OP_15 = CScriptOp(0x5f)
OP_16 = CScriptOp(0x60)
# control
OP_NOP = CScriptOp(0x61)
OP_VER = CScriptOp(0x62)
OP_IF = CScriptOp(0x63)
OP_NOTIF = CScriptOp(0x64)
OP_VERIF = CScriptOp(0x65)
OP_VERNOTIF = CScriptOp(0x66)
OP_ELSE = CScriptOp(0x67)
OP_ENDIF = CScriptOp(0x68)
OP_VERIFY = CScriptOp(0x69)
OP_RETURN = CScriptOp(0x6a)
# stack ops
OP_TOALTSTACK = CScriptOp(0x6b)
OP_FROMALTSTACK = CScriptOp(0x6c)
OP_2DROP = CScriptOp(0x6d)
OP_2DUP = CScriptOp(0x6e)
OP_3DUP = CScriptOp(0x6f)
OP_2OVER = CScriptOp(0x70)
OP_2ROT = CScriptOp(0x71)
OP_2SWAP = CScriptOp(0x72)
OP_IFDUP = CScriptOp(0x73)
OP_DEPTH = CScriptOp(0x74)
OP_DROP = CScriptOp(0x75)
OP_DUP = CScriptOp(0x76)
OP_NIP = CScriptOp(0x77)
OP_OVER = CScriptOp(0x78)
OP_PICK = CScriptOp(0x79)
OP_ROLL = CScriptOp(0x7a)
OP_ROT = CScriptOp(0x7b)
OP_SWAP = CScriptOp(0x7c)
OP_TUCK = CScriptOp(0x7d)
# splice ops
OP_CAT = CScriptOp(0x7e)
OP_SUBSTR = CScriptOp(0x7f)
OP_LEFT = CScriptOp(0x80)
OP_RIGHT = CScriptOp(0x81)
OP_SIZE = CScriptOp(0x82)
# bit logic
OP_INVERT = CScriptOp(0x83)
OP_AND = CScriptOp(0x84)
OP_OR = CScriptOp(0x85)
OP_XOR = CScriptOp(0x86)
OP_EQUAL = CScriptOp(0x87)
OP_EQUALVERIFY = CScriptOp(0x88)
OP_RESERVED1 = CScriptOp(0x89)
OP_RESERVED2 = CScriptOp(0x8a)
# numeric
OP_1ADD = CScriptOp(0x8b)
OP_1SUB = CScriptOp(0x8c)
OP_2MUL = CScriptOp(0x8d)
OP_2DIV = CScriptOp(0x8e)
OP_NEGATE = CScriptOp(0x8f)
OP_ABS = CScriptOp(0x90)
OP_NOT = CScriptOp(0x91)
OP_0NOTEQUAL = CScriptOp(0x92)
OP_ADD = CScriptOp(0x93)
OP_SUB = CScriptOp(0x94)
OP_MUL = CScriptOp(0x95)
OP_DIV = CScriptOp(0x96)
OP_MOD = CScriptOp(0x97)
OP_LSHIFT = CScriptOp(0x98)
OP_RSHIFT = CScriptOp(0x99)
OP_BOOLAND = CScriptOp(0x9a)
OP_BOOLOR = CScriptOp(0x9b)
OP_NUMEQUAL = CScriptOp(0x9c)
OP_NUMEQUALVERIFY = CScriptOp(0x9d)
OP_NUMNOTEQUAL = CScriptOp(0x9e)
OP_LESSTHAN = CScriptOp(0x9f)
OP_GREATERTHAN = CScriptOp(0xa0)
OP_LESSTHANOREQUAL = CScriptOp(0xa1)
OP_GREATERTHANOREQUAL = CScriptOp(0xa2)
OP_MIN = CScriptOp(0xa3)
OP_MAX = CScriptOp(0xa4)
OP_WITHIN = CScriptOp(0xa5)
# crypto
OP_RIPEMD160 = CScriptOp(0xa6)
OP_SHA1 = CScriptOp(0xa7)
OP_SHA256 = CScriptOp(0xa8)
OP_HASH160 = CScriptOp(0xa9)
OP_HASH256 = CScriptOp(0xaa)
OP_CODESEPARATOR = CScriptOp(0xab)
OP_CHECKSIG = CScriptOp(0xac)
OP_CHECKSIGVERIFY = CScriptOp(0xad)
OP_CHECKMULTISIG = CScriptOp(0xae)
OP_CHECKMULTISIGVERIFY = CScriptOp(0xaf)
# expansion
OP_NOP1 = CScriptOp(0xb0)
OP_CHECKLOCKTIMEVERIFY = CScriptOp(0xb1)
OP_CHECKSEQUENCEVERIFY = CScriptOp(0xb2)
OP_NOP4 = CScriptOp(0xb3)
OP_NOP5 = CScriptOp(0xb4)
OP_NOP6 = CScriptOp(0xb5)
OP_NOP7 = CScriptOp(0xb6)
OP_NOP8 = CScriptOp(0xb7)
OP_NOP9 = CScriptOp(0xb8)
OP_NOP10 = CScriptOp(0xb9)
# template matching params
OP_SMALLINTEGER = CScriptOp(0xfa)
OP_PUBKEYS = CScriptOp(0xfb)
OP_PUBKEYHASH = CScriptOp(0xfd)
OP_PUBKEY = CScriptOp(0xfe)
OP_INVALIDOPCODE = CScriptOp(0xff)
VALID_OPCODES = {
OP_1NEGATE,
OP_RESERVED,
OP_1,
OP_2,
OP_3,
OP_4,
OP_5,
OP_6,
OP_7,
OP_8,
OP_9,
OP_10,
OP_11,
OP_12,
OP_13,
OP_14,
OP_15,
OP_16,
OP_NOP,
OP_VER,
OP_IF,
OP_NOTIF,
OP_VERIF,
OP_VERNOTIF,
OP_ELSE,
OP_ENDIF,
OP_VERIFY,
OP_RETURN,
OP_TOALTSTACK,
OP_FROMALTSTACK,
OP_2DROP,
OP_2DUP,
OP_3DUP,
OP_2OVER,
OP_2ROT,
OP_2SWAP,
OP_IFDUP,
OP_DEPTH,
OP_DROP,
OP_DUP,
OP_NIP,
OP_OVER,
OP_PICK,
OP_ROLL,
OP_ROT,
OP_SWAP,
OP_TUCK,
OP_CAT,
OP_SUBSTR,
OP_LEFT,
OP_RIGHT,
OP_SIZE,
OP_INVERT,
OP_AND,
OP_OR,
OP_XOR,
OP_EQUAL,
OP_EQUALVERIFY,
OP_RESERVED1,
OP_RESERVED2,
OP_1ADD,
OP_1SUB,
OP_2MUL,
OP_2DIV,
OP_NEGATE,
OP_ABS,
OP_NOT,
OP_0NOTEQUAL,
OP_ADD,
OP_SUB,
OP_MUL,
OP_DIV,
OP_MOD,
OP_LSHIFT,
OP_RSHIFT,
OP_BOOLAND,
OP_BOOLOR,
OP_NUMEQUAL,
OP_NUMEQUALVERIFY,
OP_NUMNOTEQUAL,
OP_LESSTHAN,
OP_GREATERTHAN,
OP_LESSTHANOREQUAL,
OP_GREATERTHANOREQUAL,
OP_MIN,
OP_MAX,
OP_WITHIN,
OP_RIPEMD160,
OP_SHA1,
OP_SHA256,
OP_HASH160,
OP_HASH256,
OP_CODESEPARATOR,
OP_CHECKSIG,
OP_CHECKSIGVERIFY,
OP_CHECKMULTISIG,
OP_CHECKMULTISIGVERIFY,
OP_NOP1,
OP_CHECKLOCKTIMEVERIFY,
OP_CHECKSEQUENCEVERIFY,
OP_NOP4,
OP_NOP5,
OP_NOP6,
OP_NOP7,
OP_NOP8,
OP_NOP9,
OP_NOP10,
OP_SMALLINTEGER,
OP_PUBKEYS,
OP_PUBKEYHASH,
OP_PUBKEY,
}
OPCODE_NAMES.update({
OP_0 : 'OP_0',
OP_PUSHDATA1 : 'OP_PUSHDATA1',
OP_PUSHDATA2 : 'OP_PUSHDATA2',
OP_PUSHDATA4 : 'OP_PUSHDATA4',
OP_1NEGATE : 'OP_1NEGATE',
OP_RESERVED : 'OP_RESERVED',
OP_1 : 'OP_1',
OP_2 : 'OP_2',
OP_3 : 'OP_3',
OP_4 : 'OP_4',
OP_5 : 'OP_5',
OP_6 : 'OP_6',
OP_7 : 'OP_7',
OP_8 : 'OP_8',
OP_9 : 'OP_9',
OP_10 : 'OP_10',
OP_11 : 'OP_11',
OP_12 : 'OP_12',
OP_13 : 'OP_13',
OP_14 : 'OP_14',
OP_15 : 'OP_15',
OP_16 : 'OP_16',
OP_NOP : 'OP_NOP',
OP_VER : 'OP_VER',
OP_IF : 'OP_IF',
OP_NOTIF : 'OP_NOTIF',
OP_VERIF : 'OP_VERIF',
OP_VERNOTIF : 'OP_VERNOTIF',
OP_ELSE : 'OP_ELSE',
OP_ENDIF : 'OP_ENDIF',
OP_VERIFY : 'OP_VERIFY',
OP_RETURN : 'OP_RETURN',
OP_TOALTSTACK : 'OP_TOALTSTACK',
OP_FROMALTSTACK : 'OP_FROMALTSTACK',
OP_2DROP : 'OP_2DROP',
OP_2DUP : 'OP_2DUP',
OP_3DUP : 'OP_3DUP',
OP_2OVER : 'OP_2OVER',
OP_2ROT : 'OP_2ROT',
OP_2SWAP : 'OP_2SWAP',
OP_IFDUP : 'OP_IFDUP',
OP_DEPTH : 'OP_DEPTH',
OP_DROP : 'OP_DROP',
OP_DUP : 'OP_DUP',
OP_NIP : 'OP_NIP',
OP_OVER : 'OP_OVER',
OP_PICK : 'OP_PICK',
OP_ROLL : 'OP_ROLL',
OP_ROT : 'OP_ROT',
OP_SWAP : 'OP_SWAP',
OP_TUCK : 'OP_TUCK',
OP_CAT : 'OP_CAT',
OP_SUBSTR : 'OP_SUBSTR',
OP_LEFT : 'OP_LEFT',
OP_RIGHT : 'OP_RIGHT',
OP_SIZE : 'OP_SIZE',
OP_INVERT : 'OP_INVERT',
OP_AND : 'OP_AND',
OP_OR : 'OP_OR',
OP_XOR : 'OP_XOR',
OP_EQUAL : 'OP_EQUAL',
OP_EQUALVERIFY : 'OP_EQUALVERIFY',
OP_RESERVED1 : 'OP_RESERVED1',
OP_RESERVED2 : 'OP_RESERVED2',
OP_1ADD : 'OP_1ADD',
OP_1SUB : 'OP_1SUB',
OP_2MUL : 'OP_2MUL',
OP_2DIV : 'OP_2DIV',
OP_NEGATE : 'OP_NEGATE',
OP_ABS : 'OP_ABS',
OP_NOT : 'OP_NOT',
OP_0NOTEQUAL : 'OP_0NOTEQUAL',
OP_ADD : 'OP_ADD',
OP_SUB : 'OP_SUB',
OP_MUL : 'OP_MUL',
OP_DIV : 'OP_DIV',
OP_MOD : 'OP_MOD',
OP_LSHIFT : 'OP_LSHIFT',
OP_RSHIFT : 'OP_RSHIFT',
OP_BOOLAND : 'OP_BOOLAND',
OP_BOOLOR : 'OP_BOOLOR',
OP_NUMEQUAL : 'OP_NUMEQUAL',
OP_NUMEQUALVERIFY : 'OP_NUMEQUALVERIFY',
OP_NUMNOTEQUAL : 'OP_NUMNOTEQUAL',
OP_LESSTHAN : 'OP_LESSTHAN',
OP_GREATERTHAN : 'OP_GREATERTHAN',
OP_LESSTHANOREQUAL : 'OP_LESSTHANOREQUAL',
OP_GREATERTHANOREQUAL : 'OP_GREATERTHANOREQUAL',
OP_MIN : 'OP_MIN',
OP_MAX : 'OP_MAX',
OP_WITHIN : 'OP_WITHIN',
OP_RIPEMD160 : 'OP_RIPEMD160',
OP_SHA1 : 'OP_SHA1',
OP_SHA256 : 'OP_SHA256',
OP_HASH160 : 'OP_HASH160',
OP_HASH256 : 'OP_HASH256',
OP_CODESEPARATOR : 'OP_CODESEPARATOR',
OP_CHECKSIG : 'OP_CHECKSIG',
OP_CHECKSIGVERIFY : 'OP_CHECKSIGVERIFY',
OP_CHECKMULTISIG : 'OP_CHECKMULTISIG',
OP_CHECKMULTISIGVERIFY : 'OP_CHECKMULTISIGVERIFY',
OP_NOP1 : 'OP_NOP1',
OP_CHECKLOCKTIMEVERIFY : 'OP_CHECKLOCKTIMEVERIFY',
OP_CHECKSEQUENCEVERIFY : 'OP_CHECKSEQUENCEVERIFY',
OP_NOP4 : 'OP_NOP4',
OP_NOP5 : 'OP_NOP5',
OP_NOP6 : 'OP_NOP6',
OP_NOP7 : 'OP_NOP7',
OP_NOP8 : 'OP_NOP8',
OP_NOP9 : 'OP_NOP9',
OP_NOP10 : 'OP_NOP10',
OP_SMALLINTEGER : 'OP_SMALLINTEGER',
OP_PUBKEYS : 'OP_PUBKEYS',
OP_PUBKEYHASH : 'OP_PUBKEYHASH',
OP_PUBKEY : 'OP_PUBKEY',
OP_INVALIDOPCODE : 'OP_INVALIDOPCODE',
})
OPCODES_BY_NAME = {
'OP_0' : OP_0,
'OP_PUSHDATA1' : OP_PUSHDATA1,
'OP_PUSHDATA2' : OP_PUSHDATA2,
'OP_PUSHDATA4' : OP_PUSHDATA4,
'OP_1NEGATE' : OP_1NEGATE,
'OP_RESERVED' : OP_RESERVED,
'OP_1' : OP_1,
'OP_2' : OP_2,
'OP_3' : OP_3,
'OP_4' : OP_4,
'OP_5' : OP_5,
'OP_6' : OP_6,
'OP_7' : OP_7,
'OP_8' : OP_8,
'OP_9' : OP_9,
'OP_10' : OP_10,
'OP_11' : OP_11,
'OP_12' : OP_12,
'OP_13' : OP_13,
'OP_14' : OP_14,
'OP_15' : OP_15,
'OP_16' : OP_16,
'OP_NOP' : OP_NOP,
'OP_VER' : OP_VER,
'OP_IF' : OP_IF,
'OP_NOTIF' : OP_NOTIF,
'OP_VERIF' : OP_VERIF,
'OP_VERNOTIF' : OP_VERNOTIF,
'OP_ELSE' : OP_ELSE,
'OP_ENDIF' : OP_ENDIF,
'OP_VERIFY' : OP_VERIFY,
'OP_RETURN' : OP_RETURN,
'OP_TOALTSTACK' : OP_TOALTSTACK,
'OP_FROMALTSTACK' : OP_FROMALTSTACK,
'OP_2DROP' : OP_2DROP,
'OP_2DUP' : OP_2DUP,
'OP_3DUP' : OP_3DUP,
'OP_2OVER' : OP_2OVER,
'OP_2ROT' : OP_2ROT,
'OP_2SWAP' : OP_2SWAP,
'OP_IFDUP' : OP_IFDUP,
'OP_DEPTH' : OP_DEPTH,
'OP_DROP' : OP_DROP,
'OP_DUP' : OP_DUP,
'OP_NIP' : OP_NIP,
'OP_OVER' : OP_OVER,
'OP_PICK' : OP_PICK,
'OP_ROLL' : OP_ROLL,
'OP_ROT' : OP_ROT,
'OP_SWAP' : OP_SWAP,
'OP_TUCK' : OP_TUCK,
'OP_CAT' : OP_CAT,
'OP_SUBSTR' : OP_SUBSTR,
'OP_LEFT' : OP_LEFT,
'OP_RIGHT' : OP_RIGHT,
'OP_SIZE' : OP_SIZE,
'OP_INVERT' : OP_INVERT,
'OP_AND' : OP_AND,
'OP_OR' : OP_OR,
'OP_XOR' : OP_XOR,
'OP_EQUAL' : OP_EQUAL,
'OP_EQUALVERIFY' : OP_EQUALVERIFY,
'OP_RESERVED1' : OP_RESERVED1,
'OP_RESERVED2' : OP_RESERVED2,
'OP_1ADD' : OP_1ADD,
'OP_1SUB' : OP_1SUB,
'OP_2MUL' : OP_2MUL,
'OP_2DIV' : OP_2DIV,
'OP_NEGATE' : OP_NEGATE,
'OP_ABS' : OP_ABS,
'OP_NOT' : OP_NOT,
'OP_0NOTEQUAL' : OP_0NOTEQUAL,
'OP_ADD' : OP_ADD,
'OP_SUB' : OP_SUB,
'OP_MUL' : OP_MUL,
'OP_DIV' : OP_DIV,
'OP_MOD' : OP_MOD,
'OP_LSHIFT' : OP_LSHIFT,
'OP_RSHIFT' : OP_RSHIFT,
'OP_BOOLAND' : OP_BOOLAND,
'OP_BOOLOR' : OP_BOOLOR,
'OP_NUMEQUAL' : OP_NUMEQUAL,
'OP_NUMEQUALVERIFY' : OP_NUMEQUALVERIFY,
'OP_NUMNOTEQUAL' : OP_NUMNOTEQUAL,
'OP_LESSTHAN' : OP_LESSTHAN,
'OP_GREATERTHAN' : OP_GREATERTHAN,
'OP_LESSTHANOREQUAL' : OP_LESSTHANOREQUAL,
'OP_GREATERTHANOREQUAL' : OP_GREATERTHANOREQUAL,
'OP_MIN' : OP_MIN,
'OP_MAX' : OP_MAX,
'OP_WITHIN' : OP_WITHIN,
'OP_RIPEMD160' : OP_RIPEMD160,
'OP_SHA1' : OP_SHA1,
'OP_SHA256' : OP_SHA256,
'OP_HASH160' : OP_HASH160,
'OP_HASH256' : OP_HASH256,
'OP_CODESEPARATOR' : OP_CODESEPARATOR,
'OP_CHECKSIG' : OP_CHECKSIG,
'OP_CHECKSIGVERIFY' : OP_CHECKSIGVERIFY,
'OP_CHECKMULTISIG' : OP_CHECKMULTISIG,
'OP_CHECKMULTISIGVERIFY' : OP_CHECKMULTISIGVERIFY,
'OP_NOP1' : OP_NOP1,
'OP_CHECKLOCKTIMEVERIFY' : OP_CHECKLOCKTIMEVERIFY,
'OP_CHECKSEQUENCEVERIFY' : OP_CHECKSEQUENCEVERIFY,
'OP_NOP4' : OP_NOP4,
'OP_NOP5' : OP_NOP5,
'OP_NOP6' : OP_NOP6,
'OP_NOP7' : OP_NOP7,
'OP_NOP8' : OP_NOP8,
'OP_NOP9' : OP_NOP9,
'OP_NOP10' : OP_NOP10,
'OP_SMALLINTEGER' : OP_SMALLINTEGER,
'OP_PUBKEYS' : OP_PUBKEYS,
'OP_PUBKEYHASH' : OP_PUBKEYHASH,
'OP_PUBKEY' : OP_PUBKEY,
}
class CScriptInvalidError(Exception):
"""Base class for CScript exceptions"""
pass
class CScriptTruncatedPushDataError(CScriptInvalidError):
"""Invalid pushdata due to truncation"""
def __init__(self, msg, data):
self.data = data
super(CScriptTruncatedPushDataError, self).__init__(msg)
# This is used, eg, for blockchain heights in coinbase scripts (bip34)
class CScriptNum(object):
def __init__(self, d=0):
self.value = d
@staticmethod
def encode(obj):
r = bytearray(0)
if obj.value == 0:
return bytes(r)
neg = obj.value < 0
absvalue = -obj.value if neg else obj.value
while (absvalue):
r.append(absvalue & 0xff)
absvalue >>= 8
if r[-1] & 0x80:
r.append(0x80 if neg else 0)
elif neg:
r[-1] |= 0x80
return bytes(bchr(len(r)) + r)
class CScript(bytes):
"""Serialized script
A bytes subclass, so you can use this directly whenever bytes are accepted.
Note that this means that indexing does *not* work - you'll get an index by
byte rather than opcode. This format was chosen for efficiency so that the
general case would not require creating a lot of little CScriptOP objects.
iter(script) however does iterate by opcode.
"""
@classmethod
def __coerce_instance(cls, other):
# Coerce other into bytes
if isinstance(other, CScriptOp):
other = bchr(other)
elif isinstance(other, CScriptNum):
if (other.value == 0):
other = bchr(CScriptOp(OP_0))
else:
other = CScriptNum.encode(other)
elif isinstance(other, int):
if 0 <= other <= 16:
other = bytes(bchr(CScriptOp.encode_op_n(other)))
elif other == -1:
other = bytes(bchr(OP_1NEGATE))
else:
other = CScriptOp.encode_op_pushdata(bn2vch(other))
elif isinstance(other, (bytes, bytearray)):
other = CScriptOp.encode_op_pushdata(other)
return other
def __add__(self, other):
# Do the coercion outside of the try block so that errors in it are
# noticed.
other = self.__coerce_instance(other)
try:
# bytes.__add__ always returns bytes instances unfortunately
return CScript(super(CScript, self).__add__(other))
except TypeError:
raise TypeError('Can not add a %r instance to a CScript' % other.__class__)
def join(self, iterable):
# join makes no sense for a CScript()
raise NotImplementedError
def __new__(cls, value=b''):
if isinstance(value, bytes) or isinstance(value, bytearray):
return super(CScript, cls).__new__(cls, value)
else:
def coerce_iterable(iterable):
for instance in iterable:
yield cls.__coerce_instance(instance)
# Annoyingly on both python2 and python3 bytes.join() always
# returns a bytes instance even when subclassed.
return super(CScript, cls).__new__(cls, b''.join(coerce_iterable(value)))
def raw_iter(self):
"""Raw iteration
Yields tuples of (opcode, data, sop_idx) so that the different possible
PUSHDATA encodings can be accurately distinguished, as well as
determining the exact opcode byte indexes. (sop_idx)
"""
i = 0
while i < len(self):
sop_idx = i
opcode = bord(self[i])
i += 1
if opcode > OP_PUSHDATA4:
yield (opcode, None, sop_idx)
else:
datasize = None
pushdata_type = None
if opcode < OP_PUSHDATA1:
pushdata_type = 'PUSHDATA(%d)' % opcode
datasize = opcode
elif opcode == OP_PUSHDATA1:
pushdata_type = 'PUSHDATA1'
if i >= len(self):
raise CScriptInvalidError('PUSHDATA1: missing data length')
datasize = bord(self[i])
i += 1
elif opcode == OP_PUSHDATA2:
pushdata_type = 'PUSHDATA2'
if i + 1 >= len(self):
raise CScriptInvalidError('PUSHDATA2: missing data length')
datasize = bord(self[i]) + (bord(self[i+1]) << 8)
i += 2
elif opcode == OP_PUSHDATA4:
pushdata_type = 'PUSHDATA4'
if i + 3 >= len(self):
raise CScriptInvalidError('PUSHDATA4: missing data length')
datasize = bord(self[i]) + (bord(self[i+1]) << 8) + (bord(self[i+2]) << 16) + (bord(self[i+3]) << 24)
i += 4
else:
assert False # shouldn't happen
data = bytes(self[i:i+datasize])
# Check for truncation
if len(data) < datasize:
raise CScriptTruncatedPushDataError('%s: truncated data' % pushdata_type, data)
i += datasize
yield (opcode, data, sop_idx)
def __iter__(self):
"""'Cooked' iteration
Returns either a CScriptOP instance, an integer, or bytes, as
appropriate.
See raw_iter() if you need to distinguish the different possible
PUSHDATA encodings.
"""
for (opcode, data, sop_idx) in self.raw_iter():
if data is not None:
yield data
else:
opcode = CScriptOp(opcode)
if opcode.is_small_int():
yield opcode.decode_op_n()
else:
yield CScriptOp(opcode)
def __repr__(self):
# For Python3 compatibility add b before strings so testcases don't
# need to change
def _repr(o):
if isinstance(o, bytes):
return b"x('%s')" % hexlify(o).decode('ascii')
else:
return repr(o)
ops = []
i = iter(self)
while True:
op = None
try:
op = _repr(next(i))
except CScriptTruncatedPushDataError as err:
op = '%s...<ERROR: %s>' % (_repr(err.data), err)
break
except CScriptInvalidError as err:
op = '<ERROR: %s>' % err
break
except StopIteration:
break
finally:
if op is not None:
ops.append(op)
return "CScript([%s])" % ', '.join(ops)
def GetSigOpCount(self, fAccurate):
"""Get the SigOp count.
fAccurate - Accurately count CHECKMULTISIG, see BIP16 for details.
Note that this is consensus-critical.
"""
n = 0
lastOpcode = OP_INVALIDOPCODE
for (opcode, data, sop_idx) in self.raw_iter():
if opcode in (OP_CHECKSIG, OP_CHECKSIGVERIFY):
n += 1
elif opcode in (OP_CHECKMULTISIG, OP_CHECKMULTISIGVERIFY):
if fAccurate and (OP_1 <= lastOpcode <= OP_16):
n += opcode.decode_op_n()
else:
n += 20
lastOpcode = opcode
return n
SIGHASH_ALL = 1
SIGHASH_NONE = 2
SIGHASH_SINGLE = 3
SIGHASH_ANYONECANPAY = 0x80
def FindAndDelete(script, sig):
"""Consensus critical, see FindAndDelete() in Satoshi codebase"""
r = b''
last_sop_idx = sop_idx = 0
skip = True
for (opcode, data, sop_idx) in script.raw_iter():
if not skip:
r += script[last_sop_idx:sop_idx]
last_sop_idx = sop_idx
if script[sop_idx:sop_idx + len(sig)] == sig:
skip = True
else:
skip = False
if not skip:
r += script[last_sop_idx:]
return CScript(r)
def SignatureHash(script, txTo, inIdx, hashtype):
"""Consensus-correct SignatureHash
Returns (hash, err) to precisely match the consensus-critical behavior of
the SIGHASH_SINGLE bug. (inIdx is *not* checked for validity)
"""
HASH_ONE = b'\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
if inIdx >= len(txTo.vin):
return (HASH_ONE, "inIdx %d out of range (%d)" % (inIdx, len(txTo.vin)))
txtmp = CTransaction(txTo)
for txin in txtmp.vin:
txin.scriptSig = b''
txtmp.vin[inIdx].scriptSig = FindAndDelete(script, CScript([OP_CODESEPARATOR]))
if (hashtype & 0x1f) == SIGHASH_NONE:
txtmp.vout = []
for i in range(len(txtmp.vin)):
if i != inIdx:
txtmp.vin[i].nSequence = 0
elif (hashtype & 0x1f) == SIGHASH_SINGLE:
outIdx = inIdx
if outIdx >= len(txtmp.vout):
return (HASH_ONE, "outIdx %d out of range (%d)" % (outIdx, len(txtmp.vout)))
tmp = txtmp.vout[outIdx]
txtmp.vout = []
for i in range(outIdx):
txtmp.vout.append(CTxOut())
txtmp.vout.append(tmp)
for i in range(len(txtmp.vin)):
if i != inIdx:
txtmp.vin[i].nSequence = 0
if hashtype & SIGHASH_ANYONECANPAY:
tmp = txtmp.vin[inIdx]
txtmp.vin = []
txtmp.vin.append(tmp)
s = txtmp.serialize()
s += struct.pack(b"<I", hashtype)
hash = hash256(s)
return (hash, None)
# TODO: Allow cached hashPrevouts/hashSequence/hashOutputs to be provided.
# Performance optimization probably not necessary for python tests, however.
# Note that this corresponds to sigversion == 1 in EvalScript, which is used
# for version 0 witnesses.
def SegwitVersion1SignatureHash(script, txTo, inIdx, hashtype, amount):
hashPrevouts = 0
hashSequence = 0
hashOutputs = 0
if not (hashtype & SIGHASH_ANYONECANPAY):
serialize_prevouts = bytes()
for i in txTo.vin:
serialize_prevouts += i.prevout.serialize()
hashPrevouts = uint256_from_str(hash256(serialize_prevouts))
if (not (hashtype & SIGHASH_ANYONECANPAY) and (hashtype & 0x1f) != SIGHASH_SINGLE and (hashtype & 0x1f) != SIGHASH_NONE):
serialize_sequence = bytes()
for i in txTo.vin:
serialize_sequence += struct.pack("<I", i.nSequence)
hashSequence = uint256_from_str(hash256(serialize_sequence))
if ((hashtype & 0x1f) != SIGHASH_SINGLE and (hashtype & 0x1f) != SIGHASH_NONE):
serialize_outputs = bytes()
for o in txTo.vout:
serialize_outputs += o.serialize()
hashOutputs = uint256_from_str(hash256(serialize_outputs))
elif ((hashtype & 0x1f) == SIGHASH_SINGLE and inIdx < len(txTo.vout)):
serialize_outputs = txTo.vout[inIdx].serialize()
hashOutputs = uint256_from_str(hash256(serialize_outputs))
ss = bytes()
ss += struct.pack("<i", txTo.nVersion)
ss += ser_uint256(hashPrevouts)
ss += ser_uint256(hashSequence)
ss += txTo.vin[inIdx].prevout.serialize()
ss += ser_string(script)
ss += struct.pack("<q", amount)
ss += struct.pack("<I", txTo.vin[inIdx].nSequence)
ss += ser_uint256(hashOutputs)
ss += struct.pack("<i", txTo.nLockTime)
ss += struct.pack("<I", hashtype)
return hash256(ss)

View File

@@ -1,63 +0,0 @@
#!/usr/bin/env python3
# Copyright (c) 2016-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Specialized SipHash-2-4 implementations.
This implements SipHash-2-4 for 256-bit integers.
"""
def rotl64(n, b):
return n >> (64 - b) | (n & ((1 << (64 - b)) - 1)) << b
def siphash_round(v0, v1, v2, v3):
v0 = (v0 + v1) & ((1 << 64) - 1)
v1 = rotl64(v1, 13)
v1 ^= v0
v0 = rotl64(v0, 32)
v2 = (v2 + v3) & ((1 << 64) - 1)
v3 = rotl64(v3, 16)
v3 ^= v2
v0 = (v0 + v3) & ((1 << 64) - 1)
v3 = rotl64(v3, 21)
v3 ^= v0
v2 = (v2 + v1) & ((1 << 64) - 1)
v1 = rotl64(v1, 17)
v1 ^= v2
v2 = rotl64(v2, 32)
return (v0, v1, v2, v3)
def siphash256(k0, k1, h):
n0 = h & ((1 << 64) - 1)
n1 = (h >> 64) & ((1 << 64) - 1)
n2 = (h >> 128) & ((1 << 64) - 1)
n3 = (h >> 192) & ((1 << 64) - 1)
v0 = 0x736f6d6570736575 ^ k0
v1 = 0x646f72616e646f6d ^ k1
v2 = 0x6c7967656e657261 ^ k0
v3 = 0x7465646279746573 ^ k1 ^ n0
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n0
v3 ^= n1
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n1
v3 ^= n2
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n2
v3 ^= n3
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n3
v3 ^= 0x2000000000000000
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= 0x2000000000000000
v2 ^= 0xFF
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
return v0 ^ v1 ^ v2 ^ v3

View File

@@ -1,700 +0,0 @@
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Helpful routines for regression testing
#
import os
import sys
from binascii import hexlify, unhexlify
from base64 import b64encode
from decimal import Decimal, ROUND_DOWN
import json
import http.client
import random
import shutil
import subprocess
import time
import re
import errno
from . import coverage
from .authproxy import AuthServiceProxy, JSONRPCException
COVERAGE_DIR = None
# The maximum number of nodes a single test can spawn
MAX_NODES = 8
# Don't assign rpc or p2p ports lower than this
PORT_MIN = 11000
# The number of ports to "reserve" for p2p and rpc, each
PORT_RANGE = 5000
NAVCOIND_PROC_WAIT_TIMEOUT = 60
class PortSeed:
# Must be initialized with a unique integer for each process
n = None
#Set Mocktime default to OFF.
#MOCKTIME is only needed for scripts that use the
#cached version of the blockchain. If the cached
#version of the blockchain is used without MOCKTIME
#then the mempools will not sync due to IBD.
MOCKTIME = 0
def enable_mocktime():
#For backwared compatibility of the python scripts
#with previous versions of the cache, set MOCKTIME
#to Jan 1, 2014 + (201 * 10 * 60)
global MOCKTIME
MOCKTIME = 1388534400 + (201 * 10 * 60)
def disable_mocktime():
global MOCKTIME
MOCKTIME = 0
def get_mocktime():
return MOCKTIME
def enable_coverage(dirname):
"""Maintain a log of which RPC calls are made during testing."""
global COVERAGE_DIR
COVERAGE_DIR = dirname
def get_rpc_proxy(url, node_number, timeout=None):
"""
Args:
url (str): URL of the RPC server to call
node_number (int): the node number (or id) that this calls to
Kwargs:
timeout (int): HTTP timeout in seconds
Returns:
AuthServiceProxy. convenience object for making RPC calls.
"""
proxy_kwargs = {}
if timeout is not None:
proxy_kwargs['timeout'] = timeout
proxy = AuthServiceProxy(url, **proxy_kwargs)
proxy.url = url # store URL on proxy for info
coverage_logfile = coverage.get_filename(
COVERAGE_DIR, node_number) if COVERAGE_DIR else None
return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile)
def p2p_port(n):
assert(n <= MAX_NODES)
return PORT_MIN + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
def rpc_port(n):
return PORT_MIN + PORT_RANGE + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
def check_json_precision():
"""Make sure json library being used does not lose precision converting NAV values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def count_bytes(hex_string):
return len(bytearray.fromhex(hex_string))
def bytes_to_hex_str(byte_str):
return hexlify(byte_str).decode('ascii')
def hex_str_to_bytes(hex_str):
return unhexlify(hex_str.encode('ascii'))
def str_to_b64str(string):
return b64encode(string.encode('utf-8')).decode('ascii')
def sync_blocks(rpc_connections, wait=1, timeout=60):
"""
Wait until everybody has the same tip
"""
while timeout > 0:
tips = [ x.getbestblockhash() for x in rpc_connections ]
if tips == [ tips[0] ]*len(tips):
#if all x.getblockhash() in tips are the same return True
return True
time.sleep(wait)
timeout -= wait
raise AssertionError("Block sync failed")
def sync_mempools(rpc_connections, wait=1, timeout=60):
"""
Wait until everybody has the same transactions in their memory
pools
"""
while timeout > 0:
pool = set(rpc_connections[0].getrawmempool())
num_match = 1
for i in range(1, len(rpc_connections)):
if set(rpc_connections[i].getrawmempool()) == pool:
num_match = num_match+1
if num_match == len(rpc_connections):
return True
time.sleep(wait)
timeout -= wait
raise AssertionError("Mempool sync failed")
navcoind_processes = {}
def initialize_datadir(dirname, n):
datadir = os.path.join(dirname, "node"+str(n))
if not os.path.isdir(datadir):
os.makedirs(datadir)
rpc_u, rpc_p = rpc_auth_pair(n)
with open(os.path.join(datadir, "navcoin.conf"), 'w') as f:
f.write("devnet=1\n")
f.write("rpcuser=" + rpc_u + "\n")
f.write("rpcpassword=" + rpc_p + "\n")
f.write("port="+str(p2p_port(n))+"\n")
f.write("rpcport="+str(rpc_port(n))+"\n")
f.write("listenonion=0\n")
f.write("dandelion=0\n")
f.write("ntpminmeasures=-1\n")
f.write("torserver=0\n")
f.write("suppressblsctwarning=1\n")
return datadir
def rpc_auth_pair(n):
return 'rpcuser💻' + str(n), 'rpcpass🔑' + str(n)
def rpc_url(i, rpchost=None):
rpc_u, rpc_p = rpc_auth_pair(i)
return "http://%s:%s@%s:%d" % (rpc_u, rpc_p, rpchost or '127.0.0.1', rpc_port(i))
def wait_for_navcoind_start(process, url, i):
'''
Wait for navcoind to start. This means that RPC is accessible and fully initialized.
Raise an exception if navcoind exits during initialization.
'''
polls_interval = 1.0 / 4
runtime = 60
while runtime > 0:
if process.poll() is not None:
raise Exception('navcoind exited with status %i during initialization' % process.returncode)
try:
# print('Checking RPC')
rpc = get_rpc_proxy(url, i)
blocks = rpc.getblockcount()
# print('RPC replied with blocks: %i' % blocks)
return # break out of loop on success
except IOError as e:
if e.errno != errno.ECONNREFUSED: # Port not yet open?
raise # unknown IO error
# else:
# print('Waiting for port')
except JSONRPCException as e: # Initialization phase
if e.error['code'] != -28: # RPC in warmup?
raise # unkown JSON RPC exception
# else:
# print('RPC in warmup')
time.sleep(polls_interval)
runtime -= polls_interval
raise Exception('navcoind RPC timeout')
def initialize_chain(test_dir, num_nodes):
"""
Create a cache of a 200-block-long chain (with wallet) for MAX_NODES
Afterward, create num_nodes copies from the cache
"""
assert num_nodes <= MAX_NODES
create_cache = False
for i in range(MAX_NODES):
if not os.path.isdir(os.path.join('cache', 'node'+str(i))):
create_cache = True
break
if create_cache:
#find and delete old cache directories if any exist
for i in range(MAX_NODES):
if os.path.isdir(os.path.join("cache","node"+str(i))):
shutil.rmtree(os.path.join("cache","node"+str(i)))
# Create cache directories, run navcoinds:
for i in range(MAX_NODES):
datadir=initialize_datadir("cache", i)
args = [ os.getenv("NAVCOIND", "navcoind"), "-server", "-keypool=1", "-datadir="+datadir, "-discover=0" ]
if i > 0:
args.append("-connect=127.0.0.1:"+str(p2p_port(0)))
navcoind_processes[i] = subprocess.Popen(args)
if os.getenv("PYTHON_DEBUG", ""):
print("initialize_chain: navcoind started, waiting for RPC to come up")
wait_for_navcoind_start(navcoind_processes[i], rpc_url(i), i)
if os.getenv("PYTHON_DEBUG", ""):
print("initialize_chain: RPC succesfully started")
rpcs = []
for i in range(MAX_NODES):
try:
rpcs.append(get_rpc_proxy(rpc_url(i), i))
except:
sys.stderr.write("Error connecting to "+url+"\n")
sys.exit(1)
# Create a 200-block-long chain; each of the 4 first nodes
# gets 25 mature blocks and 25 immature.
# Note: To preserve compatibility with older versions of
# initialize_chain, only 4 nodes will generate coins.
#
# blocks are created with timestamps 10 minutes apart
# starting from 2010 minutes in the past
enable_mocktime()
block_time = get_mocktime() - (201 * 10 * 60)
for i in range(2):
for peer in range(4):
for j in range(25):
set_node_times(rpcs, block_time)
slow_gen(rpcs[peer], 1)
block_time += 10*60
# Must sync before next peer starts generating blocks
sync_blocks(rpcs)
# Shut them down, and clean up cache directories:
stop_nodes(rpcs)
wait_navcoinds()
disable_mocktime()
for i in range(MAX_NODES):
os.remove(log_filename("cache", i, "debug.log"))
os.remove(log_filename("cache", i, "db.log"))
os.remove(log_filename("cache", i, "peers.dat"))
os.remove(log_filename("cache", i, "fee_estimates.dat"))
for i in range(num_nodes):
from_dir = os.path.join("cache", "node"+str(i))
to_dir = os.path.join(test_dir, "node"+str(i))
shutil.copytree(from_dir, to_dir)
initialize_datadir(test_dir, i) # Overwrite port/rpcport in navcoin.conf
def initialize_chain_clean(test_dir, num_nodes):
"""
Create an empty blockchain and num_nodes wallets.
Useful if a test case wants complete control over initialization.
"""
for i in range(num_nodes):
datadir=initialize_datadir(test_dir, i)
def _rpchost_to_args(rpchost):
'''Convert optional IP:port spec to rpcconnect/rpcport args'''
if rpchost is None:
return []
match = re.match('(\[[0-9a-fA-f:]+\]|[^:]+)(?::([0-9]+))?$', rpchost)
if not match:
raise ValueError('Invalid RPC host spec ' + rpchost)
rpcconnect = match.group(1)
rpcport = match.group(2)
if rpcconnect.startswith('['): # remove IPv6 [...] wrapping
rpcconnect = rpcconnect[1:-1]
rv = ['-rpcconnect=' + rpcconnect]
if rpcport:
rv += ['-rpcport=' + rpcport]
return rv
def start_node(i, dirname, extra_args=None, rpchost=None, timewait=None, binary=None):
"""
Start a navcoind and return RPC connection to it
"""
datadir = os.path.join(dirname, "node"+str(i))
if binary is None:
binary = os.getenv("NAVCOIND", "navcoind")
args = [ binary, "-datadir="+datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-mocktime="+str(get_mocktime()) ]
if extra_args is not None: args.extend(extra_args)
navcoind_processes[i] = subprocess.Popen(args)
if os.getenv("PYTHON_DEBUG", ""):
print("start_node: navcoind started, waiting for RPC to come up")
url = rpc_url(i, rpchost)
wait_for_navcoind_start(navcoind_processes[i], url, i)
if os.getenv("PYTHON_DEBUG", ""):
print("start_node: RPC succesfully started")
proxy = get_rpc_proxy(url, i, timeout=timewait)
if COVERAGE_DIR:
coverage.write_all_rpc_commands(COVERAGE_DIR, proxy)
return proxy
def start_nodes(num_nodes, dirname, extra_args=None, rpchost=None, binary=None):
"""
Start multiple navcoinds, return RPC connections to them
"""
if extra_args is None: extra_args = [ None for _ in range(num_nodes) ]
if binary is None: binary = [ None for _ in range(num_nodes) ]
rpcs = []
try:
for i in range(num_nodes):
rpcs.append(start_node(i, dirname, extra_args[i], rpchost, binary=binary[i]))
except: # If one node failed to start, stop the others
stop_nodes(rpcs)
raise
return rpcs
def log_filename(dirname, n_node, logname):
return os.path.join(dirname, "node"+str(n_node), "devnet", logname)
def stop_node(node, i):
try:
node.stop()
except http.client.CannotSendRequest as e:
print("WARN: Unable to stop node: " + repr(e))
navcoind_processes[i].wait(timeout=NAVCOIND_PROC_WAIT_TIMEOUT)
del navcoind_processes[i]
def stop_nodes(nodes):
for node in nodes:
try:
node.stop()
except http.client.CannotSendRequest as e:
print("WARN: Unable to stop node: " + repr(e))
del nodes[:] # Emptying array closes connections as a side effect
def set_node_times(nodes, t):
for node in nodes:
node.setmocktime(t)
def wait_navcoinds():
# Wait for all navcoinds to cleanly exit
for navcoind in navcoind_processes.values():
navcoind.wait(timeout=NAVCOIND_PROC_WAIT_TIMEOUT)
navcoind_processes.clear()
def connect_nodes(from_connection, node_num):
ip_port = "127.0.0.1:"+str(p2p_port(node_num))
from_connection.addnode(ip_port, "onetry")
# poll until version handshake complete to avoid race conditions
# with transaction relaying
while any(peer['version'] == 0 for peer in from_connection.getpeerinfo()):
time.sleep(0.1)
def connect_nodes_bi(nodes, a, b):
connect_nodes(nodes[a], b)
connect_nodes(nodes[b], a)
def find_output(node, txid, amount):
"""
Return index to output of txid with value amount
Raises exception if there is none.
"""
txdata = node.getrawtransaction(txid, 1)
for i in range(len(txdata["vout"])):
if txdata["vout"][i]["value"] == amount:
return i
raise RuntimeError("find_output txid %s : %s not found"%(txid,str(amount)))
def gather_inputs(from_node, amount_needed, confirmations_required=1):
"""
Return a random set of unspent txouts that are enough to pay amount_needed
"""
assert(confirmations_required >=0)
utxo = from_node.listunspent(confirmations_required)
random.shuffle(utxo)
inputs = []
total_in = Decimal("0.00000000")
while total_in < amount_needed and len(utxo) > 0:
t = utxo.pop()
total_in += t["amount"]
inputs.append({ "txid" : t["txid"], "vout" : t["vout"], "address" : t["address"] } )
if total_in < amount_needed:
raise RuntimeError("Insufficient funds: need %d, have %d"%(amount_needed, total_in))
return (total_in, inputs)
def make_change(from_node, amount_in, amount_out, fee):
"""
Create change output(s), return them
"""
outputs = {}
amount = amount_out+fee
change = amount_in - amount
if change > amount*2:
# Create an extra change output to break up big inputs
change_address = from_node.getnewaddress()
# Split change in two, being careful of rounding:
outputs[change_address] = Decimal(change/2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
change = amount_in - amount - outputs[change_address]
if change > 0:
outputs[from_node.getnewaddress()] = change
return outputs
def send_zeropri_transaction(from_node, to_node, amount, fee):
"""
Create&broadcast a zero-priority transaction.
Returns (txid, hex-encoded-txdata)
Ensures transaction is zero-priority by first creating a send-to-self,
then using its output
"""
# Create a send-to-self with confirmed inputs:
self_address = from_node.getnewaddress()
(total_in, inputs) = gather_inputs(from_node, amount+fee*2)
outputs = make_change(from_node, total_in, amount+fee, fee)
outputs[self_address] = float(amount+fee)
self_rawtx = from_node.createrawtransaction(inputs, outputs)
self_signresult = from_node.signrawtransaction(self_rawtx)
self_txid = from_node.sendrawtransaction(self_signresult["hex"], True)
vout = find_output(from_node, self_txid, amount+fee)
# Now immediately spend the output to create a 1-input, 1-output
# zero-priority transaction:
inputs = [ { "txid" : self_txid, "vout" : vout } ]
outputs = { to_node.getnewaddress() : float(amount) }
rawtx = from_node.createrawtransaction(inputs, outputs)
signresult = from_node.signrawtransaction(rawtx)
txid = from_node.sendrawtransaction(signresult["hex"], True)
return (txid, signresult["hex"])
def random_zeropri_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random zero-priority transaction.
Returns (txid, hex-encoded-transaction-data, fee)
"""
from_node = random.choice(nodes)
to_node = random.choice(nodes)
fee = min_fee + fee_increment*random.randint(0,fee_variants)
(txid, txhex) = send_zeropri_transaction(from_node, to_node, amount, fee)
return (txid, txhex, fee)
def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random transaction.
Returns (txid, hex-encoded-transaction-data, fee)
"""
from_node = random.choice(nodes)
to_node = random.choice(nodes)
fee = min_fee + fee_increment*random.randint(0,fee_variants)
(total_in, inputs) = gather_inputs(from_node, amount+fee)
outputs = make_change(from_node, total_in, amount, fee)
outputs[to_node.getnewaddress()] = float(amount)
rawtx = from_node.createrawtransaction(inputs, outputs)
signresult = from_node.signrawtransaction(rawtx)
txid = from_node.sendrawtransaction(signresult["hex"], True)
return (txid, signresult["hex"], fee)
def assert_fee_amount(fee, tx_size, fee_per_kB):
"""Assert the fee was in range"""
target_fee = tx_size * fee_per_kB / 1000
if fee < target_fee:
raise AssertionError("Fee of %s NAV too low! (Should be %s NAV)"%(str(fee), str(target_fee)))
# allow the wallet's estimation to be at most 2 bytes off
if fee > (tx_size + 2) * fee_per_kB / 1000:
raise AssertionError("Fee of %s NAV too high! (Should be %s NAV)"%(str(fee), str(target_fee)))
def assert_equal(thing1, thing2):
if thing1 != thing2:
raise AssertionError("%s != %s"%(str(thing1),str(thing2)))
def assert_greater_than(thing1, thing2):
if thing1 <= thing2:
raise AssertionError("%s <= %s"%(str(thing1),str(thing2)))
def assert_raises(exc, fun, *args, **kwds):
try:
fun(*args, **kwds)
except exc:
pass
except Exception as e:
raise AssertionError("Unexpected exception raised: "+type(e).__name__)
else:
raise AssertionError("No exception raised")
def assert_is_hex_string(string):
try:
int(string, 16)
except Exception as e:
raise AssertionError(
"Couldn't interpret %r as hexadecimal; raised: %s" % (string, e))
def assert_is_hash_string(string, length=64):
if not isinstance(string, str):
raise AssertionError("Expected a string, got type %r" % type(string))
elif length and len(string) != length:
raise AssertionError(
"String of length %d expected; got %d" % (length, len(string)))
elif not re.match('[abcdef0-9]+$', string):
raise AssertionError(
"String %r contains invalid characters for a hash." % string)
def assert_array_result(object_array, to_match, expected, should_not_find = False):
"""
Pass in array of JSON objects, a dictionary with key/value pairs
to match against, and another dictionary with expected key/value
pairs.
If the should_not_find flag is true, to_match should not be found
in object_array
"""
if should_not_find == True:
assert_equal(expected, { })
num_matched = 0
for item in object_array:
all_match = True
for key,value in to_match.items():
if item[key] != value:
all_match = False
if not all_match:
continue
elif should_not_find == True:
num_matched = num_matched+1
for key,value in expected.items():
if item[key] != value:
raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value)))
num_matched = num_matched+1
if num_matched == 0 and should_not_find != True:
raise AssertionError("No objects matched %s"%(str(to_match)))
if num_matched > 0 and should_not_find == True:
raise AssertionError("Objects were found %s"%(str(to_match)))
def assert_raises_rpc_error(code, message, fun, *args, **kwds):
"""Run an RPC and verify that a specific JSONRPC exception code and message is raised.
Calls function `fun` with arguments `args` and `kwds`. Catches a JSONRPCException
and verifies that the error code and message are as expected. Throws AssertionError if
no JSONRPCException was raised or if the error code/message are not as expected.
Args:
code (int), optional: the error code returned by the RPC call (defined
in src/rpc/protocol.h). Set to None if checking the error code is not required.
message (string), optional: [a substring of] the error string returned by the
RPC call. Set to None if checking the error string is not required.
fun (function): the function to call. This should be the name of an RPC.
args*: positional arguments for the function.
kwds**: named arguments for the function.
"""
assert try_rpc(code, message, fun, *args, **kwds), "No exception raised"
def try_rpc(code, message, fun, *args, **kwds):
"""Tries to run an rpc command.
Test against error code and message if the rpc fails.
Returns whether a JSONRPCException was raised."""
try:
fun(*args, **kwds)
except JSONRPCException as e:
# JSONRPCException was thrown as expected. Check the code and message values are correct.
if (code is not None) and (code != e.error["code"]):
raise AssertionError("Unexpected JSONRPC error code %i" % e.error["code"])
if (message is not None) and (message not in e.error['message']):
raise AssertionError("Expected substring not found:" + e.error['message'])
return True
except Exception as e:
raise AssertionError("Unexpected exception raised: " + type(e).__name__)
else:
return False
def satoshi_round(amount):
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
# Helper to create at least "count" utxos
# Pass in a fee that is sufficient for relay and mining new transactions.
def create_confirmed_utxos(fee, node, count):
node.generate(int(0.5*count)+101)
utxos = node.listunspent()
iterations = count - len(utxos)
addr1 = node.getnewaddress()
addr2 = node.getnewaddress()
if iterations <= 0:
return utxos
for i in range(iterations):
t = utxos.pop()
inputs = []
inputs.append({ "txid" : t["txid"], "vout" : t["vout"]})
outputs = {}
send_value = t['amount'] - fee
outputs[addr1] = satoshi_round(send_value/2)
outputs[addr2] = satoshi_round(send_value/2)
raw_tx = node.createrawtransaction(inputs, outputs)
signed_tx = node.signrawtransaction(raw_tx)["hex"]
txid = node.sendrawtransaction(signed_tx)
while (node.getmempoolinfo()['size'] > 0):
node.generate(1)
utxos = node.listunspent()
assert(len(utxos) >= count)
return utxos
# Create large OP_RETURN txouts that can be appended to a transaction
# to make it large (helper for constructing large transactions).
def gen_return_txouts():
# Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create
# So we have big transactions (and therefore can't fit very many into each block)
# create one script_pubkey
script_pubkey = "6a4d0200" #OP_RETURN OP_PUSH2 512 bytes
for i in range (512):
script_pubkey = script_pubkey + "01"
# concatenate 128 txouts of above script_pubkey which we'll insert before the txout for change
txouts = "81"
for k in range(128):
# add txout value
txouts = txouts + "0000000000000000"
# add length of script_pubkey
txouts = txouts + "fd0402"
# add script_pubkey
txouts = txouts + script_pubkey
return txouts
def create_tx(node, coinbase, to_address, amount):
inputs = [{ "txid" : coinbase, "vout" : 0}]
outputs = { to_address : amount }
rawtx = node.createrawtransaction(inputs, outputs)
signresult = node.signrawtransaction(rawtx)
assert_equal(signresult["complete"], True)
return signresult["hex"]
# Create a spend of each passed-in utxo, splicing in "txouts" to each raw
# transaction to make it large. See gen_return_txouts() above.
def create_lots_of_big_transactions(node, txouts, utxos, fee):
addr = node.getnewaddress()
txids = []
for i in range(len(utxos)):
t = utxos.pop()
inputs = []
inputs.append({ "txid" : t["txid"], "vout" : t["vout"]})
outputs = {}
send_value = t['amount'] - fee
outputs[addr] = satoshi_round(send_value)
rawtx = node.createrawtransaction(inputs, outputs)
newtx = rawtx[0:92]
newtx = newtx + txouts
newtx = newtx + rawtx[94:]
signresult = node.signrawtransaction(newtx, None, None, "NONE")
txid = node.sendrawtransaction(signresult["hex"], True)
txids.append(txid)
return txids
def get_bip9_status(node, key):
info = node.getblockchaininfo()
return info['bip9_softforks'][key]
def slow_gen(node, count, sleep = 0.1):
total = count
blocks = []
while total > 0:
now = min(total, 10)
blocks.extend(node.generate(now))
total -= now
time.sleep(sleep)
return blocks

View File

@@ -51,6 +51,9 @@ MSG_TYPE_MASK = 0xffffffff >> 2
def sha256(s):
return hashlib.new('sha256', s).digest()
def ripemd160(s):
return hashlib.new('ripemd160', s).digest()
def hash256(s):
return sha256(sha256(s))
@@ -505,7 +508,7 @@ class CTransaction:
self.sha256 = tx.sha256
self.hash = tx.hash
def deserialize(self, f, allow_witness: bool = True):
def deserialize(self, f):
self.nVersion = struct.unpack("<h", f.read(2))[0]
self.nType = struct.unpack("<h", f.read(2))[0]
self.vin = deser_vector(f, CTxIn)

View File

@@ -1,21 +1,17 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2022-2024 tecnovert
# Copyright (c) 2024 The Basicswap developers
# Copyright (c) 2022 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
from .btc import BTCInterface
from basicswap.chainparams import Coins
from basicswap.util.address import decodeAddress
from basicswap.contrib.mnemonic import Mnemonic
from mnemonic import Mnemonic
from basicswap.contrib.test_framework.script import (
CScript,
OP_DUP,
OP_HASH160,
OP_EQUALVERIFY,
OP_CHECKSIG,
OP_DUP, OP_HASH160, OP_EQUALVERIFY, OP_CHECKSIG
)
@@ -26,66 +22,41 @@ class DASHInterface(BTCInterface):
def __init__(self, coin_settings, network, swap_client=None):
super().__init__(coin_settings, network, swap_client)
self._wallet_passphrase = ""
self._wallet_passphrase = ''
self._have_checked_seed = False
self._wallet_v20_compatible = (
False
if not swap_client
else swap_client.getChainClientSettings(self.coin_type()).get(
"wallet_v20_compatible", False
)
)
def seedToMnemonic(self, key):
return Mnemonic('english').to_mnemonic(key)
def decodeAddress(self, address: str) -> bytes:
def initialiseWallet(self, key):
words = self.seedToMnemonic(key)
mnemonic_passphrase = ''
self.rpc_callback('upgradetohd', [words, mnemonic_passphrase, self._wallet_passphrase])
self._have_checked_seed = False
if self._wallet_passphrase != '':
self.unlockWallet(self._wallet_passphrase)
def decodeAddress(self, address):
return decodeAddress(address)[1:]
def getWalletSeedID(self) -> str:
hdseed: str = self.rpc_wallet("dumphdinfo")["hdseed"]
return self.getSeedHash(bytes.fromhex(hdseed)).hex()
def entropyToMnemonic(self, key: bytes) -> None:
return Mnemonic("english").to_mnemonic(key)
def initialiseWallet(self, key_bytes: bytes, restore_time: int = -1) -> None:
self._have_checked_seed = False
if self._wallet_v20_compatible:
self._log.warning("Generating wallet compatible with v20 seed.")
words = self.entropyToMnemonic(key_bytes)
mnemonic_passphrase = ""
self.rpc_wallet(
"upgradetohd", [words, mnemonic_passphrase, self._wallet_passphrase]
)
self._have_checked_seed = False
if self._wallet_passphrase != "":
self.unlockWallet(self._wallet_passphrase)
return
key_wif = self.encodeKey(key_bytes)
self.rpc_wallet("sethdseed", [True, key_wif])
def checkExpectedSeed(self, expect_seedid: str) -> bool:
self._expect_seedid_hex = expect_seedid
def checkExpectedSeed(self, key_hash):
try:
rv = self.rpc_wallet("dumphdinfo")
except Exception as e:
self._log.debug(f"DASH dumphdinfo failed {e}.")
return False
if rv["mnemonic"] != "":
entropy = Mnemonic("english").to_entropy(rv["mnemonic"].split(" "))
rv = self.rpc_callback('dumphdinfo')
entropy = Mnemonic('english').to_entropy(rv['mnemonic'].split(' '))
entropy_hash = self.getAddressHashFromKey(entropy)[::-1].hex()
have_expected_seed: bool = expect_seedid == entropy_hash
else:
have_expected_seed: bool = expect_seedid == self.getWalletSeedID()
self._have_checked_seed = True
return have_expected_seed
self._have_checked_seed = True
return entropy_hash == key_hash
except Exception as e:
self._log.warning('checkExpectedSeed failed: {}'.format(str(e)))
return False
def withdrawCoin(self, value, addr_to, subfee):
params = [addr_to, value, "", "", subfee, False, False, self._conf_target]
return self.rpc_wallet("sendtoaddress", params)
params = [addr_to, value, '', '', subfee, False, False, self._conf_target]
return self.rpc_callback('sendtoaddress', params)
def getSpendableBalance(self) -> int:
return self.make_int(self.rpc_wallet("getwalletinfo")["balance"])
return self.make_int(self.rpc_callback('getwalletinfo')['balance'])
def getScriptForPubkeyHash(self, pkh: bytes) -> bytearray:
# Return P2PKH
@@ -95,65 +66,29 @@ class DASHInterface(BTCInterface):
add_bytes = 107
size = len(tx.serialize_with_witness()) + add_bytes
pay_fee = round(fee_rate * size / 1000)
self._log.info(
f"BLockSpendTx fee_rate, size, fee: {fee_rate}, {size}, {pay_fee}."
)
self._log.info(f'BLockSpendTx fee_rate, size, fee: {fee_rate}, {size}, {pay_fee}.')
return pay_fee
def findTxnByHash(self, txid_hex: str):
# Only works for wallet txns
try:
rv = self.rpc_wallet("gettransaction", [txid_hex])
except Exception as e: # noqa: F841
self._log.debug(
"findTxnByHash getrawtransaction failed: {}".format(txid_hex)
)
rv = self.rpc_callback('gettransaction', [txid_hex])
except Exception as ex:
self._log.debug('findTxnByHash getrawtransaction failed: {}'.format(txid_hex))
return None
if "confirmations" in rv and rv["confirmations"] >= self.blocks_confirmed:
block_height = self.getBlockHeader(rv["blockhash"])["height"]
return {"txid": txid_hex, "amount": 0, "height": block_height}
if 'confirmations' in rv and rv['confirmations'] >= self.blocks_confirmed:
block_height = self.getBlockHeader(rv['blockhash'])['height']
return {'txid': txid_hex, 'amount': 0, 'height': block_height}
return None
def unlockWallet(self, password: str, check_seed: bool = True) -> None:
super().unlockWallet(password, check_seed)
if self._wallet_v20_compatible:
# Store password for initialiseWallet
self._wallet_passphrase = password
def unlockWallet(self, password: str):
super().unlockWallet(password)
# Store password for initialiseWallet
self._wallet_passphrase = password
if not self._have_checked_seed:
self._sc.checkWalletSeed(self.coin_type())
def lockWallet(self):
super().lockWallet()
self._wallet_passphrase = ""
def encryptWallet(
self, old_password: str, new_password: str, check_seed: bool = True
):
if old_password != "":
self.unlockWallet(old_password, check_seed=False)
seed_id_before: str = self.getWalletSeedID()
self.rpc_wallet("encryptwallet", [new_password])
if check_seed is False or seed_id_before == "Not found":
return
self.unlockWallet(new_password, check_seed=False)
seed_id_after: str = self.getWalletSeedID()
self.lockWallet()
if seed_id_before == seed_id_after:
return
self._log.warning(f"{self.ticker()} wallet seed changed after encryption.")
self._log.debug(
f"seed_id_before: {seed_id_before} seed_id_after: {seed_id_after}."
)
self.setWalletSeedWarning(True)
def changeWalletPassword(
self, old_password: str, new_password: str, check_seed_if_encrypt: bool = True
):
self._log.info("changeWalletPassword - {}".format(self.ticker()))
if old_password == "":
if self.isWalletEncrypted():
raise ValueError("Old password must be set")
return self.encryptWallet(old_password, new_password, check_seed_if_encrypt)
self.rpc_wallet("walletpassphrasechange", [old_password, new_password])
self._wallet_passphrase = ''

View File

@@ -1,5 +0,0 @@
from .dcr import DCRInterface
__all__ = [
"DCRInterface",
]

File diff suppressed because it is too large Load Diff

View File

@@ -1,212 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2024 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import copy
from enum import IntEnum
from basicswap.util.crypto import blake256
from basicswap.util.integer import decode_compactsize, encode_compactsize
class TxSerializeType(IntEnum):
Full = 0
NoWitness = 1
OnlyWitness = 2
class SigHashType(IntEnum):
SigHashAll = 0x1
SigHashNone = 0x2
SigHashSingle = 0x3
SigHashAnyOneCanPay = 0x80
SigHashMask = 0x1F
class SignatureType(IntEnum):
STEcdsaSecp256k1 = 0
STEd25519 = 1
STSchnorrSecp256k1 = 2
class COutPoint:
__slots__ = ("hash", "n", "tree")
def __init__(self, hash=0, n=0, tree=0):
self.hash = hash
self.n = n
self.tree = tree
def get_hash(self) -> bytes:
return self.hash.to_bytes(32, "big")
class CTxIn:
__slots__ = (
"prevout",
"sequence",
"value_in",
"block_height",
"block_index",
"signature_script",
) # Witness
def __init__(self, prevout=COutPoint(), sequence=0):
self.prevout = prevout
self.sequence = sequence
self.value_in = -1
self.block_height = 0
self.block_index = 0xFFFFFFFF
self.signature_script = bytes()
class CTxOut:
__slots__ = ("value", "version", "script_pubkey")
def __init__(self, value=0, script_pubkey=bytes()):
self.value = value
self.version = 0
self.script_pubkey = script_pubkey
class CTransaction:
__slots__ = ("hash", "version", "vin", "vout", "locktime", "expiry")
def __init__(self, tx=None):
if tx is None:
self.version = 1
self.vin = []
self.vout = []
self.locktime = 0
self.expiry = 0
else:
self.version = tx.version
self.vin = copy.deepcopy(tx.vin)
self.vout = copy.deepcopy(tx.vout)
self.locktime = tx.locktime
self.expiry = tx.expiry
def deserialize(self, data: bytes, allow_witness: bool = True) -> None:
version = int.from_bytes(data[:4], "little")
self.version = version & 0xFFFF
ser_type: int = version >> 16
o = 4
if ser_type == TxSerializeType.Full or ser_type == TxSerializeType.NoWitness:
num_txin, nb = decode_compactsize(data, o)
o += nb
for i in range(num_txin):
txi = CTxIn()
txi.prevout = COutPoint()
txi.prevout.hash = int.from_bytes(data[o : o + 32], "little")
o += 32
txi.prevout.n = int.from_bytes(data[o : o + 4], "little")
o += 4
txi.prevout.tree = data[o]
o += 1
txi.sequence = int.from_bytes(data[o : o + 4], "little")
o += 4
self.vin.append(txi)
num_txout, nb = decode_compactsize(data, o)
o += nb
for i in range(num_txout):
txo = CTxOut()
txo.value = int.from_bytes(data[o : o + 8], "little")
o += 8
txo.version = int.from_bytes(data[o : o + 2], "little")
o += 2
script_bytes, nb = decode_compactsize(data, o)
o += nb
txo.script_pubkey = data[o : o + script_bytes]
o += script_bytes
self.vout.append(txo)
self.locktime = int.from_bytes(data[o : o + 4], "little")
o += 4
self.expiry = int.from_bytes(data[o : o + 4], "little")
o += 4
if ser_type == TxSerializeType.NoWitness:
return
num_wit_scripts, nb = decode_compactsize(data, o)
o += nb
if ser_type == TxSerializeType.OnlyWitness:
self.vin = [CTxIn() for _ in range(num_wit_scripts)]
else:
if num_wit_scripts != len(self.vin):
raise ValueError("non equal witness and prefix txin quantities")
for i in range(num_wit_scripts):
txi = self.vin[i]
txi.value_in = int.from_bytes(data[o : o + 8], "little")
o += 8
txi.block_height = int.from_bytes(data[o : o + 4], "little")
o += 4
txi.block_index = int.from_bytes(data[o : o + 4], "little")
o += 4
script_bytes, nb = decode_compactsize(data, o)
o += nb
txi.signature_script = data[o : o + script_bytes]
o += script_bytes
def serialize(self, ser_type=TxSerializeType.Full) -> bytes:
data = bytes()
version = (self.version & 0xFFFF) | (ser_type << 16)
data += version.to_bytes(4, "little")
if ser_type == TxSerializeType.Full or ser_type == TxSerializeType.NoWitness:
data += encode_compactsize(len(self.vin))
for txi in self.vin:
data += txi.prevout.hash.to_bytes(32, "little")
data += txi.prevout.n.to_bytes(4, "little")
data += txi.prevout.tree.to_bytes(1, "little")
data += txi.sequence.to_bytes(4, "little")
data += encode_compactsize(len(self.vout))
for txo in self.vout:
data += txo.value.to_bytes(8, "little")
data += txo.version.to_bytes(2, "little")
data += encode_compactsize(len(txo.script_pubkey))
data += txo.script_pubkey
data += self.locktime.to_bytes(4, "little")
data += self.expiry.to_bytes(4, "little")
if ser_type == TxSerializeType.Full or ser_type == TxSerializeType.OnlyWitness:
data += encode_compactsize(len(self.vin))
for txi in self.vin:
tc_value_in = (
txi.value_in & 0xFFFFFFFFFFFFFFFF
) # Convert negative values
data += tc_value_in.to_bytes(8, "little")
data += txi.block_height.to_bytes(4, "little")
data += txi.block_index.to_bytes(4, "little")
data += encode_compactsize(len(txi.signature_script))
data += txi.signature_script
return data
def TxHash(self) -> bytes:
return blake256(self.serialize(TxSerializeType.NoWitness))[::-1]
def TxHashWitness(self) -> bytes:
raise ValueError("todo")
def TxHashFull(self) -> bytes:
raise ValueError("todo")
def findOutput(tx, script_pk: bytes):
for i in range(len(tx.vout)):
if tx.vout[i].script_pubkey == script_pk:
return i
return None

View File

@@ -1,47 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2024 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import json
import traceback
from basicswap.rpc import Jsonrpc
def callrpc(rpc_port, auth, method, params=[], host="127.0.0.1"):
try:
url = "http://{}@{}:{}/".format(auth, host, rpc_port)
x = Jsonrpc(url)
x.__handler = None
v = x.json_request(method, params)
x.close()
r = json.loads(v.decode("utf-8"))
except Exception as ex:
traceback.print_exc()
raise ValueError("RPC server error " + str(ex) + ", method: " + method)
if "error" in r and r["error"] is not None:
raise ValueError("RPC error " + str(r["error"]))
return r["result"]
def openrpc(rpc_port, auth, host="127.0.0.1"):
try:
url = "http://{}@{}:{}/".format(auth, host, rpc_port)
return Jsonrpc(url)
except Exception as ex:
traceback.print_exc()
raise ValueError("RPC error " + str(ex))
def make_rpc_func(port, auth, host="127.0.0.1"):
port = port
auth = auth
host = host
def rpc_func(method, params=None):
return callrpc(port, auth, method, params, host)
return rpc_func

View File

@@ -1,50 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2024 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
OP_0 = 0x00
OP_DATA_1 = 0x01
OP_1NEGATE = 0x4F
OP_1 = 0x51
OP_IF = 0x63
OP_ELSE = 0x67
OP_ENDIF = 0x68
OP_DROP = 0x75
OP_DUP = 0x76
OP_EQUAL = 0x87
OP_EQUALVERIFY = 0x88
OP_PUSHDATA1 = 0x4C
OP_PUSHDATA2 = 0x4D
OP_PUSHDATA4 = 0x4E
OP_HASH160 = 0xA9
OP_CHECKSIG = 0xAC
OP_CHECKMULTISIG = 0xAE
OP_CHECKSEQUENCEVERIFY = 0xB2
def push_script_data(data_array: bytearray, data: bytes) -> None:
len_data: int = len(data)
if len_data == 0 or (len_data == 1 and data[0] == 0):
data_array += bytes((OP_0,))
return
if len_data == 1 and data[0] <= 16:
data_array += bytes((OP_1 - 1 + data[0],))
return
if len_data == 1 and data[0] == 0x81:
data_array += bytes((OP_1NEGATE,))
return
if len_data < OP_PUSHDATA1:
data_array += len_data.to_bytes(1, "little")
elif len_data <= 0xFF:
data_array += bytes((OP_PUSHDATA1, len_data))
elif len_data <= 0xFFFF:
data_array += bytes((OP_PUSHDATA2,)) + len_data.to_bytes(2, "little")
else:
data_array += bytes((OP_PUSHDATA4,)) + len_data.to_bytes(4, "little")
data_array += data

View File

@@ -1,68 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2024 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import os
import select
import subprocess
def createDCRWallet(args, hex_seed, logging, delay_event):
logging.info("Creating DCR wallet")
(pipe_r, pipe_w) = os.pipe() # subprocess.PIPE is buffered, blocks when read
if os.name == "nt":
str_args = " ".join(args)
p = subprocess.Popen(
str_args, shell=True, stdin=subprocess.PIPE, stdout=pipe_w, stderr=pipe_w
)
else:
p = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=pipe_w, stderr=pipe_w)
def readOutput():
buf = os.read(pipe_r, 1024).decode("utf-8")
response = None
if "Opened wallet" in buf:
pass
elif "Use the existing configured private passphrase" in buf:
response = b"y\n"
elif "Do you want to add an additional layer of encryption" in buf:
response = b"n\n"
elif "Do you have an existing wallet seed" in buf:
response = b"y\n"
elif "Enter existing wallet seed" in buf:
response = (hex_seed + "\n").encode("utf-8")
elif "Seed input successful" in buf:
pass
elif "Upgrading database from version" in buf:
pass
elif "Ticket commitments db upgrade done" in buf:
pass
elif "The wallet has been created successfully" in buf:
pass
else:
raise ValueError(f"Unexpected output: {buf}")
if response is not None:
p.stdin.write(response)
p.stdin.flush()
try:
while p.poll() is None:
if os.name == "nt":
readOutput()
delay_event.wait(0.1)
continue
while len(select.select([pipe_r], [], [], 0)[0]) == 1:
readOutput()
delay_event.wait(0.1)
except Exception as e:
logging.error(f"dcrwallet --create failed: {e}")
finally:
if p.poll() is None:
p.terminate()
os.close(pipe_r)
os.close(pipe_w)
p.stdin.close()

View File

@@ -1,62 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2024 The BasicSwap developers
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
from .btc import BTCInterface
from basicswap.chainparams import Coins
from basicswap.util.crypto import hash160
from basicswap.contrib.test_framework.script import (
CScript,
OP_DUP,
OP_CHECKSIG,
OP_HASH160,
OP_EQUAL,
OP_EQUALVERIFY,
)
class DOGEInterface(BTCInterface):
@staticmethod
def coin_type():
return Coins.DOGE
@staticmethod
def est_lock_tx_vsize() -> int:
return 192
@staticmethod
def xmr_swap_b_lock_spend_tx_vsize() -> int:
return 192
def __init__(self, coin_settings, network, swap_client=None):
super(DOGEInterface, self).__init__(coin_settings, network, swap_client)
def getScriptDest(self, script: bytearray) -> bytearray:
# P2SH
script_hash = hash160(script)
assert len(script_hash) == 20
return CScript([OP_HASH160, script_hash, OP_EQUAL])
def getScriptForPubkeyHash(self, pkh: bytes) -> bytearray:
# Return P2PKH
return CScript([OP_DUP, OP_HASH160, pkh, OP_EQUALVERIFY, OP_CHECKSIG])
def encodeScriptDest(self, script_dest: bytes) -> str:
# Extract hash from script
script_hash = script_dest[2:-1]
return self.sh_to_address(script_hash)
def getBLockSpendTxFee(self, tx, fee_rate: int) -> int:
add_bytes = 107
size = len(tx.serialize_with_witness()) + add_bytes
pay_fee = round(fee_rate * size / 1000)
self._log.info(
f"BLockSpendTx fee_rate, size, fee: {fee_rate}, {size}, {pay_fee}."
)
return pay_fee

View File

@@ -1,34 +1,26 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2022-2023 tecnovert
# Copyright (c) 2024-2025 The Basicswap developers
# Copyright (c) 2022 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import hashlib
import random
from .btc import BTCInterface, find_vout_for_address_from_txobj
from basicswap.util import (
i2b,
ensure,
)
from basicswap.rpc import make_rpc_func
from basicswap.util.crypto import hash160
from basicswap.util.address import decodeAddress
from basicswap.chainparams import Coins
from basicswap.interface.contrib.firo_test_framework.script import (
from basicswap.util.address import decodeAddress
from basicswap.contrib.test_framework.script import (
CScript,
OP_0,
OP_DUP,
OP_EQUAL,
OP_HASH160,
OP_CHECKSIG,
OP_EQUALVERIFY,
hash160,
)
from basicswap.interface.contrib.firo_test_framework.mininode import (
CBlock,
FromHex,
from basicswap.contrib.test_framework.messages import (
CTransaction,
)
@@ -38,155 +30,96 @@ class FIROInterface(BTCInterface):
def coin_type():
return Coins.FIRO
def __init__(self, coin_settings, network, swap_client=None):
super(FIROInterface, self).__init__(coin_settings, network, swap_client)
# No multiwallet support
self.rpc_wallet = make_rpc_func(
self._rpcport, self._rpcauth, host=self._rpc_host
)
self.rpc_wallet_watch = self.rpc_wallet
def getExchangeName(self, exchange_name):
return 'zcoin'
if "wallet_name" in coin_settings:
raise ValueError(f"Invalid setting for {self.coin_name()}: wallet_name")
def getExchangeName(self, exchange_name: str) -> str:
return "zcoin"
def initialiseWallet(self, key, restore_time: int = -1):
def initialiseWallet(self, key):
# load with -hdseed= parameter
pass
def checkWallets(self) -> int:
return 1
def encryptWallet(self, password: str, check_seed: bool = True):
# Watchonly wallets are not encrypted
# Firo shuts down after encryptwallet
seed_id_before: str = self.getWalletSeedID() if check_seed else "Not found"
self.rpc_wallet("encryptwallet", [password])
if check_seed is False or seed_id_before == "Not found":
return
seed_id_after: str = self.getWalletSeedID()
if seed_id_before == seed_id_after:
return
self._log.warning(f"{self.ticker()} wallet seed changed after encryption.")
self._log.debug(
f"seed_id_before: {seed_id_before} seed_id_after: {seed_id_after}."
)
self.setWalletSeedWarning(True)
def getNewAddress(self, use_segwit, label="swap_receive"):
return self.rpc("getnewaddress", [label])
# addr_plain = self.rpc('getnewaddress', [label])
# return self.rpc('addwitnessaddress', [addr_plain])
def getNewAddress(self, use_segwit, label='swap_receive'):
return self.rpc_callback('getnewaddress', [label])
# addr_plain = self.rpc_callback('getnewaddress', [label])
# return self.rpc_callback('addwitnessaddress', [addr_plain])
def decodeAddress(self, address):
return decodeAddress(address)[1:]
def encodeSegwitAddress(self, script):
raise ValueError("TODO")
raise ValueError('TODO')
def decodeSegwitAddress(self, addr):
raise ValueError("TODO")
raise ValueError('TODO')
def isWatchOnlyAddress(self, address):
addr_info = self.rpc("validateaddress", [address])
return addr_info["iswatchonly"]
addr_info = self.rpc_callback('validateaddress', [address])
return addr_info['iswatchonly']
def isAddressMine(self, address: str, or_watch_only: bool = False) -> bool:
addr_info = self.rpc("validateaddress", [address])
addr_info = self.rpc_callback('validateaddress', [address])
if not or_watch_only:
return addr_info["ismine"]
return addr_info["ismine"] or addr_info["iswatchonly"]
return addr_info['ismine']
return addr_info['ismine'] or addr_info['iswatchonly']
def getSCLockScriptAddress(self, lock_script: bytes) -> str:
def getSCLockScriptAddress(self, lock_script):
lock_tx_dest = self.getScriptDest(lock_script)
address = self.encodeScriptDest(lock_tx_dest)
if not self.isAddressMine(address, or_watch_only=True):
# Expects P2WSH nested in BIP16_P2SH
self.rpc("importaddress", [lock_tx_dest.hex(), "bid lock", False, True])
ro = self.rpc_callback('importaddress', [lock_tx_dest.hex(), 'bid lock', False, True])
addr_info = self.rpc_callback('validateaddress', [address])
return address
def getLockTxHeight(
self,
txid,
dest_address,
bid_amount,
rescan_from,
find_index: bool = False,
vout: int = -1,
):
def getLockTxHeightFiro(self, txid, lock_script, bid_amount, rescan_from, find_index=False):
# Add watchonly address and rescan if required
lock_tx_dest = self.getScriptDest(lock_script)
dest_address = self.encodeScriptDest(lock_tx_dest)
if not self.isAddressMine(dest_address, or_watch_only=True):
self.importWatchOnlyAddress(dest_address, "bid")
self._log.info(
"Imported watch-only addr: {}".format(self._log.addr(dest_address))
)
self._log.info(
"Rescanning {} chain from height: {}".format(
self.coin_name(), rescan_from
)
)
self.rescanBlockchainForAddress(rescan_from, dest_address)
self.rpc_callback('importaddress', [lock_tx_dest.hex(), 'bid lock', False, True])
self._log.info('Imported watch-only addr: {}'.format(dest_address))
self._log.info('Rescanning {} chain from height: {}'.format(self.coin_name(), rescan_from))
self.rpc_callback('rescanblockchain', [rescan_from])
return_txid = True if txid is None else False
if txid is None:
txns = self.rpc(
"listunspent",
[
0,
9999999,
[
dest_address,
],
],
)
txns = self.rpc_callback('listunspent', [0, 9999999, [dest_address, ]])
for tx in txns:
if self.make_int(tx["amount"]) == bid_amount:
txid = bytes.fromhex(tx["txid"])
if self.make_int(tx['amount']) == bid_amount:
txid = bytes.fromhex(tx['txid'])
break
if txid is None:
return None
try:
tx = self.rpc("gettransaction", [txid.hex()])
tx = self.rpc_callback('gettransaction', [txid.hex()])
block_height = 0
if "blockhash" in tx:
block_header = self.rpc("getblockheader", [tx["blockhash"]])
block_height = block_header["height"]
if 'blockhash' in tx:
block_header = self.rpc_callback('getblockheader', [tx['blockhash']])
block_height = block_header['height']
rv = {
"depth": 0 if "confirmations" not in tx else tx["confirmations"],
"height": block_height,
}
'depth': 0 if 'confirmations' not in tx else tx['confirmations'],
'height': block_height}
except Exception as e:
self._log.debug(
"getLockTxHeight gettransaction failed: %s, %s", txid.hex(), str(e)
)
self._log.debug('getLockTxHeight gettransaction failed: %s, %s', txid.hex(), str(e))
return None
if find_index:
tx_obj = self.rpc("decoderawtransaction", [tx["hex"]])
rv["index"] = find_vout_for_address_from_txobj(tx_obj, dest_address)
tx_obj = self.rpc_callback('decoderawtransaction', [tx['hex']])
rv['index'] = find_vout_for_address_from_txobj(tx_obj, dest_address)
if return_txid:
rv["txid"] = txid.hex()
rv['txid'] = txid.hex()
return rv
def createSCLockTx(
self, value: int, script: bytearray, vkbv: bytes = None
) -> bytes:
def createSCLockTx(self, value: int, script: bytearray, vkbv: bytes = None) -> bytes:
tx = CTransaction()
tx.nVersion = self.txVersion()
tx.vout.append(self.txoType()(value, self.getScriptDest(script)))
@@ -197,278 +130,80 @@ class FIROInterface(BTCInterface):
return self.fundTx(tx_bytes, feerate)
def signTxWithWallet(self, tx):
rv = self.rpc("signrawtransaction", [tx.hex()])
return bytes.fromhex(rv["hex"])
rv = self.rpc_callback('signrawtransaction', [tx.hex()])
return bytes.fromhex(rv['hex'])
def createRawFundedTransaction(
self,
addr_to: str,
amount: int,
sub_fee: bool = False,
lock_unspents: bool = True,
) -> str:
txn = self.rpc(
"createrawtransaction", [[], {addr_to: self.format_amount(amount)}]
)
def createRawFundedTransaction(self, addr_to: str, amount: int, sub_fee: bool = False, lock_unspents: bool = True) -> str:
txn = self.rpc_callback('createrawtransaction', [[], {addr_to: self.format_amount(amount)}])
fee_rate, fee_src = self.get_fee_rate(self._conf_target)
self._log.debug(
f"Fee rate: {fee_rate}, source: {fee_src}, block target: {self._conf_target}"
)
self._log.debug(f'Fee rate: {fee_rate}, source: {fee_src}, block target: {self._conf_target}')
options = {
"lockUnspents": lock_unspents,
"feeRate": fee_rate,
'lockUnspents': lock_unspents,
'feeRate': fee_rate,
}
if sub_fee:
options["subtractFeeFromOutputs"] = [
0,
]
return self.rpc("fundrawtransaction", [txn, options])["hex"]
options['subtractFeeFromOutputs'] = [0,]
return self.rpc_callback('fundrawtransaction', [txn, options])['hex']
def createRawSignedTransaction(self, addr_to, amount) -> str:
txn_funded = self.createRawFundedTransaction(addr_to, amount)
return self.rpc("signrawtransaction", [txn_funded])["hex"]
return self.rpc_callback('signrawtransaction', [txn_funded])['hex']
def getScriptForPubkeyHash(self, pkh: bytes) -> bytearray:
# Return P2PKH
return CScript([OP_DUP, OP_HASH160, pkh, OP_EQUALVERIFY, OP_CHECKSIG])
def getScriptDest(self, script: bytearray) -> bytearray:
# P2SH
# P2WSH nested in BIP16_P2SH
script_hash = hash160(script)
assert len(script_hash) == 20
script_hash = hashlib.sha256(script).digest()
assert len(script_hash) == 32
script_hash_hash = hash160(script_hash)
assert len(script_hash_hash) == 20
return CScript([OP_HASH160, script_hash, OP_EQUAL])
return CScript([OP_HASH160, script_hash_hash, OP_EQUAL])
def getSeedHash(self, seed: bytes) -> bytes:
def getSeedHash(self, seed) -> bytes:
return hash160(seed)[::-1]
def encodeScriptDest(self, script_dest: bytes) -> str:
def encodeScriptDest(self, script):
# Extract hash from script
script_hash = script_dest[2:-1]
script_hash = script[2:-1]
return self.sh_to_address(script_hash)
def getDestForScriptHash(self, script_hash):
assert len(script_hash) == 20
return CScript([OP_HASH160, script_hash, OP_EQUAL])
def getScriptScriptSig(self, script):
return CScript([OP_0, hashlib.sha256(script).digest()])
def withdrawCoin(self, value, addr_to, subfee):
params = [addr_to, value, "", "", subfee]
return self.rpc("sendtoaddress", params)
params = [addr_to, value, '', '', subfee]
return self.rpc_callback('sendtoaddress', params)
def getWalletSeedID(self):
return self.rpc("getwalletinfo")["hdmasterkeyid"]
return self.rpc_callback('getwalletinfo')['hdmasterkeyid']
def getSpendableBalance(self) -> int:
return self.make_int(self.rpc("getwalletinfo")["balance"])
return self.make_int(self.rpc_callback('getwalletinfo')['balance'])
def getBLockSpendTxFee(self, tx, fee_rate: int) -> int:
add_bytes = 107
size = len(tx.serialize_with_witness()) + add_bytes
pay_fee = round(fee_rate * size / 1000)
self._log.info(
f"BLockSpendTx fee_rate, size, fee: {fee_rate}, {size}, {pay_fee}."
)
self._log.info(f'BLockSpendTx fee_rate, size, fee: {fee_rate}, {size}, {pay_fee}.')
return pay_fee
def signTxWithKey(self, tx: bytes, key: bytes) -> bytes:
key_wif = self.encodeKey(key)
rv = self.rpc(
"signrawtransaction",
[
tx.hex(),
[],
[
key_wif,
],
],
)
return bytes.fromhex(rv["hex"])
rv = self.rpc_callback('signrawtransaction', [tx.hex(), [], [key_wif, ]])
return bytes.fromhex(rv['hex'])
def findTxnByHash(self, txid_hex: str):
# Only works for wallet txns
try:
rv = self.rpc("gettransaction", [txid_hex])
except Exception as e: # noqa: F841
self._log.debug(
"findTxnByHash getrawtransaction failed: {}".format(txid_hex)
)
rv = self.rpc_callback('gettransaction', [txid_hex])
except Exception as ex:
self._log.debug('findTxnByHash getrawtransaction failed: {}'.format(txid_hex))
return None
if "confirmations" in rv and rv["confirmations"] >= self.blocks_confirmed:
block_height = self.getBlockHeader(rv["blockhash"])["height"]
return {"txid": txid_hex, "amount": 0, "height": block_height}
if 'confirmations' in rv and rv['confirmations'] >= self.blocks_confirmed:
block_height = self.getBlockHeader(rv['blockhash'])['height']
return {'txid': txid_hex, 'amount': 0, 'height': block_height}
return None
def getProofOfFunds(self, amount_for, extra_commit_bytes):
# TODO: Lock unspent and use same output/s to fund bid
unspents_by_addr = dict()
unspents = self.rpc("listunspent")
for u in unspents:
if u["spendable"] is not True:
continue
if u["address"] not in unspents_by_addr:
unspents_by_addr[u["address"]] = {"total": 0, "utxos": []}
utxo_amount: int = self.make_int(u["amount"], r=1)
unspents_by_addr[u["address"]]["total"] += utxo_amount
unspents_by_addr[u["address"]]["utxos"].append(
(utxo_amount, u["txid"], u["vout"])
)
max_utxos: int = 4
viable_addrs = []
for addr, data in unspents_by_addr.items():
if data["total"] >= amount_for:
# Sort from largest to smallest amount
sorted_utxos = sorted(data["utxos"], key=lambda x: x[0])
# Max outputs required to reach amount_for
utxos_req: int = 0
sum_value: int = 0
for utxo in sorted_utxos:
sum_value += utxo[0]
utxos_req += 1
if sum_value >= amount_for:
break
if utxos_req <= max_utxos:
viable_addrs.append(addr)
continue
ensure(
len(viable_addrs) > 0, "Could not find address with enough funds for proof"
)
sign_for_addr: str = random.choice(viable_addrs)
self._log.debug("sign_for_addr %s", sign_for_addr)
prove_utxos = []
sorted_utxos = sorted(
unspents_by_addr[sign_for_addr]["utxos"], key=lambda x: x[0]
)
hasher = hashlib.sha256()
sum_value: int = 0
for utxo in sorted_utxos:
sum_value += utxo[0]
outpoint = (bytes.fromhex(utxo[1]), utxo[2])
prove_utxos.append(outpoint)
hasher.update(outpoint[0])
hasher.update(outpoint[1].to_bytes(2, "big"))
if sum_value >= amount_for:
break
utxos_hash = hasher.digest()
if (
self.using_segwit()
): # TODO: Use isSegwitAddress when scantxoutset can use combo
# 'Address does not refer to key' for non p2pkh
pkh = self.decodeAddress(sign_for_addr)
sign_for_addr = self.pkh_to_address(pkh)
self._log.debug("sign_for_addr converted %s", sign_for_addr)
signature = self.rpc(
"signmessage",
[
sign_for_addr,
sign_for_addr
+ "_swap_proof_"
+ utxos_hash.hex()
+ extra_commit_bytes.hex(),
],
)
return (sign_for_addr, signature, prove_utxos)
def verifyProofOfFunds(self, address, signature, utxos, extra_commit_bytes):
hasher = hashlib.sha256()
sum_value: int = 0
for outpoint in utxos:
hasher.update(outpoint[0])
hasher.update(outpoint[1].to_bytes(2, "big"))
utxos_hash = hasher.digest()
passed = self.verifyMessage(
address,
address + "_swap_proof_" + utxos_hash.hex() + extra_commit_bytes.hex(),
signature,
)
ensure(passed is True, "Proof of funds signature invalid")
if self.using_segwit():
address = self.encodeSegwitAddress(decodeAddress(address)[1:])
sum_value: int = 0
for outpoint in utxos:
txout = self.rpc("gettxout", [outpoint[0].hex(), outpoint[1]])
sum_value += self.make_int(txout["value"])
return sum_value
def rescanBlockchainForAddress(self, height_start: int, addr_find: str):
# Very ugly workaround for missing `rescanblockchain` rpc command
chain_blocks: int = self.getChainHeight()
current_height: int = chain_blocks
block_hash = self.rpc("getblockhash", [current_height])
script_hash: bytes = self.decodeAddress(addr_find)
find_scriptPubKey = self.getDestForScriptHash(script_hash)
while current_height > height_start:
block_hash = self.rpc("getblockhash", [current_height])
block = self.rpc("getblock", [block_hash, False])
decoded_block = CBlock()
decoded_block = FromHex(decoded_block, block)
for tx in decoded_block.vtx:
for txo in tx.vout:
if txo.scriptPubKey == find_scriptPubKey:
tx.rehash()
txid = i2b(tx.sha256)
self._log.info(
"Found output to addr: {} in tx {} in block {}".format(
addr_find, txid.hex(), block_hash
)
)
self._log.info(
"rescanblockchain hack invalidateblock {}".format(
block_hash
)
)
self.rpc("invalidateblock", [block_hash])
self.rpc("reconsiderblock", [block_hash])
return
current_height -= 1
def getBlockWithTxns(self, block_hash: str):
# TODO: Bypass decoderawtransaction and getblockheader
block = self.rpc("getblock", [block_hash, False])
block_header = self.rpc("getblockheader", [block_hash])
decoded_block = CBlock()
decoded_block = FromHex(decoded_block, block)
tx_rv = []
for tx in decoded_block.vtx:
tx_hex = tx.serialize_with_witness().hex()
tx_dec = self.rpc("decoderawtransaction", [tx_hex])
if "hex" not in tx_dec:
tx_dec["hex"] = tx_hex
tx_rv.append(tx_dec)
block_rv = {
"hash": block_hash,
"previousblockhash": block_header["previousblockhash"],
"tx": tx_rv,
"confirmations": block_header["confirmations"],
"height": block_header["height"],
"time": block_header["time"],
"version": block_header["version"],
"merkleroot": block_header["merkleroot"],
}
return block_rv

View File

@@ -1,161 +1,15 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2020-2023 tecnovert
# Copyright (c) 2024-2025 The Basicswap developers
# Copyright (c) 2020 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
from .btc import BTCInterface
from basicswap.rpc import make_rpc_func
from basicswap.chainparams import Coins, chainparams
from basicswap.chainparams import Coins
class LTCInterface(BTCInterface):
@staticmethod
def coin_type():
return Coins.LTC
def __init__(self, coin_settings, network, swap_client=None):
super(LTCInterface, self).__init__(coin_settings, network, swap_client)
self._rpc_wallet_mweb = coin_settings.get("mweb_wallet_name", "mweb")
self.rpc_wallet_mweb = make_rpc_func(
self._rpcport,
self._rpcauth,
host=self._rpc_host,
wallet=self._rpc_wallet_mweb,
)
def getNewMwebAddress(self, use_segwit=False, label="swap_receive") -> str:
return self.rpc_wallet_mweb("getnewaddress", [label, "mweb"])
def getNewStealthAddress(self, label=""):
return self.getNewMwebAddress(False, label)
def withdrawCoin(self, value, type_from: str, addr_to: str, subfee: bool) -> str:
params = [addr_to, value, "", "", subfee, True, self._conf_target]
if type_from == "mweb":
return self.rpc_wallet_mweb("sendtoaddress", params)
return self.rpc_wallet("sendtoaddress", params)
def createUTXO(self, value_sats: int):
# Create a new address and send value_sats to it
spendable_balance = self.getSpendableBalance()
if spendable_balance < value_sats:
raise ValueError("Balance too low")
address = self.getNewAddress(self._use_segwit, "create_utxo")
return (
self.withdrawCoin(self.format_amount(value_sats), "plain", address, False),
address,
)
def getWalletInfo(self):
rv = super(LTCInterface, self).getWalletInfo()
mweb_info = self.rpc_wallet_mweb("getwalletinfo")
rv["mweb_balance"] = mweb_info["balance"]
rv["mweb_unconfirmed"] = mweb_info["unconfirmed_balance"]
rv["mweb_immature"] = mweb_info["immature_balance"]
return rv
def getUnspentsByAddr(self):
unspent_addr = dict()
unspent = self.rpc_wallet("listunspent")
for u in unspent:
if u.get("spendable", False) is False:
continue
if u.get("solvable", False) is False: # Filter out mweb outputs
continue
if "address" not in u:
continue
if "desc" in u:
desc = u["desc"]
if self.using_segwit:
if self.use_p2shp2wsh():
if not desc.startswith("sh(wpkh"):
continue
else:
if not desc.startswith("wpkh"):
continue
else:
if not desc.startswith("pkh"):
continue
unspent_addr[u["address"]] = unspent_addr.get(
u["address"], 0
) + self.make_int(u["amount"], r=1)
return unspent_addr
class LTCInterfaceMWEB(LTCInterface):
def interface_type(self) -> int:
return Coins.LTC_MWEB
def __init__(self, coin_settings, network, swap_client=None):
super(LTCInterfaceMWEB, self).__init__(coin_settings, network, swap_client)
self._rpc_wallet = coin_settings.get("mweb_wallet_name", "mweb")
self.rpc_wallet = make_rpc_func(
self._rpcport, self._rpcauth, host=self._rpc_host, wallet=self._rpc_wallet
)
self.rpc_wallet_watch = self.rpc_wallet
def chainparams(self):
return chainparams[Coins.LTC]
def chainparams_network(self):
return chainparams[Coins.LTC][self._network]
def coin_name(self) -> str:
coin_chainparams = chainparams[Coins.LTC]
return coin_chainparams["name"].capitalize() + " MWEB"
def ticker(self) -> str:
ticker = chainparams[Coins.LTC]["ticker"]
if self._network == "testnet":
ticker = "t" + ticker
elif self._network == "regtest":
ticker = "rt" + ticker
return ticker + "_MWEB"
def getNewAddress(self, use_segwit=False, label="swap_receive") -> str:
return self.getNewMwebAddress()
def has_mweb_wallet(self) -> bool:
return "mweb" in self.rpc("listwallets")
def init_wallet(self, password=None):
# If system is encrypted mweb wallet will be created at first unlock
self._log.info("init_wallet - {}".format(self.ticker()))
self._log.info(f"Creating wallet {self._rpc_wallet} for {self.coin_name()}.")
# wallet_name, disable_private_keys, blank, passphrase, avoid_reuse, descriptors, load_on_startup
self.rpc("createwallet", ["mweb", False, True, password, False, False, True])
if password is not None:
# Max timeout value, ~3 years
self.rpc_wallet("walletpassphrase", [password, 100000000])
if self.getWalletSeedID() == "Not found":
self._sc.initialiseWallet(self.interface_type())
# Workaround to trigger mweb_spk_man->LoadMWEBKeychain()
self.rpc("unloadwallet", ["mweb"])
self.rpc("loadwallet", ["mweb"])
if password is not None:
self.rpc_wallet("walletpassphrase", [password, 100000000])
self.rpc_wallet("keypoolrefill")
def unlockWallet(self, password: str, check_seed: bool = True) -> None:
if password == "":
return
self._log.info("unlockWallet - {}".format(self.ticker()))
if not self.has_mweb_wallet():
self.init_wallet(password)
else:
# Max timeout value, ~3 years
self.rpc_wallet("walletpassphrase", [password, 100000000])
if check_seed:
self._sc.checkWalletSeed(self.coin_type())

View File

@@ -1,999 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2023 tecnovert
# Copyright (c) 2024-2025 The Basicswap developers
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import random
import hashlib
from io import BytesIO
from coincurve.keys import (
PublicKey,
PrivateKey,
)
from basicswap.interface.btc import (
BTCInterface,
extractScriptLockRefundScriptValues,
findOutput,
find_vout_for_address_from_txobj,
)
from basicswap.rpc import make_rpc_func
from basicswap.chainparams import Coins
from basicswap.contrib.mnemonic import Mnemonic
from basicswap.interface.contrib.nav_test_framework.mininode import (
CTxIn,
CTxOut,
CBlock,
COutPoint,
CTransaction,
CTxInWitness,
FromHex,
)
from basicswap.util.crypto import hash160
from basicswap.util.address import (
decodeWif,
pubkeyToAddress,
encodeAddress,
)
from basicswap.util import (
b2i,
i2b,
ensure,
)
from basicswap.basicswap_util import (
getVoutByScriptPubKey,
)
from basicswap.interface.contrib.nav_test_framework.script import (
CScript,
OP_0,
OP_EQUAL,
OP_DUP,
OP_HASH160,
OP_EQUALVERIFY,
OP_CHECKSIG,
SIGHASH_ALL,
SegwitVersion1SignatureHash,
)
class NAVInterface(BTCInterface):
@staticmethod
def coin_type():
return Coins.NAV
@staticmethod
def txVersion() -> int:
return 3
@staticmethod
def txoType():
return CTxOut
def __init__(self, coin_settings, network, swap_client=None):
super(NAVInterface, self).__init__(coin_settings, network, swap_client)
# No multiwallet support
self.rpc_wallet = make_rpc_func(
self._rpcport, self._rpcauth, host=self._rpc_host
)
self.rpc_wallet_watch = self.rpc_wallet
if "wallet_name" in coin_settings:
raise ValueError(f"Invalid setting for {self.coin_name()}: wallet_name")
def use_p2shp2wsh(self) -> bool:
# p2sh-p2wsh
return True
def initialiseWallet(self, key, restore_time: int = -1):
# Load with -importmnemonic= parameter
pass
def checkWallets(self) -> int:
return 1
def getWalletSeedID(self):
return self.rpc("getwalletinfo")["hdmasterkeyid"]
def withdrawCoin(self, value, addr_to: str, subfee: bool):
strdzeel = ""
params = [addr_to, value, "", "", strdzeel, subfee]
return self.rpc("sendtoaddress", params)
def getSpendableBalance(self) -> int:
return self.make_int(self.rpc("getwalletinfo")["balance"])
def signTxWithWallet(self, tx: bytes) -> bytes:
rv = self.rpc("signrawtransaction", [tx.hex()])
return bytes.fromhex(rv["hex"])
def checkExpectedSeed(self, key_hash: str):
try:
rv = self.rpc("dumpmnemonic")
entropy = Mnemonic("english").to_entropy(rv.split(" "))
entropy_hash = self.getAddressHashFromKey(entropy)[::-1].hex()
self._have_checked_seed = True
return entropy_hash == key_hash
except Exception as e:
self._log.warning("checkExpectedSeed failed: {}".format(str(e)))
return False
def getScriptForP2PKH(self, pkh: bytes) -> bytearray:
# Return P2PKH
return CScript([OP_DUP, OP_HASH160, pkh, OP_EQUALVERIFY, OP_CHECKSIG])
def getScriptForPubkeyHash(self, pkh: bytes) -> bytearray:
# Return P2SH-p2wpkh
script = CScript([OP_0, pkh])
script_hash = hash160(script)
assert len(script_hash) == 20
return CScript([OP_HASH160, script_hash, OP_EQUAL])
def getInputScriptForPubkeyHash(self, pkh: bytes) -> bytearray:
script = CScript([OP_0, pkh])
return bytes((len(script),)) + script
def encodeSegwitAddress(self, pkh: bytes) -> str:
# P2SH-p2wpkh
script = CScript([OP_0, pkh])
script_hash = hash160(script)
assert len(script_hash) == 20
return encodeAddress(
bytes((self.chainparams_network()["script_address"],)) + script_hash
)
def encodeSegwitAddressScript(self, script: bytes) -> str:
if (
len(script) == 23
and script[0] == OP_HASH160
and script[1] == 20
and script[22] == OP_EQUAL
):
script_hash = script[2:22]
return encodeAddress(
bytes((self.chainparams_network()["script_address"],)) + script_hash
)
raise ValueError("Unknown Script")
def loadTx(self, tx_bytes: bytes) -> CTransaction:
# Load tx from bytes to internal representation
tx = CTransaction()
tx.deserialize(BytesIO(tx_bytes))
return tx
def signTx(
self,
key_bytes: bytes,
tx_bytes: bytes,
input_n: int,
prevout_script,
prevout_value: int,
):
tx = self.loadTx(tx_bytes)
sig_hash = SegwitVersion1SignatureHash(
prevout_script, tx, input_n, SIGHASH_ALL, prevout_value
)
eck = PrivateKey(key_bytes)
return eck.sign(sig_hash, hasher=None) + bytes((SIGHASH_ALL,))
def setTxSignature(self, tx_bytes: bytes, stack) -> bytes:
tx = self.loadTx(tx_bytes)
tx.wit.vtxinwit.clear()
tx.wit.vtxinwit.append(CTxInWitness())
tx.wit.vtxinwit[0].scriptWitness.stack = stack
return tx.serialize_with_witness()
def getProofOfFunds(self, amount_for, extra_commit_bytes):
# TODO: Lock unspent and use same output/s to fund bid
unspents_by_addr = dict()
unspents = self.rpc("listunspent")
for u in unspents:
if u["spendable"] is not True:
continue
if u["address"] not in unspents_by_addr:
unspents_by_addr[u["address"]] = {"total": 0, "utxos": []}
utxo_amount: int = self.make_int(u["amount"], r=1)
unspents_by_addr[u["address"]]["total"] += utxo_amount
unspents_by_addr[u["address"]]["utxos"].append(
(utxo_amount, u["txid"], u["vout"])
)
max_utxos: int = 4
viable_addrs = []
for addr, data in unspents_by_addr.items():
if data["total"] >= amount_for:
# Sort from largest to smallest amount
sorted_utxos = sorted(data["utxos"], key=lambda x: x[0])
# Max outputs required to reach amount_for
utxos_req: int = 0
sum_value: int = 0
for utxo in sorted_utxos:
sum_value += utxo[0]
utxos_req += 1
if sum_value >= amount_for:
break
if utxos_req <= max_utxos:
viable_addrs.append(addr)
continue
ensure(
len(viable_addrs) > 0, "Could not find address with enough funds for proof"
)
sign_for_addr: str = random.choice(viable_addrs)
self._log.debug("sign_for_addr %s", sign_for_addr)
prove_utxos = []
sorted_utxos = sorted(
unspents_by_addr[sign_for_addr]["utxos"], key=lambda x: x[0]
)
hasher = hashlib.sha256()
sum_value: int = 0
for utxo in sorted_utxos:
sum_value += utxo[0]
outpoint = (bytes.fromhex(utxo[1]), utxo[2])
prove_utxos.append(outpoint)
hasher.update(outpoint[0])
hasher.update(outpoint[1].to_bytes(2, "big"))
if sum_value >= amount_for:
break
utxos_hash = hasher.digest()
if (
self.using_segwit()
): # TODO: Use isSegwitAddress when scantxoutset can use combo
# 'Address does not refer to key' for non p2pkh
addr_info = self.rpc(
"validateaddress",
[
addr,
],
)
if "isscript" in addr_info and addr_info["isscript"] and "hex" in addr_info:
pkh = bytes.fromhex(addr_info["hex"])[2:]
sign_for_addr = self.pkh_to_address(pkh)
self._log.debug("sign_for_addr converted %s", sign_for_addr)
signature = self.rpc(
"signmessage",
[
sign_for_addr,
sign_for_addr
+ "_swap_proof_"
+ utxos_hash.hex()
+ extra_commit_bytes.hex(),
],
)
return (sign_for_addr, signature, prove_utxos)
def verifyProofOfFunds(self, address, signature, utxos, extra_commit_bytes):
hasher = hashlib.sha256()
sum_value: int = 0
for outpoint in utxos:
hasher.update(outpoint[0])
hasher.update(outpoint[1].to_bytes(2, "big"))
utxos_hash = hasher.digest()
passed = self.verifyMessage(
address,
address + "_swap_proof_" + utxos_hash.hex() + extra_commit_bytes.hex(),
signature,
)
ensure(passed is True, "Proof of funds signature invalid")
if self.using_segwit():
address = self.encodeSegwitAddress(self.decodeAddress(address)[1:])
sum_value: int = 0
for outpoint in utxos:
txout = self.rpc("gettxout", [outpoint[0].hex(), outpoint[1]])
sum_value += self.make_int(txout["value"])
return sum_value
def createRawFundedTransaction(
self,
addr_to: str,
amount: int,
sub_fee: bool = False,
lock_unspents: bool = True,
) -> str:
txn = self.rpc(
"createrawtransaction", [[], {addr_to: self.format_amount(amount)}]
)
fee_rate, fee_src = self.get_fee_rate(self._conf_target)
self._log.debug(
f"Fee rate: {fee_rate}, source: {fee_src}, block target: {self._conf_target}"
)
if sub_fee:
raise ValueError(
"Navcoin fundrawtransaction is missing the subtractFeeFromOutputs parameter"
)
# options['subtractFeeFromOutputs'] = [0,]
fee_rate = self.make_int(fee_rate, r=1)
return self.fundTx(txn, fee_rate, lock_unspents).hex()
def isAddressMine(self, address: str, or_watch_only: bool = False) -> bool:
addr_info = self.rpc("validateaddress", [address])
if not or_watch_only:
return addr_info["ismine"]
return addr_info["ismine"] or addr_info["iswatchonly"]
def createRawSignedTransaction(self, addr_to, amount) -> str:
txn_funded = self.createRawFundedTransaction(addr_to, amount)
return self.rpc("signrawtransaction", [txn_funded])["hex"]
def getBlockchainInfo(self):
rv = self.rpc("getblockchaininfo")
synced = round(rv["verificationprogress"], 3)
if synced >= 0.997:
rv["verificationprogress"] = 1.0
return rv
def encodeScriptDest(self, script_dest: bytes) -> str:
script_hash = script_dest[2:-1] # Extract hash from script
return self.sh_to_address(script_hash)
def encode_p2wsh(self, script: bytes) -> str:
return pubkeyToAddress(self.chainparams_network()["script_address"], script)
def find_prevout_info(self, txn_hex: str, txn_script: bytes):
txjs = self.rpc("decoderawtransaction", [txn_hex])
n = getVoutByScriptPubKey(txjs, self.getScriptDest(txn_script).hex())
return {
"txid": txjs["txid"],
"vout": n,
"scriptPubKey": txjs["vout"][n]["scriptPubKey"]["hex"],
"redeemScript": txn_script.hex(),
"amount": txjs["vout"][n]["value"],
}
def getNewAddress(self, use_segwit: bool, label: str = "swap_receive") -> str:
address: str = self.rpc(
"getnewaddress",
[
label,
],
)
if use_segwit:
return self.rpc(
"addwitnessaddress",
[
address,
],
)
return address
def createRedeemTxn(
self, prevout, output_addr: str, output_value: int, txn_script: bytes
) -> str:
tx = CTransaction()
tx.nVersion = self.txVersion()
prev_txid = b2i(bytes.fromhex(prevout["txid"]))
tx.vin.append(
CTxIn(
COutPoint(prev_txid, prevout["vout"]),
scriptSig=self.getScriptScriptSig(txn_script),
)
)
pkh = self.decodeAddress(output_addr)
script = self.getScriptForPubkeyHash(pkh)
tx.vout.append(self.txoType()(output_value, script))
tx.rehash()
return tx.serialize().hex()
def createRefundTxn(
self,
prevout,
output_addr: str,
output_value: int,
locktime: int,
sequence: int,
txn_script: bytes,
) -> str:
tx = CTransaction()
tx.nVersion = self.txVersion()
tx.nLockTime = locktime
prev_txid = b2i(bytes.fromhex(prevout["txid"]))
tx.vin.append(
CTxIn(
COutPoint(prev_txid, prevout["vout"]),
nSequence=sequence,
scriptSig=self.getScriptScriptSig(txn_script),
)
)
pkh = self.decodeAddress(output_addr)
script = self.getScriptForPubkeyHash(pkh)
tx.vout.append(self.txoType()(output_value, script))
tx.rehash()
return tx.serialize().hex()
def getTxSignature(self, tx_hex: str, prevout_data, key_wif: str) -> str:
key = decodeWif(key_wif)
redeem_script = bytes.fromhex(prevout_data["redeemScript"])
sig = self.signTx(
key,
bytes.fromhex(tx_hex),
0,
redeem_script,
self.make_int(prevout_data["amount"]),
)
return sig.hex()
def verifyTxSig(
self,
tx_bytes: bytes,
sig: bytes,
K: bytes,
input_n: int,
prevout_script: bytes,
prevout_value: int,
) -> bool:
tx = self.loadTx(tx_bytes)
sig_hash = SegwitVersion1SignatureHash(
prevout_script, tx, input_n, SIGHASH_ALL, prevout_value
)
pubkey = PublicKey(K)
return pubkey.verify(sig[:-1], sig_hash, hasher=None) # Pop the hashtype byte
def verifyRawTransaction(self, tx_hex: str, prevouts):
# Only checks signature
# verifyrawtransaction
self._log.warning("NAV verifyRawTransaction only checks signature")
inputs_valid: bool = False
validscripts: int = 0
tx_bytes = bytes.fromhex(tx_hex)
tx = self.loadTx(bytes.fromhex(tx_hex))
signature = tx.wit.vtxinwit[0].scriptWitness.stack[0]
pubkey = tx.wit.vtxinwit[0].scriptWitness.stack[1]
input_n: int = 0
prevout_data = prevouts[input_n]
redeem_script = bytes.fromhex(prevout_data["redeemScript"])
prevout_value = self.make_int(prevout_data["amount"])
if self.verifyTxSig(
tx_bytes, signature, pubkey, input_n, redeem_script, prevout_value
):
validscripts += 1
# TODO: validate inputs
inputs_valid = True
return {
"inputs_valid": inputs_valid,
"validscripts": validscripts,
}
def getHTLCSpendTxVSize(self, redeem: bool = True) -> int:
tx_vsize = (
5 # Add a few bytes, sequence in script takes variable amount of bytes
)
tx_vsize += 184 if redeem else 187
return tx_vsize
def getTxid(self, tx) -> bytes:
if isinstance(tx, str):
tx = bytes.fromhex(tx)
if isinstance(tx, bytes):
tx = self.loadTx(tx)
tx.rehash()
return i2b(tx.sha256)
def rescanBlockchainForAddress(self, height_start: int, addr_find: str):
# Very ugly workaround for missing `rescanblockchain` rpc command
chain_blocks: int = self.getChainHeight()
current_height: int = chain_blocks
block_hash = self.rpc("getblockhash", [current_height])
script_hash: bytes = self.decodeAddress(addr_find)
find_scriptPubKey = self.getDestForScriptHash(script_hash)
while current_height > height_start:
block_hash = self.rpc("getblockhash", [current_height])
block = self.rpc("getblock", [block_hash, False])
decoded_block = CBlock()
decoded_block = FromHex(decoded_block, block)
for tx in decoded_block.vtx:
for txo in tx.vout:
if txo.scriptPubKey == find_scriptPubKey:
tx.rehash()
txid = i2b(tx.sha256)
self._log.info(
"Found output to addr: {} in tx {} in block {}".format(
addr_find, txid.hex(), block_hash
)
)
self._log.info(
"rescanblockchain hack invalidateblock {}".format(
block_hash
)
)
self.rpc("invalidateblock", [block_hash])
self.rpc("reconsiderblock", [block_hash])
return
current_height -= 1
def getLockTxHeight(
self,
txid,
dest_address,
bid_amount,
rescan_from,
find_index: bool = False,
vout: int = -1,
):
# Add watchonly address and rescan if required
if not self.isAddressMine(dest_address, or_watch_only=True):
self.importWatchOnlyAddress(dest_address, "bid")
self._log.info(
"Imported watch-only addr: {}".format(self._log.addr(dest_address))
)
self._log.info(
"Rescanning {} chain from height: {}".format(
self.coin_name(), rescan_from
)
)
self.rescanBlockchainForAddress(rescan_from, dest_address)
return_txid = True if txid is None else False
if txid is None:
txns = self.rpc(
"listunspent",
[
0,
9999999,
[
dest_address,
],
],
)
for tx in txns:
if self.make_int(tx["amount"]) == bid_amount:
txid = bytes.fromhex(tx["txid"])
break
if txid is None:
return None
try:
tx = self.rpc("gettransaction", [txid.hex()])
block_height = 0
if "blockhash" in tx:
block_header = self.rpc("getblockheader", [tx["blockhash"]])
block_height = block_header["height"]
rv = {
"depth": 0 if "confirmations" not in tx else tx["confirmations"],
"height": block_height,
}
except Exception as e:
self._log.debug(
"getLockTxHeight gettransaction failed: %s, %s", txid.hex(), str(e)
)
return None
if find_index:
tx_obj = self.rpc("decoderawtransaction", [tx["hex"]])
rv["index"] = find_vout_for_address_from_txobj(tx_obj, dest_address)
if return_txid:
rv["txid"] = txid.hex()
return rv
def getBlockWithTxns(self, block_hash):
# TODO: Bypass decoderawtransaction and getblockheader
block = self.rpc("getblock", [block_hash, False])
block_header = self.rpc("getblockheader", [block_hash])
decoded_block = CBlock()
decoded_block = FromHex(decoded_block, block)
tx_rv = []
for tx in decoded_block.vtx:
tx_hex = tx.serialize_with_witness().hex()
tx_dec = self.rpc("decoderawtransaction", [tx_hex])
if "hex" not in tx_dec:
tx_dec["hex"] = tx_hex
tx_rv.append(tx_dec)
block_rv = {
"hash": block_hash,
"previousblockhash": block_header["previousblockhash"],
"tx": tx_rv,
"confirmations": block_header["confirmations"],
"height": block_header["height"],
"time": block_header["time"],
"version": block_header["version"],
"merkleroot": block_header["merkleroot"],
}
return block_rv
def getScriptScriptSig(self, script: bytes) -> bytes:
return self.getP2SHP2WSHScriptSig(script)
def getScriptDest(self, script):
return self.getP2SHP2WSHDest(script)
def getDestForScriptHash(self, script_hash):
assert len(script_hash) == 20
return CScript([OP_HASH160, script_hash, OP_EQUAL])
def pubkey_to_segwit_address(self, pk: bytes) -> str:
pkh = hash160(pk)
script_out = self.getScriptForPubkeyHash(pkh)
return self.encodeSegwitAddressScript(script_out)
def createBLockTx(self, Kbs: bytes, output_amount: int, vkbv=None) -> bytes:
tx = CTransaction()
tx.nVersion = self.txVersion()
script_pk = self.getPkDest(Kbs)
tx.vout.append(self.txoType()(output_amount, script_pk))
return tx.serialize()
def spendBLockTx(
self,
chain_b_lock_txid: bytes,
address_to: str,
kbv: bytes,
kbs: bytes,
cb_swap_value: int,
b_fee: int,
restore_height: int,
spend_actual_balance: bool = False,
lock_tx_vout=None,
) -> bytes:
self._log.info("spendBLockTx %s:\n", chain_b_lock_txid.hex())
wtx = self.rpc(
"gettransaction",
[
chain_b_lock_txid.hex(),
],
)
lock_tx = self.loadTx(bytes.fromhex(wtx["hex"]))
Kbs = self.getPubkey(kbs)
script_pk = self.getPkDest(Kbs)
locked_n = findOutput(lock_tx, script_pk)
ensure(locked_n is not None, "Output not found in tx")
pkh_to = self.decodeAddress(address_to)
tx = CTransaction()
tx.nVersion = self.txVersion()
chain_b_lock_txid_int = b2i(chain_b_lock_txid)
script_sig = self.getInputScriptForPubkeyHash(self.getPubkeyHash(Kbs))
tx.vin.append(
CTxIn(
COutPoint(chain_b_lock_txid_int, locked_n),
nSequence=0,
scriptSig=script_sig,
)
)
tx.vout.append(
self.txoType()(cb_swap_value, self.getScriptForPubkeyHash(pkh_to))
)
pay_fee = self.getBLockSpendTxFee(tx, b_fee)
tx.vout[0].nValue = cb_swap_value - pay_fee
b_lock_spend_tx = tx.serialize()
b_lock_spend_tx = self.signTxWithKey(b_lock_spend_tx, kbs, cb_swap_value)
return bytes.fromhex(self.publishTx(b_lock_spend_tx))
def signTxWithKey(self, tx: bytes, key: bytes, prev_amount: int) -> bytes:
Key = self.getPubkey(key)
pkh = self.getPubkeyHash(Key)
script = self.getScriptForP2PKH(pkh)
sig = self.signTx(key, tx, 0, script, prev_amount)
stack = [
sig,
Key,
]
return self.setTxSignature(tx, stack)
def findTxnByHash(self, txid_hex: str):
# Only works for wallet txns
try:
rv = self.rpc("gettransaction", [txid_hex])
except Exception as e: # noqa: F841
self._log.debug(
"findTxnByHash getrawtransaction failed: {}".format(txid_hex)
)
return None
if "confirmations" in rv and rv["confirmations"] >= self.blocks_confirmed:
block_height = self.getBlockHeader(rv["blockhash"])["height"]
return {"txid": txid_hex, "amount": 0, "height": block_height}
return None
def createSCLockTx(
self, value: int, script: bytearray, vkbv: bytes = None
) -> bytes:
tx = CTransaction()
tx.nVersion = self.txVersion()
tx.vout.append(self.txoType()(value, self.getScriptDest(script)))
return tx.serialize()
def fundTx(self, tx_hex: str, feerate: int, lock_unspents: bool = True):
feerate_str = self.format_amount(feerate)
# TODO: unlock unspents if bid cancelled
options = {
"lockUnspents": lock_unspents,
"feeRate": feerate_str,
}
rv = self.rpc("fundrawtransaction", [tx_hex, options])
# Sign transaction then strip witness data to fill scriptsig
rv = self.rpc("signrawtransaction", [rv["hex"]])
tx_signed = self.loadTx(bytes.fromhex(rv["hex"]))
if len(tx_signed.vin) != len(tx_signed.wit.vtxinwit):
raise ValueError("txn has non segwit input")
for witness_data in tx_signed.wit.vtxinwit:
if len(witness_data.scriptWitness.stack) < 2:
raise ValueError("txn has non segwit input")
return tx_signed.serialize_without_witness()
def fundSCLockTx(self, tx_bytes: bytes, feerate, vkbv=None) -> bytes:
tx_funded = self.fundTx(tx_bytes.hex(), feerate)
return tx_funded
def createSCLockRefundTx(
self,
tx_lock_bytes,
script_lock,
Kal,
Kaf,
lock1_value,
csv_val,
tx_fee_rate,
vkbv=None,
):
tx_lock = CTransaction()
tx_lock = self.loadTx(tx_lock_bytes)
output_script = self.getScriptDest(script_lock)
locked_n = findOutput(tx_lock, output_script)
ensure(locked_n is not None, "Output not found in tx")
locked_coin = tx_lock.vout[locked_n].nValue
tx_lock.rehash()
tx_lock_id_int = tx_lock.sha256
refund_script = self.genScriptLockRefundTxScript(Kal, Kaf, csv_val)
tx = CTransaction()
tx.nVersion = self.txVersion()
tx.vin.append(
CTxIn(
COutPoint(tx_lock_id_int, locked_n),
nSequence=lock1_value,
scriptSig=self.getScriptScriptSig(script_lock),
)
)
tx.vout.append(self.txoType()(locked_coin, self.getScriptDest(refund_script)))
dummy_witness_stack = self.getScriptLockTxDummyWitness(script_lock)
witness_bytes = self.getWitnessStackSerialisedLength(dummy_witness_stack)
vsize = self.getTxVSize(tx, add_witness_bytes=witness_bytes)
pay_fee = round(tx_fee_rate * vsize / 1000)
tx.vout[0].nValue = locked_coin - pay_fee
tx.rehash()
self._log.info(
"createSCLockRefundTx {}{}.".format(
self._log.id(i2b(tx.sha256)),
(
""
if self._log.safe_logs
else f":\n fee_rate, vsize, fee: {tx_fee_rate}, {vsize}, {pay_fee}"
),
)
)
return tx.serialize(), refund_script, tx.vout[0].nValue
def createSCLockRefundSpendTx(
self,
tx_lock_refund_bytes,
script_lock_refund,
pkh_refund_to,
tx_fee_rate,
vkbv=None,
):
# Returns the coinA locked coin to the leader
# The follower will sign the multisig path with a signature encumbered by the leader's coinB spend pubkey
# If the leader publishes the decrypted signature the leader's coinB spend privatekey will be revealed to the follower
tx_lock_refund = self.loadTx(tx_lock_refund_bytes)
output_script = self.getScriptDest(script_lock_refund)
locked_n = findOutput(tx_lock_refund, output_script)
ensure(locked_n is not None, "Output not found in tx")
locked_coin = tx_lock_refund.vout[locked_n].nValue
tx_lock_refund.rehash()
tx_lock_refund_hash_int = tx_lock_refund.sha256
tx = CTransaction()
tx.nVersion = self.txVersion()
tx.vin.append(
CTxIn(
COutPoint(tx_lock_refund_hash_int, locked_n),
nSequence=0,
scriptSig=self.getScriptScriptSig(script_lock_refund),
)
)
tx.vout.append(
self.txoType()(locked_coin, self.getScriptForPubkeyHash(pkh_refund_to))
)
dummy_witness_stack = self.getScriptLockRefundSpendTxDummyWitness(
script_lock_refund
)
witness_bytes = self.getWitnessStackSerialisedLength(dummy_witness_stack)
vsize = self.getTxVSize(tx, add_witness_bytes=witness_bytes)
pay_fee = round(tx_fee_rate * vsize / 1000)
tx.vout[0].nValue = locked_coin - pay_fee
tx.rehash()
self._log.info(
"createSCLockRefundSpendTx {}{}.".format(
self._log.id(i2b(tx.sha256)),
(
""
if self._log.safe_logs
else f":\n fee_rate, vsize, fee: {tx_fee_rate}, {vsize}, {pay_fee}"
),
)
)
return tx.serialize()
def createSCLockRefundSpendToFTx(
self,
tx_lock_refund_bytes,
script_lock_refund,
pkh_dest,
tx_fee_rate,
vkbv=None,
kbsf=None,
):
# lock refund swipe tx
# Sends the coinA locked coin to the follower
tx_lock_refund = self.loadTx(tx_lock_refund_bytes)
output_script = self.getScriptDest(script_lock_refund)
locked_n = findOutput(tx_lock_refund, output_script)
ensure(locked_n is not None, "Output not found in tx")
locked_coin = tx_lock_refund.vout[locked_n].nValue
A, B, lock2_value, C = extractScriptLockRefundScriptValues(script_lock_refund)
tx_lock_refund.rehash()
tx_lock_refund_hash_int = tx_lock_refund.sha256
tx = CTransaction()
tx.nVersion = self.txVersion()
tx.vin.append(
CTxIn(
COutPoint(tx_lock_refund_hash_int, locked_n),
nSequence=lock2_value,
scriptSig=self.getScriptScriptSig(script_lock_refund),
)
)
tx.vout.append(
self.txoType()(locked_coin, self.getScriptForPubkeyHash(pkh_dest))
)
dummy_witness_stack = self.getScriptLockRefundSwipeTxDummyWitness(
script_lock_refund
)
witness_bytes = self.getWitnessStackSerialisedLength(dummy_witness_stack)
vsize = self.getTxVSize(tx, add_witness_bytes=witness_bytes)
pay_fee = round(tx_fee_rate * vsize / 1000)
tx.vout[0].nValue = locked_coin - pay_fee
tx.rehash()
self._log.info(
"createSCLockRefundSpendToFTx {}{}.".format(
self._log.id(i2b(tx.sha256)),
(
""
if self._log.safe_logs
else f":\n fee_rate, vsize, fee: {tx_fee_rate}, {vsize}, {pay_fee}"
),
)
)
return tx.serialize()
def createSCLockSpendTx(
self, tx_lock_bytes, script_lock, pkh_dest, tx_fee_rate, vkbv=None, fee_info={}
):
tx_lock = self.loadTx(tx_lock_bytes)
output_script = self.getScriptDest(script_lock)
locked_n = findOutput(tx_lock, output_script)
ensure(locked_n is not None, "Output not found in tx")
locked_coin = tx_lock.vout[locked_n].nValue
tx_lock.rehash()
tx_lock_id_int = tx_lock.sha256
tx = CTransaction()
tx.nVersion = self.txVersion()
tx.vin.append(
CTxIn(
COutPoint(tx_lock_id_int, locked_n),
scriptSig=self.getScriptScriptSig(script_lock),
)
)
tx.vout.append(
self.txoType()(locked_coin, self.getScriptForPubkeyHash(pkh_dest))
)
dummy_witness_stack = self.getScriptLockTxDummyWitness(script_lock)
witness_bytes = self.getWitnessStackSerialisedLength(dummy_witness_stack)
vsize = self.getTxVSize(tx, add_witness_bytes=witness_bytes)
pay_fee = round(tx_fee_rate * vsize / 1000)
tx.vout[0].nValue = locked_coin - pay_fee
fee_info["fee_paid"] = pay_fee
fee_info["rate_used"] = tx_fee_rate
fee_info["witness_bytes"] = witness_bytes
fee_info["vsize"] = vsize
tx.rehash()
self._log.info(
"createSCLockSpendTx {}{}.".format(
self._log.id(i2b(tx.sha256)),
(
""
if self._log.safe_logs
else f":\n fee_rate, vsize, fee: {tx_fee_rate}, {vsize}, {pay_fee}"
),
)
)
return tx.serialize()

View File

@@ -2,15 +2,41 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2020-2022 tecnovert
# Copyright (c) 2025 The Basicswap developers
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
from .btc import BTCInterface
from basicswap.chainparams import Coins
from basicswap.util import (
make_int,
)
class NMCInterface(BTCInterface):
@staticmethod
def coin_type():
return Coins.NMC
def getLockTxHeight(self, txid, dest_address, bid_amount, rescan_from, find_index=False):
self._log.debug('[rm] scantxoutset start') # scantxoutset is slow
ro = self.rpc_callback('scantxoutset', ['start', ['addr({})'.format(dest_address)]]) # TODO: Use combo(address) where possible
self._log.debug('[rm] scantxoutset end')
return_txid = True if txid is None else False
for o in ro['unspents']:
if txid and o['txid'] != txid.hex():
continue
# Verify amount
if make_int(o['amount']) != int(bid_amount):
self._log.warning('Found output to lock tx address of incorrect value: %s, %s', str(o['amount']), o['txid'])
continue
rv = {
'depth': 0,
'height': o['height']}
if o['height'] > 0:
rv['depth'] = ro['height'] - o['height']
if find_index:
rv['index'] = o['vout']
if return_txid:
rv['txid'] = o['txid']
return rv

File diff suppressed because it is too large Load Diff

View File

@@ -6,7 +6,8 @@
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
from .btc import BTCInterface
from basicswap.contrib.test_framework.messages import CTxOut
from basicswap.contrib.test_framework.messages import (
CTxOut)
class PassthroughBTCInterface(BTCInterface):
@@ -14,5 +15,5 @@ class PassthroughBTCInterface(BTCInterface):
super().__init__(coin_settings, network)
self.txoType = CTxOut
self._network = network
self.blocks_confirmed = coin_settings["blocks_confirmed"]
self.setConfTarget(coin_settings["conf_target"])
self.blocks_confirmed = coin_settings['blocks_confirmed']
self.setConfTarget(coin_settings['conf_target'])

View File

@@ -2,17 +2,19 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2022 tecnovert
# Copyright (c) 2024 The Basicswap developers
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
from io import BytesIO
from .btc import BTCInterface
from basicswap.rpc import make_rpc_func
from basicswap.chainparams import Coins
from basicswap.util.address import decodeAddress
from .contrib.pivx_test_framework.messages import CBlock, ToHex, FromHex, CTransaction
from .contrib.pivx_test_framework.messages import (
CBlock,
ToHex,
FromHex,
CTransaction)
from basicswap.contrib.test_framework.script import (
CScript,
OP_DUP,
@@ -27,109 +29,58 @@ class PIVXInterface(BTCInterface):
def coin_type():
return Coins.PIVX
def __init__(self, coin_settings, network, swap_client=None):
super(PIVXInterface, self).__init__(coin_settings, network, swap_client)
# No multiwallet support
self.rpc_wallet = make_rpc_func(
self._rpcport, self._rpcauth, host=self._rpc_host
)
self.rpc_wallet_watch = self.rpc_wallet
def encryptWallet(self, password: str, check_seed: bool = True):
# Watchonly wallets are not encrypted
seed_id_before: str = self.getWalletSeedID()
self.rpc_wallet("encryptwallet", [password])
if check_seed is False or seed_id_before == "Not found":
return
seed_id_after: str = self.getWalletSeedID()
if seed_id_before == seed_id_after:
return
self._log.warning(f"{self.ticker()} wallet seed changed after encryption.")
self._log.debug(
f"seed_id_before: {seed_id_before} seed_id_after: {seed_id_after}."
)
self.setWalletSeedWarning(True)
# Workaround for https://github.com/bitcoin/bitcoin/issues/26607
chain_client_settings = self._sc.getChainClientSettings(
self.coin_type()
) # basicswap.json
if chain_client_settings.get("manage_daemon", False) is False:
self._log.warning(
f"{self.ticker()} manage_daemon is false. Can't attempt to fix."
)
return
def signTxWithWallet(self, tx):
rv = self.rpc("signrawtransaction", [tx.hex()])
return bytes.fromhex(rv["hex"])
rv = self.rpc_callback('signrawtransaction', [tx.hex()])
return bytes.fromhex(rv['hex'])
def createRawFundedTransaction(
self,
addr_to: str,
amount: int,
sub_fee: bool = False,
lock_unspents: bool = True,
) -> str:
txn = self.rpc(
"createrawtransaction", [[], {addr_to: self.format_amount(amount)}]
)
def createRawFundedTransaction(self, addr_to: str, amount: int, sub_fee: bool = False, lock_unspents: bool = True) -> str:
txn = self.rpc_callback('createrawtransaction', [[], {addr_to: self.format_amount(amount)}])
fee_rate, fee_src = self.get_fee_rate(self._conf_target)
self._log.debug(
f"Fee rate: {fee_rate}, source: {fee_src}, block target: {self._conf_target}"
)
self._log.debug(f'Fee rate: {fee_rate}, source: {fee_src}, block target: {self._conf_target}')
options = {
"lockUnspents": lock_unspents,
"feeRate": fee_rate,
'lockUnspents': lock_unspents,
'feeRate': fee_rate,
}
if sub_fee:
options["subtractFeeFromOutputs"] = [
0,
]
return self.rpc("fundrawtransaction", [txn, options])["hex"]
options['subtractFeeFromOutputs'] = [0,]
return self.rpc_callback('fundrawtransaction', [txn, options])['hex']
def createRawSignedTransaction(self, addr_to, amount) -> str:
txn_funded = self.createRawFundedTransaction(addr_to, amount)
return self.rpc("signrawtransaction", [txn_funded])["hex"]
return self.rpc_callback('signrawtransaction', [txn_funded])['hex']
def decodeAddress(self, address):
return decodeAddress(address)[1:]
def getBlockWithTxns(self, block_hash):
# TODO: Bypass decoderawtransaction and getblockheader
block = self.rpc("getblock", [block_hash, False])
block_header = self.rpc("getblockheader", [block_hash])
block = self.rpc_callback('getblock', [block_hash, False])
block_header = self.rpc_callback('getblockheader', [block_hash])
decoded_block = CBlock()
decoded_block = FromHex(decoded_block, block)
tx_rv = []
for tx in decoded_block.vtx:
tx_dec = self.rpc("decoderawtransaction", [ToHex(tx)])
tx_dec = self.rpc_callback('decoderawtransaction', [ToHex(tx)])
tx_rv.append(tx_dec)
block_rv = {
"hash": block_hash,
"previousblockhash": block_header["previousblockhash"],
"tx": tx_rv,
"confirmations": block_header["confirmations"],
"height": block_header["height"],
"time": block_header["time"],
"version": block_header["version"],
"merkleroot": block_header["merkleroot"],
'hash': block_hash,
'tx': tx_rv,
'confirmations': block_header['confirmations'],
'height': block_header['height'],
'version': block_header['version'],
'merkleroot': block_header['merkleroot'],
}
return block_rv
def withdrawCoin(self, value, addr_to, subfee):
params = [addr_to, value, "", "", subfee]
return self.rpc("sendtoaddress", params)
params = [addr_to, value, '', '', subfee]
return self.rpc_callback('sendtoaddress', params)
def getSpendableBalance(self) -> int:
return self.make_int(self.rpc("getwalletinfo")["balance"])
return self.make_int(self.rpc_callback('getwalletinfo')['balance'])
def loadTx(self, tx_bytes):
# Load tx from bytes to internal representation
@@ -145,35 +96,22 @@ class PIVXInterface(BTCInterface):
add_bytes = 107
size = len(tx.serialize_with_witness()) + add_bytes
pay_fee = round(fee_rate * size / 1000)
self._log.info(
f"BLockSpendTx fee_rate, size, fee: {fee_rate}, {size}, {pay_fee}."
)
self._log.info(f'BLockSpendTx fee_rate, size, fee: {fee_rate}, {size}, {pay_fee}.')
return pay_fee
def signTxWithKey(self, tx: bytes, key: bytes) -> bytes:
key_wif = self.encodeKey(key)
rv = self.rpc(
"signrawtransaction",
[
tx.hex(),
[],
[
key_wif,
],
],
)
return bytes.fromhex(rv["hex"])
rv = self.rpc_callback('signrawtransaction', [tx.hex(), [], [key_wif, ]])
return bytes.fromhex(rv['hex'])
def findTxnByHash(self, txid_hex: str):
# Only works for wallet txns
try:
rv = self.rpc("gettransaction", [txid_hex])
except Exception as e: # noqa: F841
self._log.debug(
"findTxnByHash getrawtransaction failed: {}".format(txid_hex)
)
rv = self.rpc_callback('gettransaction', [txid_hex])
except Exception as ex:
self._log.debug('findTxnByHash getrawtransaction failed: {}'.format(txid_hex))
return None
if "confirmations" in rv and rv["confirmations"] >= self.blocks_confirmed:
block_height = self.getBlockHeader(rv["blockhash"])["height"]
return {"txid": txid_hex, "amount": 0, "height": block_height}
if 'confirmations' in rv and rv['confirmations'] >= self.blocks_confirmed:
block_height = self.getBlockHeader(rv['blockhash'])['height']
return {'txid': txid_hex, 'amount': 0, 'height': block_height}
return None

View File

@@ -0,0 +1,19 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2023 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
from .btc import BTCInterface
from basicswap.chainparams import Coins
class VEILInterface(BTCInterface):
@staticmethod
def coin_type():
return Coins.VEIL
@staticmethod
def txVersion() -> int:
return 2

View File

@@ -1,56 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2024 The Basicswap developers
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
from basicswap.chainparams import WOW_COIN, Coins
from .xmr import XMRInterface
class WOWInterface(XMRInterface):
@staticmethod
def coin_type():
return Coins.WOW
@staticmethod
def ticker_str() -> int:
return Coins.WOW.name
@staticmethod
def COIN():
return WOW_COIN
@staticmethod
def exp() -> int:
return 11
@staticmethod
def depth_spendable() -> int:
return 4
# below only needed until wow is rebased to monero v0.18.4.0+
def openWallet(self, filename):
params = {"filename": filename}
if self._wallet_password is not None:
params["password"] = self._wallet_password
try:
self.rpc_wallet("open_wallet", params)
except Exception as e:
if "no connection to daemon" in str(e):
self._log.debug(f"{self.coin_name()} {e}")
return # bypass refresh error to allow startup with a busy daemon
try:
# TODO Remove `store` after upstream fix to autosave on close_wallet
self.rpc_wallet("store")
self.rpc_wallet("close_wallet")
self._log.debug(f"Attempt to save and close {self.coin_name()} wallet")
except Exception as e: # noqa: F841
pass
self.rpc_wallet("open_wallet", params)
self._log.debug(f"Reattempt to open {self.coin_name()} wallet")

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

152
basicswap/messages.proto Normal file
View File

@@ -0,0 +1,152 @@
syntax = "proto3";
package basicswap;
/* Step 1, seller -> network */
message OfferMessage {
uint32 coin_from = 1;
uint32 coin_to = 2;
uint64 amount_from = 3;
uint64 rate = 4;
uint64 min_bid_amount = 5;
uint64 time_valid = 6;
enum LockType {
NOT_SET = 0;
SEQUENCE_LOCK_BLOCKS = 1;
SEQUENCE_LOCK_TIME = 2;
ABS_LOCK_BLOCKS = 3;
ABS_LOCK_TIME = 4;
}
LockType lock_type = 7;
uint32 lock_value = 8;
uint32 swap_type = 9;
/* optional */
string proof_address = 10;
string proof_signature = 11;
bytes pkhash_seller = 12;
bytes secret_hash = 13;
uint64 fee_rate_from = 14;
uint64 fee_rate_to = 15;
uint32 protocol_version = 16;
bool amount_negotiable = 17;
bool rate_negotiable = 18;
}
/* Step 2, buyer -> seller */
message BidMessage {
bytes offer_msg_id = 1;
uint64 time_valid = 2; /* seconds bid is valid for */
uint64 amount = 3; /* amount of amount_from bid is for */
uint64 rate = 4;
bytes pkhash_buyer = 5; /* buyer's address to receive amount_from */
string proof_address = 6;
string proof_signature = 7;
uint32 protocol_version = 8;
}
/* Step 3, seller -> buyer */
message BidAcceptMessage {
bytes bid_msg_id = 1;
bytes initiate_txid = 2;
bytes contract_script = 3;
}
message OfferRevokeMessage {
bytes offer_msg_id = 1;
bytes signature = 2;
}
message BidRejectMessage {
bytes bid_msg_id = 1;
uint32 reject_code = 2;
}
message XmrBidMessage {
/* MSG1L, F -> L */
bytes offer_msg_id = 1;
uint64 time_valid = 2; /* seconds bid is valid for */
uint64 amount = 3; /* amount of amount_from bid is for */
uint64 rate = 4;
bytes pkaf = 5;
bytes kbvf = 6;
bytes kbsf_dleag = 7;
bytes dest_af = 8;
uint32 protocol_version = 9;
}
message XmrSplitMessage {
bytes msg_id = 1;
uint32 msg_type = 2; /* 1 XmrBid, 2 XmrBidAccept */
uint32 sequence = 3;
bytes dleag = 4;
}
message XmrBidAcceptMessage {
bytes bid_msg_id = 1;
bytes pkal = 3;
bytes kbvl = 4;
bytes kbsl_dleag = 5;
/* MSG2F */
bytes a_lock_tx = 6;
bytes a_lock_tx_script = 7;
bytes a_lock_refund_tx = 8;
bytes a_lock_refund_tx_script = 9;
bytes a_lock_refund_spend_tx = 10;
bytes al_lock_refund_tx_sig = 11;
}
message XmrBidLockTxSigsMessage {
/* MSG3L */
bytes bid_msg_id = 1;
bytes af_lock_refund_spend_tx_esig = 2;
bytes af_lock_refund_tx_sig = 3;
}
message XmrBidLockSpendTxMessage {
/* MSG4F */
bytes bid_msg_id = 1;
bytes a_lock_spend_tx = 2;
bytes kal_sig = 3;
}
message XmrBidLockReleaseMessage {
/* MSG5F */
bytes bid_msg_id = 1;
bytes al_lock_spend_tx_esig = 2;
}
message ADSBidIntentMessage {
/* L -> F Sent from bidder, construct a reverse bid */
bytes offer_msg_id = 1;
uint64 time_valid = 2; /* seconds bid is valid for */
uint64 amount_from = 3; /* amount of offer.coin_from bid is for */
uint64 amount_to = 4; /* amount of offer.coin_to bid is for, equivalent to bid.amount */
uint64 rate = 5; /* amount of offer.coin_from bid is for */
uint32 protocol_version = 6;
}
message ADSBidIntentAcceptMessage {
/* F -> L Sent from offerer, construct a reverse bid */
bytes bid_msg_id = 1;
bytes pkaf = 2;
bytes kbvf = 3;
bytes kbsf_dleag = 4;
bytes dest_af = 5;
}

View File

@@ -1,306 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2024 tecnovert
# Copyright (c) 2025 The Basicswap developers
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
"""
syntax = "proto3";
0 VARINT int32, int64, uint32, uint64, sint32, sint64, bool, enum
1 I64 fixed64, sfixed64, double
2 LEN string, bytes, embedded messages, packed repeated fields
5 I32 fixed32, sfixed32, float
Don't encode fields of default values.
When decoding initialise all fields not set from data.
protobuf ParseFromString would reset the whole object, from_bytes won't.
"""
from basicswap.util.integer import encode_varint, decode_varint
NPBW_INT = 0
NPBW_BYTES = 2
NPBF_STR = 1
NPBF_BOOL = 2
class NonProtobufClass:
def __init__(self, init_all: bool = True, **kwargs):
for key, value in kwargs.items():
found_field: bool = False
for field_num, v in self._map.items():
field_name, wire_type, field_type = v
if field_name == key:
setattr(self, field_name, value)
found_field = True
break
if found_field is False:
raise ValueError(f"Got an unexpected keyword argument '{key}'")
if init_all:
self.init_fields()
def init_fields(self) -> None:
# Set default values for missing fields
for field_num, v in self._map.items():
field_name, wire_type, field_type = v
if hasattr(self, field_name):
continue
if wire_type == 0:
setattr(self, field_name, 0)
elif wire_type == 2:
if field_type == 1:
setattr(self, field_name, str())
else:
setattr(self, field_name, bytes())
else:
raise ValueError(f"Unknown wire_type {wire_type}")
def to_bytes(self) -> bytes:
rv = bytes()
for field_num, v in self._map.items():
field_name, wire_type, field_type = v
if not hasattr(self, field_name):
continue
field_value = getattr(self, field_name)
tag = (field_num << 3) | wire_type
if wire_type == 0:
if field_value == 0:
continue
rv += encode_varint(tag)
rv += encode_varint(field_value)
elif wire_type == 2:
if len(field_value) == 0:
continue
rv += encode_varint(tag)
if isinstance(field_value, str):
field_value = field_value.encode("utf-8")
rv += encode_varint(len(field_value))
rv += field_value
else:
raise ValueError(f"Unknown wire_type {wire_type}")
return rv
def from_bytes(self, b: bytes, init_all: bool = True) -> None:
max_len: int = len(b)
o: int = 0
while o < max_len:
tag, lv = decode_varint(b, o)
o += lv
wire_type = tag & 7
field_num = tag >> 3
field_name, wire_type_expect, field_type = self._map[field_num]
if wire_type != wire_type_expect:
raise ValueError(
f"Unexpected wire_type {wire_type} for field {field_num}"
)
if wire_type == 0:
field_value, lv = decode_varint(b, o)
o += lv
elif wire_type == 2:
field_len, lv = decode_varint(b, o)
o += lv
field_value = b[o : o + field_len]
o += field_len
if field_type == 1:
field_value = field_value.decode("utf-8")
else:
raise ValueError(f"Unknown wire_type {wire_type}")
setattr(self, field_name, field_value)
if init_all:
self.init_fields()
class OfferMessage(NonProtobufClass):
_map = {
1: ("protocol_version", NPBW_INT, 0),
2: ("coin_from", NPBW_INT, 0),
3: ("coin_to", NPBW_INT, 0),
4: ("amount_from", NPBW_INT, 0),
5: ("amount_to", NPBW_INT, 0),
6: ("min_bid_amount", NPBW_INT, 0),
7: ("time_valid", NPBW_INT, 0),
8: ("lock_type", NPBW_INT, 0),
9: ("lock_value", NPBW_INT, 0),
10: ("swap_type", NPBW_INT, 0),
11: ("proof_address", NPBW_BYTES, NPBF_STR),
12: ("proof_signature", NPBW_BYTES, NPBF_STR),
13: ("pkhash_seller", NPBW_BYTES, 0),
14: ("secret_hash", NPBW_BYTES, 0),
15: ("fee_rate_from", NPBW_INT, 0),
16: ("fee_rate_to", NPBW_INT, 0),
17: ("amount_negotiable", NPBW_INT, NPBF_BOOL),
18: ("rate_negotiable", NPBW_INT, NPBF_BOOL),
19: ("proof_utxos", NPBW_BYTES, 0),
20: ("auto_accept_type", NPBW_INT, 0),
21: ("message_nets", NPBW_BYTES, NPBF_STR),
}
class BidMessage(NonProtobufClass):
_map = {
1: ("protocol_version", NPBW_INT, 0),
2: ("offer_msg_id", NPBW_BYTES, 0),
3: ("time_valid", NPBW_INT, 0),
4: ("amount", NPBW_INT, 0),
5: ("amount_to", NPBW_INT, 0),
6: ("pkhash_buyer", NPBW_BYTES, 0),
7: ("proof_address", NPBW_BYTES, NPBF_STR),
8: ("proof_signature", NPBW_BYTES, NPBF_STR),
9: ("proof_utxos", NPBW_BYTES, 0),
10: ("pkhash_buyer_to", NPBW_BYTES, 0),
11: ("message_nets", NPBW_BYTES, NPBF_STR),
}
class BidAcceptMessage(NonProtobufClass):
# Step 3, seller -> buyer
_map = {
1: ("bid_msg_id", NPBW_BYTES, 0),
2: ("initiate_txid", NPBW_BYTES, 0),
3: ("contract_script", NPBW_BYTES, 0),
4: ("pkhash_seller", NPBW_BYTES, 0),
}
class OfferRevokeMessage(NonProtobufClass):
_map = {
1: ("offer_msg_id", NPBW_BYTES, 0),
2: ("signature", NPBW_BYTES, 0),
}
class BidRejectMessage(NonProtobufClass):
_map = {
1: ("bid_msg_id", NPBW_BYTES, 0),
2: ("reject_code", NPBW_INT, 0),
}
class XmrBidMessage(NonProtobufClass):
# MSG1L, F -> L
_map = {
1: ("protocol_version", NPBW_INT, 0),
2: ("offer_msg_id", NPBW_BYTES, 0),
3: ("time_valid", NPBW_INT, 0),
4: ("amount", NPBW_INT, 0),
5: ("amount_to", NPBW_INT, 0),
6: ("pkaf", NPBW_BYTES, 0),
7: ("kbvf", NPBW_BYTES, 0),
8: ("kbsf_dleag", NPBW_BYTES, 0),
9: ("dest_af", NPBW_BYTES, 0),
10: ("message_nets", NPBW_BYTES, NPBF_STR),
}
class XmrSplitMessage(NonProtobufClass):
_map = {
1: ("msg_id", NPBW_BYTES, 0),
2: ("msg_type", NPBW_INT, 0),
3: ("sequence", NPBW_INT, 0),
4: ("dleag", NPBW_BYTES, 0),
}
class XmrBidAcceptMessage(NonProtobufClass):
_map = {
1: ("bid_msg_id", NPBW_BYTES, 0),
2: ("pkal", NPBW_BYTES, 0),
3: ("kbvl", NPBW_BYTES, 0),
4: ("kbsl_dleag", NPBW_BYTES, 0),
# MSG2F
5: ("a_lock_tx", NPBW_BYTES, 0),
6: ("a_lock_tx_script", NPBW_BYTES, 0),
7: ("a_lock_refund_tx", NPBW_BYTES, 0),
8: ("a_lock_refund_tx_script", NPBW_BYTES, 0),
9: ("a_lock_refund_spend_tx", NPBW_BYTES, 0),
10: ("al_lock_refund_tx_sig", NPBW_BYTES, 0),
}
class XmrBidLockTxSigsMessage(NonProtobufClass):
# MSG3L
_map = {
1: ("bid_msg_id", NPBW_BYTES, 0),
2: ("af_lock_refund_spend_tx_esig", NPBW_BYTES, 0),
3: ("af_lock_refund_tx_sig", NPBW_BYTES, 0),
}
class XmrBidLockSpendTxMessage(NonProtobufClass):
# MSG4F
_map = {
1: ("bid_msg_id", NPBW_BYTES, 0),
2: ("a_lock_spend_tx", NPBW_BYTES, 0),
3: ("kal_sig", NPBW_BYTES, 0),
}
class XmrBidLockReleaseMessage(NonProtobufClass):
# MSG5F
_map = {
1: ("bid_msg_id", NPBW_BYTES, 0),
2: ("al_lock_spend_tx_esig", NPBW_BYTES, 0),
}
class ADSBidIntentMessage(NonProtobufClass):
# L -> F Sent from bidder, construct a reverse bid
_map = {
1: ("protocol_version", NPBW_INT, 0),
2: ("offer_msg_id", NPBW_BYTES, 0),
3: ("time_valid", NPBW_INT, 0),
4: ("amount_from", NPBW_INT, 0),
5: ("amount_to", NPBW_INT, 0),
6: ("message_nets", NPBW_BYTES, NPBF_STR),
}
class ADSBidIntentAcceptMessage(NonProtobufClass):
# F -> L Sent from offerer, construct a reverse bid
_map = {
1: ("bid_msg_id", NPBW_BYTES, 0),
2: ("pkaf", NPBW_BYTES, 0),
3: ("kbvf", NPBW_BYTES, 0),
4: ("kbsf_dleag", NPBW_BYTES, 0),
5: ("dest_af", NPBW_BYTES, 0),
}
class ConnectReqMessage(NonProtobufClass):
_map = {
1: ("network_type", NPBW_INT, 0),
2: ("network_data", NPBW_BYTES, 0),
3: ("request_type", NPBW_INT, 0),
4: ("request_data", NPBW_BYTES, 0),
}
class MessagePortalOffer(NonProtobufClass):
_map = {
1: ("network_type_from", NPBW_INT, 0),
2: ("network_type_to", NPBW_INT, 0),
3: ("portal_address_from", NPBW_BYTES, 0),
4: ("portal_address_to", NPBW_BYTES, 0),
5: ("time_valid", NPBW_INT, 0),
6: ("smsg_difficulty", NPBW_INT, 0),
}
class MessagePortalSend(NonProtobufClass):
_map = {
1: ("forward_address", NPBW_BYTES, 0), # pubkey, 33 bytes
2: ("message_bytes", NPBW_BYTES, 0),
}

51
basicswap/messages_pb2.py Normal file
View File

@@ -0,0 +1,51 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: messages.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import builder as _builder
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0emessages.proto\x12\tbasicswap\"\xa6\x04\n\x0cOfferMessage\x12\x11\n\tcoin_from\x18\x01 \x01(\r\x12\x0f\n\x07\x63oin_to\x18\x02 \x01(\r\x12\x13\n\x0b\x61mount_from\x18\x03 \x01(\x04\x12\x0c\n\x04rate\x18\x04 \x01(\x04\x12\x16\n\x0emin_bid_amount\x18\x05 \x01(\x04\x12\x12\n\ntime_valid\x18\x06 \x01(\x04\x12\x33\n\tlock_type\x18\x07 \x01(\x0e\x32 .basicswap.OfferMessage.LockType\x12\x12\n\nlock_value\x18\x08 \x01(\r\x12\x11\n\tswap_type\x18\t \x01(\r\x12\x15\n\rproof_address\x18\n \x01(\t\x12\x17\n\x0fproof_signature\x18\x0b \x01(\t\x12\x15\n\rpkhash_seller\x18\x0c \x01(\x0c\x12\x13\n\x0bsecret_hash\x18\r \x01(\x0c\x12\x15\n\rfee_rate_from\x18\x0e \x01(\x04\x12\x13\n\x0b\x66\x65\x65_rate_to\x18\x0f \x01(\x04\x12\x18\n\x10protocol_version\x18\x10 \x01(\r\x12\x19\n\x11\x61mount_negotiable\x18\x11 \x01(\x08\x12\x17\n\x0frate_negotiable\x18\x12 \x01(\x08\"q\n\x08LockType\x12\x0b\n\x07NOT_SET\x10\x00\x12\x18\n\x14SEQUENCE_LOCK_BLOCKS\x10\x01\x12\x16\n\x12SEQUENCE_LOCK_TIME\x10\x02\x12\x13\n\x0f\x41\x42S_LOCK_BLOCKS\x10\x03\x12\x11\n\rABS_LOCK_TIME\x10\x04\"\xb4\x01\n\nBidMessage\x12\x14\n\x0coffer_msg_id\x18\x01 \x01(\x0c\x12\x12\n\ntime_valid\x18\x02 \x01(\x04\x12\x0e\n\x06\x61mount\x18\x03 \x01(\x04\x12\x0c\n\x04rate\x18\x04 \x01(\x04\x12\x14\n\x0cpkhash_buyer\x18\x05 \x01(\x0c\x12\x15\n\rproof_address\x18\x06 \x01(\t\x12\x17\n\x0fproof_signature\x18\x07 \x01(\t\x12\x18\n\x10protocol_version\x18\x08 \x01(\r\"V\n\x10\x42idAcceptMessage\x12\x12\n\nbid_msg_id\x18\x01 \x01(\x0c\x12\x15\n\rinitiate_txid\x18\x02 \x01(\x0c\x12\x17\n\x0f\x63ontract_script\x18\x03 \x01(\x0c\"=\n\x12OfferRevokeMessage\x12\x14\n\x0coffer_msg_id\x18\x01 \x01(\x0c\x12\x11\n\tsignature\x18\x02 \x01(\x0c\";\n\x10\x42idRejectMessage\x12\x12\n\nbid_msg_id\x18\x01 \x01(\x0c\x12\x13\n\x0breject_code\x18\x02 \x01(\r\"\xb2\x01\n\rXmrBidMessage\x12\x14\n\x0coffer_msg_id\x18\x01 \x01(\x0c\x12\x12\n\ntime_valid\x18\x02 \x01(\x04\x12\x0e\n\x06\x61mount\x18\x03 \x01(\x04\x12\x0c\n\x04rate\x18\x04 \x01(\x04\x12\x0c\n\x04pkaf\x18\x05 \x01(\x0c\x12\x0c\n\x04kbvf\x18\x06 \x01(\x0c\x12\x12\n\nkbsf_dleag\x18\x07 \x01(\x0c\x12\x0f\n\x07\x64\x65st_af\x18\x08 \x01(\x0c\x12\x18\n\x10protocol_version\x18\t \x01(\r\"T\n\x0fXmrSplitMessage\x12\x0e\n\x06msg_id\x18\x01 \x01(\x0c\x12\x10\n\x08msg_type\x18\x02 \x01(\r\x12\x10\n\x08sequence\x18\x03 \x01(\r\x12\r\n\x05\x64leag\x18\x04 \x01(\x0c\"\x80\x02\n\x13XmrBidAcceptMessage\x12\x12\n\nbid_msg_id\x18\x01 \x01(\x0c\x12\x0c\n\x04pkal\x18\x03 \x01(\x0c\x12\x0c\n\x04kbvl\x18\x04 \x01(\x0c\x12\x12\n\nkbsl_dleag\x18\x05 \x01(\x0c\x12\x11\n\ta_lock_tx\x18\x06 \x01(\x0c\x12\x18\n\x10\x61_lock_tx_script\x18\x07 \x01(\x0c\x12\x18\n\x10\x61_lock_refund_tx\x18\x08 \x01(\x0c\x12\x1f\n\x17\x61_lock_refund_tx_script\x18\t \x01(\x0c\x12\x1e\n\x16\x61_lock_refund_spend_tx\x18\n \x01(\x0c\x12\x1d\n\x15\x61l_lock_refund_tx_sig\x18\x0b \x01(\x0c\"r\n\x17XmrBidLockTxSigsMessage\x12\x12\n\nbid_msg_id\x18\x01 \x01(\x0c\x12$\n\x1c\x61\x66_lock_refund_spend_tx_esig\x18\x02 \x01(\x0c\x12\x1d\n\x15\x61\x66_lock_refund_tx_sig\x18\x03 \x01(\x0c\"X\n\x18XmrBidLockSpendTxMessage\x12\x12\n\nbid_msg_id\x18\x01 \x01(\x0c\x12\x17\n\x0f\x61_lock_spend_tx\x18\x02 \x01(\x0c\x12\x0f\n\x07kal_sig\x18\x03 \x01(\x0c\"M\n\x18XmrBidLockReleaseMessage\x12\x12\n\nbid_msg_id\x18\x01 \x01(\x0c\x12\x1d\n\x15\x61l_lock_spend_tx_esig\x18\x02 \x01(\x0c\"\x8f\x01\n\x13\x41\x44SBidIntentMessage\x12\x14\n\x0coffer_msg_id\x18\x01 \x01(\x0c\x12\x12\n\ntime_valid\x18\x02 \x01(\x04\x12\x13\n\x0b\x61mount_from\x18\x03 \x01(\x04\x12\x11\n\tamount_to\x18\x04 \x01(\x04\x12\x0c\n\x04rate\x18\x05 \x01(\x04\x12\x18\n\x10protocol_version\x18\x06 \x01(\r\"p\n\x19\x41\x44SBidIntentAcceptMessage\x12\x12\n\nbid_msg_id\x18\x01 \x01(\x0c\x12\x0c\n\x04pkaf\x18\x02 \x01(\x0c\x12\x0c\n\x04kbvf\x18\x03 \x01(\x0c\x12\x12\n\nkbsf_dleag\x18\x04 \x01(\x0c\x12\x0f\n\x07\x64\x65st_af\x18\x05 \x01(\x0c\x62\x06proto3')
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'messages_pb2', globals())
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
_OFFERMESSAGE._serialized_start=30
_OFFERMESSAGE._serialized_end=580
_OFFERMESSAGE_LOCKTYPE._serialized_start=467
_OFFERMESSAGE_LOCKTYPE._serialized_end=580
_BIDMESSAGE._serialized_start=583
_BIDMESSAGE._serialized_end=763
_BIDACCEPTMESSAGE._serialized_start=765
_BIDACCEPTMESSAGE._serialized_end=851
_OFFERREVOKEMESSAGE._serialized_start=853
_OFFERREVOKEMESSAGE._serialized_end=914
_BIDREJECTMESSAGE._serialized_start=916
_BIDREJECTMESSAGE._serialized_end=975
_XMRBIDMESSAGE._serialized_start=978
_XMRBIDMESSAGE._serialized_end=1156
_XMRSPLITMESSAGE._serialized_start=1158
_XMRSPLITMESSAGE._serialized_end=1242
_XMRBIDACCEPTMESSAGE._serialized_start=1245
_XMRBIDACCEPTMESSAGE._serialized_end=1501
_XMRBIDLOCKTXSIGSMESSAGE._serialized_start=1503
_XMRBIDLOCKTXSIGSMESSAGE._serialized_end=1617
_XMRBIDLOCKSPENDTXMESSAGE._serialized_start=1619
_XMRBIDLOCKSPENDTXMESSAGE._serialized_end=1707
_XMRBIDLOCKRELEASEMESSAGE._serialized_start=1709
_XMRBIDLOCKRELEASEMESSAGE._serialized_end=1786
_ADSBIDINTENTMESSAGE._serialized_start=1789
_ADSBIDINTENTMESSAGE._serialized_end=1932
_ADSBIDINTENTACCEPTMESSAGE._serialized_start=1934
_ADSBIDINTENTACCEPTMESSAGE._serialized_end=2046
# @@protoc_insertion_point(module_scope)

View File

@@ -5,25 +5,26 @@
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
"""
Message 2 bytes msg_class, 4 bytes length, [ 2 bytes msg_type, payload ]
'''
Message 2 bytes msg_class, 4 bytes length, [ 2 bytes msg_type, payload ]
Handshake procedure:
node0 connecting to node1
node0 send_handshake
node1 process_handshake
node1 send_ping - With a version field
node0 recv_ping
Both nodes are initialised
Handshake procedure:
node0 connecting to node1
node0 send_handshake
node1 process_handshake
node1 send_ping - With a version field
node0 recv_ping
Both nodes are initialised
XChaCha20_Poly1305 mac is 16bytes
"""
XChaCha20_Poly1305 mac is 16bytes
'''
import time
import queue
import random
import select
import socket
import struct
import hashlib
import logging
import secrets
@@ -36,12 +37,11 @@ from Crypto.Cipher import ChaCha20_Poly1305 # TODO: Add to libsecp256k1/coincur
from coincurve.keys import PrivateKey, PublicKey
from basicswap.contrib.rfc6979 import (
rfc6979_hmac_sha256_initialize,
rfc6979_hmac_sha256_generate,
)
rfc6979_hmac_sha256_generate)
START_TOKEN = 0xABCD
MSG_START_TOKEN = START_TOKEN.to_bytes(2, "big")
START_TOKEN = 0xabcd
MSG_START_TOKEN = struct.pack('>H', START_TOKEN)
MSG_MAX_SIZE = 0x200000 # 2MB
@@ -64,71 +64,49 @@ class NetMessageTypes(IntEnum):
return value in cls._value2member_map_
"""
'''
class NetMessage:
def __init__(self):
self._msg_class = None # 2 bytes
self._len = None # 4 bytes
self._msg_type = None # 2 bytes
"""
'''
# Ensure handshake keys are not reused by including the time in the msg, mac and key hash
# Verify timestamp is not too old
# Add keys to db to catch concurrent attempts, records can be cleared periodically, the timestamp should catch older replay attempts
class MsgHandshake:
__slots__ = ("_timestamp", "_ephem_pk", "_ct", "_mac")
__slots__ = ('_timestamp', '_ephem_pk', '_ct', '_mac')
def __init__(self):
pass
def encode_aad(self): # Additional Authenticated Data
return (
int(NetMessageTypes.HANDSHAKE).to_bytes(2, "big")
+ self._timestamp.to_bytes(8, "big")
+ self._ephem_pk
)
return struct.pack('>H', NetMessageTypes.HANDSHAKE) + \
struct.pack('>Q', self._timestamp) + \
self._ephem_pk
def encode(self):
return self.encode_aad() + self._ct + self._mac
def decode(self, msg_mv):
o = 2
self._timestamp = int.from_bytes(msg_mv[o : o + 8], "big")
self._timestamp = struct.unpack('>Q', msg_mv[o: o + 8])[0]
o += 8
self._ephem_pk = bytes(msg_mv[o : o + 33])
self._ephem_pk = bytes(msg_mv[o: o + 33])
o += 33
self._ct = bytes(msg_mv[o:-16])
self._ct = bytes(msg_mv[o: -16])
self._mac = bytes(msg_mv[-16:])
class Peer:
__slots__ = (
"_mx",
"_pubkey",
"_address",
"_socket",
"_version",
"_ready",
"_incoming",
"_connected_at",
"_last_received_at",
"_bytes_sent",
"_bytes_received",
"_receiving_length",
"_receiving_buffer",
"_recv_messages",
"_misbehaving_score",
"_ke",
"_km",
"_dir",
"_sent_nonce",
"_recv_nonce",
"_last_handshake_at",
"_ping_nonce",
"_last_ping_at",
"_last_ping_rtt",
)
'_mx', '_pubkey', '_address', '_socket', '_version', '_ready', '_incoming',
'_connected_at', '_last_received_at', '_bytes_sent', '_bytes_received',
'_receiving_length', '_receiving_buffer', '_recv_messages', '_misbehaving_score',
'_ke', '_km', '_dir', '_sent_nonce', '_recv_nonce', '_last_handshake_at',
'_ping_nonce', '_last_ping_at', '_last_ping_rtt')
def __init__(self, address, socket, pubkey):
self._mx = threading.Lock()
@@ -164,16 +142,14 @@ def listen_thread(cls):
max_bytes = 0x10000
while cls._running:
# logging.info('[rm] network loop %d', cls._running)
readable, writable, errored = select.select(
cls._read_sockets, cls._write_sockets, cls._error_sockets, timeout
)
readable, writable, errored = select.select(cls._read_sockets, cls._write_sockets, cls._error_sockets, timeout)
cls._mx.acquire()
try:
disconnected_peers = []
for s in readable:
if s == cls._socket:
peer_socket, address = cls._socket.accept()
logging.info("Connection from %s", address)
logging.info('Connection from %s', address)
new_peer = Peer(address, peer_socket, None)
new_peer._incoming = True
cls._peers.append(new_peer)
@@ -185,12 +161,12 @@ def listen_thread(cls):
try:
bytes_recv = s.recv(max_bytes, socket.MSG_DONTWAIT)
except socket.error as se:
if se.args[0] not in (socket.EWOULDBLOCK,):
logging.error("Receive error %s", str(se))
if se.args[0] not in (socket.EWOULDBLOCK, ):
logging.error('Receive error %s', str(se))
disconnected_peers.append(peer)
continue
except Exception as e:
logging.error("Receive error %s", str(e))
logging.error('Receive error %s', str(e))
disconnected_peers.append(peer)
continue
@@ -200,7 +176,7 @@ def listen_thread(cls):
cls.receive_bytes(peer, bytes_recv)
for s in errored:
logging.warning("Socket error")
logging.warning('Socket error')
for peer in disconnected_peers:
cls.disconnect(peer)
@@ -218,9 +194,7 @@ def msg_thread(cls):
try:
now_us = time.time_ns() // 1000
if peer._ready is True:
if (
now_us - peer._last_ping_at >= 5000000
): # 5 seconds TODO: Make variable
if now_us - peer._last_ping_at >= 5000000: # 5 seconds TODO: Make variable
cls.send_ping(peer)
msg = peer._recv_messages.get(False)
cls.process_message(peer, msg)
@@ -228,7 +202,7 @@ def msg_thread(cls):
except queue.Empty:
pass
except Exception as e:
logging.warning("process message error %s", str(e))
logging.warning('process message error %s', str(e))
if cls._sc.debug:
logging.error(traceback.format_exc())
@@ -238,24 +212,9 @@ def msg_thread(cls):
class Network:
__slots__ = (
"_p2p_host",
"_p2p_port",
"_network_key",
"_network_pubkey",
"_sc",
"_peers",
"_max_connections",
"_running",
"_network_thread",
"_msg_thread",
"_mx",
"_socket",
"_read_sockets",
"_write_sockets",
"_error_sockets",
"_csprng",
"_seen_ephem_keys",
)
'_p2p_host', '_p2p_port', '_network_key', '_network_pubkey',
'_sc', '_peers', '_max_connections', '_running', '_network_thread', '_msg_thread',
'_mx', '_socket', '_read_sockets', '_write_sockets', '_error_sockets', '_csprng', '_seen_ephem_keys')
def __init__(self, p2p_host, p2p_port, network_key, swap_client):
self._p2p_host = p2p_host
@@ -320,13 +279,7 @@ class Network:
self._mx.release()
def add_connection(self, host, port, peer_pubkey):
self._sc.log.info(
"Connecting from %s to %s at %s %d",
self._network_pubkey.hex(),
peer_pubkey.hex(),
host,
port,
)
self._sc.log.info('Connecting from %s to %s at %s %d', self._network_pubkey.hex(), peer_pubkey.hex(), host, port)
self._mx.acquire()
try:
address = (host, port)
@@ -342,7 +295,7 @@ class Network:
self.send_handshake(peer)
def disconnect(self, peer):
self._sc.log.info("Closing peer socket %s", peer._address)
self._sc.log.info('Closing peer socket %s', peer._address)
self._read_sockets.pop(self._read_sockets.index(peer._socket))
self._error_sockets.pop(self._error_sockets.index(peer._socket))
peer.close()
@@ -353,11 +306,7 @@ class Network:
used = self._seen_ephem_keys.get(ephem_pk)
if used:
raise ValueError(
"Handshake ephem_pk reused %s peer %s",
"for" if direction == 1 else "by",
used[0],
)
raise ValueError('Handshake ephem_pk reused %s peer %s', 'for' if direction == 1 else 'by', used[0])
self._seen_ephem_keys[ephem_pk] = (peer._address, timestamp)
@@ -365,14 +314,12 @@ class Network:
self._seen_ephem_keys.popitem(last=False)
def send_handshake(self, peer):
self._sc.log.debug("send_handshake %s", peer._address)
self._sc.log.debug('send_handshake %s', peer._address)
peer._mx.acquire()
try:
# TODO: Drain peer._recv_messages
if not peer._recv_messages.empty():
self._sc.log.warning(
"send_handshake %s - Receive queue dumped.", peer._address
)
self._sc.log.warning('send_handshake %s - Receive queue dumped.', peer._address)
while not peer._recv_messages.empty():
peer._recv_messages.get(False)
@@ -386,7 +333,7 @@ class Network:
ss = k.ecdh(peer._pubkey)
hashed = hashlib.sha512(ss + msg._timestamp.to_bytes(8, "big")).digest()
hashed = hashlib.sha512(ss + struct.pack('>Q', msg._timestamp)).digest()
peer._ke = hashed[:32]
peer._km = hashed[32:]
@@ -415,13 +362,11 @@ class Network:
peer._mx.release()
def process_handshake(self, peer, msg_mv):
self._sc.log.debug("process_handshake %s", peer._address)
self._sc.log.debug('process_handshake %s', peer._address)
# TODO: Drain peer._recv_messages
if not peer._recv_messages.empty():
self._sc.log.warning(
"process_handshake %s - Receive queue dumped.", peer._address
)
self._sc.log.warning('process_handshake %s - Receive queue dumped.', peer._address)
while not peer._recv_messages.empty():
peer._recv_messages.get(False)
@@ -431,19 +376,17 @@ class Network:
try:
now = int(time.time())
if now - peer._last_handshake_at < 30:
raise ValueError("Too many handshakes from peer %s", peer._address)
raise ValueError('Too many handshakes from peer %s', peer._address)
if abs(msg._timestamp - now) > TIMESTAMP_LEEWAY:
raise ValueError("Bad handshake timestamp from peer %s", peer._address)
raise ValueError('Bad handshake timestamp from peer %s', peer._address)
self.check_handshake_ephem_key(
peer, msg._timestamp, msg._ephem_pk, direction=2
)
self.check_handshake_ephem_key(peer, msg._timestamp, msg._ephem_pk, direction=2)
nk = PrivateKey(self._network_key)
ss = nk.ecdh(msg._ephem_pk)
hashed = hashlib.sha512(ss + msg._timestamp.to_bytes(8, "big")).digest()
hashed = hashlib.sha512(ss + struct.pack('>Q', msg._timestamp)).digest()
peer._ke = hashed[:32]
peer._km = hashed[32:]
@@ -453,9 +396,7 @@ class Network:
aad += nonce
cipher = ChaCha20_Poly1305.new(key=peer._ke, nonce=nonce)
cipher.update(aad)
plaintext = cipher.decrypt_and_verify(
msg._ct, msg._mac
) # Will raise error if mac doesn't match
plaintext = cipher.decrypt_and_verify(msg._ct, msg._mac) # Will raise error if mac doesn't match
peer._version = plaintext[:6]
sig = plaintext[6:]
@@ -474,30 +415,26 @@ class Network:
except Exception as e:
# TODO: misbehaving
self._sc.log.debug("[rm] process_handshake %s", str(e))
self._sc.log.debug('[rm] process_handshake %s', str(e))
def process_ping(self, peer, msg_mv):
nonce = peer._recv_nonce[:24]
cipher = ChaCha20_Poly1305.new(key=peer._ke, nonce=nonce)
cipher.update(msg_mv[0:2])
cipher.update(msg_mv[0: 2])
cipher.update(nonce)
mac = msg_mv[-16:]
plaintext = cipher.decrypt_and_verify(msg_mv[2:-16], mac)
plaintext = cipher.decrypt_and_verify(msg_mv[2: -16], mac)
ping_nonce = int.from_bytes(plaintext[:4], "big")
ping_nonce = struct.unpack('>I', plaintext[:4])[0]
# Version is added to a ping following a handshake message
if len(plaintext) >= 10:
peer._ready = True
version = plaintext[4:10]
version = plaintext[4: 10]
if peer._version is None:
peer._version = version
self._sc.log.debug(
"Set version from ping %s, %s",
peer._pubkey.hex(),
peer._version.hex(),
)
self._sc.log.debug('Set version from ping %s, %s', peer._pubkey.hex(), peer._version.hex())
peer._recv_nonce = hashlib.sha256(nonce + mac).digest()
@@ -507,32 +444,32 @@ class Network:
nonce = peer._recv_nonce[:24]
cipher = ChaCha20_Poly1305.new(key=peer._ke, nonce=nonce)
cipher.update(msg_mv[0:2])
cipher.update(msg_mv[0: 2])
cipher.update(nonce)
mac = msg_mv[-16:]
plaintext = cipher.decrypt_and_verify(msg_mv[2:-16], mac)
plaintext = cipher.decrypt_and_verify(msg_mv[2: -16], mac)
pong_nonce = int.from_bytes(plaintext[:4], "big")
pong_nonce = struct.unpack('>I', plaintext[:4])[0]
if pong_nonce == peer._ping_nonce:
peer._last_ping_rtt = (time.time_ns() // 1000) - peer._last_ping_at
else:
self._sc.log.debug("Pong received out of order %s", peer._address)
self._sc.log.debug('Pong received out of order %s', peer._address)
peer._recv_nonce = hashlib.sha256(nonce + mac).digest()
def send_ping(self, peer):
ping_nonce = random.getrandbits(32)
msg_bytes = int(NetMessageTypes.PING).to_bytes(2, "big")
msg_bytes = struct.pack('>H', NetMessageTypes.PING)
nonce = peer._sent_nonce[:24]
cipher = ChaCha20_Poly1305.new(key=peer._ke, nonce=nonce)
cipher.update(msg_bytes)
cipher.update(nonce)
payload = ping_nonce.to_bytes(4, "big")
payload = struct.pack('>I', ping_nonce)
if peer._last_ping_at == 0:
payload += self._sc._version
ct, mac = cipher.encrypt_and_digest(payload)
@@ -547,14 +484,14 @@ class Network:
self.send_msg(peer, msg_bytes)
def send_pong(self, peer, ping_nonce):
msg_bytes = int(NetMessageTypes.PONG).to_bytes(2, "big")
msg_bytes = struct.pack('>H', NetMessageTypes.PONG)
nonce = peer._sent_nonce[:24]
cipher = ChaCha20_Poly1305.new(key=peer._ke, nonce=nonce)
cipher.update(msg_bytes)
cipher.update(nonce)
payload = ping_nonce.to_bytes(4, "big")
payload = struct.pack('>I', ping_nonce)
ct, mac = cipher.encrypt_and_digest(payload)
msg_bytes += ct + mac
@@ -566,21 +503,19 @@ class Network:
msg_encoded = msg if isinstance(msg, bytes) else msg.encode()
len_encoded = len(msg_encoded)
msg_packed = (
bytearray(MSG_START_TOKEN) + len_encoded.to_bytes(4, "big") + msg_encoded
)
msg_packed = bytearray(MSG_START_TOKEN) + struct.pack('>I', len_encoded) + msg_encoded
peer._socket.sendall(msg_packed)
peer._bytes_sent += len_encoded
def process_message(self, peer, msg_bytes):
logging.info("[rm] process_message %s len %d", peer._address, len(msg_bytes))
logging.info('[rm] process_message %s len %d', peer._address, len(msg_bytes))
peer._mx.acquire()
try:
mv = memoryview(msg_bytes)
o = 0
msg_type = int.from_bytes(mv[o : o + 2], "big")
msg_type = struct.unpack('>H', mv[o: o + 2])[0]
if msg_type == NetMessageTypes.HANDSHAKE:
self.process_handshake(peer, mv)
elif msg_type == NetMessageTypes.PING:
@@ -588,7 +523,7 @@ class Network:
elif msg_type == NetMessageTypes.PONG:
self.process_pong(peer, mv)
else:
self._sc.log.debug("Unknown message type %d", msg_type)
self._sc.log.debug('Unknown message type %d', msg_type)
finally:
peer._mx.release()
@@ -599,6 +534,7 @@ class Network:
peer._last_received_at = time.time()
peer._bytes_received += len_received
invalid_msg = False
mv = memoryview(bytes_recv)
o = 0
@@ -606,34 +542,34 @@ class Network:
while o < len_received:
if peer._receiving_length == 0:
if len(bytes_recv) < MSG_HEADER_LEN:
raise ValueError("Msg too short")
raise ValueError('Msg too short')
if mv[o : o + 2] != MSG_START_TOKEN:
raise ValueError("Invalid start token")
if mv[o: o + 2] != MSG_START_TOKEN:
raise ValueError('Invalid start token')
o += 2
msg_len = int.from_bytes(mv[o : o + 4], "big")
msg_len = struct.unpack('>I', mv[o: o + 4])[0]
o += 4
if msg_len < 2 or msg_len > MSG_MAX_SIZE:
raise ValueError("Invalid data length")
raise ValueError('Invalid data length')
# Precheck msg_type
msg_type = int.from_bytes(mv[o : o + 2], "big")
msg_type = struct.unpack('>H', mv[o: o + 2])[0]
# o += 2 # Don't inc offset, msg includes type
if not NetMessageTypes.has_value(msg_type):
raise ValueError("Invalid msg type")
raise ValueError('Invalid msg type')
peer._receiving_length = msg_len
len_pkt = len_received - o
len_pkt = (len_received - o)
nc = msg_len if len_pkt > msg_len else len_pkt
peer._receiving_buffer = mv[o : o + nc]
peer._receiving_buffer = mv[o: o + nc]
o += nc
else:
len_to_go = peer._receiving_length - len(peer._receiving_buffer)
len_pkt = len_received - o
len_pkt = (len_received - o)
nc = len_to_go if len_pkt > len_to_go else len_pkt
peer._receiving_buffer = mv[o : o + nc]
peer._receiving_buffer = mv[o: o + nc]
o += nc
if len(peer._receiving_buffer) == peer._receiving_length:
peer._recv_messages.put(peer._receiving_buffer)
@@ -641,13 +577,11 @@ class Network:
except Exception as e:
if self._sc.debug:
self._sc.log.error(
"Invalid message received from %s %s", peer._address, str(e)
)
self._sc.log.error('Invalid message received from %s %s', peer._address, str(e))
# TODO: misbehaving
def test_onion(self, path):
self._sc.log.debug("test_onion packet")
self._sc.log.debug('test_onion packet')
def get_info(self):
rv = {}
@@ -656,14 +590,14 @@ class Network:
with self._mx:
for peer in self._peers:
peer_info = {
"pubkey": "Unknown" if not peer._pubkey else peer._pubkey.hex(),
"address": "{}:{}".format(peer._address[0], peer._address[1]),
"bytessent": peer._bytes_sent,
"bytesrecv": peer._bytes_received,
"ready": peer._ready,
"incoming": peer._incoming,
'pubkey': 'Unknown' if not peer._pubkey else peer._pubkey.hex(),
'address': '{}:{}'.format(peer._address[0], peer._address[1]),
'bytessent': peer._bytes_sent,
'bytesrecv': peer._bytes_received,
'ready': peer._ready,
'incoming': peer._incoming,
}
peers.append(peer_info)
rv["peers"] = peers
rv['peers'] = peers
return rv

Some files were not shown because too many files have changed in this diff Show More