2 Commits

Author SHA1 Message Date
tecnovert
f57888c455 preparescript: Add temporary pivx release. 2022-08-18 00:18:50 +02:00
tecnovert
e1528c9d63 coins: Add PIVX
No CSV or segwit.
sethdseed requires a fully synced chain, manual intervention required to set a key derived from the master mnemonic.
Requires a pivxd version with a backported scantxoutset command.
2022-08-17 01:05:32 +02:00
410 changed files with 17777 additions and 326602 deletions

View File

@@ -3,11 +3,12 @@ container:
lint_task:
setup_script:
- pip install flake8 codespell
- pip install flake8
- pip install codespell
script:
- flake8 --version
- flake8 --ignore=E203,E501,W503 --exclude=basicswap/contrib,basicswap/interface/contrib,.eggs,.tox,bin/install_certifi.py
- codespell --check-filenames --disable-colors --quiet-level=7 --ignore-words=tests/lint/spelling.ignore-words.txt -S .git,.eggs,.tox,pgp,*.pyc,*basicswap/contrib,*basicswap/interface/contrib,*mnemonics.py,bin/install_certifi.py,*basicswap/static
- PYTHONWARNINGS="ignore" flake8 --ignore=E501,F841,W503 --exclude=basicswap/contrib,basicswap/interface/contrib,messages_pb2.py,.eggs,.tox,bin/install_certifi.py
- codespell --check-filenames --disable-colors --quiet-level=7 --ignore-words=tests/lint/spelling.ignore-words.txt -S .git,.eggs,.tox,pgp,*.pyc,*basicswap/contrib,*basicswap/interface/contrib,*mnemonics.py,bin/install_certifi.py
test_task:
environment:
@@ -16,21 +17,25 @@ test_task:
- BIN_DIR: /tmp/cached_bin
- PARTICL_BINDIR: ${BIN_DIR}/particl
- BITCOIN_BINDIR: ${BIN_DIR}/bitcoin
- BITCOINCASH_BINDIR: ${BIN_DIR}/bitcoincash
- LITECOIN_BINDIR: ${BIN_DIR}/litecoin
- XMR_BINDIR: ${BIN_DIR}/monero
setup_script:
- apt-get update
- apt-get install -y python3-pip pkg-config
- apt-get install -y wget python3-pip gnupg unzip protobuf-compiler automake libtool pkg-config
- pip install tox pytest
- pip install .
- python3 setup.py install
- wget -O coincurve-anonswap.zip https://github.com/tecnovert/coincurve/archive/refs/tags/anonswap_v0.1.zip
- unzip -d coincurve-anonswap coincurve-anonswap.zip
- mv ./coincurve-anonswap/*/{.,}* ./coincurve-anonswap || true
- cd coincurve-anonswap
- python3 setup.py install --force
bins_cache:
folder: /tmp/cached_bin
reupload_on_changes: false
fingerprint_script:
- basicswap-prepare -v
populate_script:
- basicswap-prepare --bindir=/tmp/cached_bin --preparebinonly --withcoins=particl,bitcoin,bitcoincash,litecoin,monero
- basicswap-prepare --bindir=/tmp/cached_bin --preparebinonly --withcoins=particl,bitcoin,litecoin,monero
script:
- cd "${CIRRUS_WORKING_DIR}"
- export DATADIRS="${TEST_DIR}"
@@ -42,4 +47,3 @@ test_task:
- pytest tests/basicswap/test_other.py
- pytest tests/basicswap/test_run.py
- pytest tests/basicswap/test_reload.py
- pytest tests/basicswap/test_btc_xmr.py -k 'test_01_a or test_01_b or test_02_a or test_02_b'

Binary file not shown.

Before

Width:  |  Height:  |  Size: 230 KiB

View File

@@ -1,11 +0,0 @@
# Please see the documentation for all configuration options:
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
version: 2
updates:
- package-ecosystem: "pip" # See documentation for possible values
directory: "/" # Location of package manifests
schedule:
interval: "weekly"
open-pull-requests-limit: 20
target-branch: "dev"

View File

@@ -1,120 +0,0 @@
name: ci
on: [push, pull_request]
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
env:
BIN_DIR: /tmp/cached_bin
TEST_RELOAD_PATH: /tmp/test_basicswap
BSX_SELENIUM_DRIVER: firefox-ci
XMR_RPC_USER: xmr_user
XMR_RPC_PWD: xmr_pwd
jobs:
ci:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.12"]
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
if [ $(dpkg-query -W -f='${Status}' firefox 2>/dev/null | grep -c "ok installed") -eq 0 ]; then
install -d -m 0755 /etc/apt/keyrings
wget -q https://packages.mozilla.org/apt/repo-signing-key.gpg -O- | sudo tee /etc/apt/keyrings/packages.mozilla.org.asc > /dev/null
echo "deb [signed-by=/etc/apt/keyrings/packages.mozilla.org.asc] https://packages.mozilla.org/apt mozilla main" | sudo tee -a /etc/apt/sources.list.d/mozilla.list > /dev/null
echo "Package: *" | sudo tee /etc/apt/preferences.d/mozilla
echo "Pin: origin packages.mozilla.org" | sudo tee -a /etc/apt/preferences.d/mozilla
echo "Pin-Priority: 1000" | sudo tee -a /etc/apt/preferences.d/mozilla
sudo apt-get update
sudo apt-get install -y firefox
fi
python -m pip install --upgrade pip
pip install python-gnupg
pip install -e .[dev]
pip install -r requirements.txt --require-hashes
- name: Install
run: |
pip install .
# Print the core versions to a file for caching
basicswap-prepare --version --withcoins=bitcoin | tail -n +2 > core_versions.txt
cat core_versions.txt
- name: Run flake8
run: |
flake8 --ignore=E203,E501,W503 --exclude=basicswap/contrib,basicswap/interface/contrib,.eggs,.tox,bin/install_certifi.py
- name: Run codespell
run: |
codespell --check-filenames --disable-colors --quiet-level=7 --ignore-words=tests/lint/spelling.ignore-words.txt -S .git,.eggs,.tox,pgp,*.pyc,*basicswap/contrib,*basicswap/interface/contrib,*mnemonics.py,bin/install_certifi.py,*basicswap/static
- name: Run black
run: |
black --check --diff --exclude="contrib" .
- name: Run test_other
run: |
pytest tests/basicswap/test_other.py
- name: Cache coin cores
id: cache-cores
uses: actions/cache@v3
env:
cache-name: cache-cores
with:
path: /tmp/cached_bin
key: cores-${{ runner.os }}-${{ hashFiles('**/core_versions.txt') }}
- if: ${{ steps.cache-cores.outputs.cache-hit != 'true' }}
name: Running basicswap-prepare
run: |
basicswap-prepare --bindir="$BIN_DIR" --preparebinonly --withcoins=particl,bitcoin,monero
- name: Run test_prepare
run: |
export PYTHONPATH=$(pwd)
export TEST_BIN_PATH="$BIN_DIR"
export TEST_PATH=/tmp/test_prepare
pytest tests/basicswap/extended/test_prepare.py
- name: Run test_xmr
run: |
export PYTHONPATH=$(pwd)
export PARTICL_BINDIR="$BIN_DIR/particl"
export BITCOIN_BINDIR="$BIN_DIR/bitcoin"
export XMR_BINDIR="$BIN_DIR/monero"
pytest tests/basicswap/test_btc_xmr.py::TestBTC -k "test_003_api or test_02_a_leader_recover_a_lock_tx"
- name: Run test_encrypted_xmr_reload
run: |
export PYTHONPATH=$(pwd)
export TEST_PATH=${TEST_RELOAD_PATH}
mkdir -p ${TEST_PATH}/bin
cp -r $BIN_DIR/* ${TEST_PATH}/bin/
pytest tests/basicswap/extended/test_encrypted_xmr_reload.py
- name: Run selenium tests
run: |
export TEST_PATH=/tmp/test_persistent
mkdir -p ${TEST_PATH}/bin
cp -r $BIN_DIR/* ${TEST_PATH}/bin/
export PYTHONPATH=$(pwd)
python tests/basicswap/extended/test_xmr_persistent.py > /tmp/log.txt 2>&1 & TEST_NETWORK_PID=$!
echo "Starting test_xmr_persistent, PID $TEST_NETWORK_PID"
i=0
until curl -s -f -o /dev/null "http://localhost:12701/json/coins"
do
tail -n 1 /tmp/log.txt
sleep 2
((++i))
if [ $i -ge 60 ]; then
echo "Timed out waiting for test_xmr_persistent, PID $TEST_NETWORK_PID"
kill $TEST_NETWORK_PID
(exit 1) # Fail test
break
fi
done
echo "Running test_settings.py"
python tests/basicswap/selenium/test_settings.py
echo "Running test_swap_direction.py"
python tests/basicswap/selenium/test_swap_direction.py
kill $TEST_NETWORK_PID

10
.gitignore vendored
View File

@@ -1,5 +1,4 @@
old/
build/
*.pyc
__pycache__
/dist/
@@ -8,13 +7,4 @@ __pycache__
/*.eggs
.tox
.eggs
.ruff_cache
.pytest_cache
*~
# geckodriver.log
*.log
docker/.env
# vscode dev container settings
compose-dev.yaml

60
.travis.yml Normal file
View File

@@ -0,0 +1,60 @@
dist: bionic
os: linux
language: python
python: '3.7'
stages:
- lint
- test
env:
global:
- TEST_DIR=${HOME}/test_basicswap2
- TEST_RELOAD_PATH=~/test_basicswap1
- BIN_DIR=~/cached_bin
- PARTICL_BINDIR=${BIN_DIR}/particl
- BITCOIN_BINDIR=${BIN_DIR}/bitcoin
- LITECOIN_BINDIR=${BIN_DIR}/litecoin
- XMR_BINDIR=${BIN_DIR}/monero
cache:
directories:
- "$BIN_DIR"
before_install:
- sudo apt-get install -y wget python3-pip gnupg unzip protobuf-compiler automake libtool pkg-config
install:
- travis_retry pip install tox pytest
before_script:
- wget -O coincurve-anonswap.zip https://github.com/tecnovert/coincurve/archive/refs/tags/anonswap_v0.1.zip
- unzip -d coincurve-anonswap coincurve-anonswap.zip
- mv ./coincurve-anonswap/*/{.,}* ./coincurve-anonswap || true
- cd coincurve-anonswap
- python3 setup.py install --force
script:
- cd $TRAVIS_BUILD_DIR
- python3 setup.py install
- basicswap-prepare --bindir=${BIN_DIR} --preparebinonly --withcoins=particl,bitcoin,litecoin,monero
- export DATADIRS="${TEST_DIR}"
- mkdir -p "${DATADIRS}/bin"
- cp -r ${BIN_DIR} "${DATADIRS}/bin"
- mkdir -p "${TEST_RELOAD_PATH}/bin"
- cp -r ${BIN_DIR} "${TEST_RELOAD_PATH}/bin"
- # tox
- pytest tests/basicswap/test_xmr.py
- pytest tests/basicswap/test_xmr_reload.py
- pytest tests/basicswap/test_xmr_bids_offline.py
after_success:
- echo "End test"
jobs:
include:
- stage: lint
env:
cache: false
install:
- travis_retry pip install flake8==3.7.0
- travis_retry pip install codespell==1.15.0
before_script:
script:
- PYTHONWARNINGS="ignore" flake8 --ignore=E501,F841,W503 --exclude=basicswap/contrib,basicswap/interface/contrib,messages_pb2.py,.eggs,.tox,bin/install_certifi.py
- codespell --check-filenames --disable-colors --quiet-level=7 --ignore-words=tests/lint/spelling.ignore-words.txt -S .git,.eggs,.tox,pgp,*.pyc,*basicswap/contrib,*basicswap/interface/contrib,*mnemonics.py,bin/install_certifi.py
after_success:
- echo "End lint"
- stage: test
env:

View File

@@ -5,15 +5,30 @@ ENV LANG=C.UTF-8 \
DATADIRS="/coindata"
RUN apt-get update; \
apt-get install -y --no-install-recommends \
python3-pip libpython3-dev gnupg pkg-config gcc libc-dev gosu tzdata cmake ninja-build;
apt-get install -y wget python3-pip gnupg unzip make g++ autoconf automake libtool pkg-config gosu tzdata;
# Must install protoc directly as latest package is only on 3.12
RUN wget -O protobuf_src.tar.gz https://github.com/protocolbuffers/protobuf/releases/download/v21.1/protobuf-python-4.21.1.tar.gz && \
tar xvf protobuf_src.tar.gz && \
cd protobuf-3.21.1 && \
./configure --prefix=/usr && \
make -j$(nproc) install && \
ldconfig
ARG COINCURVE_VERSION=v0.1
RUN wget -O coincurve-anonswap.zip https://github.com/tecnovert/coincurve/archive/refs/tags/anonswap_$COINCURVE_VERSION.zip && \
unzip coincurve-anonswap.zip && \
mv ./coincurve-anonswap_$COINCURVE_VERSION ./coincurve-anonswap && \
cd coincurve-anonswap && \
python3 setup.py install --force
# Install requirements first so as to skip in subsequent rebuilds
COPY ./requirements.txt requirements.txt
RUN pip3 install -r requirements.txt --require-hashes
RUN pip3 install -r requirements.txt
COPY . basicswap-master
RUN cd basicswap-master; \
protoc -I=basicswap --python_out=basicswap basicswap/messages.proto; \
pip3 install .;
RUN useradd -ms /bin/bash swap_user && \

View File

@@ -1,5 +1,5 @@
The MIT License (MIT)
Copyright (c) 2019-2024 tecnovert
Copyright (c) 2019 tecnovert
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

5
MANIFEST.in Normal file
View File

@@ -0,0 +1,5 @@
include *.md LICENSE
recursive-include doc *
recursive-include basicswap/templates *
recursive-include basicswap/static *

161
README.md
View File

@@ -1,157 +1,20 @@
# BasicSwap DEX (BSX)
![BasicswapDEX Preview](.github-readme/basicswap_header.jpg)
# Simple Atomic Swap Network - Proof of Concept
**[Official Website](https://basicswapdex.com)** | **[News](https://particl.news)** | **[Tutorials](https://academy.particl.io)** | **[Chat]( https://matrix.to/#/#basicswap:matrix.org )**
## Overview
Table of Contents
Simple atomic swap experiment, doesn't have many interesting features yet.
Not ready for real world use.
* [About](#about)
* [Features](#features)
* [Available Assets](#available-assets)
* [Participate](#participate)
* [Tutorials](#tutorials)
* [License](#license)
Uses Particl secure messaging and Decred style atomic swaps.
## About
The Particl node is used to hold the keys and sign for the swap transactions.
Other nodes can be run in pruned mode.
A node must be run for each coin type traded.
In the future it should be possible to use data from explorers instead of running a node.
**BasicSwap** is the worlds most secure and decentralized DEX. It facilitates cross-chain atomic swaps by enabling peers to interact directly with each other within a free and open environment without central points of failure.
## Currently a work in progress
This DEX is fully non-custodial and features a decentralized order book, letting you create or accept swap offers without any fees, counterparties, or the need for accounts.
Not ready for real-world use.
Built as a low-friction, highly secure solution to the frequent losses of funds on centralized exchanges (e.g., FTX, BitFinex, MtGox), **BasicSwap** aims to provide more reliable and secure cryptocurrency trading conditions for everyone.
**BasicSwap** is currently in active development by the community. While it already offers some of the essential trading features you'd expect from an exchange, more features and quality-of-life improvements are being worked on with the goal to provide a smoother user experience.
## Features
* **True cross-chain support** — Swap cryptocurrencies that live on entirely different blockchain environments, like Bitcoin and Monero.
* **Decentralized order book** — Make or take swap offers on a completely decentralized order book system.
* **No third-party or middleman** — Trade crypto with no intermediaries, completely eliminating central points of failure.
* **No trading fees** — Only pay the typical cryptocurrency network fee.
* **Superior financial privacy** — Protect your financial information from unauthorized access with BasicSwaps privacy-conscious technology.
* **Full Monero support** — Swap Monero with a variety of other cryptocurrencies like Bitcoin or Particl. No wrapped assets or layer-2 involved.
* **User-friendly interface** — Enjoy all these features within a user-friendly and intuitive interface that handles all the complicated parts for you.
## Under the Hood
**BasicSwap** can be best understood as the decentralized version of the SWIFT messaging network; providing a decentralized messaging protocol that allows for peers to connect directly with each other with the purpose of executing atomic swaps without central points of failure and using official core wallets (Bitcoin Core, Litecoin Core, etc).
**BasicSwap** does not process, initiate, or execute swaps; it merely enables peers to communicate with each other and exchange the required information to simplify the process of using atomic swaps on the respective blockchains of the coins being swapped.
In essence, **BasicSwap** operates merely as a decentralized messaging protocol supplemented by a user-friendly interface.
## Available Assets
BasicSwap is compatible with the following digital assets.
<table>
<tr>
<td><strong>Coin Name</strong>
</td>
<td><strong>Ticker</strong>
</td>
</tr>
<tr>
<td>Bitcoin
</td>
<td>BTC
</td>
</tr>
<tr>
<td>Monero
</td>
<td>XMR
</td>
</tr>
<tr>
<td>Bitcoin Cash
</td>
<td>BCH
</td>
</tr>
<tr>
<td>Dash
</td>
<td>DASH
</td>
</tr>
<tr>
<td>Litecoin
</td>
<td>LTC
</td>
</tr>
<tr>
<td>Firo
</td>
<td>FIRO
</td>
</tr>
<tr>
<td>PIVX
</td>
<td>PIVX
</td>
</tr>
<tr>
<td>Decred
</td>
<td>DCR
</td>
</tr>
<tr>
<td>Wownero
</td>
<td>WOW
</td>
</tr>
<tr>
<td>Particl
</td>
<td>PART
</td>
</tr>
<tr>
<td>Dogecoin
</td>
<td>DOGE
</td>
</tr>
<tr>
<td>Namecoin
</td>
<td>NMC
</td>
</tr>
</table>
If youd like to add a cryptocurrency to BasicSwap, refer to how other cryptocurrencies have been integrated to the DEX by following [this link](https://academy.particl.io/en/latest/basicswap-guides/basicswapguides_apply.html).
# Participate
### Chats
* **For support** Join the community on [#basicswap:matrix.org](https://matrix.to/#/#basicswap:matrix.org) using a Matrix client.
[![Twitter Follow](https://img.shields.io/twitter/follow/BasicSwapDEX?label=follow%20us&style=social)](http://twitter.com/BasicSwapDEX)
### Documentation, installation
Follow the guides on [Particl Academy](https://academy.particl.io) for tutorials and guides on how BasicSwap works.
* [Download BasicSwapDEX](https://github.com/basicswap/basicswap/tree/master/doc)
#### Community chat support
* [Matrix](https://matrix.to/#/#basicswap:matrix.org)
# Tutorials
You can find a wide variety of tutorials and step-by-step guides about BasicSwap on the [Particl Academy](https://academy.particl.io) or on Particls Youtube channel.
If you encounter an issue or try to accomplish something not mentioned in any of the tutorials included in the links above, please join the community chat support channel; youll be sure to find help and support from current contributors there!
# License
BasicSwap is released under MIT software license.
Discuss development and help with testing in the matrix channel [#basicswap:matrix.org](https://riot.im/app/#/room/#basicswap:matrix.org)

View File

@@ -1,3 +1,3 @@
name = "basicswap"
__version__ = "0.14.6"
__version__ = "0.11.36"

View File

@@ -1,37 +1,27 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019-2024 tecnovert
# Copyright (c) 2024-2025 The Basicswap developers
# Copyright (c) 2019-2022 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import logging
import os
import random
import shlex
import socket
import socks
import subprocess
import sys
import threading
import time
import traceback
import socket
import urllib
import logging
import threading
import subprocess
from sockshandler import SocksiPyHandler
import basicswap.config as cfg
import basicswap.contrib.segwit_addr as segwit_addr
from .db import (
DBMethods,
)
from .rpc import (
callrpc,
)
from .util import (
TemporaryError,
)
from .util.logging import (
BSXLogger,
)
from .chainparams import (
Coins,
chainparams,
@@ -42,81 +32,53 @@ def getaddrinfo_tor(*args):
return [(socket.AF_INET, socket.SOCK_STREAM, 6, "", (args[0], args[1]))]
class BaseApp(DBMethods):
def __init__(self, data_dir, settings, chain, log_name="BasicSwap"):
self.fp = None
class BaseApp:
def __init__(self, fp, data_dir, settings, chain, log_name='BasicSwap'):
self.log_name = log_name
self.fp = fp
self.is_running = True
self.fail_code = 0
self.mock_time_offset = 0
self.data_dir = data_dir
self.chain = chain
self.settings = settings
self.coin_clients = {}
self.coin_interfaces = {}
self.mxDB = threading.Lock()
self.debug = self.settings.get("debug", False)
self.mxDB = threading.RLock()
self.debug = self.settings.get('debug', False)
self.delay_event = threading.Event()
self.chainstate_delay_event = threading.Event()
self._network = None
self.prepareLogging()
self.log.info(f"Network: {self.chain}")
self.log.info('Network: {}'.format(self.chain))
self.use_tor_proxy = self.settings.get("use_tor", False)
self.tor_proxy_host = self.settings.get("tor_proxy_host", "127.0.0.1")
self.tor_proxy_port = self.settings.get("tor_proxy_port", 9050)
self.tor_control_password = self.settings.get("tor_control_password", None)
self.tor_control_port = self.settings.get("tor_control_port", 9051)
self.use_tor_proxy = self.settings.get('use_tor', False)
self.tor_proxy_host = self.settings.get('tor_proxy_host', '127.0.0.1')
self.tor_proxy_port = self.settings.get('tor_proxy_port', 9050)
self.tor_control_password = self.settings.get('tor_control_password', None)
self.tor_control_port = self.settings.get('tor_control_port', 9051)
self.default_socket = socket.socket
self.default_socket_timeout = socket.getdefaulttimeout()
self.default_socket_getaddrinfo = socket.getaddrinfo
self._force_db_upgrade = False
def __del__(self):
if self.fp:
self.fp.close()
def stopRunning(self, with_code=0):
self.fail_code = with_code
# Wait for lock to shutdown gracefully.
if self.mxDB.acquire(timeout=5):
self.chainstate_delay_event.set()
with self.mxDB:
self.is_running = False
self.delay_event.set()
self.mxDB.release()
else:
# Waiting for lock timed out, stop anyway
self.chainstate_delay_event.set()
self.delay_event.set()
def openLogFile(self):
self.fp = open(os.path.join(self.data_dir, "basicswap.log"), "a")
def prepareLogging(self):
logging.setLoggerClass(BSXLogger)
self.log = logging.getLogger(self.log_name)
self.log.propagate = False
self.openLogFile()
# Remove any existing handlers
self.log.handlers = []
formatter = logging.Formatter(
"%(asctime)s %(levelname)s : %(message)s", "%Y-%m-%d %H:%M:%S"
)
stream_stdout = logging.StreamHandler(sys.stdout)
if self.log_name != "BasicSwap":
stream_stdout.setFormatter(
logging.Formatter(
"%(asctime)s %(name)s %(levelname)s : %(message)s",
"%Y-%m-%d %H:%M:%S",
)
)
formatter = logging.Formatter('%(asctime)s %(levelname)s : %(message)s')
stream_stdout = logging.StreamHandler()
if self.log_name != 'BasicSwap':
stream_stdout.setFormatter(logging.Formatter('%(asctime)s %(name)s %(levelname)s : %(message)s'))
else:
stream_stdout.setFormatter(formatter)
self.log_formatter = formatter
stream_fp = logging.StreamHandler(self.fp)
stream_fp.setFormatter(formatter)
@@ -126,126 +88,97 @@ class BaseApp(DBMethods):
def getChainClientSettings(self, coin):
try:
return self.settings["chainclients"][chainparams[coin]["name"]]
return self.settings['chainclients'][chainparams[coin]['name']]
except Exception:
return {}
def setDaemonPID(self, name, pid) -> None:
def setDaemonPID(self, name, pid):
if isinstance(name, Coins):
self.coin_clients[name]["pid"] = pid
self.coin_clients[name]['pid'] = pid
return
for c, v in self.coin_clients.items():
if v["name"] == name:
v["pid"] = pid
if v['name'] == name:
v['pid'] = pid
def getChainDatadirPath(self, coin) -> str:
datadir = self.coin_clients[coin]["datadir"]
testnet_name = (
""
if self.chain == "mainnet"
else chainparams[coin][self.chain].get("name", self.chain)
)
def getChainDatadirPath(self, coin):
datadir = self.coin_clients[coin]['datadir']
testnet_name = '' if self.chain == 'mainnet' else chainparams[coin][self.chain].get('name', self.chain)
return os.path.join(datadir, testnet_name)
def getCoinIdFromName(self, coin_name: str):
def getCoinIdFromName(self, coin_name):
for c, params in chainparams.items():
if coin_name.lower() == params["name"].lower():
if coin_name.lower() == params['name'].lower():
return c
raise ValueError(f"Unknown coin: {coin_name}")
raise ValueError('Unknown coin: {}'.format(coin_name))
def encodeSegwit(self, coin_type, raw):
return segwit_addr.encode(chainparams[coin_type][self.chain]['hrp'], 0, raw)
def decodeSegwit(self, coin_type, addr):
return bytes(segwit_addr.decode(chainparams[coin_type][self.chain]['hrp'], addr)[1])
def callrpc(self, method, params=[], wallet=None):
cc = self.coin_clients[Coins.PART]
return callrpc(
cc["rpcport"], cc["rpcauth"], method, params, wallet, cc["rpchost"]
)
return callrpc(cc['rpcport'], cc['rpcauth'], method, params, wallet, cc['rpchost'])
def callcoinrpc(self, coin, method, params=[], wallet=None):
cc = self.coin_clients[coin]
return callrpc(
cc["rpcport"], cc["rpcauth"], method, params, wallet, cc["rpchost"]
)
return callrpc(cc['rpcport'], cc['rpcauth'], method, params, wallet, cc['rpchost'])
def calltx(self, cmd):
bindir = self.coin_clients[Coins.PART]['bindir']
args = [os.path.join(bindir, cfg.PARTICL_TX), ]
if self.chain != 'mainnet':
args.append('-' + self.chain)
args += shlex.split(cmd)
p = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out = p.communicate()
if len(out[1]) > 0:
raise ValueError('TX error ' + str(out[1]))
return out[0].decode('utf-8').strip()
def callcoincli(self, coin_type, params, wallet=None, timeout=None):
bindir = self.coin_clients[coin_type]["bindir"]
datadir = self.coin_clients[coin_type]["datadir"]
cli_bin: str = chainparams[coin_type].get(
"cli_binname", chainparams[coin_type]["name"] + "-cli"
)
command_cli = os.path.join(
bindir, cli_bin + (".exe" if os.name == "nt" else "")
)
args = [
command_cli,
]
if self.chain != "mainnet":
args.append("-" + self.chain)
args.append("-datadir=" + datadir)
bindir = self.coin_clients[coin_type]['bindir']
datadir = self.coin_clients[coin_type]['datadir']
command_cli = os.path.join(bindir, chainparams[coin_type]['name'] + '-cli' + ('.exe' if os.name == 'nt' else ''))
args = [command_cli, ]
if self.chain != 'mainnet':
args.append('-' + self.chain)
args.append('-datadir=' + datadir)
args += shlex.split(params)
p = subprocess.Popen(
args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
p = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out = p.communicate(timeout=timeout)
if len(out[1]) > 0:
raise ValueError("CLI error " + str(out[1]))
return out[0].decode("utf-8").strip()
raise ValueError('CLI error ' + str(out[1]))
return out[0].decode('utf-8').strip()
def is_transient_error(self, ex) -> bool:
def is_transient_error(self, ex):
if isinstance(ex, TemporaryError):
return True
str_error = str(ex).lower()
return "read timed out" in str_error or "no connection to daemon" in str_error
return 'read timed out' in str_error or 'no connection to daemon' in str_error
def setConnectionParameters(self, timeout=120):
opener = urllib.request.build_opener()
opener.addheaders = [("User-agent", "Mozilla/5.0")]
opener.addheaders = [('User-agent', 'Mozilla/5.0')]
urllib.request.install_opener(opener)
if self.use_tor_proxy:
socks.setdefaultproxy(
socks.PROXY_TYPE_SOCKS5,
self.tor_proxy_host,
self.tor_proxy_port,
rdns=True,
)
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, self.tor_proxy_host, self.tor_proxy_port, rdns=True)
socket.socket = socks.socksocket
socket.getaddrinfo = (
getaddrinfo_tor # Without this accessing .onion links would fail
)
socket.getaddrinfo = getaddrinfo_tor # Without this accessing .onion links would fail
socket.setdefaulttimeout(timeout)
def popConnectionParameters(self) -> None:
def popConnectionParameters(self):
if self.use_tor_proxy:
socket.socket = self.default_socket
socket.getaddrinfo = self.default_socket_getaddrinfo
socket.setdefaulttimeout(self.default_socket_timeout)
def readURL(self, url: str, timeout: int = 120, headers={}) -> bytes:
open_handler = None
if self.use_tor_proxy:
open_handler = SocksiPyHandler(
socks.PROXY_TYPE_SOCKS5, self.tor_proxy_host, self.tor_proxy_port
)
opener = (
urllib.request.build_opener(open_handler)
if self.use_tor_proxy
else urllib.request.build_opener()
)
if headers is None:
opener.addheaders = [("User-agent", "Mozilla/5.0")]
request = urllib.request.Request(url, headers=headers)
return opener.open(request, timeout=timeout).read()
def logException(self, message) -> None:
self.log.error(message)
if self.debug:
self.log.error(traceback.format_exc())
def torControl(self, query):
try:
command = 'AUTHENTICATE "{}"\r\n{}\r\nQUIT\r\n'.format(
self.tor_control_password, query
).encode("utf-8")
command = 'AUTHENTICATE "{}"\r\n{}\r\nQUIT\r\n'.format(self.tor_control_password, query).encode('utf-8')
c = socket.create_connection((self.tor_proxy_host, self.tor_control_port))
c.send(command)
response = bytearray()
@@ -257,37 +190,5 @@ class BaseApp(DBMethods):
c.close()
return response
except Exception as e:
self.log.error(f"torControl {e}")
self.log.error(f'torControl {e}')
return
def getTime(self) -> int:
return int(time.time()) + self.mock_time_offset
def setMockTimeOffset(self, new_offset: int) -> None:
self.log.warning(f"Setting mocktime to {new_offset}")
self.mock_time_offset = new_offset
def get_int_setting(self, name: str, default_v: int, min_v: int, max_v) -> int:
value: int = self.settings.get(name, default_v)
if value < min_v:
self.log.warning(f"Setting {name} to {min_v}")
value = min_v
if value > max_v:
self.log.warning(f"Setting {name} to {max_v}")
value = max_v
return value
def get_delay_event_seconds(self):
if self.min_delay_event == self.max_delay_event:
return self.min_delay_event
return random.randrange(self.min_delay_event, self.max_delay_event)
def get_short_delay_event_seconds(self):
if self.min_delay_event_short == self.max_delay_event_short:
return self.min_delay_event_short
return random.randrange(self.min_delay_event_short, self.max_delay_event_short)
def get_delay_retry_seconds(self):
if self.min_delay_retry == self.max_delay_retry:
return self.min_delay_retry
return random.randrange(self.min_delay_retry, self.max_delay_retry)

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2021-2024 tecnovert
# Copyright (c) 2024-2025 The Basicswap developers
# Copyright (c) 2021-2022 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
@@ -9,14 +8,12 @@
import struct
import hashlib
from enum import IntEnum, auto
from html import escape as html_escape
from .util.address import (
encodeAddress,
decodeAddress,
encodeAddress,
)
from .chainparams import (
chainparams,
Fiat,
)
@@ -36,11 +33,6 @@ class KeyTypes(IntEnum):
KAF = 6
class MessageNetworks(IntEnum):
SMSG = auto()
SIMPLEX = auto()
class MessageTypes(IntEnum):
OFFER = auto()
BID = auto()
@@ -55,11 +47,6 @@ class MessageTypes(IntEnum):
XMR_BID_LOCK_RELEASE_LF = auto()
OFFER_REVOKE = auto()
ADS_BID_LF = auto()
ADS_BID_ACCEPT_FL = auto()
CONNECT_REQ = auto()
class AddressTypes(IntEnum):
OFFER = auto()
@@ -74,25 +61,23 @@ class SwapTypes(IntEnum):
SELLER_FIRST_2MSG = auto()
BUYER_FIRST_2MSG = auto()
XMR_SWAP = auto()
XMR_BCH_SWAP = auto()
class OfferStates(IntEnum):
OFFER_SENT = 1
OFFER_RECEIVED = 2
OFFER_ABANDONED = 3
OFFER_EXPIRED = 4
class BidStates(IntEnum):
BID_SENT = 1
BID_RECEIVING = 2 # Partially received
BID_RECEIVING = 2 # Partially received
BID_RECEIVED = 3
BID_RECEIVING_ACC = 4 # Partially received accept message
BID_ACCEPTED = 5 # BidAcceptMessage received/sent
SWAP_INITIATED = 6 # Initiate txn validated
SWAP_PARTICIPATING = 7 # Participate txn validated
SWAP_COMPLETED = 8 # All swap txns spent
BID_RECEIVING_ACC = 4 # Partially received accept message
BID_ACCEPTED = 5 # BidAcceptMessage received/sent
SWAP_INITIATED = 6 # Initiate txn validated
SWAP_PARTICIPATING = 7 # Participate txn validated
SWAP_COMPLETED = 8 # All swap txns spent
XMR_SWAP_SCRIPT_COIN_LOCKED = 9
XMR_SWAP_HAVE_SCRIPT_COIN_SPEND_TX = 10
XMR_SWAP_NOSCRIPT_COIN_LOCKED = 11
@@ -106,19 +91,13 @@ class BidStates(IntEnum):
XMR_SWAP_FAILED = 19
SWAP_DELAYING = 20
SWAP_TIMEDOUT = 21
BID_ABANDONED = 22 # Bid will no longer be processed
BID_ERROR = 23 # An error occurred
BID_ABANDONED = 22 # Bid will no longer be processed
BID_ERROR = 23 # An error occurred
BID_STALLED_FOR_TEST = 24
BID_REJECTED = 25
BID_STATE_UNKNOWN = 26
XMR_SWAP_MSG_SCRIPT_LOCK_TX_SIGS = 27 # XmrBidLockTxSigsMessage
XMR_SWAP_MSG_SCRIPT_LOCK_SPEND_TX = 28 # XmrBidLockSpendTxMessage
BID_REQUEST_SENT = 29
BID_REQUEST_ACCEPTED = 30
BID_EXPIRED = 31
BID_AACCEPT_DELAY = 32
BID_AACCEPT_FAIL = 33
CONNECT_REQ_SENT = 34
XMR_SWAP_MSG_SCRIPT_LOCK_TX_SIGS = 27 # XmrBidLockTxSigsMessage
XMR_SWAP_MSG_SCRIPT_LOCK_SPEND_TX = 28 # XmrBidLockSpendTxMessage
class TxStates(IntEnum):
@@ -127,8 +106,6 @@ class TxStates(IntEnum):
TX_CONFIRMED = auto()
TX_REDEEMED = auto()
TX_REFUNDED = auto()
TX_IN_MEMPOOL = auto()
TX_IN_CHAIN = auto()
class TxTypes(IntEnum):
@@ -145,12 +122,6 @@ class TxTypes(IntEnum):
XMR_SWAP_A_LOCK_REFUND_SPEND = auto()
XMR_SWAP_A_LOCK_REFUND_SWIPE = auto()
XMR_SWAP_B_LOCK = auto()
XMR_SWAP_B_LOCK_SPEND = auto()
XMR_SWAP_B_LOCK_REFUND = auto()
ITX_PRE_FUNDED = auto()
BCH_MERCY = auto()
class ActionTypes(IntEnum):
@@ -165,7 +136,6 @@ class ActionTypes(IntEnum):
RECOVER_XMR_SWAP_LOCK_TX_B = auto()
SEND_XMR_SWAP_LOCK_SPEND_MSG = auto()
REDEEM_ITX = auto()
ACCEPT_AS_REV_BID = auto()
class EventLogTypes(IntEnum):
@@ -192,15 +162,6 @@ class EventLogTypes(IntEnum):
ERROR = auto()
AUTOMATION_CONSTRAINT = auto()
AUTOMATION_ACCEPTING_BID = auto()
ITX_PUBLISHED = auto()
ITX_REDEEM_PUBLISHED = auto()
ITX_REFUND_PUBLISHED = auto()
PTX_PUBLISHED = auto()
PTX_REDEEM_PUBLISHED = auto()
PTX_REFUND_PUBLISHED = auto()
LOCK_TX_B_IN_MEMPOOL = auto()
BCH_MERCY_TX_PUBLISHED = auto()
BCH_MERCY_TX_FOUND = auto()
class XmrSplitMsgTypes(IntEnum):
@@ -212,21 +173,21 @@ class DebugTypes(IntEnum):
NONE = 0
BID_STOP_AFTER_COIN_A_LOCK = auto()
BID_DONT_SPEND_COIN_A_LOCK_REFUND = auto()
BID_DONT_SPEND_COIN_A_LOCK_REFUND2 = auto() # continues
CREATE_INVALID_COIN_B_LOCK = auto()
BUYER_STOP_AFTER_ITX = auto()
MAKE_INVALID_PTX = auto()
DONT_SPEND_ITX = auto()
SKIP_LOCK_TX_REFUND = auto()
SEND_LOCKED_XMR = auto()
B_LOCK_TX_MISSED_SEND = auto()
DUPLICATE_ACTIONS = auto()
DONT_CONFIRM_PTX = auto()
OFFER_LOCK_2_VALUE_INC = auto()
BID_STOP_AFTER_COIN_B_LOCK = auto()
BID_DONT_SPEND_COIN_B_LOCK = auto()
WAIT_FOR_COIN_B_LOCK_BEFORE_REFUND = auto()
BID_DONT_SPEND_COIN_A_LOCK = auto()
def strOfferState(state):
if state == OfferStates.OFFER_SENT:
return 'Sent'
if state == OfferStates.OFFER_RECEIVED:
return 'Received'
if state == OfferStates.OFFER_ABANDONED:
return 'Abandoned'
return 'Unknown'
class NotificationTypes(IntEnum):
@@ -236,288 +197,197 @@ class NotificationTypes(IntEnum):
BID_ACCEPTED = auto()
class ConnectionRequestTypes(IntEnum):
BID = 1
class AutomationOverrideOptions(IntEnum):
DEFAULT = 0
ALWAYS_ACCEPT = 1
NEVER_ACCEPT = auto()
def strAutomationOverrideOption(option):
if option == AutomationOverrideOptions.DEFAULT:
return "Default"
if option == AutomationOverrideOptions.ALWAYS_ACCEPT:
return "Always Accept"
if option == AutomationOverrideOptions.NEVER_ACCEPT:
return "Never Accept"
return "Unknown"
class VisibilityOverrideOptions(IntEnum):
DEFAULT = 0
HIDE = 1
BLOCK = auto()
def strVisibilityOverrideOption(option):
if option == VisibilityOverrideOptions.DEFAULT:
return "Default"
if option == VisibilityOverrideOptions.HIDE:
return "Hide"
if option == VisibilityOverrideOptions.BLOCK:
return "Block"
return "Unknown"
def strOfferState(state):
if state == OfferStates.OFFER_SENT:
return "Sent"
if state == OfferStates.OFFER_RECEIVED:
return "Received"
if state == OfferStates.OFFER_ABANDONED:
return "Abandoned"
if state == OfferStates.OFFER_EXPIRED:
return "Expired"
return "Unknown"
def strBidState(state):
if state == BidStates.BID_SENT:
return "Sent"
return 'Sent'
if state == BidStates.BID_RECEIVING:
return "Receiving"
return 'Receiving'
if state == BidStates.BID_RECEIVING_ACC:
return "Receiving accept"
return 'Receiving accept'
if state == BidStates.BID_RECEIVED:
return "Received"
return 'Received'
if state == BidStates.BID_ACCEPTED:
return "Accepted"
return 'Accepted'
if state == BidStates.SWAP_INITIATED:
return "Initiated"
return 'Initiated'
if state == BidStates.SWAP_PARTICIPATING:
return "Participating"
return 'Participating'
if state == BidStates.SWAP_COMPLETED:
return "Completed"
return 'Completed'
if state == BidStates.SWAP_TIMEDOUT:
return "Timed-out"
return 'Timed-out'
if state == BidStates.BID_ABANDONED:
return "Abandoned"
return 'Abandoned'
if state == BidStates.BID_STALLED_FOR_TEST:
return "Stalled (debug)"
return 'Stalled (debug)'
if state == BidStates.BID_ERROR:
return "Error"
return 'Error'
if state == BidStates.BID_REJECTED:
return "Rejected"
return 'Rejected'
if state == BidStates.XMR_SWAP_SCRIPT_COIN_LOCKED:
return "Script coin locked"
return 'Script coin locked'
if state == BidStates.XMR_SWAP_HAVE_SCRIPT_COIN_SPEND_TX:
return "Script coin spend tx valid"
return 'Script coin spend tx valid'
if state == BidStates.XMR_SWAP_NOSCRIPT_COIN_LOCKED:
return "Scriptless coin locked"
return 'Scriptless coin locked'
if state == BidStates.XMR_SWAP_LOCK_RELEASED:
return "Script coin lock released"
return 'Script coin lock released'
if state == BidStates.XMR_SWAP_SCRIPT_TX_REDEEMED:
return "Script tx redeemed"
return 'Script tx redeemed'
if state == BidStates.XMR_SWAP_SCRIPT_TX_PREREFUND:
return "Script pre-refund tx in chain"
return 'Script pre-refund tx in chain'
if state == BidStates.XMR_SWAP_NOSCRIPT_TX_REDEEMED:
return "Scriptless tx redeemed"
return 'Scriptless tx redeemed'
if state == BidStates.XMR_SWAP_NOSCRIPT_TX_RECOVERED:
return "Scriptless tx recovered"
return 'Scriptless tx recovered'
if state == BidStates.XMR_SWAP_FAILED_REFUNDED:
return "Failed, refunded"
return 'Failed, refunded'
if state == BidStates.XMR_SWAP_FAILED_SWIPED:
return "Failed, swiped"
return 'Failed, swiped'
if state == BidStates.XMR_SWAP_FAILED:
return "Failed"
return 'Failed'
if state == BidStates.SWAP_DELAYING:
return "Delaying"
return 'Delaying'
if state == BidStates.XMR_SWAP_MSG_SCRIPT_LOCK_TX_SIGS:
return "Exchanged script lock tx sigs msg"
return 'Exchanged script lock tx sigs msg'
if state == BidStates.XMR_SWAP_MSG_SCRIPT_LOCK_SPEND_TX:
return "Exchanged script lock spend tx msg"
if state == BidStates.BID_REQUEST_SENT:
return "Request sent"
if state == BidStates.BID_REQUEST_ACCEPTED:
return "Request accepted"
if state == BidStates.BID_STATE_UNKNOWN:
return "Unknown bid state"
if state == BidStates.BID_EXPIRED:
return "Expired"
if state == BidStates.BID_AACCEPT_DELAY:
return "Auto accept delay"
if state == BidStates.BID_AACCEPT_FAIL:
return "Auto accept failed"
if state == BidStates.CONNECT_REQ_SENT:
return "Connect request sent"
return "Unknown" + " " + str(state)
return 'Exchanged script lock spend tx msg'
return 'Unknown' + ' ' + str(state)
def strTxState(state):
if state == TxStates.TX_NONE:
return "None"
return 'None'
if state == TxStates.TX_SENT:
return "Sent"
return 'Sent'
if state == TxStates.TX_CONFIRMED:
return "Confirmed"
return 'Confirmed'
if state == TxStates.TX_REDEEMED:
return "Redeemed"
return 'Redeemed'
if state == TxStates.TX_REFUNDED:
return "Refunded"
if state == TxStates.TX_IN_MEMPOOL:
return "In Mempool"
if state == TxStates.TX_IN_CHAIN:
return "In Chain"
return "Unknown"
return 'Refunded'
return 'Unknown'
def strTxType(tx_type):
if tx_type == TxTypes.XMR_SWAP_A_LOCK:
return "Chain A Lock Tx"
return 'Chain A Lock Tx'
if tx_type == TxTypes.XMR_SWAP_A_LOCK_SPEND:
return "Chain A Lock Spend Tx"
return 'Chain A Lock Spend Tx'
if tx_type == TxTypes.XMR_SWAP_A_LOCK_REFUND:
return "Chain A Lock Refund Tx"
return 'Chain A Lock Refund Tx'
if tx_type == TxTypes.XMR_SWAP_A_LOCK_REFUND_SPEND:
return "Chain A Lock Refund Spend Tx"
return 'Chain A Lock Refund Spend Tx'
if tx_type == TxTypes.XMR_SWAP_A_LOCK_REFUND_SWIPE:
return "Chain A Lock Refund Swipe Tx"
return 'Chain A Lock Refund Swipe Tx'
if tx_type == TxTypes.XMR_SWAP_B_LOCK:
return "Chain B Lock Tx"
if tx_type == TxTypes.ITX_PRE_FUNDED:
return "Funded mock initiate Tx"
if tx_type == TxTypes.BCH_MERCY:
return "BCH Mercy Tx"
return "Unknown"
return 'Chain B Lock Tx'
return 'Unknown'
def strAddressType(addr_type):
if addr_type == AddressTypes.OFFER:
return "Offer"
return 'Offer'
if addr_type == AddressTypes.BID:
return "Bid"
return 'Bid'
if addr_type == AddressTypes.RECV_OFFER:
return "Offer recv"
return 'Offer recv'
if addr_type == AddressTypes.SEND_OFFER:
return "Offer send"
return "Unknown"
return 'Offer send'
return 'Unknown'
def getLockName(lock_type):
if lock_type == TxLockTypes.SEQUENCE_LOCK_BLOCKS:
return "Sequence lock, blocks"
return 'Sequence lock, blocks'
if lock_type == TxLockTypes.SEQUENCE_LOCK_TIME:
return "Sequence lock, time"
return 'Sequence lock, time'
if lock_type == TxLockTypes.ABS_LOCK_BLOCKS:
return "blocks"
return 'blocks'
if lock_type == TxLockTypes.ABS_LOCK_TIME:
return "time"
return 'time'
def describeEventEntry(event_type, event_msg):
if event_type == EventLogTypes.FAILED_TX_B_LOCK_PUBLISH:
return "Failed to publish lock tx B"
return 'Failed to publish lock tx B'
if event_type == EventLogTypes.FAILED_TX_B_LOCK_PUBLISH:
return 'Failed to publish lock tx B'
if event_type == EventLogTypes.LOCK_TX_A_PUBLISHED:
return "Lock tx A published"
return 'Lock tx A published'
if event_type == EventLogTypes.LOCK_TX_B_PUBLISHED:
return "Lock tx B published"
return 'Lock tx B published'
if event_type == EventLogTypes.FAILED_TX_B_SPEND:
return "Failed to publish lock tx B spend: " + event_msg
return 'Failed to publish lock tx B spend: ' + event_msg
if event_type == EventLogTypes.LOCK_TX_A_SEEN:
return "Lock tx A seen in chain"
return 'Lock tx A seen in chain'
if event_type == EventLogTypes.LOCK_TX_A_CONFIRMED:
return "Lock tx A confirmed in chain"
return 'Lock tx A confirmed in chain'
if event_type == EventLogTypes.LOCK_TX_B_SEEN:
return "Lock tx B seen in chain"
return 'Lock tx B seen in chain'
if event_type == EventLogTypes.LOCK_TX_B_CONFIRMED:
return "Lock tx B confirmed in chain"
if event_type == EventLogTypes.LOCK_TX_B_IN_MEMPOOL:
return "Lock tx B seen in mempool"
return 'Lock tx B confirmed in chain'
if event_type == EventLogTypes.DEBUG_TWEAK_APPLIED:
return "Debug tweak applied " + event_msg
return 'Debug tweak applied ' + event_msg
if event_type == EventLogTypes.FAILED_TX_B_REFUND:
return "Failed to publish lock tx B refund"
return 'Failed to publish lock tx B refund'
if event_type == EventLogTypes.LOCK_TX_B_INVALID:
return "Detected invalid lock Tx B"
return 'Detected invalid lock Tx B'
if event_type == EventLogTypes.LOCK_TX_A_REFUND_TX_PUBLISHED:
return "Lock tx A refund tx published"
return 'Lock tx A refund tx published'
if event_type == EventLogTypes.LOCK_TX_A_REFUND_SPEND_TX_PUBLISHED:
return "Lock tx A refund spend tx published"
return 'Lock tx A refund spend tx published'
if event_type == EventLogTypes.LOCK_TX_A_REFUND_SWIPE_TX_PUBLISHED:
return "Lock tx A refund swipe tx published"
return 'Lock tx A refund swipe tx published'
if event_type == EventLogTypes.LOCK_TX_B_REFUND_TX_PUBLISHED:
return "Lock tx B refund tx published"
return 'Lock tx B refund tx published'
if event_type == EventLogTypes.LOCK_TX_A_SPEND_TX_PUBLISHED:
return "Lock tx A spend tx published"
return 'Lock tx A spend tx published'
if event_type == EventLogTypes.LOCK_TX_B_SPEND_TX_PUBLISHED:
return "Lock tx B spend tx published"
return 'Lock tx B spend tx published'
if event_type == EventLogTypes.LOCK_TX_A_REFUND_TX_SEEN:
return "Lock tx A refund tx seen in chain"
return 'Lock tx A refund tx seen in chain'
if event_type == EventLogTypes.LOCK_TX_A_REFUND_SPEND_TX_SEEN:
return "Lock tx A refund spend tx seen in chain"
return 'Lock tx A refund spend tx seen in chain'
if event_type == EventLogTypes.SYSTEM_WARNING:
return "Warning: " + event_msg
return 'Warning: ' + event_msg
if event_type == EventLogTypes.ERROR:
return "Error: " + event_msg
return 'Error: ' + event_msg
if event_type == EventLogTypes.AUTOMATION_CONSTRAINT:
return "Failed auto accepting"
return 'Failed auto accepting'
if event_type == EventLogTypes.AUTOMATION_ACCEPTING_BID:
return "Auto accepting"
if event_type == EventLogTypes.ITX_PUBLISHED:
return "Initiate tx published"
if event_type == EventLogTypes.ITX_REDEEM_PUBLISHED:
return "Initiate tx redeem tx published"
if event_type == EventLogTypes.ITX_REFUND_PUBLISHED:
return "Initiate tx refund tx published"
if event_type == EventLogTypes.PTX_PUBLISHED:
return "Participate tx published"
if event_type == EventLogTypes.PTX_REDEEM_PUBLISHED:
return "Participate tx redeem tx published"
if event_type == EventLogTypes.PTX_REFUND_PUBLISHED:
return "Participate tx refund tx published"
if event_type == EventLogTypes.BCH_MERCY_TX_FOUND:
return "BCH mercy tx found"
if event_type == EventLogTypes.BCH_MERCY_TX_PUBLISHED:
return "Lock tx B mercy tx published"
return 'Auto accepting'
def getVoutByAddress(txjs, p2sh):
for o in txjs["vout"]:
for o in txjs['vout']:
try:
if "address" in o["scriptPubKey"] and o["scriptPubKey"]["address"] == p2sh:
return o["n"]
if p2sh in o["scriptPubKey"]["addresses"]:
return o["n"]
if p2sh in o['scriptPubKey']['addresses']:
return o['n']
except Exception:
pass
raise ValueError("Address output not found in txn")
raise ValueError('Address output not found in txn')
def getVoutByScriptPubKey(txjs, scriptPubKey_hex: str) -> int:
for o in txjs["vout"]:
def getVoutByP2WSH(txjs, p2wsh_hex):
for o in txjs['vout']:
try:
if scriptPubKey_hex == o["scriptPubKey"]["hex"]:
return o["n"]
if p2wsh_hex == o['scriptPubKey']['hex']:
return o['n']
except Exception:
pass
raise ValueError("scriptPubKey output not found in txn")
raise ValueError('P2WSH output not found in txn')
def replaceAddrPrefix(addr, coin_type, chain_name, addr_type="pubkey_address"):
return encodeAddress(
bytes((chainparams[coin_type][chain_name][addr_type],))
+ decodeAddress(addr)[1:]
)
def replaceAddrPrefix(addr, coin_type, chain_name, addr_type='pubkey_address'):
return encodeAddress(bytes((chainparams[coin_type][chain_name][addr_type],)) + decodeAddress(addr)[1:])
def getOfferProofOfFundsHash(offer_msg, offer_addr):
# TODO: Hash must not include proof_of_funds sig if it exists in offer_msg
h = hashlib.sha256()
h.update(offer_addr.encode("utf-8"))
offer_bytes = offer_msg.to_bytes()
h.update(offer_addr.encode('utf-8'))
offer_bytes = offer_msg.SerializeToString()
h.update(offer_bytes)
return h.digest()
@@ -526,112 +396,37 @@ def getLastBidState(packed_states):
num_states = len(packed_states) // 12
if num_states < 2:
return BidStates.BID_STATE_UNKNOWN
return struct.unpack_from("<i", packed_states[(num_states - 2) * 12 :])[0]
return struct.unpack_from('<i', packed_states[(num_states - 2) * 12:])[0]
try:
num_states = len(packed_states) // 12
if num_states < 2:
return BidStates.BID_STATE_UNKNOWN
return struct.unpack_from("<i", packed_states[(num_states - 2) * 12 :])[0]
return struct.unpack_from('<i', packed_states[(num_states - 2) * 12:])[0]
except Exception:
return BidStates.BID_STATE_UNKNOWN
def strSwapType(swap_type) -> str:
if swap_type == SwapTypes.SELLER_FIRST:
return "seller_first"
if swap_type == SwapTypes.XMR_SWAP:
return "xmr_swap"
return None
def strSwapDesc(swap_type) -> str:
if swap_type == SwapTypes.SELLER_FIRST:
return "Secret Hash"
if swap_type == SwapTypes.XMR_SWAP:
return "Adaptor Sig"
return None
def fiatTicker(fiat_ind: int) -> str:
try:
return Fiat(fiat_ind).name
except Exception as e: # noqa: F841
raise ValueError(f"Unknown fiat ind {fiat_ind}")
def fiatFromTicker(ticker: str) -> int:
ticker_uc = ticker.upper()
for entry in Fiat:
if entry.name == ticker_uc:
return entry
raise ValueError(f"Unknown fiat {ticker}")
def get_api_key_setting(
settings, setting_name: str, default_value: str = "", escape: bool = False
):
setting_name_enc: str = setting_name + "_enc"
if setting_name_enc in settings:
rv = bytes.fromhex(settings[setting_name_enc]).decode("utf-8")
return html_escape(rv) if escape else rv
return settings.get(setting_name, default_value)
inactive_states = [
BidStates.SWAP_COMPLETED,
BidStates.BID_ERROR,
BidStates.BID_REJECTED,
BidStates.SWAP_TIMEDOUT,
BidStates.BID_ABANDONED,
BidStates.BID_EXPIRED,
]
def canAcceptBidState(state):
return state in (
BidStates.BID_RECEIVED,
BidStates.BID_AACCEPT_DELAY,
BidStates.BID_AACCEPT_FAIL,
)
def isActiveBidState(state):
if state >= BidStates.BID_ACCEPTED and state < BidStates.SWAP_COMPLETED:
return True
return state in (
BidStates.SWAP_DELAYING,
BidStates.XMR_SWAP_HAVE_SCRIPT_COIN_SPEND_TX,
BidStates.XMR_SWAP_SCRIPT_COIN_LOCKED,
BidStates.XMR_SWAP_NOSCRIPT_COIN_LOCKED,
BidStates.XMR_SWAP_LOCK_RELEASED,
BidStates.XMR_SWAP_NOSCRIPT_TX_REDEEMED,
BidStates.XMR_SWAP_SCRIPT_TX_REDEEMED,
BidStates.XMR_SWAP_SCRIPT_TX_PREREFUND,
BidStates.XMR_SWAP_MSG_SCRIPT_LOCK_TX_SIGS,
BidStates.XMR_SWAP_MSG_SCRIPT_LOCK_SPEND_TX,
BidStates.XMR_SWAP_FAILED,
BidStates.BID_REQUEST_ACCEPTED,
)
def isErrorBidState(state):
return state in (
BidStates.BID_STALLED_FOR_TEST,
BidStates.BID_ERROR,
)
def isFailingBidState(state):
return state in (
BidStates.BID_STALLED_FOR_TEST,
BidStates.BID_ERROR,
BidStates.XMR_SWAP_SCRIPT_TX_PREREFUND,
BidStates.XMR_SWAP_NOSCRIPT_TX_RECOVERED,
BidStates.XMR_SWAP_FAILED_REFUNDED,
BidStates.XMR_SWAP_FAILED_SWIPED,
BidStates.XMR_SWAP_FAILED,
)
def isFinalBidState(state):
return state in inactive_states
if state == BidStates.SWAP_DELAYING:
return True
if state == BidStates.XMR_SWAP_HAVE_SCRIPT_COIN_SPEND_TX:
return True
if state == BidStates.XMR_SWAP_SCRIPT_COIN_LOCKED:
return True
if state == BidStates.XMR_SWAP_NOSCRIPT_COIN_LOCKED:
return True
if state == BidStates.XMR_SWAP_LOCK_RELEASED:
return True
if state == BidStates.XMR_SWAP_NOSCRIPT_TX_REDEEMED:
return True
if state == BidStates.XMR_SWAP_SCRIPT_TX_REDEEMED:
return True
if state == BidStates.XMR_SWAP_SCRIPT_TX_PREREFUND:
return True
if state == BidStates.XMR_SWAP_MSG_SCRIPT_LOCK_TX_SIGS:
return True
if state == BidStates.XMR_SWAP_MSG_SCRIPT_LOCK_SPEND_TX:
return True
return False

View File

@@ -1 +0,0 @@
name = "bin"

File diff suppressed because it is too large Load Diff

View File

@@ -1,801 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2019-2024 tecnovert
# Copyright (c) 2024-2025 The Basicswap developers
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import json
import logging
import os
import shutil
import signal
import subprocess
import sys
import traceback
import basicswap.config as cfg
from basicswap import __version__
from basicswap.basicswap import BasicSwap
from basicswap.chainparams import chainparams, Coins, isKnownCoinName
from basicswap.contrib.websocket_server import WebsocketServer
from basicswap.http_server import HttpThread
from basicswap.network.simplex_chat import startSimplexClient
from basicswap.ui.util import getCoinName
from basicswap.util.daemon import Daemon
initial_logger = logging.getLogger()
initial_logger.level = logging.DEBUG
if not len(initial_logger.handlers):
initial_logger.addHandler(initial_logger.StreamHandler(sys.stdout))
logger = initial_logger
swap_client = None
def signal_handler(sig, frame):
os.write(
sys.stdout.fileno(), f"Signal {sig} detected, ending program.\n".encode("utf-8")
)
if swap_client is not None and not swap_client.chainstate_delay_event.is_set():
try:
from basicswap.ui.page_amm import stop_amm_process, get_amm_status
amm_status = get_amm_status()
if amm_status == "running":
logger.info("Signal handler stopping AMM process...")
success, msg = stop_amm_process(swap_client)
if success:
logger.info(f"AMM signal shutdown: {msg}")
else:
logger.warning(f"AMM signal shutdown warning: {msg}")
except Exception as e:
logger.error(f"Error stopping AMM in signal handler: {e}")
swap_client.stopRunning()
def startDaemon(node_dir, bin_dir, daemon_bin, opts=[], extra_config={}):
daemon_bin = os.path.expanduser(os.path.join(bin_dir, daemon_bin))
datadir_path = os.path.expanduser(node_dir)
coin_name = extra_config.get("coin_name", "")
# Rewrite litecoin.conf
# TODO: Remove
ltc_conf_path = os.path.join(datadir_path, "litecoin.conf")
if os.path.exists(ltc_conf_path):
needs_rewrite: bool = False
add_changetype: bool = True
with open(ltc_conf_path) as fp:
for line in fp:
line = line.strip()
if line.startswith("changetype="):
add_changetype = False
break
if line.endswith("=onion"):
needs_rewrite = True
break
if needs_rewrite:
logger.info("Rewriting litecoin.conf")
shutil.copyfile(ltc_conf_path, ltc_conf_path + ".last")
with (
open(ltc_conf_path + ".last") as fp_from,
open(ltc_conf_path, "w") as fp_to,
):
for line in fp_from:
if line.strip().endswith("=onion"):
fp_to.write(line.strip()[:-6] + "\n")
else:
fp_to.write(line)
if add_changetype:
fp_to.write("changetype=bech32\n")
add_changetype = False
if add_changetype:
logger.info("Adding changetype to litecoin.conf")
with open(ltc_conf_path, "a") as fp:
fp.write("changetype=bech32\n")
# Rewrite bitcoin.conf
# TODO: Remove
btc_conf_path = os.path.join(datadir_path, "bitcoin.conf")
if coin_name == "bitcoin" and os.path.exists(btc_conf_path):
add_changetype: bool = True
with open(btc_conf_path) as fp:
for line in fp:
line = line.strip()
if line.startswith("changetype="):
add_changetype = False
break
if add_changetype:
logger.info("Adding changetype to bitcoin.conf")
with open(btc_conf_path, "a") as fp:
fp.write("changetype=bech32\n")
args = [
daemon_bin,
]
add_datadir: bool = extra_config.get("add_datadir", True)
if add_datadir:
args.append("-datadir=" + datadir_path)
args += opts
logger.info(f"Starting node {daemon_bin}")
logger.debug("Arguments {}".format(" ".join(args)))
opened_files = []
if extra_config.get("stdout_to_file", False):
stdout_dest = open(
os.path.join(
datadir_path, extra_config.get("stdout_filename", "core_stdout.log")
),
"w",
)
opened_files.append(stdout_dest)
stderr_dest = stdout_dest
else:
stdout_dest = subprocess.PIPE
stderr_dest = subprocess.PIPE
shell: bool = False
if extra_config.get("use_shell", False):
args = " ".join(args)
shell = True
return Daemon(
subprocess.Popen(
args,
shell=shell,
stdin=subprocess.PIPE,
stdout=stdout_dest,
stderr=stderr_dest,
cwd=datadir_path,
),
opened_files,
os.path.basename(daemon_bin),
)
def startXmrDaemon(node_dir, bin_dir, daemon_bin, opts=[]):
daemon_path = os.path.expanduser(os.path.join(bin_dir, daemon_bin))
datadir_path = os.path.expanduser(node_dir)
config_filename = (
"wownerod.conf" if daemon_bin.startswith("wow") else "monerod.conf"
)
args = [
daemon_path,
"--non-interactive",
"--config-file=" + os.path.join(datadir_path, config_filename),
] + opts
logger.info(f"Starting node {daemon_bin}")
logger.debug("Arguments {}".format(" ".join(args)))
# return subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
file_stdout = open(os.path.join(datadir_path, "core_stdout.log"), "w")
file_stderr = open(os.path.join(datadir_path, "core_stderr.log"), "w")
return Daemon(
subprocess.Popen(
args,
stdin=subprocess.PIPE,
stdout=file_stdout,
stderr=file_stderr,
cwd=datadir_path,
),
[file_stdout, file_stderr],
os.path.basename(daemon_bin),
)
def startXmrWalletDaemon(node_dir, bin_dir, wallet_bin, opts=[]):
daemon_path = os.path.expanduser(os.path.join(bin_dir, wallet_bin))
args = [daemon_path, "--non-interactive"]
needs_rewrite: bool = False
config_to_remove = [
"daemon-address=",
"untrusted-daemon=",
"trusted-daemon=",
"proxy=",
]
data_dir = os.path.expanduser(node_dir)
wallet_config_filename = (
"wownero-wallet-rpc.conf"
if wallet_bin.startswith("wow")
else "monero_wallet.conf"
)
config_path = os.path.join(data_dir, wallet_config_filename)
if os.path.exists(config_path):
args += ["--config-file=" + config_path]
with open(config_path) as fp:
for line in fp:
if any(
line.startswith(config_line) for config_line in config_to_remove
):
logger.warning(
"Found old config in monero_wallet.conf: {}".format(
line.strip()
)
)
needs_rewrite = True
args += opts
if needs_rewrite:
logger.info("Rewriting wallet config")
shutil.copyfile(config_path, config_path + ".last")
with open(config_path + ".last") as fp_from, open(config_path, "w") as fp_to:
for line in fp_from:
if not any(
line.startswith(config_line) for config_line in config_to_remove
):
fp_to.write(line)
logger.info(f"Starting wallet daemon {wallet_bin}")
logger.debug("Arguments {}".format(" ".join(args)))
# TODO: return subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=data_dir)
wallet_stdout = open(os.path.join(data_dir, "wallet_stdout.log"), "w")
wallet_stderr = open(os.path.join(data_dir, "wallet_stderr.log"), "w")
return Daemon(
subprocess.Popen(
args,
stdin=subprocess.PIPE,
stdout=wallet_stdout,
stderr=wallet_stderr,
cwd=data_dir,
),
[wallet_stdout, wallet_stderr],
os.path.basename(wallet_bin),
)
def ws_new_client(client, server):
if swap_client:
swap_client.log.debug(f'ws_new_client {client["id"]}')
def ws_client_left(client, server):
if client is None:
return
if swap_client:
swap_client.log.debug(f'ws_client_left {client["id"]}')
def ws_message_received(client, server, message):
if len(message) > 200:
message = message[:200] + ".."
if swap_client:
swap_client.log.debug(f'ws_message_received {client["id"]} {message}')
def getCoreBinName(coin_id: int, coin_settings, default_name: str) -> str:
return coin_settings.get(
"core_binname", chainparams[coin_id].get("core_binname", default_name)
) + (".exe" if os.name == "nt" else "")
def getWalletBinName(coin_id: int, coin_settings, default_name: str) -> str:
return coin_settings.get(
"wallet_binname", chainparams[coin_id].get("wallet_binname", default_name)
) + (".exe" if os.name == "nt" else "")
def getCoreBinArgs(coin_id: int, coin_settings, prepare=False, use_tor_proxy=False):
extra_args = []
if "config_filename" in coin_settings:
extra_args.append("--conf=" + coin_settings["config_filename"])
if "port" in coin_settings and coin_id != Coins.BTC:
if prepare is False and use_tor_proxy:
if coin_id == Coins.BCH:
# Without this BCH (27.1) will bind to the default BTC port, even with proxy set
extra_args.append("--bind=127.0.0.1:" + str(int(coin_settings["port"])))
else:
extra_args.append("--port=" + str(int(coin_settings["port"])))
# BTC versions from v28 fail to start if the onionport is in use.
# As BCH may use port 8334, disable it here.
# When tor is enabled a bind option for the onionport will be added to bitcoin.conf.
# https://github.com/bitcoin/bitcoin/blob/master/doc/release-notes/release-notes-28.0.md?plain=1#L84
if (
prepare is False
and use_tor_proxy is False
and coin_id in (Coins.BTC, Coins.NMC)
):
port: int = coin_settings.get("port", 8333)
extra_args.append(f"--bind=0.0.0.0:{port}")
return extra_args
def mainLoop(daemons, update: bool = True):
while not swap_client.delay_event.wait(0.5):
if update:
swap_client.update()
else:
pass
for daemon in daemons:
if daemon.running is False:
continue
poll = daemon.handle.poll()
if poll is None:
pass # Process is running
else:
daemon.running = False
swap_client.log.error(
f"Process {daemon.handle.pid} for {daemon.name} terminated unexpectedly returning {poll}."
)
def runClient(
data_dir: str,
chain: str,
start_only_coins: bool,
log_prefix: str = "BasicSwap",
extra_opts=dict(),
) -> int:
global swap_client, logger
daemons = []
pids = []
threads = []
settings_path = os.path.join(data_dir, cfg.CONFIG_FILENAME)
pids_path = os.path.join(data_dir, ".pids")
if os.getenv("WALLET_ENCRYPTION_PWD", "") != "":
if "decred" in start_only_coins:
# Workaround for dcrwallet requiring password for initial startup
logger.warning(
"Allowing set WALLET_ENCRYPTION_PWD var with --startonlycoin=decred."
)
else:
raise ValueError(
"Please unset the WALLET_ENCRYPTION_PWD environment variable."
)
if not os.path.exists(settings_path):
raise ValueError("Settings file not found: " + str(settings_path))
with open(settings_path) as fs:
settings = json.load(fs)
swap_client = BasicSwap(
data_dir, settings, chain, log_name=log_prefix, extra_opts=extra_opts
)
logger = swap_client.log
if os.path.exists(pids_path):
with open(pids_path) as fd:
for ln in fd:
# TODO: try close
logger.warning("Found pid for daemon {}".format(ln.strip()))
# Ensure daemons are stopped
swap_client.stopDaemons()
# Settings may have been modified
settings = swap_client.settings
try:
# Try start daemons
for network in settings.get("networks", []):
if network.get("enabled", True) is False:
continue
network_type = network.get("type", "unknown")
if network_type == "simplex":
simplex_dir = os.path.join(data_dir, "simplex")
log_level = "debug" if swap_client.debug else "info"
socks_proxy = None
if "socks_proxy_override" in network:
socks_proxy = network["socks_proxy_override"]
elif swap_client.use_tor_proxy:
socks_proxy = (
f"{swap_client.tor_proxy_host}:{swap_client.tor_proxy_port}"
)
daemons.append(
startSimplexClient(
network["client_path"],
simplex_dir,
network["server_address"],
network["ws_port"],
logger,
swap_client.delay_event,
socks_proxy=socks_proxy,
log_level=log_level,
)
)
pid = daemons[-1].handle.pid
swap_client.log.info(f"Started Simplex client {pid}")
for c, v in settings["chainclients"].items():
if len(start_only_coins) > 0 and c not in start_only_coins:
continue
if (
len(swap_client.with_coins_override) > 0
and c not in swap_client.with_coins_override
) or c in swap_client.without_coins_override:
if v.get("manage_daemon", False) or v.get(
"manage_wallet_daemon", False
):
logger.warning(
f"Not starting coin {c.capitalize()}, disabled by arguments."
)
continue
try:
coin_id = swap_client.getCoinIdFromName(c)
display_name = getCoinName(coin_id)
except Exception as e: # noqa: F841
logger.warning(f"Not starting unknown coin: {c}")
continue
if c in ("monero", "wownero"):
if v["manage_daemon"] is True:
swap_client.log.info(f"Starting {display_name} daemon")
filename: str = getCoreBinName(coin_id, v, c + "d")
daemons.append(startXmrDaemon(v["datadir"], v["bindir"], filename))
pid = daemons[-1].handle.pid
swap_client.log.info(f"Started {filename} {pid}")
if v["manage_wallet_daemon"] is True:
swap_client.log.info(f"Starting {display_name} wallet daemon")
daemon_addr = "{}:{}".format(v["rpchost"], v["rpcport"])
trusted_daemon: bool = swap_client.getXMRTrustedDaemon(
coin_id, v["rpchost"]
)
opts = [
"--daemon-address",
daemon_addr,
]
proxy_log_str = ""
proxy_host, proxy_port = swap_client.getXMRWalletProxy(
coin_id, v["rpchost"]
)
if proxy_host:
proxy_log_str = " through proxy"
opts += [
"--proxy",
f"{proxy_host}:{proxy_port}",
"--daemon-ssl-allow-any-cert",
]
swap_client.log.info(
"daemon-address: {} ({}){}".format(
daemon_addr,
"trusted" if trusted_daemon else "untrusted",
proxy_log_str,
)
)
daemon_rpcuser = v.get("rpcuser", "")
daemon_rpcpass = v.get("rpcpassword", "")
if daemon_rpcuser != "":
opts.append("--daemon-login")
opts.append(daemon_rpcuser + ":" + daemon_rpcpass)
opts.append(
"--trusted-daemon" if trusted_daemon else "--untrusted-daemon"
)
filename: str = getWalletBinName(coin_id, v, c + "-wallet-rpc")
daemons.append(
startXmrWalletDaemon(v["datadir"], v["bindir"], filename, opts)
)
pid = daemons[-1].handle.pid
swap_client.log.info(f"Started {filename} {pid}")
continue # /monero
if c == "decred":
appdata = v["datadir"]
extra_opts = [
f'--appdata="{appdata}"',
]
use_shell: bool = True if os.name == "nt" else False
if v["manage_daemon"] is True:
swap_client.log.info(f"Starting {display_name} daemon")
filename: str = getCoreBinName(coin_id, v, "dcrd")
extra_config = {
"add_datadir": False,
"stdout_to_file": True,
"stdout_filename": "dcrd_stdout.log",
"use_shell": use_shell,
"coin_name": "decred",
}
daemons.append(
startDaemon(
appdata,
v["bindir"],
filename,
opts=extra_opts,
extra_config=extra_config,
)
)
pid = daemons[-1].handle.pid
swap_client.log.info(f"Started {filename} {pid}")
if v["manage_wallet_daemon"] is True:
swap_client.log.info(f"Starting {display_name} wallet daemon")
filename: str = getWalletBinName(coin_id, v, "dcrwallet")
wallet_pwd = v["wallet_pwd"]
if wallet_pwd == "":
# Only set when in startonlycoin mode
wallet_pwd = os.getenv("WALLET_ENCRYPTION_PWD", "")
if wallet_pwd != "":
extra_opts.append(f'--pass="{wallet_pwd}"')
extra_config = {
"add_datadir": False,
"stdout_to_file": True,
"stdout_filename": "dcrwallet_stdout.log",
"use_shell": use_shell,
"coin_name": "decred",
}
daemons.append(
startDaemon(
appdata,
v["bindir"],
filename,
opts=extra_opts,
extra_config=extra_config,
)
)
pid = daemons[-1].handle.pid
swap_client.log.info(f"Started {filename} {pid}")
continue # /decred
if v["manage_daemon"] is True:
swap_client.log.info(f"Starting {display_name} daemon")
filename: str = getCoreBinName(coin_id, v, c + "d")
extra_opts = getCoreBinArgs(
coin_id, v, use_tor_proxy=swap_client.use_tor_proxy
)
extra_config = {"coin_name": c}
daemons.append(
startDaemon(
v["datadir"],
v["bindir"],
filename,
opts=extra_opts,
extra_config=extra_config,
)
)
pid = daemons[-1].handle.pid
pids.append((c, pid))
swap_client.setDaemonPID(c, pid)
swap_client.log.info(f"Started {filename} {pid}")
if len(pids) > 0:
with open(pids_path, "w") as fd:
for p in pids:
fd.write("{}:{}\n".format(*p))
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
signal.signal(signal.SIGHUP, signal_handler)
if len(start_only_coins) > 0:
logger.info(
f"Only running {start_only_coins}. Manually exit with Ctrl + c when ready."
)
mainLoop(daemons, update=False)
else:
swap_client.start()
if "htmlhost" in settings:
swap_client.log.info(
"Starting http server at http://%s:%d."
% (settings["htmlhost"], settings["htmlport"])
)
allow_cors = (
settings["allowcors"]
if "allowcors" in settings
else cfg.DEFAULT_ALLOW_CORS
)
thread_http = HttpThread(
settings["htmlhost"],
settings["htmlport"],
allow_cors,
swap_client,
)
threads.append(thread_http)
thread_http.start()
if "wshost" in settings:
ws_url = "ws://{}:{}".format(settings["wshost"], settings["wsport"])
swap_client.log.info(f"Starting ws server at {ws_url}.")
swap_client.ws_server = WebsocketServer(
host=settings["wshost"], port=settings["wsport"]
)
swap_client.ws_server.client_port = settings.get(
"wsclientport", settings["wsport"]
)
swap_client.ws_server.set_fn_new_client(ws_new_client)
swap_client.ws_server.set_fn_client_left(ws_client_left)
swap_client.ws_server.set_fn_message_received(ws_message_received)
swap_client.ws_server.run_forever(threaded=True)
logger.info("Exit with Ctrl + c.")
mainLoop(daemons)
except Exception as e: # noqa: F841
traceback.print_exc()
if swap_client.ws_server:
try:
swap_client.log.info("Stopping websocket server.")
swap_client.ws_server.shutdown_gracefully()
except Exception as e: # noqa: F841
traceback.print_exc()
swap_client.finalise()
swap_client.log.info("Stopping HTTP threads.")
for t in threads:
try:
t.stop()
t.join()
except Exception as e: # noqa: F841
traceback.print_exc()
closed_pids = []
for d in daemons:
swap_client.log.info(f"Interrupting {d.name} {d.handle.pid}")
try:
d.handle.send_signal(
signal.CTRL_C_EVENT if os.name == "nt" else signal.SIGINT
)
except Exception as e:
swap_client.log.info(f"Interrupting {d.name} {d.handle.pid}, error {e}")
for d in daemons:
try:
d.handle.wait(timeout=120)
for fp in [d.handle.stdout, d.handle.stderr, d.handle.stdin] + d.files:
if fp:
fp.close()
closed_pids.append(d.handle.pid)
except Exception as e:
swap_client.log.error(f"Error: {e}")
fail_code: int = swap_client.fail_code
del swap_client
if os.path.exists(pids_path):
with open(pids_path) as fd:
lines = fd.read().split("\n")
still_running = ""
for ln in lines:
try:
if int(ln.split(":")[1]) not in closed_pids:
still_running += ln + "\n"
except Exception:
pass
with open(pids_path, "w") as fd:
fd.write(still_running)
return fail_code
def printVersion():
logger.info(
f"Basicswap version: {__version__}",
)
def ensure_coin_valid(coin: str) -> bool:
if isKnownCoinName(coin) is False:
raise ValueError(f"Unknown coin: {coin}")
def printHelp():
print("Usage: basicswap-run ")
print("\n--help, -h Print help.")
print("--version, -v Print version.")
print(
f"--datadir=PATH Path to basicswap data directory, default:{cfg.BASICSWAP_DATADIR}."
)
print("--mainnet Run in mainnet mode.")
print("--testnet Run in testnet mode.")
print("--regtest Run in regtest mode.")
print("--withcoin= Run only with coin/s.")
print("--withoutcoin= Run without coin/s.")
print(
"--startonlycoin Only start the provides coin daemon/s, use this if a chain requires extra processing."
)
print("--logprefix Specify log prefix.")
print(
"--forcedbupgrade Recheck database against schema regardless of version."
)
def main():
data_dir = None
chain = "mainnet"
start_only_coins = set()
log_prefix: str = "BasicSwap"
options = dict()
with_coins = set()
without_coins = set()
for v in sys.argv[1:]:
if len(v) < 2 or v[0] != "-":
logger.warning(f"Unknown argument {v}")
continue
s = v.split("=")
name = s[0].strip()
for i in range(2):
if name[0] == "-":
name = name[1:]
if name == "v" or name == "version":
printVersion()
return 0
if name == "h" or name == "help":
printHelp()
return 0
if name in ("mainnet", "testnet", "regtest"):
chain = name
continue
if name in ("withcoin", "withcoins"):
for coin in [s.strip().lower() for s in s[1].split(",")]:
ensure_coin_valid(coin)
with_coins.add(coin)
continue
if name in ("withoutcoin", "withoutcoins"):
for coin in [s.strip().lower() for s in s[1].split(",")]:
if coin == "particl":
raise ValueError("Particl is required.")
ensure_coin_valid(coin)
without_coins.add(coin)
continue
if name == "forcedbupgrade":
options["force_db_upgrade"] = True
continue
if len(s) == 2:
if name == "datadir":
data_dir = os.path.expanduser(s[1])
continue
if name == "logprefix":
log_prefix = s[1]
continue
if name == "startonlycoin":
for coin in [s.lower() for s in s[1].split(",")]:
ensure_coin_valid(coin)
start_only_coins.add(coin)
continue
logger.warning(f"Unknown argument {v}")
if os.name == "nt":
logger.warning(
"Running on windows is discouraged and windows support may be discontinued in the future. Please consider using the WSL docker setup instead."
)
if data_dir is None:
data_dir = os.path.join(os.path.expanduser(cfg.BASICSWAP_DATADIR))
logger.info(f"Using datadir: {data_dir}")
logger.info(f"Chain: {chain}")
if not os.path.exists(data_dir):
os.makedirs(data_dir)
if len(with_coins) > 0:
with_coins.add("particl")
options["with_coins"] = with_coins
if len(without_coins) > 0:
options["without_coins"] = without_coins
logger.info(os.path.basename(sys.argv[0]) + ", version: " + __version__ + "\n\n")
fail_code = runClient(data_dir, chain, start_only_coins, log_prefix, options)
print("Done.")
return fail_code
if __name__ == "__main__":
main()

View File

@@ -1,577 +1,330 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019-2024 tecnovert
# Copyright (c) 2024-2025 The Basicswap developers
# Copyright (c) 2019-2022 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import threading
from enum import IntEnum
from .util import (
COIN,
make_int,
format_amount,
TemporaryError,
)
XMR_COIN = 10**12
WOW_COIN = 10**11
XMR_COIN = 10 ** 12
class Coins(IntEnum):
PART = 1
BTC = 2
LTC = 3
DCR = 4
# DCR = 4
NMC = 5
XMR = 6
PART_BLIND = 7
PART_ANON = 8
WOW = 9
# ZANO = 9
# NDAU = 10
PIVX = 11
DASH = 12
FIRO = 13
NAV = 14
LTC_MWEB = 15
# ZANO = 16
BCH = 17
DOGE = 18
class Fiat(IntEnum):
USD = -1
GBP = -2
EUR = -3
chainparams = {
Coins.PART: {
"name": "particl",
"ticker": "PART",
"message_magic": "Bitcoin Signed Message:\n",
"blocks_target": 60 * 2,
"decimal_places": 8,
"mainnet": {
"rpcport": 51735,
"pubkey_address": 0x38,
"script_address": 0x3C,
"key_prefix": 0x6C,
"stealth_key_prefix": 0x14,
"hrp": "pw",
"bip44": 44,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"ext_public_key_prefix": 0x696E82D1,
"ext_secret_key_prefix": 0x8F1DAEB8,
'name': 'particl',
'ticker': 'PART',
'message_magic': 'Bitcoin Signed Message:\n',
'blocks_target': 60 * 2,
'decimal_places': 8,
'mainnet': {
'rpcport': 51735,
'pubkey_address': 0x38,
'script_address': 0x3c,
'key_prefix': 0x6c,
'stealth_key_prefix': 0x14,
'hrp': 'pw',
'bip44': 44,
'min_amount': 1000,
'max_amount': 100000 * COIN,
},
"testnet": {
"rpcport": 51935,
"pubkey_address": 0x76,
"script_address": 0x7A,
"key_prefix": 0x2E,
"stealth_key_prefix": 0x15,
"hrp": "tpw",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"ext_public_key_prefix": 0xE1427800,
"ext_secret_key_prefix": 0x04889478,
},
"regtest": {
"rpcport": 51936,
"pubkey_address": 0x76,
"script_address": 0x7A,
"key_prefix": 0x2E,
"stealth_key_prefix": 0x15,
"hrp": "rtpw",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"ext_public_key_prefix": 0xE1427800,
"ext_secret_key_prefix": 0x04889478,
'testnet': {
'rpcport': 51935,
'pubkey_address': 0x76,
'script_address': 0x7a,
'key_prefix': 0x2e,
'stealth_key_prefix': 0x15,
'hrp': 'tpw',
'bip44': 1,
'min_amount': 1000,
'max_amount': 100000 * COIN,
},
'regtest': {
'rpcport': 51936,
'pubkey_address': 0x76,
'script_address': 0x7a,
'key_prefix': 0x2e,
'stealth_key_prefix': 0x15,
'hrp': 'rtpw',
'bip44': 1,
'min_amount': 1000,
'max_amount': 100000 * COIN,
}
},
Coins.BTC: {
"name": "bitcoin",
"ticker": "BTC",
"message_magic": "Bitcoin Signed Message:\n",
"blocks_target": 60 * 10,
"decimal_places": 8,
"mainnet": {
"rpcport": 8332,
"pubkey_address": 0,
"script_address": 5,
"key_prefix": 128,
"hrp": "bc",
"bip44": 0,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"ext_public_key_prefix": 0x0488B21E,
"ext_secret_key_prefix": 0x0488ADE4,
'name': 'bitcoin',
'ticker': 'BTC',
'message_magic': 'Bitcoin Signed Message:\n',
'blocks_target': 60 * 10,
'decimal_places': 8,
'mainnet': {
'rpcport': 8332,
'pubkey_address': 0,
'script_address': 5,
'key_prefix': 128,
'hrp': 'bc',
'bip44': 0,
'min_amount': 1000,
'max_amount': 100000 * COIN,
},
"testnet": {
"rpcport": 18332,
"pubkey_address": 111,
"script_address": 196,
"key_prefix": 239,
"hrp": "tb",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"name": "testnet3",
"ext_public_key_prefix": 0x043587CF,
"ext_secret_key_prefix": 0x04358394,
},
"regtest": {
"rpcport": 18443,
"pubkey_address": 111,
"script_address": 196,
"key_prefix": 239,
"hrp": "bcrt",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"ext_public_key_prefix": 0x043587CF,
"ext_secret_key_prefix": 0x04358394,
'testnet': {
'rpcport': 18332,
'pubkey_address': 111,
'script_address': 196,
'key_prefix': 239,
'hrp': 'tb',
'bip44': 1,
'min_amount': 1000,
'max_amount': 100000 * COIN,
'name': 'testnet3',
},
'regtest': {
'rpcport': 18443,
'pubkey_address': 111,
'script_address': 196,
'key_prefix': 239,
'hrp': 'bcrt',
'bip44': 1,
'min_amount': 1000,
'max_amount': 100000 * COIN,
}
},
Coins.LTC: {
"name": "litecoin",
"ticker": "LTC",
"message_magic": "Litecoin Signed Message:\n",
"blocks_target": 60 * 1,
"decimal_places": 8,
"mainnet": {
"rpcport": 9332,
"pubkey_address": 48,
"script_address": 5,
"script_address2": 50,
"key_prefix": 176,
"hrp": "ltc",
"bip44": 2,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
'name': 'litecoin',
'ticker': 'LTC',
'message_magic': 'Litecoin Signed Message:\n',
'blocks_target': 60 * 1,
'decimal_places': 8,
'mainnet': {
'rpcport': 9332,
'pubkey_address': 48,
'script_address': 5,
'script_address2': 50,
'key_prefix': 176,
'hrp': 'ltc',
'bip44': 2,
'min_amount': 1000,
'max_amount': 100000 * COIN,
},
"testnet": {
"rpcport": 19332,
"pubkey_address": 111,
"script_address": 196,
"script_address2": 58,
"key_prefix": 239,
"hrp": "tltc",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"name": "testnet4",
},
"regtest": {
"rpcport": 19443,
"pubkey_address": 111,
"script_address": 196,
"script_address2": 58,
"key_prefix": 239,
"hrp": "rltc",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
},
},
Coins.DOGE: {
"name": "dogecoin",
"ticker": "DOGE",
"message_magic": "Dogecoin Signed Message:\n",
"blocks_target": 60 * 1,
"decimal_places": 8,
"mainnet": {
"rpcport": 22555,
"pubkey_address": 30,
"script_address": 22,
"key_prefix": 158,
"hrp": "doge",
"bip44": 3,
"min_amount": 100000, # TODO increase above fee
"max_amount": 10000000 * COIN,
},
"testnet": {
"rpcport": 44555,
"pubkey_address": 113,
"script_address": 196,
"key_prefix": 241,
"hrp": "tdge",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"name": "testnet4",
},
"regtest": {
"rpcport": 18332,
"pubkey_address": 111,
"script_address": 196,
"key_prefix": 239,
"hrp": "rdge",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
},
},
Coins.DCR: {
"name": "decred",
"ticker": "DCR",
"message_magic": "Decred Signed Message:\n",
"blocks_target": 60 * 5,
"decimal_places": 8,
"has_multiwallet": False,
"mainnet": {
"rpcport": 9109,
"pubkey_address": 0x073F,
"script_address": 0x071A,
"key_prefix": 0x22DE,
"bip44": 42,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
},
"testnet": {
"rpcport": 19109,
"pubkey_address": 0x0F21,
"script_address": 0x0EFC,
"key_prefix": 0x230E,
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"name": "testnet3",
},
"regtest": { # simnet
"rpcport": 18656,
"pubkey_address": 0x0E91,
"script_address": 0x0E6C,
"key_prefix": 0x2307,
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
'testnet': {
'rpcport': 19332,
'pubkey_address': 111,
'script_address': 196,
'script_address2': 58,
'key_prefix': 239,
'hrp': 'tltc',
'bip44': 1,
'min_amount': 1000,
'max_amount': 100000 * COIN,
'name': 'testnet4',
},
'regtest': {
'rpcport': 19443,
'pubkey_address': 111,
'script_address': 196,
'script_address2': 58,
'key_prefix': 239,
'hrp': 'rltc',
'bip44': 1,
'min_amount': 1000,
'max_amount': 100000 * COIN,
}
},
Coins.NMC: {
"name": "namecoin",
"ticker": "NMC",
"message_magic": "Namecoin Signed Message:\n",
"blocks_target": 60 * 10,
"decimal_places": 8,
"mainnet": {
"rpcport": 8336,
"pubkey_address": 52,
"script_address": 13,
"key_prefix": 180,
"hrp": "nc",
"bip44": 7,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"ext_public_key_prefix": 0x0488B21E, # base58Prefixes[EXT_PUBLIC_KEY]
"ext_secret_key_prefix": 0x0488ADE4,
'name': 'namecoin',
'ticker': 'NMC',
'message_magic': 'Namecoin Signed Message:\n',
'blocks_target': 60 * 10,
'decimal_places': 8,
'mainnet': {
'rpcport': 8336,
'pubkey_address': 52,
'script_address': 13,
'hrp': 'nc',
'bip44': 7,
'min_amount': 1000,
'max_amount': 100000 * COIN,
},
"testnet": {
"rpcport": 18336,
"pubkey_address": 111,
"script_address": 196,
"key_prefix": 239,
"hrp": "tn",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"name": "testnet3",
"ext_public_key_prefix": 0x043587CF,
"ext_secret_key_prefix": 0x04358394,
},
"regtest": {
"rpcport": 18443,
"pubkey_address": 111,
"script_address": 196,
"key_prefix": 239,
"hrp": "ncrt",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"ext_public_key_prefix": 0x043587CF,
"ext_secret_key_prefix": 0x04358394,
'testnet': {
'rpcport': 18336,
'pubkey_address': 111,
'script_address': 196,
'hrp': 'tn',
'bip44': 1,
'min_amount': 1000,
'max_amount': 100000 * COIN,
'name': 'testnet3',
},
'regtest': {
'rpcport': 18443,
'pubkey_address': 111,
'script_address': 196,
'hrp': 'ncrt',
'bip44': 1,
'min_amount': 1000,
'max_amount': 100000 * COIN,
}
},
Coins.XMR: {
"name": "monero",
"ticker": "XMR",
"client": "xmr",
"decimal_places": 12,
"mainnet": {
"rpcport": 18081,
"walletrpcport": 18082,
"min_amount": 1000000000,
"max_amount": 10000000 * XMR_COIN,
"address_prefix": 18,
'name': 'monero',
'ticker': 'XMR',
'client': 'xmr',
'decimal_places': 12,
'mainnet': {
'rpcport': 18081,
'walletrpcport': 18082,
'min_amount': 100000,
'max_amount': 10000 * XMR_COIN,
},
"testnet": {
"rpcport": 28081,
"walletrpcport": 28082,
"min_amount": 1000000000,
"max_amount": 10000000 * XMR_COIN,
"address_prefix": 18,
},
"regtest": {
"rpcport": 18081,
"walletrpcport": 18082,
"min_amount": 1000000000,
"max_amount": 10000000 * XMR_COIN,
"address_prefix": 18,
},
},
Coins.WOW: {
"name": "wownero",
"ticker": "WOW",
"client": "wow",
"decimal_places": 11,
"mainnet": {
"rpcport": 34568,
"walletrpcport": 34572, # todo
"min_amount": 100000000,
"max_amount": 10000000 * WOW_COIN,
"address_prefix": 4146,
},
"testnet": {
"rpcport": 44568,
"walletrpcport": 44572,
"min_amount": 100000000,
"max_amount": 10000000 * WOW_COIN,
"address_prefix": 4146,
},
"regtest": {
"rpcport": 54568,
"walletrpcport": 54572,
"min_amount": 100000000,
"max_amount": 10000000 * WOW_COIN,
"address_prefix": 4146,
'testnet': {
'rpcport': 28081,
'walletrpcport': 28082,
'min_amount': 100000,
'max_amount': 10000 * XMR_COIN,
},
'regtest': {
'rpcport': 18081,
'walletrpcport': 18082,
'min_amount': 100000,
'max_amount': 10000 * XMR_COIN,
}
},
Coins.PIVX: {
"name": "pivx",
"ticker": "PIVX",
"display_name": "PIVX",
"message_magic": "DarkNet Signed Message:\n",
"blocks_target": 60 * 1,
"decimal_places": 8,
"has_cltv": True,
"has_csv": False,
"has_segwit": False,
"mainnet": {
"rpcport": 51473,
"pubkey_address": 30,
"script_address": 13,
"key_prefix": 212,
"bip44": 119,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
'name': 'pivx',
'ticker': 'PIVX',
'message_magic': 'DarkNet Signed Message:\n',
'blocks_target': 60 * 1,
'decimal_places': 8,
'has_csv': False,
'has_segwit': False,
'mainnet': {
'rpcport': 51473,
'pubkey_address': 30,
'script_address': 13,
'key_prefix': 212,
'bip44': 119,
'min_amount': 1000,
'max_amount': 100000 * COIN,
},
"testnet": {
"rpcport": 51475,
"pubkey_address": 139,
"script_address": 19,
"key_prefix": 239,
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"name": "testnet4",
},
"regtest": {
"rpcport": 51477,
"pubkey_address": 139,
"script_address": 19,
"key_prefix": 239,
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
},
},
Coins.DASH: {
"name": "dash",
"ticker": "DASH",
"message_magic": "DarkCoin Signed Message:\n",
"blocks_target": 60 * 2.5,
"decimal_places": 8,
"has_csv": True,
"has_segwit": False,
"mainnet": {
"rpcport": 9998,
"pubkey_address": 76,
"script_address": 16,
"key_prefix": 204,
"hrp": "",
"bip44": 5,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
},
"testnet": {
"rpcport": 19998,
"pubkey_address": 140,
"script_address": 19,
"key_prefix": 239,
"hrp": "",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
},
"regtest": {
"rpcport": 18332,
"pubkey_address": 140,
"script_address": 19,
"key_prefix": 239,
"hrp": "",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
},
},
Coins.FIRO: {
"name": "firo",
"ticker": "FIRO",
"message_magic": "Zcoin Signed Message:\n",
"blocks_target": 60 * 10,
"decimal_places": 8,
"has_cltv": False,
"has_csv": False,
"has_segwit": False,
"has_multiwallet": False,
"mainnet": {
"rpcport": 8888,
"pubkey_address": 82,
"script_address": 7,
"key_prefix": 210,
"hrp": "",
"bip44": 136,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
},
"testnet": {
"rpcport": 18888,
"pubkey_address": 65,
"script_address": 178,
"key_prefix": 185,
"hrp": "",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
},
"regtest": {
"rpcport": 28888,
"pubkey_address": 65,
"script_address": 178,
"key_prefix": 239,
"hrp": "",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
},
},
Coins.NAV: {
"name": "navcoin",
"ticker": "NAV",
"message_magic": "Navcoin Signed Message:\n",
"blocks_target": 30,
"decimal_places": 8,
"has_csv": True,
"has_segwit": True,
"has_multiwallet": False,
"mainnet": {
"rpcport": 44444,
"pubkey_address": 53,
"script_address": 85,
"key_prefix": 150,
"hrp": "",
"bip44": 130,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
},
"testnet": {
"rpcport": 44445,
"pubkey_address": 111,
"script_address": 196,
"key_prefix": 239,
"hrp": "",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
},
"regtest": {
"rpcport": 44446,
"pubkey_address": 111,
"script_address": 196,
"key_prefix": 239,
"hrp": "",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
},
},
Coins.BCH: {
"name": "bitcoincash",
"ticker": "BCH",
"display_name": "Bitcoin Cash",
"message_magic": "Bitcoin Signed Message:\n",
"blocks_target": 60 * 2,
"decimal_places": 8,
"has_cltv": True,
"has_csv": True,
"has_segwit": False,
"cli_binname": "bitcoin-cli",
"core_binname": "bitcoind",
"mainnet": {
"rpcport": 8332,
"pubkey_address": 0,
"script_address": 5,
"key_prefix": 128,
"hrp": "bitcoincash",
"bip44": 0,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
},
"testnet": {
"rpcport": 18332,
"pubkey_address": 111,
"script_address": 196,
"key_prefix": 239,
"hrp": "bchtest",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
"name": "testnet3",
},
"regtest": {
"rpcport": 18443,
"pubkey_address": 111,
"script_address": 196,
"key_prefix": 239,
"hrp": "bchreg",
"bip44": 1,
"min_amount": 100000,
"max_amount": 10000000 * COIN,
'testnet': {
'rpcport': 51475,
'pubkey_address': 139,
'script_address': 19,
'key_prefix': 239,
'bip44': 1,
'min_amount': 1000,
'max_amount': 100000 * COIN,
'name': 'testnet4',
},
'regtest': {
'rpcport': 51477,
'pubkey_address': 139,
'script_address': 19,
'key_prefix': 239,
'bip44': 1,
'min_amount': 1000,
'max_amount': 100000 * COIN,
}
},
}
name_map = {}
ticker_map = {}
for c, params in chainparams.items():
name_map[params["name"].lower()] = c
ticker_map[params["ticker"].lower()] = c
ticker_map[params['ticker'].lower()] = c
def getCoinIdFromTicker(ticker: str) -> str:
def getCoinIdFromTicker(ticker):
try:
return ticker_map[ticker.lower()]
except Exception:
raise ValueError(f"Unknown coin {ticker}")
raise ValueError('Unknown coin')
def getCoinIdFromName(name: str) -> str:
try:
return name_map[name.lower()]
except Exception:
raise ValueError(f"Unknown coin {name}")
class CoinInterface:
def __init__(self, network):
self.setDefaults()
self._network = network
self._mx_wallet = threading.Lock()
def setDefaults(self):
self._unknown_wallet_seed = True
self._restore_height = None
def isKnownCoinName(name: str) -> bool:
return params["name"].lower() in name_map
def make_int(self, amount_in, r=0):
return make_int(amount_in, self.exp(), r=r)
def format_amount(self, amount_in, conv_int=False, r=0):
amount_int = make_int(amount_in, self.exp(), r=r) if conv_int else amount_in
return format_amount(amount_int, self.exp())
def coin_name(self):
return chainparams[self.coin_type()]['name'].capitalize()
def ticker(self):
ticker = chainparams[self.coin_type()]['ticker']
if self._network == 'testnet':
ticker = 't' + ticker
elif self._network == 'regtest':
ticker = 'rt' + ticker
return ticker
def ticker_mainnet(self):
ticker = chainparams[self.coin_type()]['ticker']
return ticker
def min_amount(self):
return chainparams[self.coin_type()][self._network]['min_amount']
def max_amount(self):
return chainparams[self.coin_type()][self._network]['max_amount']
def setWalletSeedWarning(self, value):
self._unknown_wallet_seed = value
def setWalletRestoreHeight(self, value):
self._restore_height = value
def knownWalletSeed(self):
return not self._unknown_wallet_seed
def chainparams(self):
return chainparams[self.coin_type()]
def chainparams_network(self):
return chainparams[self.coin_type()][self._network]
def is_transient_error(self, ex):
if isinstance(ex, TemporaryError):
return True
str_error = str(ex).lower()
if 'not enough unlocked money' in str_error:
return True
if 'transaction was rejected by daemon' in str_error:
return True
if 'invalid unlocked_balance' in str_error:
return True
if 'daemon is busy' in str_error:
return True
return False

View File

@@ -1,49 +1,43 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019-2025 The Basicswap developers
# Copyright (c) 2019-2022 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import os
CONFIG_FILENAME = "basicswap.json"
BASICSWAP_DATADIR = os.getenv("BASICSWAP_DATADIR", os.path.join("~", ".basicswap"))
CONFIG_FILENAME = 'basicswap.json'
BASICSWAP_DATADIR = os.getenv('BASICSWAP_DATADIR', '~/.basicswap')
DEFAULT_ALLOW_CORS = False
TEST_DATADIRS = os.path.expanduser(os.getenv("DATADIRS", "/tmp/basicswap"))
DEFAULT_TEST_BINDIR = os.path.expanduser(
os.getenv("DEFAULT_TEST_BINDIR", os.path.join("~", ".basicswap", "bin"))
)
TEST_DATADIRS = os.path.expanduser(os.getenv('DATADIRS', '/tmp/basicswap'))
DEFAULT_TEST_BINDIR = os.path.expanduser(os.getenv('DEFAULT_TEST_BINDIR', '~/tmp/bin'))
bin_suffix = ".exe" if os.name == "nt" else ""
PARTICL_BINDIR = os.path.expanduser(
os.getenv("PARTICL_BINDIR", os.path.join(DEFAULT_TEST_BINDIR, "particl"))
)
PARTICLD = os.getenv("PARTICLD", "particld" + bin_suffix)
PARTICL_CLI = os.getenv("PARTICL_CLI", "particl-cli" + bin_suffix)
PARTICL_TX = os.getenv("PARTICL_TX", "particl-tx" + bin_suffix)
bin_suffix = ('.exe' if os.name == 'nt' else '')
PARTICL_BINDIR = os.path.expanduser(os.getenv('PARTICL_BINDIR', os.path.join(DEFAULT_TEST_BINDIR, 'particl')))
PARTICLD = os.getenv('PARTICLD', 'particld' + bin_suffix)
PARTICL_CLI = os.getenv('PARTICL_CLI', 'particl-cli' + bin_suffix)
PARTICL_TX = os.getenv('PARTICL_TX', 'particl-tx' + bin_suffix)
BITCOIN_BINDIR = os.path.expanduser(
os.getenv("BITCOIN_BINDIR", os.path.join(DEFAULT_TEST_BINDIR, "bitcoin"))
)
BITCOIND = os.getenv("BITCOIND", "bitcoind" + bin_suffix)
BITCOIN_CLI = os.getenv("BITCOIN_CLI", "bitcoin-cli" + bin_suffix)
BITCOIN_TX = os.getenv("BITCOIN_TX", "bitcoin-tx" + bin_suffix)
BITCOIN_BINDIR = os.path.expanduser(os.getenv('BITCOIN_BINDIR', os.path.join(DEFAULT_TEST_BINDIR, 'bitcoin')))
BITCOIND = os.getenv('BITCOIND', 'bitcoind' + bin_suffix)
BITCOIN_CLI = os.getenv('BITCOIN_CLI', 'bitcoin-cli' + bin_suffix)
BITCOIN_TX = os.getenv('BITCOIN_TX', 'bitcoin-tx' + bin_suffix)
LITECOIN_BINDIR = os.path.expanduser(
os.getenv("LITECOIN_BINDIR", os.path.join(DEFAULT_TEST_BINDIR, "litecoin"))
)
LITECOIND = os.getenv("LITECOIND", "litecoind" + bin_suffix)
LITECOIN_CLI = os.getenv("LITECOIN_CLI", "litecoin-cli" + bin_suffix)
LITECOIN_TX = os.getenv("LITECOIN_TX", "litecoin-tx" + bin_suffix)
LITECOIN_BINDIR = os.path.expanduser(os.getenv('LITECOIN_BINDIR', os.path.join(DEFAULT_TEST_BINDIR, 'litecoin')))
LITECOIND = os.getenv('LITECOIND', 'litecoind' + bin_suffix)
LITECOIN_CLI = os.getenv('LITECOIN_CLI', 'litecoin-cli' + bin_suffix)
LITECOIN_TX = os.getenv('LITECOIN_TX', 'litecoin-tx' + bin_suffix)
DOGECOIND = os.getenv("DOGECOIND", "dogecoind" + bin_suffix)
DOGECOIN_CLI = os.getenv("DOGECOIN_CLI", "dogecoin-cli" + bin_suffix)
DOGECOIN_TX = os.getenv("DOGECOIN_TX", "dogecoin-tx" + bin_suffix)
NAMECOIN_BINDIR = os.path.expanduser(os.getenv('NAMECOIN_BINDIR', os.path.join(DEFAULT_TEST_BINDIR, 'namecoin')))
NAMECOIND = os.getenv('NAMECOIND', 'namecoind' + bin_suffix)
NAMECOIN_CLI = os.getenv('NAMECOIN_CLI', 'namecoin-cli' + bin_suffix)
NAMECOIN_TX = os.getenv('NAMECOIN_TX', 'namecoin-tx' + bin_suffix)
XMR_BINDIR = os.path.expanduser(
os.getenv("XMR_BINDIR", os.path.join(DEFAULT_TEST_BINDIR, "monero"))
)
XMRD = os.getenv("XMRD", "monerod" + bin_suffix)
XMR_WALLET_RPC = os.getenv("XMR_WALLET_RPC", "monero-wallet-rpc" + bin_suffix)
XMR_BINDIR = os.path.expanduser(os.getenv('XMR_BINDIR', os.path.join(DEFAULT_TEST_BINDIR, 'monero')))
XMRD = os.getenv('XMRD', 'monerod' + bin_suffix)
XMR_WALLET_RPC = os.getenv('XMR_WALLET_RPC', 'monero-wallet-rpc' + bin_suffix)
# NOTE: Adding coin definitions here is deprecated. Please add in coin test file.
PIVX_BINDIR = os.path.expanduser(os.getenv('PIVX_BINDIR', os.path.join(DEFAULT_TEST_BINDIR, 'pivx')))
PIVXD = os.getenv('PIVXD', 'pivxd' + bin_suffix)
PIVX_CLI = os.getenv('PIVX_CLI', 'pivx-cli' + bin_suffix)
PIVX_TX = os.getenv('PIVX_TX', 'pivx-tx' + bin_suffix)

View File

@@ -1 +0,0 @@

View File

@@ -1,533 +0,0 @@
intro = """
blake.py
version 5, 2-Apr-2014
BLAKE is a SHA3 round-3 finalist designed and submitted by
Jean-Philippe Aumasson et al.
At the core of BLAKE is a ChaCha-like mixer, very similar
to that found in the stream cipher, ChaCha8. Besides being
a very good mixer, ChaCha is fast.
References:
http://www.131002.net/blake/
http://csrc.nist.gov/groups/ST/hash/sha-3/index.html
http://en.wikipedia.org/wiki/BLAKE_(hash_function)
This implementation assumes all data is in increments of
whole bytes. (The formal definition of BLAKE allows for
hashing individual bits.) Note too that this implementation
does include the round-3 tweaks where the number of rounds
was increased to 14/16 from 10/14.
This version can be imported into both Python2 (2.6 and 2.7)
and Python3 programs. Python 2.5 requires an older version
of blake.py (version 4).
Here are some comparative times for different versions of
Python:
64-bit:
2.6 6.284s
2.7 6.343s
3.2 7.620s
pypy (2.7) 2.080s
32-bit:
2.5 (32) 15.389s (with psyco)
2.7-32 13.645s
3.2-32 12.574s
One test on a 2.0GHz Core 2 Duo of 10,000 iterations of
BLAKE-256 on a short message produced a time of 5.7 seconds.
Not bad, but if raw speed is what you want, look to the
the C version. It is 40x faster and did the same thing
in 0.13 seconds.
Copyright (c) 2009-2012 by Larry Bugbee, Kent, WA
ALL RIGHTS RESERVED.
blake.py IS EXPERIMENTAL SOFTWARE FOR EDUCATIONAL
PURPOSES ONLY. IT IS MADE AVAILABLE "AS-IS" WITHOUT
WARRANTY OR GUARANTEE OF ANY KIND. USE SIGNIFIES
ACCEPTANCE OF ALL RISK.
To make your learning and experimentation less cumbersome,
blake.py is free for any use.
Enjoy,
Larry Bugbee
March 2011
rev May 2011 - fixed Python version check (tx JP)
rev Apr 2012 - fixed an out-of-order bit set in final()
- moved self-test to a separate test pgm
- this now works with Python2 and Python3
rev Apr 2014 - added test and conversion of string input
to byte string in update() (tx Soham)
- added hexdigest() method.
- now support state 3 so only one call to
final() per instantiation is allowed. all
subsequent calls to final(), digest() or
hexdigest() simply return the stored value.
"""
import struct
from binascii import hexlify, unhexlify
#---------------------------------------------------------------
class BLAKE(object):
# - - - - - - - - - - - - - - - - - - - - - - - - - - -
# initial values, constants and padding
# IVx for BLAKE-x
IV64 = [
0x6A09E667F3BCC908, 0xBB67AE8584CAA73B,
0x3C6EF372FE94F82B, 0xA54FF53A5F1D36F1,
0x510E527FADE682D1, 0x9B05688C2B3E6C1F,
0x1F83D9ABFB41BD6B, 0x5BE0CD19137E2179,
]
IV48 = [
0xCBBB9D5DC1059ED8, 0x629A292A367CD507,
0x9159015A3070DD17, 0x152FECD8F70E5939,
0x67332667FFC00B31, 0x8EB44A8768581511,
0xDB0C2E0D64F98FA7, 0x47B5481DBEFA4FA4,
]
# note: the values here are the same as the high-order
# half-words of IV64
IV32 = [
0x6A09E667, 0xBB67AE85,
0x3C6EF372, 0xA54FF53A,
0x510E527F, 0x9B05688C,
0x1F83D9AB, 0x5BE0CD19,
]
# note: the values here are the same as the low-order
# half-words of IV48
IV28 = [
0xC1059ED8, 0x367CD507,
0x3070DD17, 0xF70E5939,
0xFFC00B31, 0x68581511,
0x64F98FA7, 0xBEFA4FA4,
]
# constants for BLAKE-64 and BLAKE-48
C64 = [
0x243F6A8885A308D3, 0x13198A2E03707344,
0xA4093822299F31D0, 0x082EFA98EC4E6C89,
0x452821E638D01377, 0xBE5466CF34E90C6C,
0xC0AC29B7C97C50DD, 0x3F84D5B5B5470917,
0x9216D5D98979FB1B, 0xD1310BA698DFB5AC,
0x2FFD72DBD01ADFB7, 0xB8E1AFED6A267E96,
0xBA7C9045F12C7F99, 0x24A19947B3916CF7,
0x0801F2E2858EFC16, 0x636920D871574E69,
]
# constants for BLAKE-32 and BLAKE-28
# note: concatenate and the values are the same as the values
# for the 1st half of C64
C32 = [
0x243F6A88, 0x85A308D3,
0x13198A2E, 0x03707344,
0xA4093822, 0x299F31D0,
0x082EFA98, 0xEC4E6C89,
0x452821E6, 0x38D01377,
0xBE5466CF, 0x34E90C6C,
0xC0AC29B7, 0xC97C50DD,
0x3F84D5B5, 0xB5470917,
]
# the 10 permutations of:0,...15}
SIGMA = [
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15],
[14,10, 4, 8, 9,15,13, 6, 1,12, 0, 2,11, 7, 5, 3],
[11, 8,12, 0, 5, 2,15,13,10,14, 3, 6, 7, 1, 9, 4],
[ 7, 9, 3, 1,13,12,11,14, 2, 6, 5,10, 4, 0,15, 8],
[ 9, 0, 5, 7, 2, 4,10,15,14, 1,11,12, 6, 8, 3,13],
[ 2,12, 6,10, 0,11, 8, 3, 4,13, 7, 5,15,14, 1, 9],
[12, 5, 1,15,14,13, 4,10, 0, 7, 6, 3, 9, 2, 8,11],
[13,11, 7,14,12, 1, 3, 9, 5, 0,15, 4, 8, 6, 2,10],
[ 6,15,14, 9,11, 3, 0, 8,12, 2,13, 7, 1, 4,10, 5],
[10, 2, 8, 4, 7, 6, 1, 5,15,11, 9,14, 3,12,13, 0],
[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15],
[14,10, 4, 8, 9,15,13, 6, 1,12, 0, 2,11, 7, 5, 3],
[11, 8,12, 0, 5, 2,15,13,10,14, 3, 6, 7, 1, 9, 4],
[ 7, 9, 3, 1,13,12,11,14, 2, 6, 5,10, 4, 0,15, 8],
[ 9, 0, 5, 7, 2, 4,10,15,14, 1,11,12, 6, 8, 3,13],
[ 2,12, 6,10, 0,11, 8, 3, 4,13, 7, 5,15,14, 1, 9],
[12, 5, 1,15,14,13, 4,10, 0, 7, 6, 3, 9, 2, 8,11],
[13,11, 7,14,12, 1, 3, 9, 5, 0,15, 4, 8, 6, 2,10],
[ 6,15,14, 9,11, 3, 0, 8,12, 2,13, 7, 1, 4,10, 5],
[10, 2, 8, 4, 7, 6, 1, 5,15,11, 9,14, 3,12,13, 0],
]
MASK32BITS = 0xFFFFFFFF
MASK64BITS = 0xFFFFFFFFFFFFFFFF
# - - - - - - - - - - - - - - - - - - - - - - - - - - -
def __init__(self, hashbitlen):
"""
load the hashSate structure (copy hashbitlen...)
hashbitlen: length of the hash output
"""
if hashbitlen not in [224, 256, 384, 512]:
raise Exception('hash length not 224, 256, 384 or 512')
self.hashbitlen = hashbitlen
self.h = [0]*8 # current chain value (initialized to the IV)
self.t = 0 # number of *BITS* hashed so far
self.cache = b'' # cached leftover data not yet compressed
self.salt = [0]*4 # salt (null by default)
self.state = 1 # set to 2 by update and 3 by final
self.nullt = 0 # Boolean value for special case \ell_i=0
# The algorithm is the same for both the 32- and 64- versions
# of BLAKE. The difference is in word size (4 vs 8 bytes),
# blocksize (64 vs 128 bytes), number of rounds (14 vs 16)
# and a few very specific constants.
if (hashbitlen == 224) or (hashbitlen == 256):
# setup for 32-bit words and 64-bit block
self.byte2int = self._fourByte2int
self.int2byte = self._int2fourByte
self.MASK = self.MASK32BITS
self.WORDBYTES = 4
self.WORDBITS = 32
self.BLKBYTES = 64
self.BLKBITS = 512
self.ROUNDS = 14 # was 10 before round 3
self.cxx = self.C32
self.rot1 = 16 # num bits to shift in G
self.rot2 = 12 # num bits to shift in G
self.rot3 = 8 # num bits to shift in G
self.rot4 = 7 # num bits to shift in G
self.mul = 0 # for 32-bit words, 32<<self.mul where self.mul = 0
# 224- and 256-bit versions (32-bit words)
if hashbitlen == 224:
self.h = self.IV28[:]
else:
self.h = self.IV32[:]
elif (hashbitlen == 384) or (hashbitlen == 512):
# setup for 64-bit words and 128-bit block
self.byte2int = self._eightByte2int
self.int2byte = self._int2eightByte
self.MASK = self.MASK64BITS
self.WORDBYTES = 8
self.WORDBITS = 64
self.BLKBYTES = 128
self.BLKBITS = 1024
self.ROUNDS = 16 # was 14 before round 3
self.cxx = self.C64
self.rot1 = 32 # num bits to shift in G
self.rot2 = 25 # num bits to shift in G
self.rot3 = 16 # num bits to shift in G
self.rot4 = 11 # num bits to shift in G
self.mul = 1 # for 64-bit words, 32<<self.mul where self.mul = 1
# 384- and 512-bit versions (64-bit words)
if hashbitlen == 384:
self.h = self.IV48[:]
else:
self.h = self.IV64[:]
# - - - - - - - - - - - - - - - - - - - - - - - - - - -
def _compress(self, block):
byte2int = self.byte2int
mul = self.mul # de-reference these for ...speed? ;-)
cxx = self.cxx
rot1 = self.rot1
rot2 = self.rot2
rot3 = self.rot3
rot4 = self.rot4
MASK = self.MASK
WORDBITS = self.WORDBITS
SIGMA = self.SIGMA
# get message (<<2 is the same as *4 but faster)
m = [byte2int(block[i<<2<<mul:(i<<2<<mul)+(4<<mul)]) for i in range(16)]
# initialization
v = [0]*16
v[ 0: 8] = [self.h[i] for i in range(8)]
v[ 8:16] = [self.cxx[i] for i in range(8)]
v[ 8:12] = [v[8+i] ^ self.salt[i] for i in range(4)]
if self.nullt == 0: # (i>>1 is the same as i/2 but faster)
v[12] = v[12] ^ (self.t & MASK)
v[13] = v[13] ^ (self.t & MASK)
v[14] = v[14] ^ (self.t >> self.WORDBITS)
v[15] = v[15] ^ (self.t >> self.WORDBITS)
# - - - - - - - - - - - - - - - - -
# ready? let's ChaCha!!!
def G(a, b, c, d, i):
va = v[a] # it's faster to deref and reref later
vb = v[b]
vc = v[c]
vd = v[d]
sri = SIGMA[round][i]
sri1 = SIGMA[round][i+1]
va = ((va + vb) + (m[sri] ^ cxx[sri1]) ) & MASK
x = vd ^ va
vd = (x >> rot1) | ((x << (WORDBITS-rot1)) & MASK)
vc = (vc + vd) & MASK
x = vb ^ vc
vb = (x >> rot2) | ((x << (WORDBITS-rot2)) & MASK)
va = ((va + vb) + (m[sri1] ^ cxx[sri]) ) & MASK
x = vd ^ va
vd = (x >> rot3) | ((x << (WORDBITS-rot3)) & MASK)
vc = (vc + vd) & MASK
x = vb ^ vc
vb = (x >> rot4) | ((x << (WORDBITS-rot4)) & MASK)
v[a] = va
v[b] = vb
v[c] = vc
v[d] = vd
for round in range(self.ROUNDS):
# column step
G( 0, 4, 8,12, 0)
G( 1, 5, 9,13, 2)
G( 2, 6,10,14, 4)
G( 3, 7,11,15, 6)
# diagonal step
G( 0, 5,10,15, 8)
G( 1, 6,11,12,10)
G( 2, 7, 8,13,12)
G( 3, 4, 9,14,14)
# - - - - - - - - - - - - - - - - -
# save current hash value (use i&0x3 to get 0,1,2,3,0,1,2,3)
self.h = [self.h[i]^v[i]^v[i+8]^self.salt[i&0x3]
for i in range(8)]
# print 'self.h', [num2hex(h) for h in self.h]
# - - - - - - - - - - - - - - - - - - - - - - - - - - -
def addsalt(self, salt):
""" adds a salt to the hash function (OPTIONAL)
should be called AFTER Init, and BEFORE update
salt: a bytestring, length determined by hashbitlen.
if not of sufficient length, the bytestring
will be assumed to be a big endian number and
prefixed with an appropriate number of null
bytes, and if too large, only the low order
bytes will be used.
if hashbitlen=224 or 256, then salt will be 16 bytes
if hashbitlen=384 or 512, then salt will be 32 bytes
"""
# fail if addsalt() was not called at the right time
if self.state != 1:
raise Exception('addsalt() not called after init() and before update()')
# salt size is to be 4x word size
saltsize = self.WORDBYTES * 4
# if too short, prefix with null bytes. if too long,
# truncate high order bytes
if len(salt) < saltsize:
salt = (chr(0)*(saltsize-len(salt)) + salt)
else:
salt = salt[-saltsize:]
# prep the salt array
self.salt[0] = self.byte2int(salt[ : 4<<self.mul])
self.salt[1] = self.byte2int(salt[ 4<<self.mul: 8<<self.mul])
self.salt[2] = self.byte2int(salt[ 8<<self.mul:12<<self.mul])
self.salt[3] = self.byte2int(salt[12<<self.mul: ])
# - - - - - - - - - - - - - - - - - - - - - - - - - - -
def update(self, data):
""" update the state with new data, storing excess data
as necessary. may be called multiple times and if a
call sends less than a full block in size, the leftover
is cached and will be consumed in the next call
data: data to be hashed (bytestring)
"""
self.state = 2
BLKBYTES = self.BLKBYTES # de-referenced for improved readability
BLKBITS = self.BLKBITS
datalen = len(data)
if not datalen: return
if type(data) == type(u''):
# use either of the next two lines for a proper
# response under both Python2 and Python3
data = data.encode('UTF-8') # converts to byte string
#data = bytearray(data, 'utf-8') # use if want mutable
# This next line works for Py3 but fails under
# Py2 because the Py2 version of bytes() will
# accept only *one* argument. Arrrrgh!!!
#data = bytes(data, 'utf-8') # converts to immutable byte
# string but... under p7
# bytes() wants only 1 arg
# ...a dummy, 2nd argument like encoding=None
# that does nothing would at least allow
# compatibility between Python2 and Python3.
left = len(self.cache)
fill = BLKBYTES - left
# if any cached data and any added new data will fill a
# full block, fill and compress
if left and datalen >= fill:
self.cache = self.cache + data[:fill]
self.t += BLKBITS # update counter
self._compress(self.cache)
self.cache = b''
data = data[fill:]
datalen -= fill
# compress new data until not enough for a full block
while datalen >= BLKBYTES:
self.t += BLKBITS # update counter
self._compress(data[:BLKBYTES])
data = data[BLKBYTES:]
datalen -= BLKBYTES
# cache all leftover bytes until next call to update()
if datalen > 0:
self.cache = self.cache + data[:datalen]
# - - - - - - - - - - - - - - - - - - - - - - - - - - -
def final(self, data=''):
""" finalize the hash -- pad and hash remaining data
returns hashval, the digest
"""
if self.state == 3:
# we have already finalized so simply return the
# previously calculated/stored hash value
return self.hash
if data:
self.update(data)
ZZ = b'\x00'
ZO = b'\x01'
OZ = b'\x80'
OO = b'\x81'
PADDING = OZ + ZZ*128 # pre-formatted padding data
# copy nb. bits hash in total as a 64-bit BE word
# copy nb. bits hash in total as a 128-bit BE word
tt = self.t + (len(self.cache) << 3)
if self.BLKBYTES == 64:
msglen = self._int2eightByte(tt)
else:
low = tt & self.MASK
high = tt >> self.WORDBITS
msglen = self._int2eightByte(high) + self._int2eightByte(low)
# size of block without the words at the end that count
# the number of bits, 55 or 111.
# Note: (((self.WORDBITS/8)*2)+1) equals ((self.WORDBITS>>2)+1)
sizewithout = self.BLKBYTES - ((self.WORDBITS>>2)+1)
if len(self.cache) == sizewithout:
# special case of one padding byte
self.t -= 8
if self.hashbitlen in [224, 384]:
self.update(OZ)
else:
self.update(OO)
else:
if len(self.cache) < sizewithout:
# enough space to fill the block
# use t=0 if no remaining data
if len(self.cache) == 0:
self.nullt=1
self.t -= (sizewithout - len(self.cache)) << 3
self.update(PADDING[:sizewithout - len(self.cache)])
else:
# NOT enough space, need 2 compressions
# ...add marker, pad with nulls and compress
self.t -= (self.BLKBYTES - len(self.cache)) << 3
self.update(PADDING[:self.BLKBYTES - len(self.cache)])
# ...now pad w/nulls leaving space for marker & bit count
self.t -= (sizewithout+1) << 3
self.update(PADDING[1:sizewithout+1]) # pad with zeroes
self.nullt = 1 # raise flag to set t=0 at the next _compress
# append a marker byte
if self.hashbitlen in [224, 384]:
self.update(ZZ)
else:
self.update(ZO)
self.t -= 8
# append the number of bits (long long)
self.t -= self.BLKBYTES
self.update(msglen)
hashval = []
if self.BLKBYTES == 64:
for h in self.h:
hashval.append(self._int2fourByte(h))
else:
for h in self.h:
hashval.append(self._int2eightByte(h))
self.hash = b''.join(hashval)[:self.hashbitlen >> 3]
self.state = 3
return self.hash
digest = final # may use digest() as a synonym for final()
# - - - - - - - - - - - - - - - - - - - - - - - - - - -
def hexdigest(self, data=''):
return hexlify(self.final(data)).decode('UTF-8')
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# utility functions
def _fourByte2int(self, bytestr): # see also long2byt() below
""" convert a 4-byte string to an int (long) """
return struct.unpack('!L', bytestr)[0]
def _eightByte2int(self, bytestr):
""" convert a 8-byte string to an int (long long) """
return struct.unpack('!Q', bytestr)[0]
def _int2fourByte(self, x): # see also long2byt() below
""" convert a number to a 4-byte string, high order
truncation possible (in Python x could be a BIGNUM)
"""
return struct.pack('!L', x)
def _int2eightByte(self, x):
""" convert a number to a 8-byte string, high order
truncation possible (in Python x could be a BIGNUM)
"""
return struct.pack('!Q', x)
#---------------------------------------------------------------
#---------------------------------------------------------------
#---------------------------------------------------------------
def blake_hash(data):
return BLAKE(256).digest(data)

View File

@@ -1,37 +0,0 @@
from blake256 import blake_hash
testVectors = [
["716f6e863f744b9ac22c97ec7b76ea5f5908bc5b2f67c61510bfc4751384ea7a", ""],
["43234ff894a9c0590d0246cfc574eb781a80958b01d7a2fa1ac73c673ba5e311", "a"],
["658c6d9019a1deddbcb3640a066dfd23471553a307ab941fd3e677ba887be329", "ab"],
["1833a9fa7cf4086bd5fda73da32e5a1d75b4c3f89d5c436369f9d78bb2da5c28", "abc"],
["35282468f3b93c5aaca6408582fced36e578f67671ed0741c332d68ac72d7aa2", "abcd"],
["9278d633efce801c6aa62987d7483d50e3c918caed7d46679551eed91fba8904", "abcde"],
["7a17ee5e289845adcafaf6ca1b05c4a281b232a71c7083f66c19ba1d1169a8d4", "abcdef"],
["ee8c7f94ff805cb2e644643010ea43b0222056420917ec70c3da764175193f8f", "abcdefg"],
["7b37c0876d29c5add7800a1823795a82b809fc12f799ff6a4b5e58d52c42b17e", "abcdefgh"],
["bdc514bea74ffbb9c3aa6470b08ceb80a88e313ad65e4a01457bbffd0acc86de", "abcdefghi"],
["12e3afb9739df8d727e93d853faeafc374cc55aedc937e5a1e66f5843b1d4c2e", "abcdefghij"],
["22297d373b751f581944bb26315133f6fda2f0bf60f65db773900f61f81b7e79", "Discard medicine more than two years old."],
["4d48d137bc9cf6d21415b805bf33f59320337d85c673998260e03a02a0d760cd", "He who has a shady past knows that nice guys finish last."],
["beba299e10f93e17d45663a6dc4b8c9349e4f5b9bac0d7832389c40a1b401e5c", "I wouldn't marry him with a ten foot pole."],
["42e082ae7f967781c6cd4e0ceeaeeb19fb2955adbdbaf8c7ec4613ac130071b3", "Free! Free!/A trip/to Mars/for 900/empty jars/Burma Shave"],
["207d06b205bfb359df91b48b6fd8aa6e4798b712d1cc5e91a254da9cef8684a3", "The days of the digital watch are numbered. -Tom Stoppard"],
["d56eab6927e371e2148b0788779aaf565d30567af2af822b6be3b90db9767a70", "Nepal premier won't resign."],
["01020709ca7fd10dc7756ce767d508d7206167d300b7a7ed76838a8547a7898c", "For every action there is an equal and opposite government program."],
["5569a6cc6535a66da221d8f6ad25008f28752d0343f3f1d757f1ecc9b1c61536", "His money is twice tainted: 'taint yours and 'taint mine."],
["8ff699b5ac7687c82600e89d0ff6cfa87e7179759184386971feb76fbae9975f", "There is no reason for any individual to have a computer in their home. -Ken Olsen, 1977"],
["f4b3a7c85a418b15ce330fd41ae0254b036ad48dd98aa37f0506a995ba9c6029", "It's a tiny change to the code and not completely disgusting. - Bob Manchek"],
["1ed94bab64fe560ef0983165fcb067e9a8a971c1db8e6fb151ff9a7c7fe877e3", "size: a.out: bad magic"],
["ff15b54992eedf9889f7b4bbb16692881aa01ed10dfc860fdb04785d8185cd3c", "The major problem is with sendmail. -Mark Horton"],
["8a0a7c417a47deec0b6474d8c247da142d2e315113a2817af3de8f45690d8652", "Give me a rock, paper and scissors and I will move the world. CCFestoon"],
["310d263fdab056a930324cdea5f46f9ea70219c1a74b01009994484113222a62", "If the enemy is within range, then so are you."],
["1aaa0903aa4cf872fe494c322a6e535698ea2140e15f26fb6088287aedceb6ba", "It's well we cannot hear the screams/That we create in others' dreams."],
["2eb81bcaa9e9185a7587a1b26299dcfb30f2a58a7f29adb584b969725457ad4f", "You remind me of a TV show, but that's all right: I watch it anyway."],
["c27b1683ef76e274680ab5492e592997b0d9d5ac5a5f4651b6036f64215256af", "C is as portable as Stonehedge!!"],
["3995cce8f32b174c22ffac916124bd095c80205d9d5f1bb08a155ac24b40d6cb", "Even if I could be Shakespeare, I think I should still choose to be Faraday. - A. Huxley"],
["496f7063f8bd479bf54e9d87e9ba53e277839ac7fdaecc5105f2879b58ee562f", "The fugacity of a constituent in a mixture of gases at a given temperature is proportional to its mole fraction. Lewis-Randall Rule"],
["2e0eff918940b01eea9539a02212f33ee84f77fab201f4287aa6167e4a1ed043", "How can you write a big system without C++? -Paul Glick"]]
for vectorSet in testVectors:
assert vectorSet[0] == blake_hash(vectorSet[1]).encode('hex')

View File

@@ -1,3 +0,0 @@
from .mnemonic import Mnemonic
__all__ = ["Mnemonic"]

View File

@@ -1,298 +0,0 @@
#
# Copyright (c) 2013 Pavol Rusnak
# Copyright (c) 2017 mruddy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from __future__ import annotations
import hashlib
import hmac
import itertools
import os
import secrets
import typing as t
import unicodedata
PBKDF2_ROUNDS = 2048
class ConfigurationError(Exception):
pass
# Refactored code segments from <https://github.com/keis/base58>
def b58encode(v: bytes) -> str:
alphabet = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
p, acc = 1, 0
for c in reversed(v):
acc += p * c
p = p << 8
string = ""
while acc:
acc, idx = divmod(acc, 58)
string = alphabet[idx : idx + 1] + string
return string
class Mnemonic(object):
def __init__(self, language: str = "english", wordlist: list[str] | None = None):
self.radix = 2048
self.language = language
if wordlist is None:
d = os.path.join(os.path.dirname(__file__), f"wordlist/{language}.txt")
if os.path.exists(d) and os.path.isfile(d):
with open(d, "r", encoding="utf-8") as f:
wordlist = [w.strip() for w in f.readlines()]
else:
raise ConfigurationError("Language not detected")
if len(wordlist) != self.radix:
raise ConfigurationError(f"Wordlist must contain {self.radix} words.")
self.wordlist = wordlist
# Japanese must be joined by ideographic space
self.delimiter = "\u3000" if language == "japanese" else " "
@classmethod
def list_languages(cls) -> list[str]:
return [
f.split(".")[0]
for f in os.listdir(os.path.join(os.path.dirname(__file__), "wordlist"))
if f.endswith(".txt")
]
@staticmethod
def normalize_string(txt: t.AnyStr) -> str:
if isinstance(txt, bytes):
utxt = txt.decode("utf8")
elif isinstance(txt, str):
utxt = txt
else:
raise TypeError("String value expected")
return unicodedata.normalize("NFKD", utxt)
@classmethod
def detect_language(cls, code: str) -> str:
"""Scan the Mnemonic until the language becomes unambiguous, including as abbreviation prefixes.
Unfortunately, there are valid words that are ambiguous between languages, which are complete words
in one language and are prefixes in another:
english: abandon ... about
french: abandon ... aboutir
If prefixes remain ambiguous, require exactly one language where word(s) match exactly.
"""
code = cls.normalize_string(code)
possible = set(cls(lang) for lang in cls.list_languages())
words = set(code.split())
for word in words:
# possible languages have candidate(s) starting with the word/prefix
possible = set(
p for p in possible if any(c.startswith(word) for c in p.wordlist)
)
if not possible:
raise ConfigurationError(f"Language unrecognized for {word!r}")
if len(possible) == 1:
return possible.pop().language
# Multiple languages match: A prefix in many, but an exact match in one determines language.
complete = set()
for word in words:
exact = set(p for p in possible if word in p.wordlist)
if len(exact) == 1:
complete.update(exact)
if len(complete) == 1:
return complete.pop().language
raise ConfigurationError(
f"Language ambiguous between {', '.join(p.language for p in possible)}"
)
def generate(self, strength: int = 128) -> str:
"""
Create a new mnemonic using a random generated number as entropy.
As defined in BIP39, the entropy must be a multiple of 32 bits, and its size must be between 128 and 256 bits.
Therefore the possible values for `strength` are 128, 160, 192, 224 and 256.
If not provided, the default entropy length will be set to 128 bits.
The return is a list of words that encodes the generated entropy.
:param strength: Number of bytes used as entropy
:type strength: int
:return: A randomly generated mnemonic
:rtype: str
"""
if strength not in [128, 160, 192, 224, 256]:
raise ValueError(
"Invalid strength value. Allowed values are [128, 160, 192, 224, 256]."
)
return self.to_mnemonic(secrets.token_bytes(strength // 8))
# Adapted from <http://tinyurl.com/oxmn476>
def to_entropy(self, words: list[str] | str) -> bytearray:
if not isinstance(words, list):
words = words.split(" ")
if len(words) not in [12, 15, 18, 21, 24]:
raise ValueError(
"Number of words must be one of the following: [12, 15, 18, 21, 24], but it is not (%d)."
% len(words)
)
# Look up all the words in the list and construct the
# concatenation of the original entropy and the checksum.
concatLenBits = len(words) * 11
concatBits = [False] * concatLenBits
wordindex = 0
for word in words:
# Find the words index in the wordlist
ndx = self.wordlist.index(self.normalize_string(word))
if ndx < 0:
raise LookupError('Unable to find "%s" in word list.' % word)
# Set the next 11 bits to the value of the index.
for ii in range(11):
concatBits[(wordindex * 11) + ii] = (ndx & (1 << (10 - ii))) != 0
wordindex += 1
checksumLengthBits = concatLenBits // 33
entropyLengthBits = concatLenBits - checksumLengthBits
# Extract original entropy as bytes.
entropy = bytearray(entropyLengthBits // 8)
for ii in range(len(entropy)):
for jj in range(8):
if concatBits[(ii * 8) + jj]:
entropy[ii] |= 1 << (7 - jj)
# Take the digest of the entropy.
hashBytes = hashlib.sha256(entropy).digest()
hashBits = list(
itertools.chain.from_iterable(
[c & (1 << (7 - i)) != 0 for i in range(8)] for c in hashBytes
)
)
# Check all the checksum bits.
for i in range(checksumLengthBits):
if concatBits[entropyLengthBits + i] != hashBits[i]:
raise ValueError("Failed checksum.")
return entropy
def to_mnemonic(self, data: bytes) -> str:
if len(data) not in [16, 20, 24, 28, 32]:
raise ValueError(
f"Data length should be one of the following: [16, 20, 24, 28, 32], but it is not {len(data)}."
)
h = hashlib.sha256(data).hexdigest()
b = (
bin(int.from_bytes(data, byteorder="big"))[2:].zfill(len(data) * 8)
+ bin(int(h, 16))[2:].zfill(256)[: len(data) * 8 // 32]
)
result = []
for i in range(len(b) // 11):
idx = int(b[i * 11 : (i + 1) * 11], 2)
result.append(self.wordlist[idx])
return self.delimiter.join(result)
def check(self, mnemonic: str) -> bool:
mnemonic_list = self.normalize_string(mnemonic).split(" ")
# list of valid mnemonic lengths
if len(mnemonic_list) not in [12, 15, 18, 21, 24]:
return False
try:
idx = map(
lambda x: bin(self.wordlist.index(x))[2:].zfill(11), mnemonic_list
)
b = "".join(idx)
except ValueError:
return False
l = len(b) # noqa: E741
d = b[: l // 33 * 32]
h = b[-l // 33 :]
nd = int(d, 2).to_bytes(l // 33 * 4, byteorder="big")
nh = bin(int(hashlib.sha256(nd).hexdigest(), 16))[2:].zfill(256)[: l // 33]
return h == nh
def expand_word(self, prefix: str) -> str:
if prefix in self.wordlist:
return prefix
else:
matches = [word for word in self.wordlist if word.startswith(prefix)]
if len(matches) == 1: # matched exactly one word in the wordlist
return matches[0]
else:
# exact match not found.
# this is not a validation routine, just return the input
return prefix
def expand(self, mnemonic: str) -> str:
return " ".join(map(self.expand_word, mnemonic.split(" ")))
@classmethod
def to_seed(cls, mnemonic: str, passphrase: str = "") -> bytes:
mnemonic = cls.normalize_string(mnemonic)
passphrase = cls.normalize_string(passphrase)
passphrase = "mnemonic" + passphrase
mnemonic_bytes = mnemonic.encode("utf-8")
passphrase_bytes = passphrase.encode("utf-8")
stretched = hashlib.pbkdf2_hmac(
"sha512", mnemonic_bytes, passphrase_bytes, PBKDF2_ROUNDS
)
return stretched[:64]
@staticmethod
def to_hd_master_key(seed: bytes, testnet: bool = False) -> str:
if len(seed) != 64:
raise ValueError("Provided seed should have length of 64")
# Compute HMAC-SHA512 of seed
seed = hmac.new(b"Bitcoin seed", seed, digestmod=hashlib.sha512).digest()
# Serialization format can be found at: https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki#serialization-format
xprv = b"\x04\x88\xad\xe4" # Version for private mainnet
if testnet:
xprv = b"\x04\x35\x83\x94" # Version for private testnet
xprv += b"\x00" * 9 # Depth, parent fingerprint, and child number
xprv += seed[32:] # Chain code
xprv += b"\x00" + seed[:32] # Master key
# Double hash using SHA256
hashed_xprv = hashlib.sha256(xprv).digest()
hashed_xprv = hashlib.sha256(hashed_xprv).digest()
# Append 4 bytes of checksum
xprv += hashed_xprv[:4]
# Return base58
return b58encode(xprv)
def main() -> None:
import sys
if len(sys.argv) > 1:
hex_data = sys.argv[1]
else:
hex_data = sys.stdin.readline().strip()
data = bytes.fromhex(hex_data)
m = Mnemonic("english")
print(m.to_mnemonic(data))
if __name__ == "__main__":
main()

View File

@@ -1 +0,0 @@
# Marker file for PEP 561.

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,64 +0,0 @@
#!/usr/bin/env python3
# Copyright (c) 2019 Pieter Wuille
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Utility functions related to output descriptors"""
import re
INPUT_CHARSET = "0123456789()[],'/*abcdefgh@:$%{}IJKLMNOPQRSTUVWXYZ&+-.;<=>?!^_|~ijklmnopqrstuvwxyzABCDEFGH`#\"\\ "
CHECKSUM_CHARSET = "qpzry9x8gf2tvdw0s3jn54khce6mua7l"
GENERATOR = [0xf5dee51989, 0xa9fdca3312, 0x1bab10e32d, 0x3706b1677a, 0x644d626ffd]
def descsum_polymod(symbols):
"""Internal function that computes the descriptor checksum."""
chk = 1
for value in symbols:
top = chk >> 35
chk = (chk & 0x7ffffffff) << 5 ^ value
for i in range(5):
chk ^= GENERATOR[i] if ((top >> i) & 1) else 0
return chk
def descsum_expand(s):
"""Internal function that does the character to symbol expansion"""
groups = []
symbols = []
for c in s:
if not c in INPUT_CHARSET:
return None
v = INPUT_CHARSET.find(c)
symbols.append(v & 31)
groups.append(v >> 5)
if len(groups) == 3:
symbols.append(groups[0] * 9 + groups[1] * 3 + groups[2])
groups = []
if len(groups) == 1:
symbols.append(groups[0])
elif len(groups) == 2:
symbols.append(groups[0] * 3 + groups[1])
return symbols
def descsum_create(s):
"""Add a checksum to a descriptor without"""
symbols = descsum_expand(s) + [0, 0, 0, 0, 0, 0, 0, 0]
checksum = descsum_polymod(symbols) ^ 1
return s + '#' + ''.join(CHECKSUM_CHARSET[(checksum >> (5 * (7 - i))) & 31] for i in range(8))
def descsum_check(s, require=True):
"""Verify that the checksum is correct in a descriptor"""
if not '#' in s:
return not require
if s[-9] != '#':
return False
if not all(x in CHECKSUM_CHARSET for x in s[-8:]):
return False
symbols = descsum_expand(s[:-9]) + [CHECKSUM_CHARSET.find(x) for x in s[-8:]]
return descsum_polymod(symbols) == 1
def drop_origins(s):
'''Drop the key origins from a descriptor'''
desc = re.sub(r'\[.+?\]', '', s)
if '#' in s:
desc = desc[:desc.index('#')]
return descsum_create(desc)

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -5,22 +5,20 @@
"""Helpful routines for regression testing."""
from base64 import b64encode
from binascii import unhexlify
from decimal import Decimal, ROUND_DOWN
from subprocess import CalledProcessError
import hashlib
import inspect
import json
import logging
import os
import pathlib
import platform
import random
import re
import time
from . import coverage
from .authproxy import AuthServiceProxy, JSONRPCException
from collections.abc import Callable
from typing import Optional
from io import BytesIO
logger = logging.getLogger("TestFramework.utils")
@@ -30,46 +28,23 @@ logger = logging.getLogger("TestFramework.utils")
def assert_approx(v, vexp, vspan=0.00001):
"""Assert that `v` is within `vspan` of `vexp`"""
if isinstance(v, Decimal) or isinstance(vexp, Decimal):
v=Decimal(v)
vexp=Decimal(vexp)
vspan=Decimal(vspan)
if v < vexp - vspan:
raise AssertionError("%s < [%s..%s]" % (str(v), str(vexp - vspan), str(vexp + vspan)))
if v > vexp + vspan:
raise AssertionError("%s > [%s..%s]" % (str(v), str(vexp - vspan), str(vexp + vspan)))
def assert_fee_amount(fee, tx_size, feerate_BTC_kvB):
"""Assert the fee is in range."""
assert isinstance(tx_size, int)
target_fee = get_fee(tx_size, feerate_BTC_kvB)
def assert_fee_amount(fee, tx_size, fee_per_kB):
"""Assert the fee was in range"""
target_fee = round(tx_size * fee_per_kB / 1000, 8)
if fee < target_fee:
raise AssertionError("Fee of %s BTC too low! (Should be %s BTC)" % (str(fee), str(target_fee)))
# allow the wallet's estimation to be at most 2 bytes off
high_fee = get_fee(tx_size + 2, feerate_BTC_kvB)
if fee > high_fee:
if fee > (tx_size + 2) * fee_per_kB / 1000:
raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)" % (str(fee), str(target_fee)))
def summarise_dict_differences(thing1, thing2):
if not isinstance(thing1, dict) or not isinstance(thing2, dict):
return thing1, thing2
d1, d2 = {}, {}
for k in sorted(thing1.keys()):
if k not in thing2:
d1[k] = thing1[k]
elif thing1[k] != thing2[k]:
d1[k], d2[k] = summarise_dict_differences(thing1[k], thing2[k])
for k in sorted(thing2.keys()):
if k not in thing1:
d2[k] = thing2[k]
return d1, d2
def assert_equal(thing1, thing2, *args):
if thing1 != thing2 and not args and isinstance(thing1, dict) and isinstance(thing2, dict):
d1,d2 = summarise_dict_differences(thing1, thing2)
raise AssertionError("not(%s == %s)\n in particular not(%s == %s)" % (thing1, thing2, d1, d2))
if thing1 != thing2 or any(thing1 != arg for arg in args):
raise AssertionError("not(%s)" % " == ".join(str(arg) for arg in (thing1, thing2) + args))
@@ -104,7 +79,7 @@ def assert_raises_message(exc, message, fun, *args, **kwds):
raise AssertionError("No exception raised")
def assert_raises_process_error(returncode: int, output: str, fun: Callable, *args, **kwds):
def assert_raises_process_error(returncode, output, fun, *args, **kwds):
"""Execute a process and asserts the process return code and output.
Calls function `fun` with arguments `args` and `kwds`. Catches a CalledProcessError
@@ -112,9 +87,9 @@ def assert_raises_process_error(returncode: int, output: str, fun: Callable, *ar
no CalledProcessError was raised or if the return code and output are not as expected.
Args:
returncode: the process return code.
output: [a substring of] the process output.
fun: the function to call. This should execute a process.
returncode (int): the process return code.
output (string): [a substring of] the process output.
fun (function): the function to call. This should execute a process.
args*: positional arguments for the function.
kwds**: named arguments for the function.
"""
@@ -129,7 +104,7 @@ def assert_raises_process_error(returncode: int, output: str, fun: Callable, *ar
raise AssertionError("No exception raised")
def assert_raises_rpc_error(code: Optional[int], message: Optional[str], fun: Callable, *args, **kwds):
def assert_raises_rpc_error(code, message, fun, *args, **kwds):
"""Run an RPC and verify that a specific JSONRPC exception code and message is raised.
Calls function `fun` with arguments `args` and `kwds`. Catches a JSONRPCException
@@ -137,11 +112,11 @@ def assert_raises_rpc_error(code: Optional[int], message: Optional[str], fun: Ca
no JSONRPCException was raised or if the error code/message are not as expected.
Args:
code: the error code returned by the RPC call (defined in src/rpc/protocol.h).
Set to None if checking the error code is not required.
message: [a substring of] the error string returned by the RPC call.
Set to None if checking the error string is not required.
fun: the function to call. This should be the name of an RPC.
code (int), optional: the error code returned by the RPC call (defined
in src/rpc/protocol.h). Set to None if checking the error code is not required.
message (string), optional: [a substring of] the error string returned by the
RPC call. Set to None if checking the error string is not required.
fun (function): the function to call. This should be the name of an RPC.
args*: positional arguments for the function.
kwds**: named arguments for the function.
"""
@@ -228,45 +203,29 @@ def check_json_precision():
raise RuntimeError("JSON encode/decode loses precision")
def EncodeDecimal(o):
if isinstance(o, Decimal):
return str(o)
raise TypeError(repr(o) + " is not JSON serializable")
def count_bytes(hex_string):
return len(bytearray.fromhex(hex_string))
def hex_str_to_bytes(hex_str):
return unhexlify(hex_str.encode('ascii'))
def str_to_b64str(string):
return b64encode(string.encode('utf-8')).decode('ascii')
def ceildiv(a, b):
"""
Divide 2 ints and round up to next int rather than round down
Implementation requires python integers, which have a // operator that does floor division.
Other types like decimal.Decimal whose // operator truncates towards 0 will not work.
"""
assert isinstance(a, int)
assert isinstance(b, int)
return -(-a // b)
def get_fee(tx_size, feerate_btc_kvb):
"""Calculate the fee in BTC given a feerate is BTC/kvB. Reflects CFeeRate::GetFee"""
feerate_sat_kvb = int(feerate_btc_kvb * Decimal(1e8)) # Fee in sat/kvb as an int to avoid float precision errors
target_fee_sat = ceildiv(feerate_sat_kvb * tx_size, 1000) # Round calculated fee up to nearest sat
return target_fee_sat / Decimal(1e8) # Return result in BTC
def satoshi_round(amount):
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
def wait_until_helper_internal(predicate, *, attempts=float('inf'), timeout=float('inf'), lock=None, timeout_factor=1.0):
"""Sleep until the predicate resolves to be True.
Warning: Note that this method is not recommended to be used in tests as it is
not aware of the context of the test framework. Using the `wait_until()` members
from `BitcoinTestFramework` or `P2PInterface` class ensures the timeout is
properly scaled. Furthermore, `wait_until()` from `P2PInterface` class in
`p2p.py` has a preset lock.
"""
def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf'), lock=None, timeout_factor=1.0):
if attempts == float('inf') and timeout == float('inf'):
timeout = 60
timeout = timeout * timeout_factor
@@ -294,16 +253,6 @@ def wait_until_helper_internal(predicate, *, attempts=float('inf'), timeout=floa
raise RuntimeError('Unreachable')
def sha256sum_file(filename):
h = hashlib.sha256()
with open(filename, 'rb') as f:
d = f.read(4096)
while len(d) > 0:
h.update(d)
d = f.read(4096)
return h.digest()
# RPC/P2P connection constants and functions
############################################
@@ -320,15 +269,15 @@ class PortSeed:
n = None
def get_rpc_proxy(url: str, node_number: int, *, timeout: Optional[int]=None, coveragedir: Optional[str]=None) -> coverage.AuthServiceProxyWrapper:
def get_rpc_proxy(url, node_number, *, timeout=None, coveragedir=None):
"""
Args:
url: URL of the RPC server to call
node_number: the node number (or id) that this calls to
url (str): URL of the RPC server to call
node_number (int): the node number (or id) that this calls to
Kwargs:
timeout: HTTP timeout in seconds
coveragedir: Directory
timeout (int): HTTP timeout in seconds
coveragedir (str): Directory
Returns:
AuthServiceProxy. convenience object for making RPC calls.
@@ -339,10 +288,11 @@ def get_rpc_proxy(url: str, node_number: int, *, timeout: Optional[int]=None, co
proxy_kwargs['timeout'] = int(timeout)
proxy = AuthServiceProxy(url, **proxy_kwargs)
proxy.url = url # store URL on proxy for info
coverage_logfile = coverage.get_filename(coveragedir, node_number) if coveragedir else None
return coverage.AuthServiceProxyWrapper(proxy, url, coverage_logfile)
return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile)
def p2p_port(n):
@@ -371,76 +321,38 @@ def rpc_url(datadir, i, chain, rpchost):
################
def initialize_datadir(dirname, n, chain, disable_autoconnect=True):
def initialize_datadir(dirname, n, chain):
datadir = get_datadir_path(dirname, n)
if not os.path.isdir(datadir):
os.makedirs(datadir)
write_config(os.path.join(datadir, "particl.conf"), n=n, chain=chain, disable_autoconnect=disable_autoconnect)
os.makedirs(os.path.join(datadir, 'stderr'), exist_ok=True)
os.makedirs(os.path.join(datadir, 'stdout'), exist_ok=True)
return datadir
def write_config(config_path, *, n, chain, extra_config="", disable_autoconnect=True):
# Translate chain subdirectory name to config name
if chain == 'testnet':
# Translate chain name to config name
if chain == 'testnet3':
chain_name_conf_arg = 'testnet'
chain_name_conf_section = 'test'
else:
chain_name_conf_arg = chain
chain_name_conf_section = chain
with open(config_path, 'w', encoding='utf8') as f:
if chain_name_conf_arg:
f.write("{}=1\n".format(chain_name_conf_arg))
if chain_name_conf_section:
f.write("[{}]\n".format(chain_name_conf_section))
with open(os.path.join(datadir, "particl.conf"), 'w', encoding='utf8') as f:
f.write("{}=1\n".format(chain_name_conf_arg))
f.write("[{}]\n".format(chain_name_conf_section))
f.write("port=" + str(p2p_port(n)) + "\n")
f.write("rpcport=" + str(rpc_port(n)) + "\n")
# Disable server-side timeouts to avoid intermittent issues
f.write("rpcservertimeout=99000\n")
f.write("rpcdoccheck=1\n")
f.write("fallbackfee=0.0002\n")
f.write("server=1\n")
f.write("keypool=1\n")
f.write("discover=0\n")
f.write("dnsseed=0\n")
f.write("fixedseeds=0\n")
f.write("listenonion=0\n")
# Increase peertimeout to avoid disconnects while using mocktime.
# peertimeout is measured in mock time, so setting it large enough to
# cover any duration in mock time is sufficient. It can be overridden
# in tests.
f.write("peertimeout=999999999\n")
f.write("printtoconsole=0\n")
f.write("upnp=0\n")
f.write("natpmp=0\n")
f.write("shrinkdebugfile=0\n")
f.write("deprecatedrpc=create_bdb\n") # Required to run the tests
# To improve SQLite wallet performance so that the tests don't timeout, use -unsafesqlitesync
f.write("unsafesqlitesync=1\n")
if disable_autoconnect:
f.write("connect=0\n")
f.write(extra_config)
os.makedirs(os.path.join(datadir, 'stderr'), exist_ok=True)
os.makedirs(os.path.join(datadir, 'stdout'), exist_ok=True)
return datadir
def get_datadir_path(dirname, n):
return pathlib.Path(dirname) / f"node{n}"
def get_temp_default_datadir(temp_dir: pathlib.Path) -> tuple[dict, pathlib.Path]:
"""Return os-specific environment variables that can be set to make the
GetDefaultDataDir() function return a datadir path under the provided
temp_dir, as well as the complete path it would return."""
if platform.system() == "Windows":
env = dict(APPDATA=str(temp_dir))
datadir = temp_dir / "Particl"
else:
env = dict(HOME=str(temp_dir))
if platform.system() == "Darwin":
datadir = temp_dir / "Library/Application Support/Particl"
else:
datadir = temp_dir / ".particl"
return env, datadir
return os.path.join(dirname, "node" + str(n))
def append_config(datadir, options):
@@ -483,7 +395,7 @@ def delete_cookie_file(datadir, chain):
def softfork_active(node, key):
"""Return whether a softfork is active."""
return node.getdeploymentinfo()['deployments'][key]['active']
return node.getblockchaininfo()['softforks'][key]['active']
def set_node_times(nodes, t):
@@ -491,51 +403,208 @@ def set_node_times(nodes, t):
node.setmocktime(t)
def check_node_connections(*, node, num_in, num_out):
info = node.getnetworkinfo()
assert_equal(info["connections_in"], num_in)
assert_equal(info["connections_out"], num_out)
def disconnect_nodes(from_connection, node_num):
def get_peer_ids():
result = []
for peer in from_connection.getpeerinfo():
if "testnode{}".format(node_num) in peer['subver']:
result.append(peer['id'])
return result
peer_ids = get_peer_ids()
if not peer_ids:
logger.warning("disconnect_nodes: {} and {} were not connected".format(
from_connection.index,
node_num,
))
return
for peer_id in peer_ids:
try:
from_connection.disconnectnode(nodeid=peer_id)
except JSONRPCException as e:
# If this node is disconnected between calculating the peer id
# and issuing the disconnect, don't worry about it.
# This avoids a race condition if we're mass-disconnecting peers.
if e.error['code'] != -29: # RPC_CLIENT_NODE_NOT_CONNECTED
raise
# wait to disconnect
wait_until(lambda: not get_peer_ids(), timeout=5)
def connect_nodes(from_connection, node_num):
ip_port = "127.0.0.1:" + str(p2p_port(node_num))
from_connection.addnode(ip_port, "onetry")
# poll until version handshake complete to avoid race conditions
# with transaction relaying
# See comments in net_processing:
# * Must have a version message before anything else
# * Must have a verack message before anything else
wait_until(lambda: all(peer['version'] != 0 for peer in from_connection.getpeerinfo()))
wait_until(lambda: all(peer['bytesrecv_per_msg'].pop('verack', 0) == 24 for peer in from_connection.getpeerinfo()))
# Transaction/Block functions
#############################
def find_output(node, txid, amount, *, blockhash=None):
"""
Return index to output of txid with value amount
Raises exception if there is none.
"""
txdata = node.getrawtransaction(txid, 1, blockhash)
for i in range(len(txdata["vout"])):
if txdata["vout"][i]["value"] == amount:
return i
raise RuntimeError("find_output txid %s : %s not found" % (txid, str(amount)))
def gather_inputs(from_node, amount_needed, confirmations_required=1):
"""
Return a random set of unspent txouts that are enough to pay amount_needed
"""
assert confirmations_required >= 0
utxo = from_node.listunspent(confirmations_required)
random.shuffle(utxo)
inputs = []
total_in = Decimal("0.00000000")
while total_in < amount_needed and len(utxo) > 0:
t = utxo.pop()
total_in += t["amount"]
inputs.append({"txid": t["txid"], "vout": t["vout"], "address": t["address"]})
if total_in < amount_needed:
raise RuntimeError("Insufficient funds: need %d, have %d" % (amount_needed, total_in))
return (total_in, inputs)
def make_change(from_node, amount_in, amount_out, fee):
"""
Create change output(s), return them
"""
outputs = {}
amount = amount_out + fee
change = amount_in - amount
if change > amount * 2:
# Create an extra change output to break up big inputs
change_address = from_node.getnewaddress()
# Split change in two, being careful of rounding:
outputs[change_address] = Decimal(change / 2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
change = amount_in - amount - outputs[change_address]
if change > 0:
outputs[from_node.getnewaddress()] = change
return outputs
def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random transaction.
Returns (txid, hex-encoded-transaction-data, fee)
"""
from_node = random.choice(nodes)
to_node = random.choice(nodes)
fee = min_fee + fee_increment * random.randint(0, fee_variants)
(total_in, inputs) = gather_inputs(from_node, amount + fee)
outputs = make_change(from_node, total_in, amount, fee)
outputs[to_node.getnewaddress()] = float(amount)
rawtx = from_node.createrawtransaction(inputs, outputs)
signresult = from_node.signrawtransactionwithwallet(rawtx)
txid = from_node.sendrawtransaction(signresult["hex"], 0)
return (txid, signresult["hex"], fee)
# Helper to create at least "count" utxos
# Pass in a fee that is sufficient for relay and mining new transactions.
def create_confirmed_utxos(fee, node, count):
to_generate = int(0.5 * count) + 101
while to_generate > 0:
node.generate(min(25, to_generate))
to_generate -= 25
utxos = node.listunspent()
iterations = count - len(utxos)
addr1 = node.getnewaddress()
addr2 = node.getnewaddress()
if iterations <= 0:
return utxos
for i in range(iterations):
t = utxos.pop()
inputs = []
inputs.append({"txid": t["txid"], "vout": t["vout"]})
outputs = {}
send_value = t['amount'] - fee
outputs[addr1] = satoshi_round(send_value / 2)
outputs[addr2] = satoshi_round(send_value / 2)
raw_tx = node.createrawtransaction(inputs, outputs)
signed_tx = node.signrawtransactionwithwallet(raw_tx)["hex"]
node.sendrawtransaction(signed_tx)
while (node.getmempoolinfo()['size'] > 0):
node.generate(1)
utxos = node.listunspent()
assert len(utxos) >= count
return utxos
# Create large OP_RETURN txouts that can be appended to a transaction
# to make it large (helper for constructing large transactions). The
# total serialized size of the txouts is about 66k vbytes.
# to make it large (helper for constructing large transactions).
def gen_return_txouts():
# Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create
# So we have big transactions (and therefore can't fit very many into each block)
# create one script_pubkey
script_pubkey = "6a4d0200" # OP_RETURN OP_PUSH2 512 bytes
for i in range(512):
script_pubkey = script_pubkey + "01"
# concatenate 128 txouts of above script_pubkey which we'll insert before the txout for change
txouts = []
from .messages import CTxOut
from .script import CScript, OP_RETURN
txouts = [CTxOut(nValue=0, scriptPubKey=CScript([OP_RETURN, b'\x01'*67437]))]
assert_equal(sum([len(txout.serialize()) for txout in txouts]), 67456)
txout = CTxOut()
txout.nValue = 0
txout.scriptPubKey = hex_str_to_bytes(script_pubkey)
for k in range(128):
txouts.append(txout)
return txouts
# Create a spend of each passed-in utxo, splicing in "txouts" to each raw
# transaction to make it large. See gen_return_txouts() above.
def create_lots_of_big_transactions(mini_wallet, node, fee, tx_batch_size, txouts, utxos=None):
def create_lots_of_big_transactions(node, txouts, utxos, num, fee):
addr = node.getnewaddress()
txids = []
use_internal_utxos = utxos is None
for _ in range(tx_batch_size):
tx = mini_wallet.create_self_transfer(
utxo_to_spend=None if use_internal_utxos else utxos.pop(),
fee=fee,
)["tx"]
tx.vout.extend(txouts)
res = node.testmempoolaccept([tx.serialize().hex()])[0]
assert_equal(res['fees']['base'], fee)
txids.append(node.sendrawtransaction(tx.serialize().hex()))
from .messages import CTransaction
for _ in range(num):
t = utxos.pop()
inputs = [{"txid": t["txid"], "vout": t["vout"]}]
outputs = {}
change = t['amount'] - fee
outputs[addr] = satoshi_round(change)
rawtx = node.createrawtransaction(inputs, outputs)
tx = CTransaction()
tx.deserialize(BytesIO(hex_str_to_bytes(rawtx)))
for txout in txouts:
tx.vout.append(txout)
newtx = tx.serialize().hex()
signresult = node.signrawtransactionwithwallet(newtx, None, "NONE")
txid = node.sendrawtransaction(signresult["hex"], 0)
txids.append(txid)
return txids
def mine_large_block(test_framework, mini_wallet, node):
def mine_large_block(node, utxos=None):
# generate a 66k transaction,
# and 14 of them is close to the 1MB block limit
num = 14
txouts = gen_return_txouts()
utxos = utxos if utxos is not None else []
if len(utxos) < num:
utxos.clear()
utxos.extend(node.listunspent())
fee = 100 * node.getnetworkinfo()["relayfee"]
create_lots_of_big_transactions(mini_wallet, node, fee, 14, txouts)
test_framework.generate(node, 1)
create_lots_of_big_transactions(node, txouts, utxos, num, fee=fee)
node.generate(1)
def find_vout_for_address(node, txid, addr):
@@ -545,6 +614,11 @@ def find_vout_for_address(node, txid, addr):
"""
tx = node.getrawtransaction(txid, True)
for i in range(len(tx["vout"])):
if addr == tx["vout"][i]["scriptPubKey"]["address"]:
return i
scriptPubKey = tx["vout"][i]["scriptPubKey"]
if "addresses" in scriptPubKey:
if any([addr == a for a in scriptPubKey["addresses"]]):
return i
elif "address" in scriptPubKey:
if addr == scriptPubKey["address"]:
return i
raise RuntimeError("Vout not found for address: txid=%s, addr=%s" % (txid, addr))

File diff suppressed because it is too large Load Diff

View File

@@ -1,275 +1,230 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2022-2024 tecnovert
# Copyright (c) 2024-2025 The Basicswap developers
# Copyright (c) 2022 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import json
import time
from sqlalchemy.orm import scoped_session
from .db import (
AutomationStrategy,
BidState,
Concepts,
create_table,
CURRENT_DB_DATA_VERSION,
AutomationStrategy,
CURRENT_DB_VERSION,
extract_schema,
)
CURRENT_DB_DATA_VERSION)
from .basicswap_util import (
BidStates,
canAcceptBidState,
isActiveBidState,
isErrorBidState,
isFailingBidState,
isFinalBidState,
strBidState,
)
def addBidState(self, state, now, cursor):
self.add(
BidState(
active_ind=1,
state_id=int(state),
in_progress=isActiveBidState(state),
in_error=isErrorBidState(state),
swap_failed=isFailingBidState(state),
swap_ended=isFinalBidState(state),
can_accept=canAcceptBidState(state),
label=strBidState(state),
created_at=now,
),
cursor,
)
isActiveBidState)
def upgradeDatabaseData(self, data_version):
if data_version >= CURRENT_DB_DATA_VERSION:
return
self.log.info(
f"Upgrading database records from version {data_version} to {CURRENT_DB_DATA_VERSION}."
)
self.log.info('Upgrading database records from version %d to %d.', data_version, CURRENT_DB_DATA_VERSION)
with self.mxDB:
try:
session = scoped_session(self.session_factory)
cursor = self.openDB()
try:
now = int(time.time())
now = int(time.time())
if data_version < 1:
self.add(
AutomationStrategy(
if data_version < 1:
session.add(AutomationStrategy(
active_ind=1,
label="Accept All",
label='Accept All',
type_ind=Concepts.OFFER,
data=json.dumps(
{"exact_rate_only": True, "max_concurrent_bids": 5}
).encode("utf-8"),
data=json.dumps({'exact_rate_only': True,
'max_concurrent_bids': 5}).encode('utf-8'),
only_known_identities=False,
created_at=now,
),
cursor,
)
self.add(
AutomationStrategy(
created_at=now))
session.add(AutomationStrategy(
active_ind=1,
label="Accept Known",
label='Accept Known',
type_ind=Concepts.OFFER,
data=json.dumps(
{"exact_rate_only": True, "max_concurrent_bids": 5}
).encode("utf-8"),
data=json.dumps({'exact_rate_only': True,
'max_concurrent_bids': 5}).encode('utf-8'),
only_known_identities=True,
note="Accept bids from identities with previously successful swaps only",
created_at=now,
),
cursor,
)
note='Accept bids from identities with previously successful swaps only',
created_at=now))
for state in BidStates:
addBidState(self, state, now, cursor)
if data_version > 0 and data_version < 2:
for state in (
BidStates.XMR_SWAP_MSG_SCRIPT_LOCK_TX_SIGS,
BidStates.XMR_SWAP_MSG_SCRIPT_LOCK_SPEND_TX,
):
self.add(
BidState(
for state in BidStates:
session.add(BidState(
active_ind=1,
state_id=int(state),
in_progress=isActiveBidState(state),
label=strBidState(state),
created_at=now,
),
cursor,
)
if data_version > 0 and data_version < 6:
for state in BidStates:
in_error = isErrorBidState(state)
swap_failed = isFailingBidState(state)
swap_ended = isFinalBidState(state)
can_accept = canAcceptBidState(state)
cursor.execute(
"UPDATE bidstates SET can_accept = :can_accept, in_error = :in_error, swap_failed = :swap_failed, swap_ended = :swap_ended WHERE state_id = :state_id",
{
"in_error": in_error,
"swap_failed": swap_failed,
"swap_ended": swap_ended,
"can_accept": can_accept,
"state_id": int(state),
},
)
if data_version > 0 and data_version < 4:
for state in (
BidStates.BID_REQUEST_SENT,
BidStates.BID_REQUEST_ACCEPTED,
):
addBidState(self, state, now, cursor)
created_at=now))
if data_version > 0 and data_version < 5:
for state in (
BidStates.BID_EXPIRED,
BidStates.BID_AACCEPT_DELAY,
BidStates.BID_AACCEPT_FAIL,
):
addBidState(self, state, now, cursor)
if data_version < 2:
for state in (BidStates.XMR_SWAP_MSG_SCRIPT_LOCK_TX_SIGS, BidStates.XMR_SWAP_MSG_SCRIPT_LOCK_SPEND_TX):
session.add(BidState(
active_ind=1,
state_id=int(state),
in_progress=isActiveBidState(state),
label=strBidState(state),
created_at=now))
self.db_data_version = CURRENT_DB_DATA_VERSION
self.setIntKV("db_data_version", self.db_data_version, cursor)
self.commitDB()
self.log.info(f"Upgraded database records to version {self.db_data_version}")
finally:
self.closeDB(cursor, commit=False)
self.db_data_version = CURRENT_DB_DATA_VERSION
self.setIntKVInSession('db_data_version', self.db_data_version, session)
session.commit()
self.log.info('Upgraded database records to version {}'.format(self.db_data_version))
finally:
session.close()
session.remove()
def upgradeDatabase(self, db_version):
if self._force_db_upgrade is False and db_version >= CURRENT_DB_VERSION:
if db_version >= CURRENT_DB_VERSION:
return
self.log.info(
f"Upgrading database from version {db_version} to {CURRENT_DB_VERSION}."
)
self.log.info('Upgrading database from version %d to %d.', db_version, CURRENT_DB_VERSION)
# db_version, tablename, oldcolumnname, newcolumnname
rename_columns = [
(13, "actions", "event_id", "action_id"),
(13, "actions", "event_type", "action_type"),
(13, "actions", "event_data", "action_data"),
(
14,
"xmr_swaps",
"coin_a_lock_refund_spend_tx_msg_id",
"coin_a_lock_spend_tx_msg_id",
),
]
while True:
session = scoped_session(self.session_factory)
expect_schema = extract_schema()
have_tables = {}
try:
cursor = self.openDB()
current_version = db_version
if current_version == 6:
session.execute('ALTER TABLE bids ADD COLUMN security_token BLOB')
session.execute('ALTER TABLE offers ADD COLUMN security_token BLOB')
db_version += 1
elif current_version == 7:
session.execute('ALTER TABLE transactions ADD COLUMN block_hash BLOB')
session.execute('ALTER TABLE transactions ADD COLUMN block_height INTEGER')
session.execute('ALTER TABLE transactions ADD COLUMN block_time INTEGER')
db_version += 1
elif current_version == 8:
session.execute('''
CREATE TABLE wallets (
record_id INTEGER NOT NULL,
coin_id INTEGER,
wallet_name VARCHAR,
wallet_data VARCHAR,
balance_type INTEGER,
created_at BIGINT,
PRIMARY KEY (record_id))''')
db_version += 1
elif current_version == 9:
session.execute('ALTER TABLE wallets ADD COLUMN wallet_data VARCHAR')
db_version += 1
elif current_version == 10:
session.execute('ALTER TABLE smsgaddresses ADD COLUMN active_ind INTEGER')
session.execute('ALTER TABLE smsgaddresses ADD COLUMN created_at INTEGER')
session.execute('ALTER TABLE smsgaddresses ADD COLUMN note VARCHAR')
session.execute('ALTER TABLE smsgaddresses ADD COLUMN pubkey VARCHAR')
session.execute('UPDATE smsgaddresses SET active_ind = 1, created_at = 1')
for rename_column in rename_columns:
dbv, table_name, colname_from, colname_to = rename_column
if db_version < dbv:
cursor.execute(
f"ALTER TABLE {table_name} RENAME COLUMN {colname_from} TO {colname_to}"
)
session.execute('ALTER TABLE offers ADD COLUMN addr_to VARCHAR')
session.execute(f'UPDATE offers SET addr_to = "{self.network_addr}"')
db_version += 1
elif current_version == 11:
session.execute('ALTER TABLE bids ADD COLUMN chain_a_height_start INTEGER')
session.execute('ALTER TABLE bids ADD COLUMN chain_b_height_start INTEGER')
session.execute('ALTER TABLE bids ADD COLUMN protocol_version INTEGER')
session.execute('ALTER TABLE offers ADD COLUMN protocol_version INTEGER')
session.execute('ALTER TABLE transactions ADD COLUMN tx_data BLOB')
db_version += 1
elif current_version == 12:
session.execute('''
CREATE TABLE knownidentities (
record_id INTEGER NOT NULL,
address VARCHAR,
label VARCHAR,
publickey BLOB,
num_sent_bids_successful INTEGER,
num_recv_bids_successful INTEGER,
num_sent_bids_rejected INTEGER,
num_recv_bids_rejected INTEGER,
num_sent_bids_failed INTEGER,
num_recv_bids_failed INTEGER,
note VARCHAR,
updated_at BIGINT,
created_at BIGINT,
PRIMARY KEY (record_id))''')
session.execute('ALTER TABLE bids ADD COLUMN reject_code INTEGER')
session.execute('ALTER TABLE bids ADD COLUMN rate INTEGER')
session.execute('ALTER TABLE offers ADD COLUMN amount_negotiable INTEGER')
session.execute('ALTER TABLE offers ADD COLUMN rate_negotiable INTEGER')
db_version += 1
elif current_version == 13:
db_version += 1
session.execute('''
CREATE TABLE automationstrategies (
record_id INTEGER NOT NULL,
active_ind INTEGER,
label VARCHAR,
type_ind INTEGER,
only_known_identities INTEGER,
num_concurrent INTEGER,
data BLOB,
query = "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name;"
tables = cursor.execute(query).fetchall()
for table in tables:
table_name = table[0]
if table_name in ("sqlite_sequence",):
continue
note VARCHAR,
created_at BIGINT,
PRIMARY KEY (record_id))''')
have_table = {}
have_columns = {}
query = "SELECT * FROM PRAGMA_TABLE_INFO(:table_name) ORDER BY cid DESC;"
columns = cursor.execute(query, {"table_name": table_name}).fetchall()
for column in columns:
cid, name, data_type, notnull, default_value, primary_key = column
have_columns[name] = {"type": data_type, "primary_key": primary_key}
session.execute('''
CREATE TABLE automationlinks (
record_id INTEGER NOT NULL,
active_ind INTEGER,
have_table["columns"] = have_columns
linked_type INTEGER,
linked_id BLOB,
strategy_id INTEGER,
cursor.execute(f"PRAGMA INDEX_LIST('{table_name}');")
indices = cursor.fetchall()
for index in indices:
seq, index_name, unique, origin, partial = index
data BLOB,
repeat_limit INTEGER,
repeat_count INTEGER,
if origin == "pk": # Created by a PRIMARY KEY constraint
continue
note VARCHAR,
created_at BIGINT,
PRIMARY KEY (record_id))''')
cursor.execute(f"PRAGMA INDEX_INFO('{index_name}');")
index_info = cursor.fetchall()
session.execute('''
CREATE TABLE history (
record_id INTEGER NOT NULL,
concept_type INTEGER,
concept_id INTEGER,
changed_data BLOB,
add_index = {"index_name": index_name}
for index_columns in index_info:
seqno, cid, name = index_columns
if origin == "u": # Created by a UNIQUE constraint
have_columns[name]["unique"] = 1
else:
if "column_1" not in add_index:
add_index["column_1"] = name
elif "column_2" not in add_index:
add_index["column_2"] = name
elif "column_3" not in add_index:
add_index["column_3"] = name
else:
raise RuntimeError("Add more index columns.")
if origin == "c":
if "indices" not in table:
have_table["indices"] = []
have_table["indices"].append(add_index)
note VARCHAR,
created_at BIGINT,
PRIMARY KEY (record_id))''')
have_tables[table_name] = have_table
session.execute('''
CREATE TABLE bidstates (
record_id INTEGER NOT NULL,
active_ind INTEGER,
state_id INTEGER,
label VARCHAR,
in_progress INTEGER,
for table_name, table in expect_schema.items():
if table_name not in have_tables:
self.log.info(f"Creating table {table_name}.")
create_table(cursor, table_name, table)
continue
note VARCHAR,
created_at BIGINT,
PRIMARY KEY (record_id))''')
have_table = have_tables[table_name]
have_columns = have_table["columns"]
for colname, column in table["columns"].items():
if colname not in have_columns:
col_type = column["type"]
self.log.info(f"Adding column {colname} to table {table_name}.")
cursor.execute(
f"ALTER TABLE {table_name} ADD COLUMN {colname} {col_type}"
)
indices = table.get("indices", [])
have_indices = have_table.get("indices", [])
for index in indices:
index_name = index["index_name"]
if not any(
have_idx.get("index_name") == index_name
for have_idx in have_indices
):
self.log.info(f"Adding index {index_name} to table {table_name}.")
column_1 = index["column_1"]
column_2 = index.get("column_2", None)
column_3 = index.get("column_3", None)
query: str = (
f"CREATE INDEX {index_name} ON {table_name} ({column_1}"
)
if column_2:
query += f", {column_2}"
if column_3:
query += f", {column_3}"
query += ")"
cursor.execute(query)
session.execute('ALTER TABLE wallets ADD COLUMN active_ind INTEGER')
session.execute('ALTER TABLE knownidentities ADD COLUMN active_ind INTEGER')
session.execute('ALTER TABLE eventqueue RENAME TO actions')
session.execute('ALTER TABLE actions RENAME COLUMN event_id TO action_id')
session.execute('ALTER TABLE actions RENAME COLUMN event_type TO action_type')
session.execute('ALTER TABLE actions RENAME COLUMN event_data TO action_data')
elif current_version == 14:
db_version += 1
session.execute('ALTER TABLE xmr_swaps ADD COLUMN coin_a_lock_release_msg_id BLOB')
session.execute('ALTER TABLE xmr_swaps RENAME COLUMN coin_a_lock_refund_spend_tx_msg_id TO coin_a_lock_spend_tx_msg_id')
if CURRENT_DB_VERSION != db_version:
self.db_version = CURRENT_DB_VERSION
self.setIntKV("db_version", CURRENT_DB_VERSION, cursor)
self.log.info(f"Upgraded database to version {self.db_version}")
self.commitDB()
except Exception as e:
self.log.error(f"Upgrade failed {e}")
self.rollbackDB()
finally:
self.closeDB(cursor, commit=False)
if current_version != db_version:
self.db_version = db_version
self.setIntKVInSession('db_version', db_version, session)
session.commit()
session.close()
session.remove()
self.log.info('Upgraded database to version {}'.format(self.db_version))
continue
break
if db_version != CURRENT_DB_VERSION:
raise ValueError('Unable to upgrade database.')

View File

@@ -1,137 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2023-2024 The Basicswap Developers
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
from .db import (
Concepts,
)
def remove_expired_data(self, time_offset: int = 0):
now: int = self.getTime()
try:
cursor = self.openDB()
active_bids_insert: str = self.activeBidsQueryStr("", "b2")
query_str = f"""
SELECT o.offer_id FROM offers o
WHERE o.expire_at <= :expired_at AND 0 = (SELECT COUNT(*) FROM bids b2 WHERE b2.offer_id = o.offer_id AND {active_bids_insert})
"""
num_offers = 0
num_bids = 0
offer_rows = cursor.execute(
query_str, {"now": now, "expired_at": now - time_offset}
)
for offer_row in offer_rows:
num_offers += 1
bid_rows = cursor.execute(
"SELECT bids.bid_id FROM bids WHERE bids.offer_id = :offer_id",
{"offer_id": offer_row[0]},
)
for bid_row in bid_rows:
num_bids += 1
cursor.execute(
"DELETE FROM transactions WHERE transactions.bid_id = :bid_id",
{"bid_id": bid_row[0]},
)
cursor.execute(
"DELETE FROM eventlog WHERE eventlog.linked_type = :type_ind AND eventlog.linked_id = :bid_id",
{"type_ind": int(Concepts.BID), "bid_id": bid_row[0]},
)
cursor.execute(
"DELETE FROM automationlinks WHERE automationlinks.linked_type = :type_ind AND automationlinks.linked_id = :bid_id",
{"type_ind": int(Concepts.BID), "bid_id": bid_row[0]},
)
cursor.execute(
"DELETE FROM prefunded_transactions WHERE prefunded_transactions.linked_type = :type_ind AND prefunded_transactions.linked_id = :bid_id",
{"type_ind": int(Concepts.BID), "bid_id": bid_row[0]},
)
cursor.execute(
"DELETE FROM history WHERE history.concept_type = :type_ind AND history.concept_id = :bid_id",
{"type_ind": int(Concepts.BID), "bid_id": bid_row[0]},
)
cursor.execute(
"DELETE FROM xmr_swaps WHERE xmr_swaps.bid_id = :bid_id",
{"bid_id": bid_row[0]},
)
cursor.execute(
"DELETE FROM actions WHERE actions.linked_id = :bid_id",
{"bid_id": bid_row[0]},
)
cursor.execute(
"DELETE FROM addresspool WHERE addresspool.bid_id = :bid_id",
{"bid_id": bid_row[0]},
)
cursor.execute(
"DELETE FROM xmr_split_data WHERE xmr_split_data.bid_id = :bid_id",
{"bid_id": bid_row[0]},
)
cursor.execute(
"DELETE FROM bids WHERE bids.bid_id = :bid_id",
{"bid_id": bid_row[0]},
)
cursor.execute(
"DELETE FROM message_links WHERE linked_type = :type_ind AND linked_id = :linked_id",
{"type_ind": int(Concepts.BID), "linked_id": bid_row[0]},
)
cursor.execute(
"DELETE FROM direct_message_route_links WHERE linked_type = :type_ind AND linked_id = :linked_id",
{"type_ind": int(Concepts.BID), "linked_id": bid_row[0]},
)
cursor.execute(
"DELETE FROM eventlog WHERE eventlog.linked_type = :type_ind AND eventlog.linked_id = :offer_id",
{"type_ind": int(Concepts.OFFER), "offer_id": offer_row[0]},
)
cursor.execute(
"DELETE FROM automationlinks WHERE automationlinks.linked_type = :type_ind AND automationlinks.linked_id = :offer_id",
{"type_ind": int(Concepts.OFFER), "offer_id": offer_row[0]},
)
cursor.execute(
"DELETE FROM prefunded_transactions WHERE prefunded_transactions.linked_type = :type_ind AND prefunded_transactions.linked_id = :offer_id",
{"type_ind": int(Concepts.OFFER), "offer_id": offer_row[0]},
)
cursor.execute(
"DELETE FROM history WHERE history.concept_type = :type_ind AND history.concept_id = :offer_id",
{"type_ind": int(Concepts.OFFER), "offer_id": offer_row[0]},
)
cursor.execute(
"DELETE FROM xmr_offers WHERE xmr_offers.offer_id = :offer_id",
{"offer_id": offer_row[0]},
)
cursor.execute(
"DELETE FROM sentoffers WHERE sentoffers.offer_id = :offer_id",
{"offer_id": offer_row[0]},
)
cursor.execute(
"DELETE FROM actions WHERE actions.linked_id = :offer_id",
{"offer_id": offer_row[0]},
)
cursor.execute(
"DELETE FROM offers WHERE offers.offer_id = :offer_id",
{"offer_id": offer_row[0]},
)
cursor.execute(
"DELETE FROM message_links WHERE linked_type = :type_ind AND linked_id = :offer_id",
{"type_ind": int(Concepts.OFFER), "offer_id": offer_row[0]},
)
if num_offers > 0 or num_bids > 0:
self.log.info(
"Removed data for {} expired offer{} and {} bid{}.".format(
num_offers,
"s" if num_offers != 1 else "",
num_bids,
"s" if num_bids != 1 else "",
)
)
cursor.execute(
"DELETE FROM checkedblocks WHERE created_at <= :expired_at",
{"expired_at": now - time_offset},
)
finally:
self.closeDB(cursor)

View File

@@ -13,8 +13,8 @@ def encodepoint(P):
zi = edf.inv(P[2])
x = (P[0] * zi) % edf.q
y = (P[1] * zi) % edf.q
y += (x & 1) << 255
return y.to_bytes(32, byteorder="little")
y += ((x & 1) << 255)
return y.to_bytes(32, byteorder='little')
def hashToEd25519(bytes_in):
@@ -22,8 +22,8 @@ def hashToEd25519(bytes_in):
for i in range(1000):
h255 = bytearray(hashed)
x_sign = 0 if h255[31] & 0x80 == 0 else 1
h255[31] &= 0x7F # Clear top bit
y = int.from_bytes(h255, byteorder="little")
h255[31] &= 0x7f # Clear top bit
y = int.from_bytes(h255, byteorder='little')
x = edf.xrecover(y, x_sign)
if x == 0 and y == 1: # Skip infinity point
continue
@@ -33,4 +33,4 @@ def hashToEd25519(bytes_in):
if edf.isoncurve(P) and edf.is_identity(edf.scalarmult(P, edf.l)):
return P
hashed = hashlib.sha256(hashed).digest()
raise ValueError("hashToEd25519 failed")
raise ValueError('hashToEd25519 failed')

View File

@@ -1,20 +1,14 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019-2023 tecnovert
# Copyright (c) 2025 The Basicswap developers
# Copyright (c) 2019-2022 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import json
import urllib.request
default_chart_api_key = (
"95dd900af910656e0e17c41f2ddc5dba77d01bf8b0e7d2787634a16bd976c553"
)
default_coingecko_api_key = "CG-8hm3r9iLfpEXv4ied8oLbeUj"
class Explorer:
class Explorer():
def __init__(self, swapclient, coin_type, base_url):
self.swapclient = swapclient
self.coin_type = coin_type
@@ -22,94 +16,87 @@ class Explorer:
self.log = self.swapclient.log
def readURL(self, url):
self.log.debug("Explorer url: {}".format(url))
return self.swapclient.readURL(url)
self.log.debug('Explorer url: {}'.format(url))
try:
self.swapclient.setConnectionParameters()
req = urllib.request.Request(url)
return urllib.request.urlopen(req).read()
finally:
self.swapclient.popConnectionParameters()
class ExplorerInsight(Explorer):
def getChainHeight(self):
return json.loads(self.readURL(self.base_url + "/sync"))["blockChainHeight"]
return json.loads(self.readURL(self.base_url + '/sync'))['blockChainHeight']
def getBlock(self, block_hash):
data = json.loads(self.readURL(self.base_url + "/block/{}".format(block_hash)))
data = json.loads(self.readURL(self.base_url + '/block/{}'.format(block_hash)))
return data
def getTransaction(self, txid):
data = json.loads(self.readURL(self.base_url + "/tx/{}".format(txid)))
data = json.loads(self.readURL(self.base_url + '/tx/{}'.format(txid)))
return data
def getBalance(self, address):
data = json.loads(
self.readURL(self.base_url + "/addr/{}/balance".format(address))
)
data = json.loads(self.readURL(self.base_url + '/addr/{}/balance'.format(address)))
return data
def lookupUnspentByAddress(self, address):
data = json.loads(self.readURL(self.base_url + "/addr/{}/utxo".format(address)))
data = json.loads(self.readURL(self.base_url + '/addr/{}/utxo'.format(address)))
rv = []
for utxo in data:
rv.append(
{
"txid": utxo["txid"],
"index": utxo["vout"],
"height": utxo["height"],
"n_conf": utxo["confirmations"],
"value": utxo["satoshis"],
}
)
rv.append({
'txid': utxo['txid'],
'index': utxo['vout'],
'height': utxo['height'],
'n_conf': utxo['confirmations'],
'value': utxo['satoshis'],
})
return rv
class ExplorerBitAps(Explorer):
def getChainHeight(self):
return json.loads(self.readURL(self.base_url + "/block/last"))["data"]["block"][
"height"
]
return json.loads(self.readURL(self.base_url + '/block/last'))['data']['block']['height']
def getBlock(self, block_hash):
data = json.loads(self.readURL(self.base_url + "/block/{}".format(block_hash)))
data = json.loads(self.readURL(self.base_url + '/block/{}'.format(block_hash)))
return data
def getTransaction(self, txid):
data = json.loads(self.readURL(self.base_url + "/transaction/{}".format(txid)))
data = json.loads(self.readURL(self.base_url + '/transaction/{}'.format(txid)))
return data
def getBalance(self, address):
data = json.loads(self.readURL(self.base_url + "/address/state/" + address))
return data["data"]["balance"]
data = json.loads(self.readURL(self.base_url + '/address/state/' + address))
return data['data']['balance']
def lookupUnspentByAddress(self, address):
# Can't get unspents return only if exactly one transaction exists
data = json.loads(
self.readURL(self.base_url + "/address/transactions/" + address)
)
data = json.loads(self.readURL(self.base_url + '/address/transactions/' + address))
try:
assert data["data"]["list"] == 1
assert data['data']['list'] == 1
except Exception as ex:
self.log.debug("Explorer error: {}".format(str(ex)))
self.log.debug('Explorer error: {}'.format(str(ex)))
return None
tx = data["data"]["list"][0]
tx_data = json.loads(
self.readURL(self.base_url + "/transaction/{}".format(tx["txId"]))
)["data"]
tx = data['data']['list'][0]
tx_data = json.loads(self.readURL(self.base_url + '/transaction/{}'.format(tx['txId'])))['data']
for i, vout in tx_data["vOut"].items():
if vout["address"] == address:
return [
{
"txid": tx_data["txId"],
"index": int(i),
"height": tx_data["blockHeight"],
"n_conf": tx_data["confirmations"],
"value": vout["value"],
}
]
for i, vout in tx_data['vOut'].items():
if vout['address'] == address:
return [{
'txid': tx_data['txId'],
'index': int(i),
'height': tx_data['blockHeight'],
'n_conf': tx_data['confirmations'],
'value': vout['value'],
}]
class ExplorerChainz(Explorer):
def getChainHeight(self):
return int(self.readURL(self.base_url + "?q=getblockcount"))
return int(self.readURL(self.base_url + '?q=getblockcount'))
def lookupUnspentByAddress(self, address):
chain_height = self.getChainHeight()
self.log.debug("[rm] chain_height %d", chain_height)
self.log.debug('[rm] chain_height %d', chain_height)

File diff suppressed because it is too large Load Diff

View File

@@ -1,233 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2024 tecnovert
# Copyright (c) 2025 The Basicswap developers
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import threading
from enum import IntEnum
from basicswap.chainparams import (
chainparams,
)
from basicswap.util import (
ensure,
i2b,
b2i,
make_int,
format_amount,
TemporaryError,
)
from basicswap.util.crypto import (
hash160,
)
from basicswap.util.ecc import (
ep,
getSecretInt,
)
from coincurve.dleag import verify_secp256k1_point
from coincurve.keys import (
PublicKey,
)
class Curves(IntEnum):
secp256k1 = 1
ed25519 = 2
class CoinInterface:
@staticmethod
def watch_blocks_for_scripts() -> bool:
return False
@staticmethod
def compareFeeRates(a, b) -> bool:
return abs(a - b) < 20
def __init__(self, network):
self.setDefaults()
self._network = network
self._mx_wallet = threading.Lock()
self._altruistic = True
def interface_type(self) -> int:
# coin_type() returns the base coin type, interface_type() returns the coin+balance type.
return self.coin_type()
def setDefaults(self):
self._unknown_wallet_seed = True
self._restore_height = None
def make_int(self, amount_in: int, r: int = 0) -> int:
return make_int(amount_in, self.exp(), r=r)
def format_amount(self, amount_in, conv_int=False, r=0):
amount_int = make_int(amount_in, self.exp(), r=r) if conv_int else amount_in
return format_amount(amount_int, self.exp())
def coin_name(self) -> str:
coin_chainparams = chainparams[self.coin_type()]
if "display_name" in coin_chainparams:
return coin_chainparams["display_name"]
return coin_chainparams["name"].capitalize()
def ticker(self) -> str:
ticker = chainparams[self.coin_type()]["ticker"]
if self._network == "testnet":
ticker = "t" + ticker
elif self._network == "regtest":
ticker = "rt" + ticker
return ticker
def getExchangeTicker(self, exchange_name: str) -> str:
return chainparams[self.coin_type()]["ticker"]
def getExchangeName(self, exchange_name: str) -> str:
return chainparams[self.coin_type()]["name"]
def ticker_mainnet(self) -> str:
ticker = chainparams[self.coin_type()]["ticker"]
return ticker
def min_amount(self) -> int:
return chainparams[self.coin_type()][self._network]["min_amount"]
def max_amount(self) -> int:
return chainparams[self.coin_type()][self._network]["max_amount"]
def setWalletSeedWarning(self, value: bool) -> None:
self._unknown_wallet_seed = value
def setWalletRestoreHeight(self, value: int) -> None:
self._restore_height = value
def knownWalletSeed(self) -> bool:
return not self._unknown_wallet_seed
def chainparams(self):
return chainparams[self.coin_type()]
def chainparams_network(self):
return chainparams[self.coin_type()][self._network]
def has_segwit(self) -> bool:
return chainparams[self.coin_type()].get("has_segwit", True)
def use_p2shp2wsh(self) -> bool:
# p2sh-p2wsh
return False
def is_transient_error(self, ex) -> bool:
if isinstance(ex, TemporaryError):
return True
str_error: str = str(ex).lower()
if "not enough unlocked money" in str_error:
return True
if "no unlocked balance" in str_error:
return True
if "transaction was rejected by daemon" in str_error:
return True
if "invalid unlocked_balance" in str_error:
return True
if "daemon is busy" in str_error:
return True
if "timed out" in str_error:
return True
if "request-sent" in str_error:
return True
return False
def setConfTarget(self, new_conf_target: int) -> None:
ensure(
new_conf_target >= 1 and new_conf_target < 33, "Invalid conf_target value"
)
self._conf_target = new_conf_target
def walletRestoreHeight(self) -> int:
return self._restore_height
def get_connection_type(self):
return self._connection_type
def using_segwit(self) -> bool:
# Using btc native segwit
return self._use_segwit
def use_tx_vsize(self) -> bool:
return self._use_segwit
def getLockTxSwapOutputValue(self, bid, xmr_swap) -> int:
return bid.amount
def getLockRefundTxSwapOutputValue(self, bid, xmr_swap) -> int:
return xmr_swap.a_swap_refund_value
def getLockRefundTxSwapOutput(self, xmr_swap) -> int:
# Only one prevout exists
return 0
def checkWallets(self) -> int:
return 1
def altruistic(self) -> bool:
return self._altruistic
class AdaptorSigInterface:
def getScriptLockTxDummyWitness(self, script: bytes):
return [b"", bytes(72), bytes(72), bytes(len(script))]
def getScriptLockRefundSpendTxDummyWitness(self, script: bytes):
return [b"", bytes(72), bytes(72), bytes((1,)), bytes(len(script))]
def getScriptLockRefundSwipeTxDummyWitness(self, script: bytes):
return [bytes(72), b"", bytes(len(script))]
class Secp256k1Interface(CoinInterface, AdaptorSigInterface):
@staticmethod
def curve_type():
return Curves.secp256k1
def getNewRandomKey(self) -> bytes:
return i2b(getSecretInt())
def getPubkey(self, privkey: bytes) -> bytes:
return PublicKey.from_secret(privkey).format()
def pkh(self, pubkey: bytes) -> bytes:
return hash160(pubkey)
def verifyKey(self, k: bytes) -> bool:
i = b2i(k)
return i < ep.o and i > 0
def verifyPubkey(self, pubkey_bytes: bytes) -> bool:
return verify_secp256k1_point(pubkey_bytes)
def isValidAddressHash(self, address_hash: bytes) -> bool:
hash_len = len(address_hash)
if hash_len == 20:
return True
def isValidPubkey(self, pubkey: bytes) -> bool:
try:
self.verifyPubkey(pubkey)
return True
except Exception:
return False
def verifySig(self, pubkey: bytes, signed_hash: bytes, sig: bytes) -> bool:
pubkey = PublicKey(pubkey)
return pubkey.verify(sig, signed_hash, hasher=None)
def sumKeys(self, ka: bytes, kb: bytes) -> bytes:
# TODO: Add to coincurve
return i2b((b2i(ka) + b2i(kb)) % ep.o)
def sumPubkeys(self, Ka: bytes, Kb: bytes) -> bytes:
return PublicKey.combine_keys([PublicKey(Ka), PublicKey(Kb)]).format()

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,247 +0,0 @@
import unittest
CHARSET = "qpzry9x8gf2tvdw0s3jn54khce6mua7l"
def polymod(values):
chk = 1
generator = [
(0x01, 0x98F2BC8E61),
(0x02, 0x79B76D99E2),
(0x04, 0xF33E5FB3C4),
(0x08, 0xAE2EABE2A8),
(0x10, 0x1E4F43E470),
]
for value in values:
top = chk >> 35
chk = ((chk & 0x07FFFFFFFF) << 5) ^ value
for i in generator:
if top & i[0] != 0:
chk ^= i[1]
return chk ^ 1
def calculate_checksum(prefix, payload):
poly = polymod(prefix_expand(prefix) + payload + [0, 0, 0, 0, 0, 0, 0, 0])
out = list()
for i in range(8):
out.append((poly >> 5 * (7 - i)) & 0x1F)
return out
def verify_checksum(prefix, payload):
return polymod(prefix_expand(prefix) + payload) == 0
def b32decode(inputs):
out = list()
for letter in inputs:
out.append(CHARSET.find(letter))
return out
def b32encode(inputs):
out = ""
for char_code in inputs:
out += CHARSET[char_code]
return out
def convertbits(data, frombits, tobits, pad=True):
acc = 0
bits = 0
ret = []
maxv = (1 << tobits) - 1
max_acc = (1 << (frombits + tobits - 1)) - 1
for value in data:
if value < 0 or (value >> frombits):
return None
acc = ((acc << frombits) | value) & max_acc
bits += frombits
while bits >= tobits:
bits -= tobits
ret.append((acc >> bits) & maxv)
if pad:
if bits:
ret.append((acc << (tobits - bits)) & maxv)
elif bits >= frombits or ((acc << (tobits - bits)) & maxv):
return None
return ret
def prefix_expand(prefix):
return [ord(x) & 0x1F for x in prefix] + [0]
class Address:
"""
Class to handle CashAddr.
:param version: Version of CashAddr
:type version: ``str``
:param payload: Payload of CashAddr as int list of the bytearray
:type payload: ``list`` of ``int``
"""
VERSIONS = {
"P2SH20": {"prefix": "bitcoincash", "version_bit": 8, "network": "mainnet"},
"P2SH32": {"prefix": "bitcoincash", "version_bit": 11, "network": "mainnet"},
"P2PKH": {"prefix": "bitcoincash", "version_bit": 0, "network": "mainnet"},
"P2SH20-TESTNET": {"prefix": "bchtest", "version_bit": 8, "network": "testnet"},
"P2SH32-TESTNET": {
"prefix": "bchtest",
"version_bit": 11,
"network": "testnet",
},
"P2PKH-TESTNET": {"prefix": "bchtest", "version_bit": 0, "network": "testnet"},
"P2SH20-REGTEST": {"prefix": "bchreg", "version_bit": 8, "network": "regtest"},
"P2SH32-REGTEST": {"prefix": "bchreg", "version_bit": 11, "network": "regtest"},
"P2PKH-REGTEST": {"prefix": "bchreg", "version_bit": 0, "network": "regtest"},
"P2SH20-CATKN": {
"prefix": "bitcoincash",
"version_bit": 24,
"network": "mainnet",
},
"P2SH32-CATKN": {
"prefix": "bitcoincash",
"version_bit": 27,
"network": "mainnet",
},
"P2PKH-CATKN": {
"prefix": "bitcoincash",
"version_bit": 16,
"network": "mainnet",
},
"P2SH20-CATKN-TESTNET": {
"prefix": "bchtest",
"version_bit": 24,
"network": "testnet",
},
"P2SH32-CATKN-TESTNET": {
"prefix": "bchtest",
"version_bit": 27,
"network": "testnet",
},
"P2PKH-CATKN-TESTNET": {
"prefix": "bchtest",
"version_bit": 16,
"network": "testnet",
},
"P2SH20-CATKN-REGTEST": {
"prefix": "bchreg",
"version_bit": 24,
"network": "regtest",
},
"P2SH32-CATKN-REGTEST": {
"prefix": "bchreg",
"version_bit": 27,
"network": "regtest",
},
"P2PKH-CATKN-REGTEST": {
"prefix": "bchreg",
"version_bit": 16,
"network": "regtest",
},
}
VERSION_SUFFIXES = {"bitcoincash": "", "bchtest": "-TESTNET", "bchreg": "-REGTEST"}
ADDRESS_TYPES = {
0: "P2PKH",
8: "P2SH20",
11: "P2SH32",
16: "P2PKH-CATKN",
24: "P2SH20-CATKN",
27: "P2SH32-CATKN",
}
def __init__(self, version, payload):
if version not in Address.VERSIONS:
raise ValueError("Invalid address version provided")
self.version = version
self.payload = payload
self.prefix = Address.VERSIONS[self.version]["prefix"]
def __str__(self):
return (
f"version: {self.version}\npayload: {self.payload}\nprefix: {self.prefix}"
)
def __repr__(self):
return f"Address('{self.cash_address()}')"
def __eq__(self, other):
if isinstance(other, str):
return self.cash_address() == other
elif isinstance(other, Address):
return self.cash_address() == other.cash_address()
else:
raise ValueError(
"Address can be compared to a string address"
" or an instance of Address"
)
def cash_address(self):
"""
Generate CashAddr of the Address
:rtype: ``str``
"""
version_bit = Address.VERSIONS[self.version]["version_bit"]
payload = [version_bit] + list(self.payload)
payload = convertbits(payload, 8, 5)
checksum = calculate_checksum(self.prefix, payload)
return self.prefix + ":" + b32encode(payload + checksum)
@staticmethod
def from_string(address):
"""
Generate Address from a cashadress string
:param scriptcode: The cashaddress string
:type scriptcode: ``str``
:returns: Instance of :class:~bitcash.cashaddress.Address
"""
try:
address = str(address)
except Exception:
raise ValueError("Expected string as input")
if address.upper() != address and address.lower() != address:
raise ValueError(
"Cash address contains uppercase and lowercase characters: " + address
)
address = address.lower()
colon_count = address.count(":")
if colon_count == 0:
raise ValueError("Cash address is missing prefix")
if colon_count > 1:
raise ValueError("Cash address contains more than one colon character")
prefix, base32string = address.split(":")
decoded = b32decode(base32string)
if not verify_checksum(prefix, decoded):
raise ValueError(
"Bad cash address checksum for address {}".format(address)
)
converted = convertbits(decoded, 5, 8)
try:
version = Address.ADDRESS_TYPES[converted[0]]
except Exception:
raise ValueError("Could not determine address version")
version += Address.VERSION_SUFFIXES[prefix]
payload = converted[1:-6]
return Address(version, payload)
class TestFrameworkScript(unittest.TestCase):
def test_base58encodedecode(self):
def check_cashaddress(address: str):
self.assertEqual(Address.from_string(address).cash_address(), address)
check_cashaddress("bitcoincash:qzfyvx77v2pmgc0vulwlfkl3uzjgh5gnmqk5hhyaa6")

View File

@@ -1,43 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2024 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
from basicswap.contrib.test_framework.script import CScriptOp
OP_TXINPUTCOUNT = CScriptOp(0xc3)
OP_1 = CScriptOp(0x51)
OP_NUMEQUALVERIFY = CScriptOp(0x9d)
OP_TXOUTPUTCOUNT = CScriptOp(0xc4)
OP_0 = CScriptOp(0x00)
OP_UTXOVALUE = CScriptOp(0xc6)
OP_OUTPUTVALUE = CScriptOp(0xcc)
OP_SUB = CScriptOp(0x94)
OP_UTXOTOKENCATEGORY = CScriptOp(0xce)
OP_OUTPUTTOKENCATEGORY = CScriptOp(0xd1)
OP_EQUALVERIFY = CScriptOp(0x88)
OP_UTXOTOKENCOMMITMENT = CScriptOp(0xcf)
OP_OUTPUTTOKENCOMMITMENT = CScriptOp(0xd2)
OP_UTXOTOKENAMOUNT = CScriptOp(0xd0)
OP_OUTPUTTOKENAMOUNT = CScriptOp(0xd3)
OP_INPUTSEQUENCENUMBER = CScriptOp(0xcb)
OP_NOTIF = CScriptOp(0x64)
OP_OUTPUTBYTECODE = CScriptOp(0xcd)
OP_OVER = CScriptOp(0x78)
OP_CHECKDATASIG = CScriptOp(0xba)
OP_CHECKDATASIGVERIFY = CScriptOp(0xbb)
OP_ELSE = CScriptOp(0x67)
OP_CHECKSEQUENCEVERIFY = CScriptOp(0xb2)
OP_DROP = CScriptOp(0x75)
OP_EQUAL = CScriptOp(0x87)
OP_ENDIF = CScriptOp(0x68)
OP_HASH256 = CScriptOp(0xaa)
OP_PUSHBYTES_32 = CScriptOp(0x20)
OP_DUP = CScriptOp(0x76)
OP_HASH160 = CScriptOp(0xa9)
OP_CHECKSIG = CScriptOp(0xac)
OP_SHA256 = CScriptOp(0xa8)
OP_VERIFY = CScriptOp(0x69)

View File

@@ -1,191 +0,0 @@
"""
Copyright (c) 2011 Jeff Garzik
AuthServiceProxy has the following improvements over python-jsonrpc's
ServiceProxy class:
- HTTP connections persist for the life of the AuthServiceProxy object
(if server supports HTTP/1.1)
- sends protocol 'version', per JSON-RPC 1.1
- sends proper, incrementing 'id'
- sends Basic HTTP authentication headers
- parses all JSON numbers that look like floats as Decimal
- uses standard Python json lib
Previous copyright, from python-jsonrpc/jsonrpc/proxy.py:
Copyright (c) 2007 Jan-Klaas Kollhof
This file is part of jsonrpc.
jsonrpc is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version.
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this software; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
try:
import http.client as httplib
except ImportError:
import httplib
import base64
import decimal
import json
import logging
import socket
try:
import urllib.parse as urlparse
except ImportError:
import urlparse
USER_AGENT = "AuthServiceProxy/0.1"
HTTP_TIMEOUT = 30
log = logging.getLogger("BitcoinRPC")
class JSONRPCException(Exception):
def __init__(self, rpc_error):
try:
errmsg = '%(message)s (%(code)i)' % rpc_error
except (KeyError, TypeError):
errmsg = ''
Exception.__init__(self, errmsg)
self.error = rpc_error
def EncodeDecimal(o):
if isinstance(o, decimal.Decimal):
return str(o)
raise TypeError(repr(o) + " is not JSON serializable")
class AuthServiceProxy(object):
__id_count = 0
# ensure_ascii: escape unicode as \uXXXX, passed to json.dumps
def __init__(self, service_url, service_name=None, timeout=HTTP_TIMEOUT, connection=None, ensure_ascii=True):
self.__service_url = service_url
self._service_name = service_name
self.ensure_ascii = ensure_ascii # can be toggled on the fly by tests
self.__url = urlparse.urlparse(service_url)
if self.__url.port is None:
port = 80
else:
port = self.__url.port
(user, passwd) = (self.__url.username, self.__url.password)
try:
user = user.encode('utf8')
except AttributeError:
pass
try:
passwd = passwd.encode('utf8')
except AttributeError:
pass
authpair = user + b':' + passwd
self.__auth_header = b'Basic ' + base64.b64encode(authpair)
if connection:
# Callables re-use the connection of the original proxy
self.__conn = connection
elif self.__url.scheme == 'https':
self.__conn = httplib.HTTPSConnection(self.__url.hostname, port,
timeout=timeout)
else:
self.__conn = httplib.HTTPConnection(self.__url.hostname, port,
timeout=timeout)
def __getattr__(self, name):
if name.startswith('__') and name.endswith('__'):
# Python internal stuff
raise AttributeError
if self._service_name is not None:
name = "%s.%s" % (self._service_name, name)
return AuthServiceProxy(self.__service_url, name, connection=self.__conn)
def _request(self, method, path, postdata):
'''
Do a HTTP request, with retry if we get disconnected (e.g. due to a timeout).
This is a workaround for https://bugs.python.org/issue3566 which is fixed in Python 3.5.
'''
headers = {'Host': self.__url.hostname,
'User-Agent': USER_AGENT,
'Authorization': self.__auth_header,
'Content-type': 'application/json'}
try:
self.__conn.request(method, path, postdata, headers)
return self._get_response()
except httplib.BadStatusLine as e:
if e.line == "''": # if connection was closed, try again
self.__conn.close()
self.__conn.request(method, path, postdata, headers)
return self._get_response()
else:
raise
except (BrokenPipeError,ConnectionResetError):
# Python 3.5+ raises BrokenPipeError instead of BadStatusLine when the connection was reset
# ConnectionResetError happens on FreeBSD with Python 3.4
self.__conn.close()
self.__conn.request(method, path, postdata, headers)
return self._get_response()
def __call__(self, *args, **argsn):
AuthServiceProxy.__id_count += 1
log.debug("-%s-> %s %s"%(AuthServiceProxy.__id_count, self._service_name,
json.dumps(args, default=EncodeDecimal, ensure_ascii=self.ensure_ascii)))
if args and argsn:
raise ValueError('Cannot handle both named and positional arguments')
postdata = json.dumps({'version': '1.1',
'method': self._service_name,
'params': args or argsn,
'id': AuthServiceProxy.__id_count}, default=EncodeDecimal, ensure_ascii=self.ensure_ascii)
response = self._request('POST', self.__url.path, postdata.encode('utf-8'))
if response['error'] is not None:
raise JSONRPCException(response['error'])
elif 'result' not in response:
raise JSONRPCException({
'code': -343, 'message': 'missing JSON-RPC result'})
else:
return response['result']
def _batch(self, rpc_call_list):
postdata = json.dumps(list(rpc_call_list), default=EncodeDecimal, ensure_ascii=self.ensure_ascii)
log.debug("--> "+postdata)
return self._request('POST', self.__url.path, postdata.encode('utf-8'))
def _get_response(self):
try:
http_response = self.__conn.getresponse()
except socket.timeout as e:
raise JSONRPCException({
'code': -344,
'message': '%r RPC took longer than %f seconds. Consider '
'using larger timeout for calls that take '
'longer to return.' % (self._service_name,
self.__conn.timeout)})
if http_response is None:
raise JSONRPCException({
'code': -342, 'message': 'missing HTTP response from server'})
content_type = http_response.getheader('Content-Type')
if content_type != 'application/json':
raise JSONRPCException({
'code': -342, 'message': 'non-JSON HTTP response with \'%i %s\' from server' % (http_response.status, http_response.reason)})
responsedata = http_response.read().decode('utf8')
response = json.loads(responsedata, parse_float=decimal.Decimal)
if "error" in response and response["error"] is None:
log.debug("<-%s- %s"%(response["id"], json.dumps(response["result"], default=EncodeDecimal, ensure_ascii=self.ensure_ascii)))
else:
log.debug("<-- "+responsedata)
return response

View File

@@ -1,101 +0,0 @@
#!/usr/bin/env python3
#
# bignum.py
#
# This file is copied from python-bitcoinlib.
#
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
"""Bignum routines"""
import struct
# generic big endian MPI format
def bn_bytes(v, have_ext=False):
ext = 0
if have_ext:
ext = 1
return ((v.bit_length()+7)//8) + ext
def bn2bin(v):
s = bytearray()
i = bn_bytes(v)
while i > 0:
s.append((v >> ((i-1) * 8)) & 0xff)
i -= 1
return s
def bin2bn(s):
l = 0
for ch in s:
l = (l << 8) | ch
return l
def bn2mpi(v):
have_ext = False
if v.bit_length() > 0:
have_ext = (v.bit_length() & 0x07) == 0
neg = False
if v < 0:
neg = True
v = -v
s = struct.pack(b">I", bn_bytes(v, have_ext))
ext = bytearray()
if have_ext:
ext.append(0)
v_bin = bn2bin(v)
if neg:
if have_ext:
ext[0] |= 0x80
else:
v_bin[0] |= 0x80
return s + ext + v_bin
def mpi2bn(s):
if len(s) < 4:
return None
s_size = bytes(s[:4])
v_len = struct.unpack(b">I", s_size)[0]
if len(s) != (v_len + 4):
return None
if v_len == 0:
return 0
v_str = bytearray(s[4:])
neg = False
i = v_str[0]
if i & 0x80:
neg = True
i &= ~0x80
v_str[0] = i
v = bin2bn(v_str)
if neg:
return -v
return v
# bitcoin-specific little endian format, with implicit size
def mpi2vch(s):
r = s[4:] # strip size
r = r[::-1] # reverse string, converting BE->LE
return r
def bn2vch(v):
return bytes(mpi2vch(bn2mpi(v)))
def vch2mpi(s):
r = struct.pack(b">I", len(s)) # size
r += s[::-1] # reverse string, converting LE->BE
return r
def vch2bn(s):
return mpi2bn(vch2mpi(s))

View File

@@ -1,106 +0,0 @@
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
This module contains utilities for doing coverage analysis on the RPC
interface.
It provides a way to track which RPC commands are exercised during
testing.
"""
import os
REFERENCE_FILENAME = 'rpc_interface.txt'
class AuthServiceProxyWrapper(object):
"""
An object that wraps AuthServiceProxy to record specific RPC calls.
"""
def __init__(self, auth_service_proxy_instance, coverage_logfile=None):
"""
Kwargs:
auth_service_proxy_instance (AuthServiceProxy): the instance
being wrapped.
coverage_logfile (str): if specified, write each service_name
out to a file when called.
"""
self.auth_service_proxy_instance = auth_service_proxy_instance
self.coverage_logfile = coverage_logfile
def __getattr__(self, *args, **kwargs):
return_val = self.auth_service_proxy_instance.__getattr__(
*args, **kwargs)
return AuthServiceProxyWrapper(return_val, self.coverage_logfile)
def __call__(self, *args, **kwargs):
"""
Delegates to AuthServiceProxy, then writes the particular RPC method
called to a file.
"""
return_val = self.auth_service_proxy_instance.__call__(*args, **kwargs)
rpc_method = self.auth_service_proxy_instance._service_name
if self.coverage_logfile:
with open(self.coverage_logfile, 'a+', encoding='utf8') as f:
f.write("%s\n" % rpc_method)
return return_val
@property
def url(self):
return self.auth_service_proxy_instance.url
def get_filename(dirname, n_node):
"""
Get a filename unique to the test process ID and node.
This file will contain a list of RPC commands covered.
"""
pid = str(os.getpid())
return os.path.join(
dirname, "coverage.pid%s.node%s.txt" % (pid, str(n_node)))
def write_all_rpc_commands(dirname, node):
"""
Write out a list of all RPC functions available in `bitcoin-cli` for
coverage comparison. This will only happen once per coverage
directory.
Args:
dirname (str): temporary test dir
node (AuthServiceProxy): client
Returns:
bool. if the RPC interface file was written.
"""
filename = os.path.join(dirname, REFERENCE_FILENAME)
if os.path.isfile(filename):
return False
help_output = node.help().split('\n')
commands = set()
for line in help_output:
line = line.strip()
# Ignore blanks and headers
if line and not line.startswith('='):
commands.add("%s\n" % line.split()[0])
with open(filename, 'w', encoding='utf8') as f:
f.writelines(list(commands))
return True

File diff suppressed because it is too large Load Diff

View File

@@ -1,943 +0,0 @@
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# script.py
#
# This file is modified from python-bitcoinlib.
#
"""Scripts
Functionality to build scripts, as well as SignatureHash().
"""
from .mininode import CTransaction, CTxOut, sha256, hash256, uint256_from_str, ser_uint256, ser_string
from binascii import hexlify
import hashlib
import sys
bchr = chr
bord = ord
if sys.version > '3':
long = int
bchr = lambda x: bytes([x])
bord = lambda x: x
import struct
from .bignum import bn2vch
MAX_SCRIPT_SIZE = 10000
MAX_SCRIPT_ELEMENT_SIZE = 520
MAX_SCRIPT_OPCODES = 201
OPCODE_NAMES = {}
_opcode_instances = []
class CScriptOp(int):
"""A single script opcode"""
__slots__ = []
@staticmethod
def encode_op_pushdata(d):
"""Encode a PUSHDATA op, returning bytes"""
if len(d) < 0x4c:
return b'' + bchr(len(d)) + d # OP_PUSHDATA
elif len(d) <= 0xff:
return b'\x4c' + bchr(len(d)) + d # OP_PUSHDATA1
elif len(d) <= 0xffff:
return b'\x4d' + struct.pack(b'<H', len(d)) + d # OP_PUSHDATA2
elif len(d) <= 0xffffffff:
return b'\x4e' + struct.pack(b'<I', len(d)) + d # OP_PUSHDATA4
else:
raise ValueError("Data too long to encode in a PUSHDATA op")
@staticmethod
def encode_op_n(n):
"""Encode a small integer op, returning an opcode"""
if not (0 <= n <= 16):
raise ValueError('Integer must be in range 0 <= n <= 16, got %d' % n)
if n == 0:
return OP_0
else:
return CScriptOp(OP_1 + n-1)
def decode_op_n(self):
"""Decode a small integer opcode, returning an integer"""
if self == OP_0:
return 0
if not (self == OP_0 or OP_1 <= self <= OP_16):
raise ValueError('op %r is not an OP_N' % self)
return int(self - OP_1+1)
def is_small_int(self):
"""Return true if the op pushes a small integer to the stack"""
if 0x51 <= self <= 0x60 or self == 0:
return True
else:
return False
def __str__(self):
return repr(self)
def __repr__(self):
if self in OPCODE_NAMES:
return OPCODE_NAMES[self]
else:
return 'CScriptOp(0x%x)' % self
def __new__(cls, n):
try:
return _opcode_instances[n]
except IndexError:
assert len(_opcode_instances) == n
_opcode_instances.append(super(CScriptOp, cls).__new__(cls, n))
return _opcode_instances[n]
# Populate opcode instance table
for n in range(0xff+1):
CScriptOp(n)
# push value
OP_0 = CScriptOp(0x00)
OP_FALSE = OP_0
OP_PUSHDATA1 = CScriptOp(0x4c)
OP_PUSHDATA2 = CScriptOp(0x4d)
OP_PUSHDATA4 = CScriptOp(0x4e)
OP_1NEGATE = CScriptOp(0x4f)
OP_RESERVED = CScriptOp(0x50)
OP_1 = CScriptOp(0x51)
OP_TRUE=OP_1
OP_2 = CScriptOp(0x52)
OP_3 = CScriptOp(0x53)
OP_4 = CScriptOp(0x54)
OP_5 = CScriptOp(0x55)
OP_6 = CScriptOp(0x56)
OP_7 = CScriptOp(0x57)
OP_8 = CScriptOp(0x58)
OP_9 = CScriptOp(0x59)
OP_10 = CScriptOp(0x5a)
OP_11 = CScriptOp(0x5b)
OP_12 = CScriptOp(0x5c)
OP_13 = CScriptOp(0x5d)
OP_14 = CScriptOp(0x5e)
OP_15 = CScriptOp(0x5f)
OP_16 = CScriptOp(0x60)
# control
OP_NOP = CScriptOp(0x61)
OP_VER = CScriptOp(0x62)
OP_IF = CScriptOp(0x63)
OP_NOTIF = CScriptOp(0x64)
OP_VERIF = CScriptOp(0x65)
OP_VERNOTIF = CScriptOp(0x66)
OP_ELSE = CScriptOp(0x67)
OP_ENDIF = CScriptOp(0x68)
OP_VERIFY = CScriptOp(0x69)
OP_RETURN = CScriptOp(0x6a)
# stack ops
OP_TOALTSTACK = CScriptOp(0x6b)
OP_FROMALTSTACK = CScriptOp(0x6c)
OP_2DROP = CScriptOp(0x6d)
OP_2DUP = CScriptOp(0x6e)
OP_3DUP = CScriptOp(0x6f)
OP_2OVER = CScriptOp(0x70)
OP_2ROT = CScriptOp(0x71)
OP_2SWAP = CScriptOp(0x72)
OP_IFDUP = CScriptOp(0x73)
OP_DEPTH = CScriptOp(0x74)
OP_DROP = CScriptOp(0x75)
OP_DUP = CScriptOp(0x76)
OP_NIP = CScriptOp(0x77)
OP_OVER = CScriptOp(0x78)
OP_PICK = CScriptOp(0x79)
OP_ROLL = CScriptOp(0x7a)
OP_ROT = CScriptOp(0x7b)
OP_SWAP = CScriptOp(0x7c)
OP_TUCK = CScriptOp(0x7d)
# splice ops
OP_CAT = CScriptOp(0x7e)
OP_SUBSTR = CScriptOp(0x7f)
OP_LEFT = CScriptOp(0x80)
OP_RIGHT = CScriptOp(0x81)
OP_SIZE = CScriptOp(0x82)
# bit logic
OP_INVERT = CScriptOp(0x83)
OP_AND = CScriptOp(0x84)
OP_OR = CScriptOp(0x85)
OP_XOR = CScriptOp(0x86)
OP_EQUAL = CScriptOp(0x87)
OP_EQUALVERIFY = CScriptOp(0x88)
OP_RESERVED1 = CScriptOp(0x89)
OP_RESERVED2 = CScriptOp(0x8a)
# numeric
OP_1ADD = CScriptOp(0x8b)
OP_1SUB = CScriptOp(0x8c)
OP_2MUL = CScriptOp(0x8d)
OP_2DIV = CScriptOp(0x8e)
OP_NEGATE = CScriptOp(0x8f)
OP_ABS = CScriptOp(0x90)
OP_NOT = CScriptOp(0x91)
OP_0NOTEQUAL = CScriptOp(0x92)
OP_ADD = CScriptOp(0x93)
OP_SUB = CScriptOp(0x94)
OP_MUL = CScriptOp(0x95)
OP_DIV = CScriptOp(0x96)
OP_MOD = CScriptOp(0x97)
OP_LSHIFT = CScriptOp(0x98)
OP_RSHIFT = CScriptOp(0x99)
OP_BOOLAND = CScriptOp(0x9a)
OP_BOOLOR = CScriptOp(0x9b)
OP_NUMEQUAL = CScriptOp(0x9c)
OP_NUMEQUALVERIFY = CScriptOp(0x9d)
OP_NUMNOTEQUAL = CScriptOp(0x9e)
OP_LESSTHAN = CScriptOp(0x9f)
OP_GREATERTHAN = CScriptOp(0xa0)
OP_LESSTHANOREQUAL = CScriptOp(0xa1)
OP_GREATERTHANOREQUAL = CScriptOp(0xa2)
OP_MIN = CScriptOp(0xa3)
OP_MAX = CScriptOp(0xa4)
OP_WITHIN = CScriptOp(0xa5)
# crypto
OP_RIPEMD160 = CScriptOp(0xa6)
OP_SHA1 = CScriptOp(0xa7)
OP_SHA256 = CScriptOp(0xa8)
OP_HASH160 = CScriptOp(0xa9)
OP_HASH256 = CScriptOp(0xaa)
OP_CODESEPARATOR = CScriptOp(0xab)
OP_CHECKSIG = CScriptOp(0xac)
OP_CHECKSIGVERIFY = CScriptOp(0xad)
OP_CHECKMULTISIG = CScriptOp(0xae)
OP_CHECKMULTISIGVERIFY = CScriptOp(0xaf)
# expansion
OP_NOP1 = CScriptOp(0xb0)
OP_CHECKLOCKTIMEVERIFY = CScriptOp(0xb1)
OP_CHECKSEQUENCEVERIFY = CScriptOp(0xb2)
OP_NOP4 = CScriptOp(0xb3)
OP_NOP5 = CScriptOp(0xb4)
OP_NOP6 = CScriptOp(0xb5)
OP_NOP7 = CScriptOp(0xb6)
OP_NOP8 = CScriptOp(0xb7)
OP_NOP9 = CScriptOp(0xb8)
OP_NOP10 = CScriptOp(0xb9)
# template matching params
OP_SMALLINTEGER = CScriptOp(0xfa)
OP_PUBKEYS = CScriptOp(0xfb)
OP_PUBKEYHASH = CScriptOp(0xfd)
OP_PUBKEY = CScriptOp(0xfe)
OP_INVALIDOPCODE = CScriptOp(0xff)
VALID_OPCODES = {
OP_1NEGATE,
OP_RESERVED,
OP_1,
OP_2,
OP_3,
OP_4,
OP_5,
OP_6,
OP_7,
OP_8,
OP_9,
OP_10,
OP_11,
OP_12,
OP_13,
OP_14,
OP_15,
OP_16,
OP_NOP,
OP_VER,
OP_IF,
OP_NOTIF,
OP_VERIF,
OP_VERNOTIF,
OP_ELSE,
OP_ENDIF,
OP_VERIFY,
OP_RETURN,
OP_TOALTSTACK,
OP_FROMALTSTACK,
OP_2DROP,
OP_2DUP,
OP_3DUP,
OP_2OVER,
OP_2ROT,
OP_2SWAP,
OP_IFDUP,
OP_DEPTH,
OP_DROP,
OP_DUP,
OP_NIP,
OP_OVER,
OP_PICK,
OP_ROLL,
OP_ROT,
OP_SWAP,
OP_TUCK,
OP_CAT,
OP_SUBSTR,
OP_LEFT,
OP_RIGHT,
OP_SIZE,
OP_INVERT,
OP_AND,
OP_OR,
OP_XOR,
OP_EQUAL,
OP_EQUALVERIFY,
OP_RESERVED1,
OP_RESERVED2,
OP_1ADD,
OP_1SUB,
OP_2MUL,
OP_2DIV,
OP_NEGATE,
OP_ABS,
OP_NOT,
OP_0NOTEQUAL,
OP_ADD,
OP_SUB,
OP_MUL,
OP_DIV,
OP_MOD,
OP_LSHIFT,
OP_RSHIFT,
OP_BOOLAND,
OP_BOOLOR,
OP_NUMEQUAL,
OP_NUMEQUALVERIFY,
OP_NUMNOTEQUAL,
OP_LESSTHAN,
OP_GREATERTHAN,
OP_LESSTHANOREQUAL,
OP_GREATERTHANOREQUAL,
OP_MIN,
OP_MAX,
OP_WITHIN,
OP_RIPEMD160,
OP_SHA1,
OP_SHA256,
OP_HASH160,
OP_HASH256,
OP_CODESEPARATOR,
OP_CHECKSIG,
OP_CHECKSIGVERIFY,
OP_CHECKMULTISIG,
OP_CHECKMULTISIGVERIFY,
OP_NOP1,
OP_CHECKLOCKTIMEVERIFY,
OP_CHECKSEQUENCEVERIFY,
OP_NOP4,
OP_NOP5,
OP_NOP6,
OP_NOP7,
OP_NOP8,
OP_NOP9,
OP_NOP10,
OP_SMALLINTEGER,
OP_PUBKEYS,
OP_PUBKEYHASH,
OP_PUBKEY,
}
OPCODE_NAMES.update({
OP_0 : 'OP_0',
OP_PUSHDATA1 : 'OP_PUSHDATA1',
OP_PUSHDATA2 : 'OP_PUSHDATA2',
OP_PUSHDATA4 : 'OP_PUSHDATA4',
OP_1NEGATE : 'OP_1NEGATE',
OP_RESERVED : 'OP_RESERVED',
OP_1 : 'OP_1',
OP_2 : 'OP_2',
OP_3 : 'OP_3',
OP_4 : 'OP_4',
OP_5 : 'OP_5',
OP_6 : 'OP_6',
OP_7 : 'OP_7',
OP_8 : 'OP_8',
OP_9 : 'OP_9',
OP_10 : 'OP_10',
OP_11 : 'OP_11',
OP_12 : 'OP_12',
OP_13 : 'OP_13',
OP_14 : 'OP_14',
OP_15 : 'OP_15',
OP_16 : 'OP_16',
OP_NOP : 'OP_NOP',
OP_VER : 'OP_VER',
OP_IF : 'OP_IF',
OP_NOTIF : 'OP_NOTIF',
OP_VERIF : 'OP_VERIF',
OP_VERNOTIF : 'OP_VERNOTIF',
OP_ELSE : 'OP_ELSE',
OP_ENDIF : 'OP_ENDIF',
OP_VERIFY : 'OP_VERIFY',
OP_RETURN : 'OP_RETURN',
OP_TOALTSTACK : 'OP_TOALTSTACK',
OP_FROMALTSTACK : 'OP_FROMALTSTACK',
OP_2DROP : 'OP_2DROP',
OP_2DUP : 'OP_2DUP',
OP_3DUP : 'OP_3DUP',
OP_2OVER : 'OP_2OVER',
OP_2ROT : 'OP_2ROT',
OP_2SWAP : 'OP_2SWAP',
OP_IFDUP : 'OP_IFDUP',
OP_DEPTH : 'OP_DEPTH',
OP_DROP : 'OP_DROP',
OP_DUP : 'OP_DUP',
OP_NIP : 'OP_NIP',
OP_OVER : 'OP_OVER',
OP_PICK : 'OP_PICK',
OP_ROLL : 'OP_ROLL',
OP_ROT : 'OP_ROT',
OP_SWAP : 'OP_SWAP',
OP_TUCK : 'OP_TUCK',
OP_CAT : 'OP_CAT',
OP_SUBSTR : 'OP_SUBSTR',
OP_LEFT : 'OP_LEFT',
OP_RIGHT : 'OP_RIGHT',
OP_SIZE : 'OP_SIZE',
OP_INVERT : 'OP_INVERT',
OP_AND : 'OP_AND',
OP_OR : 'OP_OR',
OP_XOR : 'OP_XOR',
OP_EQUAL : 'OP_EQUAL',
OP_EQUALVERIFY : 'OP_EQUALVERIFY',
OP_RESERVED1 : 'OP_RESERVED1',
OP_RESERVED2 : 'OP_RESERVED2',
OP_1ADD : 'OP_1ADD',
OP_1SUB : 'OP_1SUB',
OP_2MUL : 'OP_2MUL',
OP_2DIV : 'OP_2DIV',
OP_NEGATE : 'OP_NEGATE',
OP_ABS : 'OP_ABS',
OP_NOT : 'OP_NOT',
OP_0NOTEQUAL : 'OP_0NOTEQUAL',
OP_ADD : 'OP_ADD',
OP_SUB : 'OP_SUB',
OP_MUL : 'OP_MUL',
OP_DIV : 'OP_DIV',
OP_MOD : 'OP_MOD',
OP_LSHIFT : 'OP_LSHIFT',
OP_RSHIFT : 'OP_RSHIFT',
OP_BOOLAND : 'OP_BOOLAND',
OP_BOOLOR : 'OP_BOOLOR',
OP_NUMEQUAL : 'OP_NUMEQUAL',
OP_NUMEQUALVERIFY : 'OP_NUMEQUALVERIFY',
OP_NUMNOTEQUAL : 'OP_NUMNOTEQUAL',
OP_LESSTHAN : 'OP_LESSTHAN',
OP_GREATERTHAN : 'OP_GREATERTHAN',
OP_LESSTHANOREQUAL : 'OP_LESSTHANOREQUAL',
OP_GREATERTHANOREQUAL : 'OP_GREATERTHANOREQUAL',
OP_MIN : 'OP_MIN',
OP_MAX : 'OP_MAX',
OP_WITHIN : 'OP_WITHIN',
OP_RIPEMD160 : 'OP_RIPEMD160',
OP_SHA1 : 'OP_SHA1',
OP_SHA256 : 'OP_SHA256',
OP_HASH160 : 'OP_HASH160',
OP_HASH256 : 'OP_HASH256',
OP_CODESEPARATOR : 'OP_CODESEPARATOR',
OP_CHECKSIG : 'OP_CHECKSIG',
OP_CHECKSIGVERIFY : 'OP_CHECKSIGVERIFY',
OP_CHECKMULTISIG : 'OP_CHECKMULTISIG',
OP_CHECKMULTISIGVERIFY : 'OP_CHECKMULTISIGVERIFY',
OP_NOP1 : 'OP_NOP1',
OP_CHECKLOCKTIMEVERIFY : 'OP_CHECKLOCKTIMEVERIFY',
OP_CHECKSEQUENCEVERIFY : 'OP_CHECKSEQUENCEVERIFY',
OP_NOP4 : 'OP_NOP4',
OP_NOP5 : 'OP_NOP5',
OP_NOP6 : 'OP_NOP6',
OP_NOP7 : 'OP_NOP7',
OP_NOP8 : 'OP_NOP8',
OP_NOP9 : 'OP_NOP9',
OP_NOP10 : 'OP_NOP10',
OP_SMALLINTEGER : 'OP_SMALLINTEGER',
OP_PUBKEYS : 'OP_PUBKEYS',
OP_PUBKEYHASH : 'OP_PUBKEYHASH',
OP_PUBKEY : 'OP_PUBKEY',
OP_INVALIDOPCODE : 'OP_INVALIDOPCODE',
})
OPCODES_BY_NAME = {
'OP_0' : OP_0,
'OP_PUSHDATA1' : OP_PUSHDATA1,
'OP_PUSHDATA2' : OP_PUSHDATA2,
'OP_PUSHDATA4' : OP_PUSHDATA4,
'OP_1NEGATE' : OP_1NEGATE,
'OP_RESERVED' : OP_RESERVED,
'OP_1' : OP_1,
'OP_2' : OP_2,
'OP_3' : OP_3,
'OP_4' : OP_4,
'OP_5' : OP_5,
'OP_6' : OP_6,
'OP_7' : OP_7,
'OP_8' : OP_8,
'OP_9' : OP_9,
'OP_10' : OP_10,
'OP_11' : OP_11,
'OP_12' : OP_12,
'OP_13' : OP_13,
'OP_14' : OP_14,
'OP_15' : OP_15,
'OP_16' : OP_16,
'OP_NOP' : OP_NOP,
'OP_VER' : OP_VER,
'OP_IF' : OP_IF,
'OP_NOTIF' : OP_NOTIF,
'OP_VERIF' : OP_VERIF,
'OP_VERNOTIF' : OP_VERNOTIF,
'OP_ELSE' : OP_ELSE,
'OP_ENDIF' : OP_ENDIF,
'OP_VERIFY' : OP_VERIFY,
'OP_RETURN' : OP_RETURN,
'OP_TOALTSTACK' : OP_TOALTSTACK,
'OP_FROMALTSTACK' : OP_FROMALTSTACK,
'OP_2DROP' : OP_2DROP,
'OP_2DUP' : OP_2DUP,
'OP_3DUP' : OP_3DUP,
'OP_2OVER' : OP_2OVER,
'OP_2ROT' : OP_2ROT,
'OP_2SWAP' : OP_2SWAP,
'OP_IFDUP' : OP_IFDUP,
'OP_DEPTH' : OP_DEPTH,
'OP_DROP' : OP_DROP,
'OP_DUP' : OP_DUP,
'OP_NIP' : OP_NIP,
'OP_OVER' : OP_OVER,
'OP_PICK' : OP_PICK,
'OP_ROLL' : OP_ROLL,
'OP_ROT' : OP_ROT,
'OP_SWAP' : OP_SWAP,
'OP_TUCK' : OP_TUCK,
'OP_CAT' : OP_CAT,
'OP_SUBSTR' : OP_SUBSTR,
'OP_LEFT' : OP_LEFT,
'OP_RIGHT' : OP_RIGHT,
'OP_SIZE' : OP_SIZE,
'OP_INVERT' : OP_INVERT,
'OP_AND' : OP_AND,
'OP_OR' : OP_OR,
'OP_XOR' : OP_XOR,
'OP_EQUAL' : OP_EQUAL,
'OP_EQUALVERIFY' : OP_EQUALVERIFY,
'OP_RESERVED1' : OP_RESERVED1,
'OP_RESERVED2' : OP_RESERVED2,
'OP_1ADD' : OP_1ADD,
'OP_1SUB' : OP_1SUB,
'OP_2MUL' : OP_2MUL,
'OP_2DIV' : OP_2DIV,
'OP_NEGATE' : OP_NEGATE,
'OP_ABS' : OP_ABS,
'OP_NOT' : OP_NOT,
'OP_0NOTEQUAL' : OP_0NOTEQUAL,
'OP_ADD' : OP_ADD,
'OP_SUB' : OP_SUB,
'OP_MUL' : OP_MUL,
'OP_DIV' : OP_DIV,
'OP_MOD' : OP_MOD,
'OP_LSHIFT' : OP_LSHIFT,
'OP_RSHIFT' : OP_RSHIFT,
'OP_BOOLAND' : OP_BOOLAND,
'OP_BOOLOR' : OP_BOOLOR,
'OP_NUMEQUAL' : OP_NUMEQUAL,
'OP_NUMEQUALVERIFY' : OP_NUMEQUALVERIFY,
'OP_NUMNOTEQUAL' : OP_NUMNOTEQUAL,
'OP_LESSTHAN' : OP_LESSTHAN,
'OP_GREATERTHAN' : OP_GREATERTHAN,
'OP_LESSTHANOREQUAL' : OP_LESSTHANOREQUAL,
'OP_GREATERTHANOREQUAL' : OP_GREATERTHANOREQUAL,
'OP_MIN' : OP_MIN,
'OP_MAX' : OP_MAX,
'OP_WITHIN' : OP_WITHIN,
'OP_RIPEMD160' : OP_RIPEMD160,
'OP_SHA1' : OP_SHA1,
'OP_SHA256' : OP_SHA256,
'OP_HASH160' : OP_HASH160,
'OP_HASH256' : OP_HASH256,
'OP_CODESEPARATOR' : OP_CODESEPARATOR,
'OP_CHECKSIG' : OP_CHECKSIG,
'OP_CHECKSIGVERIFY' : OP_CHECKSIGVERIFY,
'OP_CHECKMULTISIG' : OP_CHECKMULTISIG,
'OP_CHECKMULTISIGVERIFY' : OP_CHECKMULTISIGVERIFY,
'OP_NOP1' : OP_NOP1,
'OP_CHECKLOCKTIMEVERIFY' : OP_CHECKLOCKTIMEVERIFY,
'OP_CHECKSEQUENCEVERIFY' : OP_CHECKSEQUENCEVERIFY,
'OP_NOP4' : OP_NOP4,
'OP_NOP5' : OP_NOP5,
'OP_NOP6' : OP_NOP6,
'OP_NOP7' : OP_NOP7,
'OP_NOP8' : OP_NOP8,
'OP_NOP9' : OP_NOP9,
'OP_NOP10' : OP_NOP10,
'OP_SMALLINTEGER' : OP_SMALLINTEGER,
'OP_PUBKEYS' : OP_PUBKEYS,
'OP_PUBKEYHASH' : OP_PUBKEYHASH,
'OP_PUBKEY' : OP_PUBKEY,
}
class CScriptInvalidError(Exception):
"""Base class for CScript exceptions"""
pass
class CScriptTruncatedPushDataError(CScriptInvalidError):
"""Invalid pushdata due to truncation"""
def __init__(self, msg, data):
self.data = data
super(CScriptTruncatedPushDataError, self).__init__(msg)
# This is used, eg, for blockchain heights in coinbase scripts (bip34)
class CScriptNum(object):
def __init__(self, d=0):
self.value = d
@staticmethod
def encode(obj):
r = bytearray(0)
if obj.value == 0:
return bytes(r)
neg = obj.value < 0
absvalue = -obj.value if neg else obj.value
while (absvalue):
r.append(absvalue & 0xff)
absvalue >>= 8
if r[-1] & 0x80:
r.append(0x80 if neg else 0)
elif neg:
r[-1] |= 0x80
return bytes(bchr(len(r)) + r)
class CScript(bytes):
"""Serialized script
A bytes subclass, so you can use this directly whenever bytes are accepted.
Note that this means that indexing does *not* work - you'll get an index by
byte rather than opcode. This format was chosen for efficiency so that the
general case would not require creating a lot of little CScriptOP objects.
iter(script) however does iterate by opcode.
"""
@classmethod
def __coerce_instance(cls, other):
# Coerce other into bytes
if isinstance(other, CScriptOp):
other = bchr(other)
elif isinstance(other, CScriptNum):
if (other.value == 0):
other = bchr(CScriptOp(OP_0))
else:
other = CScriptNum.encode(other)
elif isinstance(other, int):
if 0 <= other <= 16:
other = bytes(bchr(CScriptOp.encode_op_n(other)))
elif other == -1:
other = bytes(bchr(OP_1NEGATE))
else:
other = CScriptOp.encode_op_pushdata(bn2vch(other))
elif isinstance(other, (bytes, bytearray)):
other = CScriptOp.encode_op_pushdata(other)
return other
def __add__(self, other):
# Do the coercion outside of the try block so that errors in it are
# noticed.
other = self.__coerce_instance(other)
try:
# bytes.__add__ always returns bytes instances unfortunately
return CScript(super(CScript, self).__add__(other))
except TypeError:
raise TypeError('Can not add a %r instance to a CScript' % other.__class__)
def join(self, iterable):
# join makes no sense for a CScript()
raise NotImplementedError
def __new__(cls, value=b''):
if isinstance(value, bytes) or isinstance(value, bytearray):
return super(CScript, cls).__new__(cls, value)
else:
def coerce_iterable(iterable):
for instance in iterable:
yield cls.__coerce_instance(instance)
# Annoyingly on both python2 and python3 bytes.join() always
# returns a bytes instance even when subclassed.
return super(CScript, cls).__new__(cls, b''.join(coerce_iterable(value)))
def raw_iter(self):
"""Raw iteration
Yields tuples of (opcode, data, sop_idx) so that the different possible
PUSHDATA encodings can be accurately distinguished, as well as
determining the exact opcode byte indexes. (sop_idx)
"""
i = 0
while i < len(self):
sop_idx = i
opcode = bord(self[i])
i += 1
if opcode > OP_PUSHDATA4:
yield (opcode, None, sop_idx)
else:
datasize = None
pushdata_type = None
if opcode < OP_PUSHDATA1:
pushdata_type = 'PUSHDATA(%d)' % opcode
datasize = opcode
elif opcode == OP_PUSHDATA1:
pushdata_type = 'PUSHDATA1'
if i >= len(self):
raise CScriptInvalidError('PUSHDATA1: missing data length')
datasize = bord(self[i])
i += 1
elif opcode == OP_PUSHDATA2:
pushdata_type = 'PUSHDATA2'
if i + 1 >= len(self):
raise CScriptInvalidError('PUSHDATA2: missing data length')
datasize = bord(self[i]) + (bord(self[i+1]) << 8)
i += 2
elif opcode == OP_PUSHDATA4:
pushdata_type = 'PUSHDATA4'
if i + 3 >= len(self):
raise CScriptInvalidError('PUSHDATA4: missing data length')
datasize = bord(self[i]) + (bord(self[i+1]) << 8) + (bord(self[i+2]) << 16) + (bord(self[i+3]) << 24)
i += 4
else:
assert False # shouldn't happen
data = bytes(self[i:i+datasize])
# Check for truncation
if len(data) < datasize:
raise CScriptTruncatedPushDataError('%s: truncated data' % pushdata_type, data)
i += datasize
yield (opcode, data, sop_idx)
def __iter__(self):
"""'Cooked' iteration
Returns either a CScriptOP instance, an integer, or bytes, as
appropriate.
See raw_iter() if you need to distinguish the different possible
PUSHDATA encodings.
"""
for (opcode, data, sop_idx) in self.raw_iter():
if data is not None:
yield data
else:
opcode = CScriptOp(opcode)
if opcode.is_small_int():
yield opcode.decode_op_n()
else:
yield CScriptOp(opcode)
def __repr__(self):
# For Python3 compatibility add b before strings so testcases don't
# need to change
def _repr(o):
if isinstance(o, bytes):
return b"x('%s')" % hexlify(o).decode('ascii')
else:
return repr(o)
ops = []
i = iter(self)
while True:
op = None
try:
op = _repr(next(i))
except CScriptTruncatedPushDataError as err:
op = '%s...<ERROR: %s>' % (_repr(err.data), err)
break
except CScriptInvalidError as err:
op = '<ERROR: %s>' % err
break
except StopIteration:
break
finally:
if op is not None:
ops.append(op)
return "CScript([%s])" % ', '.join(ops)
def GetSigOpCount(self, fAccurate):
"""Get the SigOp count.
fAccurate - Accurately count CHECKMULTISIG, see BIP16 for details.
Note that this is consensus-critical.
"""
n = 0
lastOpcode = OP_INVALIDOPCODE
for (opcode, data, sop_idx) in self.raw_iter():
if opcode in (OP_CHECKSIG, OP_CHECKSIGVERIFY):
n += 1
elif opcode in (OP_CHECKMULTISIG, OP_CHECKMULTISIGVERIFY):
if fAccurate and (OP_1 <= lastOpcode <= OP_16):
n += opcode.decode_op_n()
else:
n += 20
lastOpcode = opcode
return n
SIGHASH_ALL = 1
SIGHASH_NONE = 2
SIGHASH_SINGLE = 3
SIGHASH_ANYONECANPAY = 0x80
def FindAndDelete(script, sig):
"""Consensus critical, see FindAndDelete() in Satoshi codebase"""
r = b''
last_sop_idx = sop_idx = 0
skip = True
for (opcode, data, sop_idx) in script.raw_iter():
if not skip:
r += script[last_sop_idx:sop_idx]
last_sop_idx = sop_idx
if script[sop_idx:sop_idx + len(sig)] == sig:
skip = True
else:
skip = False
if not skip:
r += script[last_sop_idx:]
return CScript(r)
def SignatureHash(script, txTo, inIdx, hashtype):
"""Consensus-correct SignatureHash
Returns (hash, err) to precisely match the consensus-critical behavior of
the SIGHASH_SINGLE bug. (inIdx is *not* checked for validity)
"""
HASH_ONE = b'\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
if inIdx >= len(txTo.vin):
return (HASH_ONE, "inIdx %d out of range (%d)" % (inIdx, len(txTo.vin)))
txtmp = CTransaction(txTo)
for txin in txtmp.vin:
txin.scriptSig = b''
txtmp.vin[inIdx].scriptSig = FindAndDelete(script, CScript([OP_CODESEPARATOR]))
if (hashtype & 0x1f) == SIGHASH_NONE:
txtmp.vout = []
for i in range(len(txtmp.vin)):
if i != inIdx:
txtmp.vin[i].nSequence = 0
elif (hashtype & 0x1f) == SIGHASH_SINGLE:
outIdx = inIdx
if outIdx >= len(txtmp.vout):
return (HASH_ONE, "outIdx %d out of range (%d)" % (outIdx, len(txtmp.vout)))
tmp = txtmp.vout[outIdx]
txtmp.vout = []
for i in range(outIdx):
txtmp.vout.append(CTxOut(-1))
txtmp.vout.append(tmp)
for i in range(len(txtmp.vin)):
if i != inIdx:
txtmp.vin[i].nSequence = 0
if hashtype & SIGHASH_ANYONECANPAY:
tmp = txtmp.vin[inIdx]
txtmp.vin = []
txtmp.vin.append(tmp)
s = txtmp.serialize()
s += struct.pack(b"<I", hashtype)
hash = hash256(s)
return (hash, None)
# TODO: Allow cached hashPrevouts/hashSequence/hashOutputs to be provided.
# Performance optimization probably not necessary for python tests, however.
# Note that this corresponds to sigversion == 1 in EvalScript, which is used
# for version 0 witnesses.
def SegwitVersion1SignatureHash(script, txTo, inIdx, hashtype, amount):
hashPrevouts = 0
hashSequence = 0
hashOutputs = 0
if not (hashtype & SIGHASH_ANYONECANPAY):
serialize_prevouts = bytes()
for i in txTo.vin:
serialize_prevouts += i.prevout.serialize()
hashPrevouts = uint256_from_str(hash256(serialize_prevouts))
if (not (hashtype & SIGHASH_ANYONECANPAY) and (hashtype & 0x1f) != SIGHASH_SINGLE and (hashtype & 0x1f) != SIGHASH_NONE):
serialize_sequence = bytes()
for i in txTo.vin:
serialize_sequence += struct.pack("<I", i.nSequence)
hashSequence = uint256_from_str(hash256(serialize_sequence))
if ((hashtype & 0x1f) != SIGHASH_SINGLE and (hashtype & 0x1f) != SIGHASH_NONE):
serialize_outputs = bytes()
for o in txTo.vout:
serialize_outputs += o.serialize()
hashOutputs = uint256_from_str(hash256(serialize_outputs))
elif ((hashtype & 0x1f) == SIGHASH_SINGLE and inIdx < len(txTo.vout)):
serialize_outputs = txTo.vout[inIdx].serialize()
hashOutputs = uint256_from_str(hash256(serialize_outputs))
ss = bytes()
ss += struct.pack("<i", txTo.nVersion)
ss += ser_uint256(hashPrevouts)
ss += ser_uint256(hashSequence)
ss += txTo.vin[inIdx].prevout.serialize()
ss += ser_string(script)
ss += struct.pack("<q", amount)
ss += struct.pack("<I", txTo.vin[inIdx].nSequence)
ss += ser_uint256(hashOutputs)
ss += struct.pack("<i", txTo.nLockTime)
ss += struct.pack("<I", hashtype)
return hash256(ss)

View File

@@ -1,64 +0,0 @@
#!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# siphash.py - Specialized SipHash-2-4 implementations
#
# This implements SipHash-2-4 for 256-bit integers.
def rotl64(n, b):
return n >> (64 - b) | (n & ((1 << (64 - b)) - 1)) << b
def siphash_round(v0, v1, v2, v3):
v0 = (v0 + v1) & ((1 << 64) - 1)
v1 = rotl64(v1, 13)
v1 ^= v0
v0 = rotl64(v0, 32)
v2 = (v2 + v3) & ((1 << 64) - 1)
v3 = rotl64(v3, 16)
v3 ^= v2
v0 = (v0 + v3) & ((1 << 64) - 1)
v3 = rotl64(v3, 21)
v3 ^= v0
v2 = (v2 + v1) & ((1 << 64) - 1)
v1 = rotl64(v1, 17)
v1 ^= v2
v2 = rotl64(v2, 32)
return (v0, v1, v2, v3)
def siphash256(k0, k1, h):
n0 = h & ((1 << 64) - 1)
n1 = (h >> 64) & ((1 << 64) - 1)
n2 = (h >> 128) & ((1 << 64) - 1)
n3 = (h >> 192) & ((1 << 64) - 1)
v0 = 0x736f6d6570736575 ^ k0
v1 = 0x646f72616e646f6d ^ k1
v2 = 0x6c7967656e657261 ^ k0
v3 = 0x7465646279746573 ^ k1 ^ n0
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n0
v3 ^= n1
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n1
v3 ^= n2
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n2
v3 ^= n3
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n3
v3 ^= 0x2000000000000000
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= 0x2000000000000000
v2 ^= 0xFF
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
return v0 ^ v1 ^ v2 ^ v3

View File

@@ -1,841 +0,0 @@
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Copyright (c) 2014-2017 The Dash Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Helpful routines for regression testing
#
import os
import sys
from binascii import hexlify, unhexlify
from base64 import b64encode
from decimal import Decimal, ROUND_DOWN
import json
import http.client
import random
import shutil
import subprocess
import tempfile
import time
import re
import errno
import logging
from . import coverage
from .authproxy import AuthServiceProxy, JSONRPCException
COVERAGE_DIR = None
logger = logging.getLogger("TestFramework.utils")
# The maximum number of nodes a single test can spawn
MAX_NODES = 15
# Don't assign rpc or p2p ports lower than this
PORT_MIN = 11000
# The number of ports to "reserve" for p2p and rpc, each
PORT_RANGE = 5000
BITCOIND_PROC_WAIT_TIMEOUT = 60
class PortSeed:
# Must be initialized with a unique integer for each process
n = None
#Set Mocktime default to OFF.
#MOCKTIME is only needed for scripts that use the
#cached version of the blockchain. If the cached
#version of the blockchain is used without MOCKTIME
#then the mempools will not sync due to IBD.
MOCKTIME = 0
def enable_mocktime():
#For backwared compatibility of the python scripts
#with previous versions of the cache, set MOCKTIME
#to Jan 1, 2014 + (201 * 10 * 60)
global MOCKTIME
MOCKTIME = 1414776313 + (201 * 10 * 60)
def set_mocktime(t):
global MOCKTIME
MOCKTIME = t
def disable_mocktime():
global MOCKTIME
MOCKTIME = 0
def get_mocktime():
return MOCKTIME
def enable_coverage(dirname):
"""Maintain a log of which RPC calls are made during testing."""
global COVERAGE_DIR
COVERAGE_DIR = dirname
def get_rpc_proxy(url, node_number, timeout=None):
"""
Args:
url (str): URL of the RPC server to call
node_number (int): the node number (or id) that this calls to
Kwargs:
timeout (int): HTTP timeout in seconds
Returns:
AuthServiceProxy. convenience object for making RPC calls.
"""
proxy_kwargs = {}
if timeout is not None:
proxy_kwargs['timeout'] = timeout
proxy = AuthServiceProxy(url, **proxy_kwargs)
proxy.url = url # store URL on proxy for info
coverage_logfile = coverage.get_filename(
COVERAGE_DIR, node_number) if COVERAGE_DIR else None
return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile)
def get_evoznsync_status(node):
result = node.evoznsync("status")
return result['IsSynced']
def wait_to_sync(node, fast_znsync=False):
tm = 0
synced = False
while tm < 30:
synced = get_evoznsync_status(node)
if synced:
return
time.sleep(0.2)
if fast_znsync:
# skip mnsync states
node.evoznsync("next")
tm += 0.2
assert(synced)
def p2p_port(n):
assert(n <= MAX_NODES)
return PORT_MIN + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
def rpc_port(n):
return PORT_MIN + PORT_RANGE + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
def check_json_precision():
"""Make sure json library being used does not lose precision converting BTC values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def count_bytes(hex_string):
return len(bytearray.fromhex(hex_string))
def bytes_to_hex_str(byte_str):
return hexlify(byte_str).decode('ascii')
def hex_str_to_bytes(hex_str):
return unhexlify(hex_str.encode('ascii'))
def str_to_b64str(string):
return b64encode(string.encode('utf-8')).decode('ascii')
def sync_blocks(rpc_connections, *, wait=1, timeout=60):
"""
Wait until everybody has the same tip.
sync_blocks needs to be called with an rpc_connections set that has least
one node already synced to the latest, stable tip, otherwise there's a
chance it might return before all nodes are stably synced.
"""
# Use getblockcount() instead of waitforblockheight() to determine the
# initial max height because the two RPCs look at different internal global
# variables (chainActive vs latestBlock) and the former gets updated
# earlier.
maxheight = max(x.getblockcount() for x in rpc_connections)
start_time = cur_time = time.time()
while cur_time <= start_time + timeout:
tips = [r.waitforblockheight(maxheight, int(wait * 1000)) for r in rpc_connections]
if all(t["height"] == maxheight for t in tips):
if all(t["hash"] == tips[0]["hash"] for t in tips):
return
raise AssertionError("Block sync failed, mismatched block hashes:{}".format(
"".join("\n {!r}".format(tip) for tip in tips)))
time.sleep(wait)
cur_time = time.time()
raise AssertionError("Block sync to height {} timed out:{}".format(
maxheight, "".join("\n {!r}".format(tip) for tip in tips)))
def sync_znodes(rpc_connections, *, timeout=60):
"""
Waits until every node has their znsync status is synced.
"""
start_time = cur_time = time.time()
while cur_time <= start_time + timeout:
statuses = [r.znsync("status") for r in rpc_connections]
if all(stat["IsSynced"] == True for stat in statuses):
return
cur_time = time.time()
raise AssertionError("Znode sync failed.")
def sync_chain(rpc_connections, *, wait=1, timeout=60):
"""
Wait until everybody has the same best block
"""
while timeout > 0:
best_hash = [x.getbestblockhash() for x in rpc_connections]
if best_hash == [best_hash[0]]*len(best_hash):
return
time.sleep(wait)
timeout -= wait
raise AssertionError("Chain sync failed: Best block hashes don't match")
def sync_mempools(rpc_connections, *, wait=1, timeout=60):
"""
Wait until everybody has the same transactions in their memory
pools
"""
while timeout > 0:
pool = set(rpc_connections[0].getrawmempool())
num_match = 1
for i in range(1, len(rpc_connections)):
if set(rpc_connections[i].getrawmempool()) == pool:
num_match = num_match+1
if num_match == len(rpc_connections):
return
time.sleep(wait)
timeout -= wait
raise AssertionError("Mempool sync failed")
def sync_znodes(rpc_connections, fast_mnsync=False):
for node in rpc_connections:
wait_to_sync(node, fast_mnsync)
bitcoind_processes = {}
def initialize_datadir(dirname, n):
datadir = os.path.join(dirname, "node"+str(n))
if not os.path.isdir(datadir):
os.makedirs(datadir)
rpc_u, rpc_p = rpc_auth_pair(n)
with open(os.path.join(datadir, "firo.conf"), 'w', encoding='utf8') as f:
f.write("regtest=1\n")
f.write("rpcuser=" + rpc_u + "\n")
f.write("rpcpassword=" + rpc_p + "\n")
f.write("port="+str(p2p_port(n))+"\n")
f.write("rpcport="+str(rpc_port(n))+"\n")
f.write("listenonion=0\n")
return datadir
def rpc_auth_pair(n):
return 'rpcuser💻' + str(n), 'rpcpass🔑' + str(n)
def rpc_url(i, rpchost=None):
rpc_u, rpc_p = rpc_auth_pair(i)
host = '127.0.0.1'
port = rpc_port(i)
if rpchost:
parts = rpchost.split(':')
if len(parts) == 2:
host, port = parts
else:
host = rpchost
return "http://%s:%s@%s:%d" % (rpc_u, rpc_p, host, int(port))
def wait_for_bitcoind_start(process, url, i):
'''
Wait for firod to start. This means that RPC is accessible and fully initialized.
Raise an exception if firod exits during initialization.
'''
while True:
if process.poll() is not None:
raise Exception('firod exited with status %i during initialization' % process.returncode)
try:
rpc = get_rpc_proxy(url, i)
blocks = rpc.getblockcount()
break # break out of loop on success
except IOError as e:
if e.errno != errno.ECONNREFUSED: # Port not yet open?
raise # unknown IO error
except JSONRPCException as e: # Initialization phase
if e.error['code'] != -28: # RPC in warmup?
raise # unknown JSON RPC exception
time.sleep(0.25)
def initialize_chain(test_dir, num_nodes, cachedir):
"""
Create a cache of a 200-block-long chain (with wallet) for MAX_NODES
Afterward, create num_nodes copies from the cache
"""
assert num_nodes <= MAX_NODES
create_cache = False
for i in range(MAX_NODES):
if not os.path.isdir(os.path.join(cachedir, 'node'+str(i))):
create_cache = True
break
if create_cache:
#find and delete old cache directories if any exist
for i in range(MAX_NODES):
if os.path.isdir(os.path.join(cachedir,"node"+str(i))):
shutil.rmtree(os.path.join(cachedir,"node"+str(i)))
# Create cache directories, run bitcoinds:
for i in range(MAX_NODES):
datadir=initialize_datadir(cachedir, i)
args = [ os.getenv("FIROD", "firod"), "-server", "-keypool=1", "-datadir="+datadir, "-discover=0" ]
if i > 0:
args.append("-connect=127.0.0.1:"+str(p2p_port(0)))
bitcoind_processes[i] = subprocess.Popen(args)
if os.getenv("PYTHON_DEBUG", ""):
print("initialize_chain: bitcoind started, waiting for RPC to come up")
wait_for_bitcoind_start(bitcoind_processes[i], rpc_url(i), i)
if os.getenv("PYTHON_DEBUG", ""):
print("initialize_chain: RPC successfully started")
rpcs = []
for i in range(MAX_NODES):
try:
rpcs.append(get_rpc_proxy(rpc_url(i), i))
except:
sys.stderr.write("Error connecting to "+url+"\n")
sys.exit(1)
# Create a 200-block-long chain; each of the 4 first nodes
# gets 25 mature blocks and 25 immature.
# Note: To preserve compatibility with older versions of
# initialize_chain, only 4 nodes will generate coins.
#
# blocks are created with timestamps 10 minutes apart
# starting from 2010 minutes in the past
enable_mocktime()
block_time = get_mocktime() - (201 * 10 * 60)
for i in range(2):
for peer in range(4):
for j in range(25):
set_node_times(rpcs, block_time)
rpcs[peer].generate(1)
block_time += 10*60
# Must sync before next peer starts generating blocks
sync_blocks(rpcs)
# Shut them down, and clean up cache directories:
stop_nodes(rpcs)
disable_mocktime()
for i in range(MAX_NODES):
try:
os.remove(log_filename(cachedir, i, "debug.log"))
os.remove(log_filename(cachedir, i, "db.log"))
os.remove(log_filename(cachedir, i, "peers.dat"))
os.remove(log_filename(cachedir, i, "fee_estimates.dat"))
except OSError:
pass
for i in range(num_nodes):
from_dir = os.path.join(cachedir, "node"+str(i))
to_dir = os.path.join(test_dir, "node"+str(i))
if from_dir != to_dir:
shutil.copytree(from_dir, to_dir)
initialize_datadir(test_dir, i) # Overwrite port/rpcport in bitcoin.conf
def initialize_chain_clean(test_dir, num_nodes):
"""
Create an empty blockchain and num_nodes wallets.
Useful if a test case wants complete control over initialization.
"""
for i in range(num_nodes):
datadir=initialize_datadir(test_dir, i)
def _rpchost_to_args(rpchost):
'''Convert optional IP:port spec to rpcconnect/rpcport args'''
if rpchost is None:
return []
match = re.match('(\[[0-9a-fA-f:]+\]|[^:]+)(?::([0-9]+))?$', rpchost)
if not match:
raise ValueError('Invalid RPC host spec ' + rpchost)
rpcconnect = match.group(1)
rpcport = match.group(2)
if rpcconnect.startswith('['): # remove IPv6 [...] wrapping
rpcconnect = rpcconnect[1:-1]
rv = ['-rpcconnect=' + rpcconnect]
if rpcport:
rv += ['-rpcport=' + rpcport]
return rv
def start_node(i, dirname, extra_args=None, rpchost=None, timewait=None, binary=None, redirect_stderr=False, stderr=None):
"""
Start a bitcoind and return RPC connection to it
"""
datadir = os.path.join(dirname, "node"+str(i))
if binary is None:
binary = os.getenv("FIROD", "firod")
args = [ binary, "-datadir="+datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-dandelion=0", "-usemnemonic=0", "-mocktime="+str(get_mocktime()) ]
#Useful args for debugging
# "screen", "--",
# "gdb", "-x", "/tmp/gdb_run", "--args",
# Don't try auto backups (they fail a lot when running tests)
args += [ "-createwalletbackups=0" ]
if extra_args is not None: args.extend(extra_args)
# Allow to redirect stderr to stdout in case we expect some non-critical warnings/errors printed to stderr
# Otherwise the whole test would be considered to be failed in such cases
if redirect_stderr:
stderr = sys.stdout
bitcoind_processes[i] = subprocess.Popen(args, stderr=stderr)
logger.debug("start_node: firod started, waiting for RPC to come up")
url = rpc_url(i, rpchost)
wait_for_bitcoind_start(bitcoind_processes[i], url, i)
logger.debug("start_node: RPC successfully started")
proxy = get_rpc_proxy(url, i, timeout=timewait)
if COVERAGE_DIR:
coverage.write_all_rpc_commands(COVERAGE_DIR, proxy)
return proxy
def start_nodes(num_nodes, dirname, extra_args=None, rpchost=None, timewait=None, binary=None):
"""
Start multiple bitcoinds, return RPC connections to them
"""
if extra_args is None: extra_args = [ None for _ in range(num_nodes) ]
if binary is None: binary = [ None for _ in range(num_nodes) ]
rpcs = []
try:
for i in range(num_nodes):
rpcs.append(start_node(i, dirname, extra_args[i], rpchost, timewait=timewait, binary=binary[i]))
except: # If one node failed to start, stop the others
stop_nodes(rpcs)
raise
return rpcs
def copy_datadir(from_node, to_node, dirname):
from_datadir = os.path.join(dirname, "node"+str(from_node), "regtest")
to_datadir = os.path.join(dirname, "node"+str(to_node), "regtest")
dirs = ["blocks", "chainstate", "evodb", "llmq"]
for d in dirs:
try:
src = os.path.join(from_datadir, d)
dst = os.path.join(to_datadir, d)
shutil.copytree(src, dst)
except:
pass
def log_filename(dirname, n_node, logname):
return os.path.join(dirname, "node"+str(n_node), "regtest", logname)
def wait_node(i):
return_code = bitcoind_processes[i].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT)
assert_equal(return_code, 0)
del bitcoind_processes[i]
def stop_node(node, i, wait=True):
logger.debug("Stopping node %d" % i)
try:
node.stop()
except http.client.CannotSendRequest as e:
logger.exception("Unable to stop node")
if wait:
wait_node(i)
def stop_nodes(nodes, fast=True):
for i, node in enumerate(nodes):
stop_node(node, i, not fast)
if fast:
for i, node in enumerate(nodes):
wait_node(i)
assert not bitcoind_processes.values() # All connections must be gone now
def set_node_times(nodes, t):
for node in nodes:
node.setmocktime(t)
def connect_nodes(from_connection, node_num):
# NOTE: In next line p2p_port(0) was replaced by rpc_port(0).
ip_port = "127.0.0.1:"+str(p2p_port(node_num))
from_connection.addnode(ip_port, "onetry")
# poll until version handshake complete to avoid race conditions
# with transaction relaying
while any(peer['version'] == 0 for peer in from_connection.getpeerinfo()):
time.sleep(0.1)
def connect_nodes_bi(nodes, a, b):
connect_nodes(nodes[a], b)
connect_nodes(nodes[b], a)
def isolate_node(node, timeout=5):
node.setnetworkactive(False)
st = time.time()
while time.time() < st + timeout:
if node.getconnectioncount() == 0:
return
time.sleep(0.5)
raise AssertionError("disconnect_node timed out")
def reconnect_isolated_node(node, node_num):
node.setnetworkactive(True)
connect_nodes(node, node_num)
def find_output(node, txid, amount):
"""
Return index to output of txid with value amount
Raises exception if there is none.
"""
txdata = node.getrawtransaction(txid, 1)
for i in range(len(txdata["vout"])):
if txdata["vout"][i]["value"] == amount:
return i
raise RuntimeError("find_output txid %s : %s not found"%(txid,str(amount)))
def gather_inputs(from_node, amount_needed, confirmations_required=1):
"""
Return a random set of unspent txouts that are enough to pay amount_needed
"""
assert(confirmations_required >=0)
utxo = from_node.listunspent(confirmations_required)
random.shuffle(utxo)
inputs = []
total_in = Decimal("0.00000000")
while total_in < amount_needed and len(utxo) > 0:
t = utxo.pop()
total_in += t["amount"]
inputs.append({ "txid" : t["txid"], "vout" : t["vout"], "address" : t["address"] } )
if total_in < amount_needed:
raise RuntimeError("Insufficient funds: need %d, have %d"%(amount_needed, total_in))
return (total_in, inputs)
def make_change(from_node, amount_in, amount_out, fee):
"""
Create change output(s), return them
"""
outputs = {}
amount = amount_out+fee
change = amount_in - amount
if change > amount*2:
# Create an extra change output to break up big inputs
change_address = from_node.getnewaddress()
# Split change in two, being careful of rounding:
outputs[change_address] = Decimal(change/2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
change = amount_in - amount - outputs[change_address]
if change > 0:
outputs[from_node.getnewaddress()] = change
return outputs
def send_zeropri_transaction(from_node, to_node, amount, fee):
"""
Create&broadcast a zero-priority transaction.
Returns (txid, hex-encoded-txdata)
Ensures transaction is zero-priority by first creating a send-to-self,
then using its output
"""
# Create a send-to-self with confirmed inputs:
self_address = from_node.getnewaddress()
(total_in, inputs) = gather_inputs(from_node, amount+fee*2)
outputs = make_change(from_node, total_in, amount+fee, fee)
outputs[self_address] = float(amount+fee)
self_rawtx = from_node.createrawtransaction(inputs, outputs)
self_signresult = from_node.signrawtransaction(self_rawtx)
self_txid = from_node.sendrawtransaction(self_signresult["hex"], True)
vout = find_output(from_node, self_txid, amount+fee)
# Now immediately spend the output to create a 1-input, 1-output
# zero-priority transaction:
inputs = [ { "txid" : self_txid, "vout" : vout } ]
outputs = { to_node.getnewaddress() : float(amount) }
rawtx = from_node.createrawtransaction(inputs, outputs)
signresult = from_node.signrawtransaction(rawtx)
txid = from_node.sendrawtransaction(signresult["hex"], True)
return (txid, signresult["hex"])
def random_zeropri_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random zero-priority transaction.
Returns (txid, hex-encoded-transaction-data, fee)
"""
from_node = random.choice(nodes)
to_node = random.choice(nodes)
fee = min_fee + fee_increment*random.randint(0,fee_variants)
(txid, txhex) = send_zeropri_transaction(from_node, to_node, amount, fee)
return (txid, txhex, fee)
def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random transaction.
Returns (txid, hex-encoded-transaction-data, fee)
"""
from_node = random.choice(nodes)
to_node = random.choice(nodes)
fee = min_fee + fee_increment*random.randint(0,fee_variants)
(total_in, inputs) = gather_inputs(from_node, amount+fee)
outputs = make_change(from_node, total_in, amount, fee)
outputs[to_node.getnewaddress()] = float(amount)
rawtx = from_node.createrawtransaction(inputs, outputs)
signresult = from_node.signrawtransaction(rawtx)
txid = from_node.sendrawtransaction(signresult["hex"], True)
return (txid, signresult["hex"], fee)
def assert_fee_amount(fee, tx_size, fee_per_kB):
"""Assert the fee was in range"""
target_fee = tx_size * fee_per_kB / 1000
if fee < target_fee:
raise AssertionError("Fee of %s BTC too low! (Should be %s BTC)"%(str(fee), str(target_fee)))
# allow the wallet's estimation to be at most 2 bytes off
if fee > (tx_size + 2) * fee_per_kB / 1000:
raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)"%(str(fee), str(target_fee)))
def assert_equal(thing1, thing2, *args):
if thing1 != thing2 or any(thing1 != arg for arg in args):
raise AssertionError("not(%s)" % " == ".join(str(arg) for arg in (thing1, thing2) + args))
def assert_greater_than(thing1, thing2):
if thing1 <= thing2:
raise AssertionError("%s <= %s"%(str(thing1),str(thing2)))
def assert_greater_than_or_equal(thing1, thing2):
if thing1 < thing2:
raise AssertionError("%s < %s"%(str(thing1),str(thing2)))
def assert_raises(exc, fun, *args, **kwds):
assert_raises_message(exc, None, fun, *args, **kwds)
def assert_raises_message(exc, message, fun, *args, **kwds):
try:
fun(*args, **kwds)
except exc as e:
if message is not None and message not in e.error['message']:
raise AssertionError("Expected substring not found:"+e.error['message'])
except Exception as e:
raise AssertionError("Unexpected exception raised: "+type(e).__name__)
else:
raise AssertionError("No exception raised")
def assert_raises_jsonrpc(code, message, fun, *args, **kwds):
"""Run an RPC and verify that a specific JSONRPC exception code and message is raised.
Calls function `fun` with arguments `args` and `kwds`. Catches a JSONRPCException
and verifies that the error code and message are as expected. Throws AssertionError if
no JSONRPCException was returned or if the error code/message are not as expected.
Args:
code (int), optional: the error code returned by the RPC call (defined
in src/rpc/protocol.h). Set to None if checking the error code is not required.
message (string), optional: [a substring of] the error string returned by the
RPC call. Set to None if checking the error string is not required
fun (function): the function to call. This should be the name of an RPC.
args*: positional arguments for the function.
kwds**: named arguments for the function.
"""
try:
fun(*args, **kwds)
except JSONRPCException as e:
# JSONRPCException was thrown as expected. Check the code and message values are correct.
if (code is not None) and (code != e.error["code"]):
raise AssertionError("Unexpected JSONRPC error code %i" % e.error["code"])
if (message is not None) and (message not in e.error['message']):
raise AssertionError("Expected substring not found:"+e.error['message'])
except Exception as e:
raise AssertionError("Unexpected exception raised: "+type(e).__name__)
else:
raise AssertionError("No exception raised")
def assert_is_hex_string(string):
try:
int(string, 16)
except Exception as e:
raise AssertionError(
"Couldn't interpret %r as hexadecimal; raised: %s" % (string, e))
def assert_is_hash_string(string, length=64):
if not isinstance(string, str):
raise AssertionError("Expected a string, got type %r" % type(string))
elif length and len(string) != length:
raise AssertionError(
"String of length %d expected; got %d" % (length, len(string)))
elif not re.match('[abcdef0-9]+$', string):
raise AssertionError(
"String %r contains invalid characters for a hash." % string)
def assert_array_result(object_array, to_match, expected, should_not_find = False):
"""
Pass in array of JSON objects, a dictionary with key/value pairs
to match against, and another dictionary with expected key/value
pairs.
If the should_not_find flag is true, to_match should not be found
in object_array
"""
if should_not_find == True:
assert_equal(expected, { })
num_matched = 0
for item in object_array:
all_match = True
for key,value in to_match.items():
if item[key] != value:
all_match = False
if not all_match:
continue
elif should_not_find == True:
num_matched = num_matched+1
for key,value in expected.items():
if item[key] != value:
raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value)))
num_matched = num_matched+1
if num_matched == 0 and should_not_find != True:
raise AssertionError("No objects matched %s"%(str(to_match)))
if num_matched > 0 and should_not_find == True:
raise AssertionError("Objects were found %s"%(str(to_match)))
def satoshi_round(amount):
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
# Helper to create at least "count" utxos
# Pass in a fee that is sufficient for relay and mining new transactions.
def create_confirmed_utxos(fee, node, count):
node.generate(int(0.5*count)+101)
utxos = node.listunspent()
iterations = count - len(utxos)
addr1 = node.getnewaddress()
addr2 = node.getnewaddress()
if iterations <= 0:
return utxos
for i in range(iterations):
t = utxos.pop()
inputs = []
inputs.append({ "txid" : t["txid"], "vout" : t["vout"]})
outputs = {}
send_value = t['amount'] - fee
outputs[addr1] = satoshi_round(send_value/2)
outputs[addr2] = satoshi_round(send_value/2)
raw_tx = node.createrawtransaction(inputs, outputs)
signed_tx = node.signrawtransaction(raw_tx)["hex"]
txid = node.sendrawtransaction(signed_tx)
while (node.getmempoolinfo()['size'] > 0):
node.generate(1)
utxos = node.listunspent()
assert(len(utxos) >= count)
return utxos
# Create large OP_RETURN txouts that can be appended to a transaction
# to make it large (helper for constructing large transactions).
def gen_return_txouts():
# Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create
# So we have big transactions (and therefore can't fit very many into each block)
# create one script_pubkey
script_pubkey = "6a4d0200" #OP_RETURN OP_PUSH2 512 bytes
for i in range (512):
script_pubkey = script_pubkey + "01"
# concatenate 128 txouts of above script_pubkey which we'll insert before the txout for change
txouts = "81"
for k in range(128):
# add txout value
txouts = txouts + "0000000000000000"
# add length of script_pubkey
txouts = txouts + "fd0402"
# add script_pubkey
txouts = txouts + script_pubkey
return txouts
def create_tx(node, coinbase, to_address, amount):
inputs = [{ "txid" : coinbase, "vout" : 0}]
outputs = { to_address : amount }
rawtx = node.createrawtransaction(inputs, outputs)
signresult = node.signrawtransaction(rawtx)
assert_equal(signresult["complete"], True)
return signresult["hex"]
def create_tx_multi_input(node, inputs, outputs):
rawtx = node.createrawtransaction(inputs, outputs)
signresult = node.signrawtransaction(rawtx)
assert_equal(signresult["complete"], True)
return signresult["hex"]
# Create a spend of each passed-in utxo, splicing in "txouts" to each raw
# transaction to make it large. See gen_return_txouts() above.
def create_lots_of_big_transactions(node, txouts, utxos, num, fee):
addr = node.getnewaddress()
txids = []
for _ in range(num):
t = utxos.pop()
inputs=[{ "txid" : t["txid"], "vout" : t["vout"]}]
outputs = {}
change = t['amount'] - fee
outputs[addr] = satoshi_round(change)
rawtx = node.createrawtransaction(inputs, outputs)
newtx = rawtx[0:92]
newtx = newtx + txouts
newtx = newtx + rawtx[94:]
signresult = node.signrawtransaction(newtx, None, None, "NONE")
txid = node.sendrawtransaction(signresult["hex"], True)
txids.append(txid)
return txids
def mine_large_block(node, utxos=None):
# generate a 66k transaction,
# and 14 of them is close to the 1MB block limit
num = 14
txouts = gen_return_txouts()
utxos = utxos if utxos is not None else []
if len(utxos) < num:
utxos.clear()
utxos.extend(node.listunspent())
fee = 100 * node.getnetworkinfo()["relayfee"]
create_lots_of_big_transactions(node, txouts, utxos, num, fee=fee)
node.generate(1)
def get_bip9_status(node, key):
info = node.getblockchaininfo()
return info['bip9_softforks'][key]
def dumpprivkey_otac(node, address):
import re
error_text = ''
try:
return node.dumpprivkey(address)
except JSONRPCException as e:
error_text = e.error
else:
raise
otac_match = re.search("Your one time authorization code is: ([a-zA-Z0-9]+)", error_text['message'])
if not otac_match:
raise JSONRPCException(error_text)
return node.dumpprivkey(address, otac_match.groups()[0])
def get_znsync_status(node):
result = node.znsync("status")
return result['IsSynced']
def wait_to_sync_znodes(node, fast_znsync=False):
while True:
synced = get_znsync_status(node)
if synced:
break
time.sleep(0.2)
if fast_znsync:
# skip mnsync states
node.znsync("next")
def get_full_balance(node):
wallet_info = node.getwalletinfo()
return wallet_info["balance"] + wallet_info["immature_balance"] + wallet_info["unconfirmed_balance"]

View File

@@ -1,175 +0,0 @@
"""
Copyright 2011 Jeff Garzik
AuthServiceProxy has the following improvements over python-jsonrpc's
ServiceProxy class:
- HTTP connections persist for the life of the AuthServiceProxy object
(if server supports HTTP/1.1)
- sends protocol 'version', per JSON-RPC 1.1
- sends proper, incrementing 'id'
- sends Basic HTTP authentication headers
- parses all JSON numbers that look like floats as Decimal
- uses standard Python json lib
Previous copyright, from python-jsonrpc/jsonrpc/proxy.py:
Copyright (c) 2007 Jan-Klaas Kollhof
This file is part of jsonrpc.
jsonrpc is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version.
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this software; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
try:
import http.client as httplib
except ImportError:
import httplib
import base64
import decimal
import json
import logging
try:
import urllib.parse as urlparse
except ImportError:
import urlparse
USER_AGENT = "AuthServiceProxy/0.1"
HTTP_TIMEOUT = 30
log = logging.getLogger("NavcoinRPC")
class JSONRPCException(Exception):
def __init__(self, rpc_error):
Exception.__init__(self)
self.error = rpc_error
def EncodeDecimal(o):
if isinstance(o, decimal.Decimal):
return str(o)
raise TypeError(repr(o) + " is not JSON serializable")
class AuthServiceProxy(object):
__id_count = 0
# ensure_ascii: escape unicode as \uXXXX, passed to json.dumps
def __init__(self, service_url, service_name=None, timeout=HTTP_TIMEOUT, connection=None, ensure_ascii=True):
self.__service_url = service_url
self._service_name = service_name
self.ensure_ascii = ensure_ascii # can be toggled on the fly by tests
self.__url = urlparse.urlparse(service_url)
if self.__url.port is None:
port = 80
else:
port = self.__url.port
(user, passwd) = (self.__url.username, self.__url.password)
try:
user = user.encode('utf8')
except AttributeError:
pass
try:
passwd = passwd.encode('utf8')
except AttributeError:
pass
authpair = user + b':' + passwd
self.__auth_header = b'Basic ' + base64.b64encode(authpair)
if connection:
# Callables re-use the connection of the original proxy
self.__conn = connection
elif self.__url.scheme == 'https':
self.__conn = httplib.HTTPSConnection(self.__url.hostname, port,
timeout=timeout)
else:
self.__conn = httplib.HTTPConnection(self.__url.hostname, port,
timeout=timeout)
def __getattr__(self, name):
if name.startswith('__') and name.endswith('__'):
# Python internal stuff
raise AttributeError
if self._service_name is not None:
name = "%s.%s" % (self._service_name, name)
return AuthServiceProxy(self.__service_url, name, connection=self.__conn)
def _request(self, method, path, postdata):
'''
Do a HTTP request, with retry if we get disconnected (e.g. due to a timeout).
This is a workaround for https://bugs.python.org/issue3566 which is fixed in Python 3.5.
'''
headers = {'Host': self.__url.hostname,
'User-Agent': USER_AGENT,
'Authorization': self.__auth_header,
'Content-type': 'application/json'}
try:
self.__conn.request(method, path, postdata, headers)
return self._get_response()
except httplib.BadStatusLine as e:
if e.line == "''": # if connection was closed, try again
self.__conn.close()
self.__conn.request(method, path, postdata, headers)
return self._get_response()
else:
raise
except BrokenPipeError:
# Python 3.5+ raises this instead of BadStatusLine when the connection was reset
self.__conn.close()
self.__conn.request(method, path, postdata, headers)
return self._get_response()
def __call__(self, *args):
AuthServiceProxy.__id_count += 1
log.debug("-%s-> %s %s"%(AuthServiceProxy.__id_count, self._service_name,
json.dumps(args, default=EncodeDecimal, ensure_ascii=self.ensure_ascii)))
postdata = json.dumps({'version': '1.1',
'method': self._service_name,
'params': args,
'id': AuthServiceProxy.__id_count}, default=EncodeDecimal, ensure_ascii=self.ensure_ascii)
response = self._request('POST', self.__url.path, postdata.encode('utf-8'))
if response['error'] is not None:
raise JSONRPCException(response['error'])
elif 'result' not in response:
raise JSONRPCException({
'code': -343, 'message': 'missing JSON-RPC result'})
else:
return response['result']
def _batch(self, rpc_call_list):
postdata = json.dumps(list(rpc_call_list), default=EncodeDecimal, ensure_ascii=self.ensure_ascii)
log.debug("--> "+postdata)
return self._request('POST', self.__url.path, postdata.encode('utf-8'))
def _get_response(self):
http_response = self.__conn.getresponse()
if http_response is None:
raise JSONRPCException({
'code': -342, 'message': 'missing HTTP response from server'})
content_type = http_response.getheader('Content-Type')
if content_type != 'application/json':
raise JSONRPCException({
'code': -342, 'message': 'non-JSON HTTP response with \'%i %s\' from server' % (http_response.status, http_response.reason)})
responsedata = http_response.read().decode('utf8')
response = json.loads(responsedata, parse_float=decimal.Decimal)
if "error" in response and response["error"] is None:
log.debug("<-%s- %s"%(response["id"], json.dumps(response["result"], default=EncodeDecimal, ensure_ascii=self.ensure_ascii)))
else:
log.debug("<-- "+responsedata)
return response

View File

@@ -1,101 +0,0 @@
#!/usr/bin/env python3
#
# bignum.py
#
# This file is copied from python-navcoinlib.
#
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
"""Bignum routines"""
import struct
# generic big endian MPI format
def bn_bytes(v, have_ext=False):
ext = 0
if have_ext:
ext = 1
return ((v.bit_length()+7)//8) + ext
def bn2bin(v):
s = bytearray()
i = bn_bytes(v)
while i > 0:
s.append((v >> ((i-1) * 8)) & 0xff)
i -= 1
return s
def bin2bn(s):
l = 0
for ch in s:
l = (l << 8) | ch
return l
def bn2mpi(v):
have_ext = False
if v.bit_length() > 0:
have_ext = (v.bit_length() & 0x07) == 0
neg = False
if v < 0:
neg = True
v = -v
s = struct.pack(b">I", bn_bytes(v, have_ext))
ext = bytearray()
if have_ext:
ext.append(0)
v_bin = bn2bin(v)
if neg:
if have_ext:
ext[0] |= 0x80
else:
v_bin[0] |= 0x80
return s + ext + v_bin
def mpi2bn(s):
if len(s) < 4:
return None
s_size = bytes(s[:4])
v_len = struct.unpack(b">I", s_size)[0]
if len(s) != (v_len + 4):
return None
if v_len == 0:
return 0
v_str = bytearray(s[4:])
neg = False
i = v_str[0]
if i & 0x80:
neg = True
i &= ~0x80
v_str[0] = i
v = bin2bn(v_str)
if neg:
return -v
return v
# navcoin-specific little endian format, with implicit size
def mpi2vch(s):
r = s[4:] # strip size
r = r[::-1] # reverse string, converting BE->LE
return r
def bn2vch(v):
return bytes(mpi2vch(bn2mpi(v)))
def vch2mpi(s):
r = struct.pack(b">I", len(s)) # size
r += s[::-1] # reverse string, converting LE->BE
return r
def vch2bn(s):
return mpi2bn(vch2mpi(s))

View File

@@ -1,106 +0,0 @@
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
This module contains utilities for doing coverage analysis on the RPC
interface.
It provides a way to track which RPC commands are exercised during
testing.
"""
import os
REFERENCE_FILENAME = 'rpc_interface.txt'
class AuthServiceProxyWrapper(object):
"""
An object that wraps AuthServiceProxy to record specific RPC calls.
"""
def __init__(self, auth_service_proxy_instance, coverage_logfile=None):
"""
Kwargs:
auth_service_proxy_instance (AuthServiceProxy): the instance
being wrapped.
coverage_logfile (str): if specified, write each service_name
out to a file when called.
"""
self.auth_service_proxy_instance = auth_service_proxy_instance
self.coverage_logfile = coverage_logfile
def __getattr__(self, *args, **kwargs):
return_val = self.auth_service_proxy_instance.__getattr__(
*args, **kwargs)
return AuthServiceProxyWrapper(return_val, self.coverage_logfile)
def __call__(self, *args, **kwargs):
"""
Delegates to AuthServiceProxy, then writes the particular RPC method
called to a file.
"""
return_val = self.auth_service_proxy_instance.__call__(*args, **kwargs)
rpc_method = self.auth_service_proxy_instance._service_name
if self.coverage_logfile:
with open(self.coverage_logfile, 'a+') as f:
f.write("%s\n" % rpc_method)
return return_val
@property
def url(self):
return self.auth_service_proxy_instance.url
def get_filename(dirname, n_node):
"""
Get a filename unique to the test process ID and node.
This file will contain a list of RPC commands covered.
"""
pid = str(os.getpid())
return os.path.join(
dirname, "coverage.pid%s.node%s.txt" % (pid, str(n_node)))
def write_all_rpc_commands(dirname, node):
"""
Write out a list of all RPC functions available in `navcoin-cli` for
coverage comparison. This will only happen once per coverage
directory.
Args:
dirname (str): temporary test dir
node (AuthServiceProxy): client
Returns:
bool. if the RPC interface file was written.
"""
filename = os.path.join(dirname, REFERENCE_FILENAME)
if os.path.isfile(filename):
return False
help_output = node.help().split('\n')
commands = set()
for line in help_output:
line = line.strip()
# Ignore blanks and headers
if line and not line.startswith('='):
commands.add("%s\n" % line.split()[0])
with open(filename, 'w') as f:
f.writelines(list(commands))
return True

File diff suppressed because it is too large Load Diff

View File

@@ -1,943 +0,0 @@
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# script.py
#
# This file is modified from python-navcoinlib.
#
"""Scripts
Functionality to build scripts, as well as SignatureHash().
"""
from .mininode import CTransaction, CTxOut, sha256, hash256, uint256_from_str, ser_uint256, ser_string
from binascii import hexlify
import hashlib
import sys
bchr = chr
bord = ord
if sys.version > '3':
long = int
bchr = lambda x: bytes([x])
bord = lambda x: x
import struct
from .bignum import bn2vch
MAX_SCRIPT_SIZE = 10000
MAX_SCRIPT_ELEMENT_SIZE = 520
MAX_SCRIPT_OPCODES = 201
OPCODE_NAMES = {}
_opcode_instances = []
class CScriptOp(int):
"""A single script opcode"""
__slots__ = []
@staticmethod
def encode_op_pushdata(d):
"""Encode a PUSHDATA op, returning bytes"""
if len(d) < 0x4c:
return b'' + bchr(len(d)) + d # OP_PUSHDATA
elif len(d) <= 0xff:
return b'\x4c' + bchr(len(d)) + d # OP_PUSHDATA1
elif len(d) <= 0xffff:
return b'\x4d' + struct.pack(b'<H', len(d)) + d # OP_PUSHDATA2
elif len(d) <= 0xffffffff:
return b'\x4e' + struct.pack(b'<I', len(d)) + d # OP_PUSHDATA4
else:
raise ValueError("Data too long to encode in a PUSHDATA op")
@staticmethod
def encode_op_n(n):
"""Encode a small integer op, returning an opcode"""
if not (0 <= n <= 16):
raise ValueError('Integer must be in range 0 <= n <= 16, got %d' % n)
if n == 0:
return OP_0
else:
return CScriptOp(OP_1 + n-1)
def decode_op_n(self):
"""Decode a small integer opcode, returning an integer"""
if self == OP_0:
return 0
if not (self == OP_0 or OP_1 <= self <= OP_16):
raise ValueError('op %r is not an OP_N' % self)
return int(self - OP_1+1)
def is_small_int(self):
"""Return true if the op pushes a small integer to the stack"""
if 0x51 <= self <= 0x60 or self == 0:
return True
else:
return False
def __str__(self):
return repr(self)
def __repr__(self):
if self in OPCODE_NAMES:
return OPCODE_NAMES[self]
else:
return 'CScriptOp(0x%x)' % self
def __new__(cls, n):
try:
return _opcode_instances[n]
except IndexError:
assert len(_opcode_instances) == n
_opcode_instances.append(super(CScriptOp, cls).__new__(cls, n))
return _opcode_instances[n]
# Populate opcode instance table
for n in range(0xff+1):
CScriptOp(n)
# push value
OP_0 = CScriptOp(0x00)
OP_FALSE = OP_0
OP_PUSHDATA1 = CScriptOp(0x4c)
OP_PUSHDATA2 = CScriptOp(0x4d)
OP_PUSHDATA4 = CScriptOp(0x4e)
OP_1NEGATE = CScriptOp(0x4f)
OP_RESERVED = CScriptOp(0x50)
OP_1 = CScriptOp(0x51)
OP_TRUE=OP_1
OP_2 = CScriptOp(0x52)
OP_3 = CScriptOp(0x53)
OP_4 = CScriptOp(0x54)
OP_5 = CScriptOp(0x55)
OP_6 = CScriptOp(0x56)
OP_7 = CScriptOp(0x57)
OP_8 = CScriptOp(0x58)
OP_9 = CScriptOp(0x59)
OP_10 = CScriptOp(0x5a)
OP_11 = CScriptOp(0x5b)
OP_12 = CScriptOp(0x5c)
OP_13 = CScriptOp(0x5d)
OP_14 = CScriptOp(0x5e)
OP_15 = CScriptOp(0x5f)
OP_16 = CScriptOp(0x60)
# control
OP_NOP = CScriptOp(0x61)
OP_VER = CScriptOp(0x62)
OP_IF = CScriptOp(0x63)
OP_NOTIF = CScriptOp(0x64)
OP_VERIF = CScriptOp(0x65)
OP_VERNOTIF = CScriptOp(0x66)
OP_ELSE = CScriptOp(0x67)
OP_ENDIF = CScriptOp(0x68)
OP_VERIFY = CScriptOp(0x69)
OP_RETURN = CScriptOp(0x6a)
# stack ops
OP_TOALTSTACK = CScriptOp(0x6b)
OP_FROMALTSTACK = CScriptOp(0x6c)
OP_2DROP = CScriptOp(0x6d)
OP_2DUP = CScriptOp(0x6e)
OP_3DUP = CScriptOp(0x6f)
OP_2OVER = CScriptOp(0x70)
OP_2ROT = CScriptOp(0x71)
OP_2SWAP = CScriptOp(0x72)
OP_IFDUP = CScriptOp(0x73)
OP_DEPTH = CScriptOp(0x74)
OP_DROP = CScriptOp(0x75)
OP_DUP = CScriptOp(0x76)
OP_NIP = CScriptOp(0x77)
OP_OVER = CScriptOp(0x78)
OP_PICK = CScriptOp(0x79)
OP_ROLL = CScriptOp(0x7a)
OP_ROT = CScriptOp(0x7b)
OP_SWAP = CScriptOp(0x7c)
OP_TUCK = CScriptOp(0x7d)
# splice ops
OP_CAT = CScriptOp(0x7e)
OP_SUBSTR = CScriptOp(0x7f)
OP_LEFT = CScriptOp(0x80)
OP_RIGHT = CScriptOp(0x81)
OP_SIZE = CScriptOp(0x82)
# bit logic
OP_INVERT = CScriptOp(0x83)
OP_AND = CScriptOp(0x84)
OP_OR = CScriptOp(0x85)
OP_XOR = CScriptOp(0x86)
OP_EQUAL = CScriptOp(0x87)
OP_EQUALVERIFY = CScriptOp(0x88)
OP_RESERVED1 = CScriptOp(0x89)
OP_RESERVED2 = CScriptOp(0x8a)
# numeric
OP_1ADD = CScriptOp(0x8b)
OP_1SUB = CScriptOp(0x8c)
OP_2MUL = CScriptOp(0x8d)
OP_2DIV = CScriptOp(0x8e)
OP_NEGATE = CScriptOp(0x8f)
OP_ABS = CScriptOp(0x90)
OP_NOT = CScriptOp(0x91)
OP_0NOTEQUAL = CScriptOp(0x92)
OP_ADD = CScriptOp(0x93)
OP_SUB = CScriptOp(0x94)
OP_MUL = CScriptOp(0x95)
OP_DIV = CScriptOp(0x96)
OP_MOD = CScriptOp(0x97)
OP_LSHIFT = CScriptOp(0x98)
OP_RSHIFT = CScriptOp(0x99)
OP_BOOLAND = CScriptOp(0x9a)
OP_BOOLOR = CScriptOp(0x9b)
OP_NUMEQUAL = CScriptOp(0x9c)
OP_NUMEQUALVERIFY = CScriptOp(0x9d)
OP_NUMNOTEQUAL = CScriptOp(0x9e)
OP_LESSTHAN = CScriptOp(0x9f)
OP_GREATERTHAN = CScriptOp(0xa0)
OP_LESSTHANOREQUAL = CScriptOp(0xa1)
OP_GREATERTHANOREQUAL = CScriptOp(0xa2)
OP_MIN = CScriptOp(0xa3)
OP_MAX = CScriptOp(0xa4)
OP_WITHIN = CScriptOp(0xa5)
# crypto
OP_RIPEMD160 = CScriptOp(0xa6)
OP_SHA1 = CScriptOp(0xa7)
OP_SHA256 = CScriptOp(0xa8)
OP_HASH160 = CScriptOp(0xa9)
OP_HASH256 = CScriptOp(0xaa)
OP_CODESEPARATOR = CScriptOp(0xab)
OP_CHECKSIG = CScriptOp(0xac)
OP_CHECKSIGVERIFY = CScriptOp(0xad)
OP_CHECKMULTISIG = CScriptOp(0xae)
OP_CHECKMULTISIGVERIFY = CScriptOp(0xaf)
# expansion
OP_NOP1 = CScriptOp(0xb0)
OP_CHECKLOCKTIMEVERIFY = CScriptOp(0xb1)
OP_CHECKSEQUENCEVERIFY = CScriptOp(0xb2)
OP_NOP4 = CScriptOp(0xb3)
OP_NOP5 = CScriptOp(0xb4)
OP_NOP6 = CScriptOp(0xb5)
OP_NOP7 = CScriptOp(0xb6)
OP_NOP8 = CScriptOp(0xb7)
OP_NOP9 = CScriptOp(0xb8)
OP_NOP10 = CScriptOp(0xb9)
# template matching params
OP_SMALLINTEGER = CScriptOp(0xfa)
OP_PUBKEYS = CScriptOp(0xfb)
OP_PUBKEYHASH = CScriptOp(0xfd)
OP_PUBKEY = CScriptOp(0xfe)
OP_INVALIDOPCODE = CScriptOp(0xff)
VALID_OPCODES = {
OP_1NEGATE,
OP_RESERVED,
OP_1,
OP_2,
OP_3,
OP_4,
OP_5,
OP_6,
OP_7,
OP_8,
OP_9,
OP_10,
OP_11,
OP_12,
OP_13,
OP_14,
OP_15,
OP_16,
OP_NOP,
OP_VER,
OP_IF,
OP_NOTIF,
OP_VERIF,
OP_VERNOTIF,
OP_ELSE,
OP_ENDIF,
OP_VERIFY,
OP_RETURN,
OP_TOALTSTACK,
OP_FROMALTSTACK,
OP_2DROP,
OP_2DUP,
OP_3DUP,
OP_2OVER,
OP_2ROT,
OP_2SWAP,
OP_IFDUP,
OP_DEPTH,
OP_DROP,
OP_DUP,
OP_NIP,
OP_OVER,
OP_PICK,
OP_ROLL,
OP_ROT,
OP_SWAP,
OP_TUCK,
OP_CAT,
OP_SUBSTR,
OP_LEFT,
OP_RIGHT,
OP_SIZE,
OP_INVERT,
OP_AND,
OP_OR,
OP_XOR,
OP_EQUAL,
OP_EQUALVERIFY,
OP_RESERVED1,
OP_RESERVED2,
OP_1ADD,
OP_1SUB,
OP_2MUL,
OP_2DIV,
OP_NEGATE,
OP_ABS,
OP_NOT,
OP_0NOTEQUAL,
OP_ADD,
OP_SUB,
OP_MUL,
OP_DIV,
OP_MOD,
OP_LSHIFT,
OP_RSHIFT,
OP_BOOLAND,
OP_BOOLOR,
OP_NUMEQUAL,
OP_NUMEQUALVERIFY,
OP_NUMNOTEQUAL,
OP_LESSTHAN,
OP_GREATERTHAN,
OP_LESSTHANOREQUAL,
OP_GREATERTHANOREQUAL,
OP_MIN,
OP_MAX,
OP_WITHIN,
OP_RIPEMD160,
OP_SHA1,
OP_SHA256,
OP_HASH160,
OP_HASH256,
OP_CODESEPARATOR,
OP_CHECKSIG,
OP_CHECKSIGVERIFY,
OP_CHECKMULTISIG,
OP_CHECKMULTISIGVERIFY,
OP_NOP1,
OP_CHECKLOCKTIMEVERIFY,
OP_CHECKSEQUENCEVERIFY,
OP_NOP4,
OP_NOP5,
OP_NOP6,
OP_NOP7,
OP_NOP8,
OP_NOP9,
OP_NOP10,
OP_SMALLINTEGER,
OP_PUBKEYS,
OP_PUBKEYHASH,
OP_PUBKEY,
}
OPCODE_NAMES.update({
OP_0 : 'OP_0',
OP_PUSHDATA1 : 'OP_PUSHDATA1',
OP_PUSHDATA2 : 'OP_PUSHDATA2',
OP_PUSHDATA4 : 'OP_PUSHDATA4',
OP_1NEGATE : 'OP_1NEGATE',
OP_RESERVED : 'OP_RESERVED',
OP_1 : 'OP_1',
OP_2 : 'OP_2',
OP_3 : 'OP_3',
OP_4 : 'OP_4',
OP_5 : 'OP_5',
OP_6 : 'OP_6',
OP_7 : 'OP_7',
OP_8 : 'OP_8',
OP_9 : 'OP_9',
OP_10 : 'OP_10',
OP_11 : 'OP_11',
OP_12 : 'OP_12',
OP_13 : 'OP_13',
OP_14 : 'OP_14',
OP_15 : 'OP_15',
OP_16 : 'OP_16',
OP_NOP : 'OP_NOP',
OP_VER : 'OP_VER',
OP_IF : 'OP_IF',
OP_NOTIF : 'OP_NOTIF',
OP_VERIF : 'OP_VERIF',
OP_VERNOTIF : 'OP_VERNOTIF',
OP_ELSE : 'OP_ELSE',
OP_ENDIF : 'OP_ENDIF',
OP_VERIFY : 'OP_VERIFY',
OP_RETURN : 'OP_RETURN',
OP_TOALTSTACK : 'OP_TOALTSTACK',
OP_FROMALTSTACK : 'OP_FROMALTSTACK',
OP_2DROP : 'OP_2DROP',
OP_2DUP : 'OP_2DUP',
OP_3DUP : 'OP_3DUP',
OP_2OVER : 'OP_2OVER',
OP_2ROT : 'OP_2ROT',
OP_2SWAP : 'OP_2SWAP',
OP_IFDUP : 'OP_IFDUP',
OP_DEPTH : 'OP_DEPTH',
OP_DROP : 'OP_DROP',
OP_DUP : 'OP_DUP',
OP_NIP : 'OP_NIP',
OP_OVER : 'OP_OVER',
OP_PICK : 'OP_PICK',
OP_ROLL : 'OP_ROLL',
OP_ROT : 'OP_ROT',
OP_SWAP : 'OP_SWAP',
OP_TUCK : 'OP_TUCK',
OP_CAT : 'OP_CAT',
OP_SUBSTR : 'OP_SUBSTR',
OP_LEFT : 'OP_LEFT',
OP_RIGHT : 'OP_RIGHT',
OP_SIZE : 'OP_SIZE',
OP_INVERT : 'OP_INVERT',
OP_AND : 'OP_AND',
OP_OR : 'OP_OR',
OP_XOR : 'OP_XOR',
OP_EQUAL : 'OP_EQUAL',
OP_EQUALVERIFY : 'OP_EQUALVERIFY',
OP_RESERVED1 : 'OP_RESERVED1',
OP_RESERVED2 : 'OP_RESERVED2',
OP_1ADD : 'OP_1ADD',
OP_1SUB : 'OP_1SUB',
OP_2MUL : 'OP_2MUL',
OP_2DIV : 'OP_2DIV',
OP_NEGATE : 'OP_NEGATE',
OP_ABS : 'OP_ABS',
OP_NOT : 'OP_NOT',
OP_0NOTEQUAL : 'OP_0NOTEQUAL',
OP_ADD : 'OP_ADD',
OP_SUB : 'OP_SUB',
OP_MUL : 'OP_MUL',
OP_DIV : 'OP_DIV',
OP_MOD : 'OP_MOD',
OP_LSHIFT : 'OP_LSHIFT',
OP_RSHIFT : 'OP_RSHIFT',
OP_BOOLAND : 'OP_BOOLAND',
OP_BOOLOR : 'OP_BOOLOR',
OP_NUMEQUAL : 'OP_NUMEQUAL',
OP_NUMEQUALVERIFY : 'OP_NUMEQUALVERIFY',
OP_NUMNOTEQUAL : 'OP_NUMNOTEQUAL',
OP_LESSTHAN : 'OP_LESSTHAN',
OP_GREATERTHAN : 'OP_GREATERTHAN',
OP_LESSTHANOREQUAL : 'OP_LESSTHANOREQUAL',
OP_GREATERTHANOREQUAL : 'OP_GREATERTHANOREQUAL',
OP_MIN : 'OP_MIN',
OP_MAX : 'OP_MAX',
OP_WITHIN : 'OP_WITHIN',
OP_RIPEMD160 : 'OP_RIPEMD160',
OP_SHA1 : 'OP_SHA1',
OP_SHA256 : 'OP_SHA256',
OP_HASH160 : 'OP_HASH160',
OP_HASH256 : 'OP_HASH256',
OP_CODESEPARATOR : 'OP_CODESEPARATOR',
OP_CHECKSIG : 'OP_CHECKSIG',
OP_CHECKSIGVERIFY : 'OP_CHECKSIGVERIFY',
OP_CHECKMULTISIG : 'OP_CHECKMULTISIG',
OP_CHECKMULTISIGVERIFY : 'OP_CHECKMULTISIGVERIFY',
OP_NOP1 : 'OP_NOP1',
OP_CHECKLOCKTIMEVERIFY : 'OP_CHECKLOCKTIMEVERIFY',
OP_CHECKSEQUENCEVERIFY : 'OP_CHECKSEQUENCEVERIFY',
OP_NOP4 : 'OP_NOP4',
OP_NOP5 : 'OP_NOP5',
OP_NOP6 : 'OP_NOP6',
OP_NOP7 : 'OP_NOP7',
OP_NOP8 : 'OP_NOP8',
OP_NOP9 : 'OP_NOP9',
OP_NOP10 : 'OP_NOP10',
OP_SMALLINTEGER : 'OP_SMALLINTEGER',
OP_PUBKEYS : 'OP_PUBKEYS',
OP_PUBKEYHASH : 'OP_PUBKEYHASH',
OP_PUBKEY : 'OP_PUBKEY',
OP_INVALIDOPCODE : 'OP_INVALIDOPCODE',
})
OPCODES_BY_NAME = {
'OP_0' : OP_0,
'OP_PUSHDATA1' : OP_PUSHDATA1,
'OP_PUSHDATA2' : OP_PUSHDATA2,
'OP_PUSHDATA4' : OP_PUSHDATA4,
'OP_1NEGATE' : OP_1NEGATE,
'OP_RESERVED' : OP_RESERVED,
'OP_1' : OP_1,
'OP_2' : OP_2,
'OP_3' : OP_3,
'OP_4' : OP_4,
'OP_5' : OP_5,
'OP_6' : OP_6,
'OP_7' : OP_7,
'OP_8' : OP_8,
'OP_9' : OP_9,
'OP_10' : OP_10,
'OP_11' : OP_11,
'OP_12' : OP_12,
'OP_13' : OP_13,
'OP_14' : OP_14,
'OP_15' : OP_15,
'OP_16' : OP_16,
'OP_NOP' : OP_NOP,
'OP_VER' : OP_VER,
'OP_IF' : OP_IF,
'OP_NOTIF' : OP_NOTIF,
'OP_VERIF' : OP_VERIF,
'OP_VERNOTIF' : OP_VERNOTIF,
'OP_ELSE' : OP_ELSE,
'OP_ENDIF' : OP_ENDIF,
'OP_VERIFY' : OP_VERIFY,
'OP_RETURN' : OP_RETURN,
'OP_TOALTSTACK' : OP_TOALTSTACK,
'OP_FROMALTSTACK' : OP_FROMALTSTACK,
'OP_2DROP' : OP_2DROP,
'OP_2DUP' : OP_2DUP,
'OP_3DUP' : OP_3DUP,
'OP_2OVER' : OP_2OVER,
'OP_2ROT' : OP_2ROT,
'OP_2SWAP' : OP_2SWAP,
'OP_IFDUP' : OP_IFDUP,
'OP_DEPTH' : OP_DEPTH,
'OP_DROP' : OP_DROP,
'OP_DUP' : OP_DUP,
'OP_NIP' : OP_NIP,
'OP_OVER' : OP_OVER,
'OP_PICK' : OP_PICK,
'OP_ROLL' : OP_ROLL,
'OP_ROT' : OP_ROT,
'OP_SWAP' : OP_SWAP,
'OP_TUCK' : OP_TUCK,
'OP_CAT' : OP_CAT,
'OP_SUBSTR' : OP_SUBSTR,
'OP_LEFT' : OP_LEFT,
'OP_RIGHT' : OP_RIGHT,
'OP_SIZE' : OP_SIZE,
'OP_INVERT' : OP_INVERT,
'OP_AND' : OP_AND,
'OP_OR' : OP_OR,
'OP_XOR' : OP_XOR,
'OP_EQUAL' : OP_EQUAL,
'OP_EQUALVERIFY' : OP_EQUALVERIFY,
'OP_RESERVED1' : OP_RESERVED1,
'OP_RESERVED2' : OP_RESERVED2,
'OP_1ADD' : OP_1ADD,
'OP_1SUB' : OP_1SUB,
'OP_2MUL' : OP_2MUL,
'OP_2DIV' : OP_2DIV,
'OP_NEGATE' : OP_NEGATE,
'OP_ABS' : OP_ABS,
'OP_NOT' : OP_NOT,
'OP_0NOTEQUAL' : OP_0NOTEQUAL,
'OP_ADD' : OP_ADD,
'OP_SUB' : OP_SUB,
'OP_MUL' : OP_MUL,
'OP_DIV' : OP_DIV,
'OP_MOD' : OP_MOD,
'OP_LSHIFT' : OP_LSHIFT,
'OP_RSHIFT' : OP_RSHIFT,
'OP_BOOLAND' : OP_BOOLAND,
'OP_BOOLOR' : OP_BOOLOR,
'OP_NUMEQUAL' : OP_NUMEQUAL,
'OP_NUMEQUALVERIFY' : OP_NUMEQUALVERIFY,
'OP_NUMNOTEQUAL' : OP_NUMNOTEQUAL,
'OP_LESSTHAN' : OP_LESSTHAN,
'OP_GREATERTHAN' : OP_GREATERTHAN,
'OP_LESSTHANOREQUAL' : OP_LESSTHANOREQUAL,
'OP_GREATERTHANOREQUAL' : OP_GREATERTHANOREQUAL,
'OP_MIN' : OP_MIN,
'OP_MAX' : OP_MAX,
'OP_WITHIN' : OP_WITHIN,
'OP_RIPEMD160' : OP_RIPEMD160,
'OP_SHA1' : OP_SHA1,
'OP_SHA256' : OP_SHA256,
'OP_HASH160' : OP_HASH160,
'OP_HASH256' : OP_HASH256,
'OP_CODESEPARATOR' : OP_CODESEPARATOR,
'OP_CHECKSIG' : OP_CHECKSIG,
'OP_CHECKSIGVERIFY' : OP_CHECKSIGVERIFY,
'OP_CHECKMULTISIG' : OP_CHECKMULTISIG,
'OP_CHECKMULTISIGVERIFY' : OP_CHECKMULTISIGVERIFY,
'OP_NOP1' : OP_NOP1,
'OP_CHECKLOCKTIMEVERIFY' : OP_CHECKLOCKTIMEVERIFY,
'OP_CHECKSEQUENCEVERIFY' : OP_CHECKSEQUENCEVERIFY,
'OP_NOP4' : OP_NOP4,
'OP_NOP5' : OP_NOP5,
'OP_NOP6' : OP_NOP6,
'OP_NOP7' : OP_NOP7,
'OP_NOP8' : OP_NOP8,
'OP_NOP9' : OP_NOP9,
'OP_NOP10' : OP_NOP10,
'OP_SMALLINTEGER' : OP_SMALLINTEGER,
'OP_PUBKEYS' : OP_PUBKEYS,
'OP_PUBKEYHASH' : OP_PUBKEYHASH,
'OP_PUBKEY' : OP_PUBKEY,
}
class CScriptInvalidError(Exception):
"""Base class for CScript exceptions"""
pass
class CScriptTruncatedPushDataError(CScriptInvalidError):
"""Invalid pushdata due to truncation"""
def __init__(self, msg, data):
self.data = data
super(CScriptTruncatedPushDataError, self).__init__(msg)
# This is used, eg, for blockchain heights in coinbase scripts (bip34)
class CScriptNum(object):
def __init__(self, d=0):
self.value = d
@staticmethod
def encode(obj):
r = bytearray(0)
if obj.value == 0:
return bytes(r)
neg = obj.value < 0
absvalue = -obj.value if neg else obj.value
while (absvalue):
r.append(absvalue & 0xff)
absvalue >>= 8
if r[-1] & 0x80:
r.append(0x80 if neg else 0)
elif neg:
r[-1] |= 0x80
return bytes(bchr(len(r)) + r)
class CScript(bytes):
"""Serialized script
A bytes subclass, so you can use this directly whenever bytes are accepted.
Note that this means that indexing does *not* work - you'll get an index by
byte rather than opcode. This format was chosen for efficiency so that the
general case would not require creating a lot of little CScriptOP objects.
iter(script) however does iterate by opcode.
"""
@classmethod
def __coerce_instance(cls, other):
# Coerce other into bytes
if isinstance(other, CScriptOp):
other = bchr(other)
elif isinstance(other, CScriptNum):
if (other.value == 0):
other = bchr(CScriptOp(OP_0))
else:
other = CScriptNum.encode(other)
elif isinstance(other, int):
if 0 <= other <= 16:
other = bytes(bchr(CScriptOp.encode_op_n(other)))
elif other == -1:
other = bytes(bchr(OP_1NEGATE))
else:
other = CScriptOp.encode_op_pushdata(bn2vch(other))
elif isinstance(other, (bytes, bytearray)):
other = CScriptOp.encode_op_pushdata(other)
return other
def __add__(self, other):
# Do the coercion outside of the try block so that errors in it are
# noticed.
other = self.__coerce_instance(other)
try:
# bytes.__add__ always returns bytes instances unfortunately
return CScript(super(CScript, self).__add__(other))
except TypeError:
raise TypeError('Can not add a %r instance to a CScript' % other.__class__)
def join(self, iterable):
# join makes no sense for a CScript()
raise NotImplementedError
def __new__(cls, value=b''):
if isinstance(value, bytes) or isinstance(value, bytearray):
return super(CScript, cls).__new__(cls, value)
else:
def coerce_iterable(iterable):
for instance in iterable:
yield cls.__coerce_instance(instance)
# Annoyingly on both python2 and python3 bytes.join() always
# returns a bytes instance even when subclassed.
return super(CScript, cls).__new__(cls, b''.join(coerce_iterable(value)))
def raw_iter(self):
"""Raw iteration
Yields tuples of (opcode, data, sop_idx) so that the different possible
PUSHDATA encodings can be accurately distinguished, as well as
determining the exact opcode byte indexes. (sop_idx)
"""
i = 0
while i < len(self):
sop_idx = i
opcode = bord(self[i])
i += 1
if opcode > OP_PUSHDATA4:
yield (opcode, None, sop_idx)
else:
datasize = None
pushdata_type = None
if opcode < OP_PUSHDATA1:
pushdata_type = 'PUSHDATA(%d)' % opcode
datasize = opcode
elif opcode == OP_PUSHDATA1:
pushdata_type = 'PUSHDATA1'
if i >= len(self):
raise CScriptInvalidError('PUSHDATA1: missing data length')
datasize = bord(self[i])
i += 1
elif opcode == OP_PUSHDATA2:
pushdata_type = 'PUSHDATA2'
if i + 1 >= len(self):
raise CScriptInvalidError('PUSHDATA2: missing data length')
datasize = bord(self[i]) + (bord(self[i+1]) << 8)
i += 2
elif opcode == OP_PUSHDATA4:
pushdata_type = 'PUSHDATA4'
if i + 3 >= len(self):
raise CScriptInvalidError('PUSHDATA4: missing data length')
datasize = bord(self[i]) + (bord(self[i+1]) << 8) + (bord(self[i+2]) << 16) + (bord(self[i+3]) << 24)
i += 4
else:
assert False # shouldn't happen
data = bytes(self[i:i+datasize])
# Check for truncation
if len(data) < datasize:
raise CScriptTruncatedPushDataError('%s: truncated data' % pushdata_type, data)
i += datasize
yield (opcode, data, sop_idx)
def __iter__(self):
"""'Cooked' iteration
Returns either a CScriptOP instance, an integer, or bytes, as
appropriate.
See raw_iter() if you need to distinguish the different possible
PUSHDATA encodings.
"""
for (opcode, data, sop_idx) in self.raw_iter():
if data is not None:
yield data
else:
opcode = CScriptOp(opcode)
if opcode.is_small_int():
yield opcode.decode_op_n()
else:
yield CScriptOp(opcode)
def __repr__(self):
# For Python3 compatibility add b before strings so testcases don't
# need to change
def _repr(o):
if isinstance(o, bytes):
return b"x('%s')" % hexlify(o).decode('ascii')
else:
return repr(o)
ops = []
i = iter(self)
while True:
op = None
try:
op = _repr(next(i))
except CScriptTruncatedPushDataError as err:
op = '%s...<ERROR: %s>' % (_repr(err.data), err)
break
except CScriptInvalidError as err:
op = '<ERROR: %s>' % err
break
except StopIteration:
break
finally:
if op is not None:
ops.append(op)
return "CScript([%s])" % ', '.join(ops)
def GetSigOpCount(self, fAccurate):
"""Get the SigOp count.
fAccurate - Accurately count CHECKMULTISIG, see BIP16 for details.
Note that this is consensus-critical.
"""
n = 0
lastOpcode = OP_INVALIDOPCODE
for (opcode, data, sop_idx) in self.raw_iter():
if opcode in (OP_CHECKSIG, OP_CHECKSIGVERIFY):
n += 1
elif opcode in (OP_CHECKMULTISIG, OP_CHECKMULTISIGVERIFY):
if fAccurate and (OP_1 <= lastOpcode <= OP_16):
n += opcode.decode_op_n()
else:
n += 20
lastOpcode = opcode
return n
SIGHASH_ALL = 1
SIGHASH_NONE = 2
SIGHASH_SINGLE = 3
SIGHASH_ANYONECANPAY = 0x80
def FindAndDelete(script, sig):
"""Consensus critical, see FindAndDelete() in Satoshi codebase"""
r = b''
last_sop_idx = sop_idx = 0
skip = True
for (opcode, data, sop_idx) in script.raw_iter():
if not skip:
r += script[last_sop_idx:sop_idx]
last_sop_idx = sop_idx
if script[sop_idx:sop_idx + len(sig)] == sig:
skip = True
else:
skip = False
if not skip:
r += script[last_sop_idx:]
return CScript(r)
def SignatureHash(script, txTo, inIdx, hashtype):
"""Consensus-correct SignatureHash
Returns (hash, err) to precisely match the consensus-critical behavior of
the SIGHASH_SINGLE bug. (inIdx is *not* checked for validity)
"""
HASH_ONE = b'\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
if inIdx >= len(txTo.vin):
return (HASH_ONE, "inIdx %d out of range (%d)" % (inIdx, len(txTo.vin)))
txtmp = CTransaction(txTo)
for txin in txtmp.vin:
txin.scriptSig = b''
txtmp.vin[inIdx].scriptSig = FindAndDelete(script, CScript([OP_CODESEPARATOR]))
if (hashtype & 0x1f) == SIGHASH_NONE:
txtmp.vout = []
for i in range(len(txtmp.vin)):
if i != inIdx:
txtmp.vin[i].nSequence = 0
elif (hashtype & 0x1f) == SIGHASH_SINGLE:
outIdx = inIdx
if outIdx >= len(txtmp.vout):
return (HASH_ONE, "outIdx %d out of range (%d)" % (outIdx, len(txtmp.vout)))
tmp = txtmp.vout[outIdx]
txtmp.vout = []
for i in range(outIdx):
txtmp.vout.append(CTxOut())
txtmp.vout.append(tmp)
for i in range(len(txtmp.vin)):
if i != inIdx:
txtmp.vin[i].nSequence = 0
if hashtype & SIGHASH_ANYONECANPAY:
tmp = txtmp.vin[inIdx]
txtmp.vin = []
txtmp.vin.append(tmp)
s = txtmp.serialize()
s += struct.pack(b"<I", hashtype)
hash = hash256(s)
return (hash, None)
# TODO: Allow cached hashPrevouts/hashSequence/hashOutputs to be provided.
# Performance optimization probably not necessary for python tests, however.
# Note that this corresponds to sigversion == 1 in EvalScript, which is used
# for version 0 witnesses.
def SegwitVersion1SignatureHash(script, txTo, inIdx, hashtype, amount):
hashPrevouts = 0
hashSequence = 0
hashOutputs = 0
if not (hashtype & SIGHASH_ANYONECANPAY):
serialize_prevouts = bytes()
for i in txTo.vin:
serialize_prevouts += i.prevout.serialize()
hashPrevouts = uint256_from_str(hash256(serialize_prevouts))
if (not (hashtype & SIGHASH_ANYONECANPAY) and (hashtype & 0x1f) != SIGHASH_SINGLE and (hashtype & 0x1f) != SIGHASH_NONE):
serialize_sequence = bytes()
for i in txTo.vin:
serialize_sequence += struct.pack("<I", i.nSequence)
hashSequence = uint256_from_str(hash256(serialize_sequence))
if ((hashtype & 0x1f) != SIGHASH_SINGLE and (hashtype & 0x1f) != SIGHASH_NONE):
serialize_outputs = bytes()
for o in txTo.vout:
serialize_outputs += o.serialize()
hashOutputs = uint256_from_str(hash256(serialize_outputs))
elif ((hashtype & 0x1f) == SIGHASH_SINGLE and inIdx < len(txTo.vout)):
serialize_outputs = txTo.vout[inIdx].serialize()
hashOutputs = uint256_from_str(hash256(serialize_outputs))
ss = bytes()
ss += struct.pack("<i", txTo.nVersion)
ss += ser_uint256(hashPrevouts)
ss += ser_uint256(hashSequence)
ss += txTo.vin[inIdx].prevout.serialize()
ss += ser_string(script)
ss += struct.pack("<q", amount)
ss += struct.pack("<I", txTo.vin[inIdx].nSequence)
ss += ser_uint256(hashOutputs)
ss += struct.pack("<i", txTo.nLockTime)
ss += struct.pack("<I", hashtype)
return hash256(ss)

View File

@@ -1,63 +0,0 @@
#!/usr/bin/env python3
# Copyright (c) 2016-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Specialized SipHash-2-4 implementations.
This implements SipHash-2-4 for 256-bit integers.
"""
def rotl64(n, b):
return n >> (64 - b) | (n & ((1 << (64 - b)) - 1)) << b
def siphash_round(v0, v1, v2, v3):
v0 = (v0 + v1) & ((1 << 64) - 1)
v1 = rotl64(v1, 13)
v1 ^= v0
v0 = rotl64(v0, 32)
v2 = (v2 + v3) & ((1 << 64) - 1)
v3 = rotl64(v3, 16)
v3 ^= v2
v0 = (v0 + v3) & ((1 << 64) - 1)
v3 = rotl64(v3, 21)
v3 ^= v0
v2 = (v2 + v1) & ((1 << 64) - 1)
v1 = rotl64(v1, 17)
v1 ^= v2
v2 = rotl64(v2, 32)
return (v0, v1, v2, v3)
def siphash256(k0, k1, h):
n0 = h & ((1 << 64) - 1)
n1 = (h >> 64) & ((1 << 64) - 1)
n2 = (h >> 128) & ((1 << 64) - 1)
n3 = (h >> 192) & ((1 << 64) - 1)
v0 = 0x736f6d6570736575 ^ k0
v1 = 0x646f72616e646f6d ^ k1
v2 = 0x6c7967656e657261 ^ k0
v3 = 0x7465646279746573 ^ k1 ^ n0
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n0
v3 ^= n1
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n1
v3 ^= n2
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n2
v3 ^= n3
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n3
v3 ^= 0x2000000000000000
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= 0x2000000000000000
v2 ^= 0xFF
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
return v0 ^ v1 ^ v2 ^ v3

View File

@@ -1,700 +0,0 @@
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Helpful routines for regression testing
#
import os
import sys
from binascii import hexlify, unhexlify
from base64 import b64encode
from decimal import Decimal, ROUND_DOWN
import json
import http.client
import random
import shutil
import subprocess
import time
import re
import errno
from . import coverage
from .authproxy import AuthServiceProxy, JSONRPCException
COVERAGE_DIR = None
# The maximum number of nodes a single test can spawn
MAX_NODES = 8
# Don't assign rpc or p2p ports lower than this
PORT_MIN = 11000
# The number of ports to "reserve" for p2p and rpc, each
PORT_RANGE = 5000
NAVCOIND_PROC_WAIT_TIMEOUT = 60
class PortSeed:
# Must be initialized with a unique integer for each process
n = None
#Set Mocktime default to OFF.
#MOCKTIME is only needed for scripts that use the
#cached version of the blockchain. If the cached
#version of the blockchain is used without MOCKTIME
#then the mempools will not sync due to IBD.
MOCKTIME = 0
def enable_mocktime():
#For backwared compatibility of the python scripts
#with previous versions of the cache, set MOCKTIME
#to Jan 1, 2014 + (201 * 10 * 60)
global MOCKTIME
MOCKTIME = 1388534400 + (201 * 10 * 60)
def disable_mocktime():
global MOCKTIME
MOCKTIME = 0
def get_mocktime():
return MOCKTIME
def enable_coverage(dirname):
"""Maintain a log of which RPC calls are made during testing."""
global COVERAGE_DIR
COVERAGE_DIR = dirname
def get_rpc_proxy(url, node_number, timeout=None):
"""
Args:
url (str): URL of the RPC server to call
node_number (int): the node number (or id) that this calls to
Kwargs:
timeout (int): HTTP timeout in seconds
Returns:
AuthServiceProxy. convenience object for making RPC calls.
"""
proxy_kwargs = {}
if timeout is not None:
proxy_kwargs['timeout'] = timeout
proxy = AuthServiceProxy(url, **proxy_kwargs)
proxy.url = url # store URL on proxy for info
coverage_logfile = coverage.get_filename(
COVERAGE_DIR, node_number) if COVERAGE_DIR else None
return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile)
def p2p_port(n):
assert(n <= MAX_NODES)
return PORT_MIN + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
def rpc_port(n):
return PORT_MIN + PORT_RANGE + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
def check_json_precision():
"""Make sure json library being used does not lose precision converting NAV values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def count_bytes(hex_string):
return len(bytearray.fromhex(hex_string))
def bytes_to_hex_str(byte_str):
return hexlify(byte_str).decode('ascii')
def hex_str_to_bytes(hex_str):
return unhexlify(hex_str.encode('ascii'))
def str_to_b64str(string):
return b64encode(string.encode('utf-8')).decode('ascii')
def sync_blocks(rpc_connections, wait=1, timeout=60):
"""
Wait until everybody has the same tip
"""
while timeout > 0:
tips = [ x.getbestblockhash() for x in rpc_connections ]
if tips == [ tips[0] ]*len(tips):
#if all x.getblockhash() in tips are the same return True
return True
time.sleep(wait)
timeout -= wait
raise AssertionError("Block sync failed")
def sync_mempools(rpc_connections, wait=1, timeout=60):
"""
Wait until everybody has the same transactions in their memory
pools
"""
while timeout > 0:
pool = set(rpc_connections[0].getrawmempool())
num_match = 1
for i in range(1, len(rpc_connections)):
if set(rpc_connections[i].getrawmempool()) == pool:
num_match = num_match+1
if num_match == len(rpc_connections):
return True
time.sleep(wait)
timeout -= wait
raise AssertionError("Mempool sync failed")
navcoind_processes = {}
def initialize_datadir(dirname, n):
datadir = os.path.join(dirname, "node"+str(n))
if not os.path.isdir(datadir):
os.makedirs(datadir)
rpc_u, rpc_p = rpc_auth_pair(n)
with open(os.path.join(datadir, "navcoin.conf"), 'w') as f:
f.write("devnet=1\n")
f.write("rpcuser=" + rpc_u + "\n")
f.write("rpcpassword=" + rpc_p + "\n")
f.write("port="+str(p2p_port(n))+"\n")
f.write("rpcport="+str(rpc_port(n))+"\n")
f.write("listenonion=0\n")
f.write("dandelion=0\n")
f.write("ntpminmeasures=-1\n")
f.write("torserver=0\n")
f.write("suppressblsctwarning=1\n")
return datadir
def rpc_auth_pair(n):
return 'rpcuser💻' + str(n), 'rpcpass🔑' + str(n)
def rpc_url(i, rpchost=None):
rpc_u, rpc_p = rpc_auth_pair(i)
return "http://%s:%s@%s:%d" % (rpc_u, rpc_p, rpchost or '127.0.0.1', rpc_port(i))
def wait_for_navcoind_start(process, url, i):
'''
Wait for navcoind to start. This means that RPC is accessible and fully initialized.
Raise an exception if navcoind exits during initialization.
'''
polls_interval = 1.0 / 4
runtime = 60
while runtime > 0:
if process.poll() is not None:
raise Exception('navcoind exited with status %i during initialization' % process.returncode)
try:
# print('Checking RPC')
rpc = get_rpc_proxy(url, i)
blocks = rpc.getblockcount()
# print('RPC replied with blocks: %i' % blocks)
return # break out of loop on success
except IOError as e:
if e.errno != errno.ECONNREFUSED: # Port not yet open?
raise # unknown IO error
# else:
# print('Waiting for port')
except JSONRPCException as e: # Initialization phase
if e.error['code'] != -28: # RPC in warmup?
raise # unkown JSON RPC exception
# else:
# print('RPC in warmup')
time.sleep(polls_interval)
runtime -= polls_interval
raise Exception('navcoind RPC timeout')
def initialize_chain(test_dir, num_nodes):
"""
Create a cache of a 200-block-long chain (with wallet) for MAX_NODES
Afterward, create num_nodes copies from the cache
"""
assert num_nodes <= MAX_NODES
create_cache = False
for i in range(MAX_NODES):
if not os.path.isdir(os.path.join('cache', 'node'+str(i))):
create_cache = True
break
if create_cache:
#find and delete old cache directories if any exist
for i in range(MAX_NODES):
if os.path.isdir(os.path.join("cache","node"+str(i))):
shutil.rmtree(os.path.join("cache","node"+str(i)))
# Create cache directories, run navcoinds:
for i in range(MAX_NODES):
datadir=initialize_datadir("cache", i)
args = [ os.getenv("NAVCOIND", "navcoind"), "-server", "-keypool=1", "-datadir="+datadir, "-discover=0" ]
if i > 0:
args.append("-connect=127.0.0.1:"+str(p2p_port(0)))
navcoind_processes[i] = subprocess.Popen(args)
if os.getenv("PYTHON_DEBUG", ""):
print("initialize_chain: navcoind started, waiting for RPC to come up")
wait_for_navcoind_start(navcoind_processes[i], rpc_url(i), i)
if os.getenv("PYTHON_DEBUG", ""):
print("initialize_chain: RPC succesfully started")
rpcs = []
for i in range(MAX_NODES):
try:
rpcs.append(get_rpc_proxy(rpc_url(i), i))
except:
sys.stderr.write("Error connecting to "+url+"\n")
sys.exit(1)
# Create a 200-block-long chain; each of the 4 first nodes
# gets 25 mature blocks and 25 immature.
# Note: To preserve compatibility with older versions of
# initialize_chain, only 4 nodes will generate coins.
#
# blocks are created with timestamps 10 minutes apart
# starting from 2010 minutes in the past
enable_mocktime()
block_time = get_mocktime() - (201 * 10 * 60)
for i in range(2):
for peer in range(4):
for j in range(25):
set_node_times(rpcs, block_time)
slow_gen(rpcs[peer], 1)
block_time += 10*60
# Must sync before next peer starts generating blocks
sync_blocks(rpcs)
# Shut them down, and clean up cache directories:
stop_nodes(rpcs)
wait_navcoinds()
disable_mocktime()
for i in range(MAX_NODES):
os.remove(log_filename("cache", i, "debug.log"))
os.remove(log_filename("cache", i, "db.log"))
os.remove(log_filename("cache", i, "peers.dat"))
os.remove(log_filename("cache", i, "fee_estimates.dat"))
for i in range(num_nodes):
from_dir = os.path.join("cache", "node"+str(i))
to_dir = os.path.join(test_dir, "node"+str(i))
shutil.copytree(from_dir, to_dir)
initialize_datadir(test_dir, i) # Overwrite port/rpcport in navcoin.conf
def initialize_chain_clean(test_dir, num_nodes):
"""
Create an empty blockchain and num_nodes wallets.
Useful if a test case wants complete control over initialization.
"""
for i in range(num_nodes):
datadir=initialize_datadir(test_dir, i)
def _rpchost_to_args(rpchost):
'''Convert optional IP:port spec to rpcconnect/rpcport args'''
if rpchost is None:
return []
match = re.match('(\[[0-9a-fA-f:]+\]|[^:]+)(?::([0-9]+))?$', rpchost)
if not match:
raise ValueError('Invalid RPC host spec ' + rpchost)
rpcconnect = match.group(1)
rpcport = match.group(2)
if rpcconnect.startswith('['): # remove IPv6 [...] wrapping
rpcconnect = rpcconnect[1:-1]
rv = ['-rpcconnect=' + rpcconnect]
if rpcport:
rv += ['-rpcport=' + rpcport]
return rv
def start_node(i, dirname, extra_args=None, rpchost=None, timewait=None, binary=None):
"""
Start a navcoind and return RPC connection to it
"""
datadir = os.path.join(dirname, "node"+str(i))
if binary is None:
binary = os.getenv("NAVCOIND", "navcoind")
args = [ binary, "-datadir="+datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-mocktime="+str(get_mocktime()) ]
if extra_args is not None: args.extend(extra_args)
navcoind_processes[i] = subprocess.Popen(args)
if os.getenv("PYTHON_DEBUG", ""):
print("start_node: navcoind started, waiting for RPC to come up")
url = rpc_url(i, rpchost)
wait_for_navcoind_start(navcoind_processes[i], url, i)
if os.getenv("PYTHON_DEBUG", ""):
print("start_node: RPC succesfully started")
proxy = get_rpc_proxy(url, i, timeout=timewait)
if COVERAGE_DIR:
coverage.write_all_rpc_commands(COVERAGE_DIR, proxy)
return proxy
def start_nodes(num_nodes, dirname, extra_args=None, rpchost=None, binary=None):
"""
Start multiple navcoinds, return RPC connections to them
"""
if extra_args is None: extra_args = [ None for _ in range(num_nodes) ]
if binary is None: binary = [ None for _ in range(num_nodes) ]
rpcs = []
try:
for i in range(num_nodes):
rpcs.append(start_node(i, dirname, extra_args[i], rpchost, binary=binary[i]))
except: # If one node failed to start, stop the others
stop_nodes(rpcs)
raise
return rpcs
def log_filename(dirname, n_node, logname):
return os.path.join(dirname, "node"+str(n_node), "devnet", logname)
def stop_node(node, i):
try:
node.stop()
except http.client.CannotSendRequest as e:
print("WARN: Unable to stop node: " + repr(e))
navcoind_processes[i].wait(timeout=NAVCOIND_PROC_WAIT_TIMEOUT)
del navcoind_processes[i]
def stop_nodes(nodes):
for node in nodes:
try:
node.stop()
except http.client.CannotSendRequest as e:
print("WARN: Unable to stop node: " + repr(e))
del nodes[:] # Emptying array closes connections as a side effect
def set_node_times(nodes, t):
for node in nodes:
node.setmocktime(t)
def wait_navcoinds():
# Wait for all navcoinds to cleanly exit
for navcoind in navcoind_processes.values():
navcoind.wait(timeout=NAVCOIND_PROC_WAIT_TIMEOUT)
navcoind_processes.clear()
def connect_nodes(from_connection, node_num):
ip_port = "127.0.0.1:"+str(p2p_port(node_num))
from_connection.addnode(ip_port, "onetry")
# poll until version handshake complete to avoid race conditions
# with transaction relaying
while any(peer['version'] == 0 for peer in from_connection.getpeerinfo()):
time.sleep(0.1)
def connect_nodes_bi(nodes, a, b):
connect_nodes(nodes[a], b)
connect_nodes(nodes[b], a)
def find_output(node, txid, amount):
"""
Return index to output of txid with value amount
Raises exception if there is none.
"""
txdata = node.getrawtransaction(txid, 1)
for i in range(len(txdata["vout"])):
if txdata["vout"][i]["value"] == amount:
return i
raise RuntimeError("find_output txid %s : %s not found"%(txid,str(amount)))
def gather_inputs(from_node, amount_needed, confirmations_required=1):
"""
Return a random set of unspent txouts that are enough to pay amount_needed
"""
assert(confirmations_required >=0)
utxo = from_node.listunspent(confirmations_required)
random.shuffle(utxo)
inputs = []
total_in = Decimal("0.00000000")
while total_in < amount_needed and len(utxo) > 0:
t = utxo.pop()
total_in += t["amount"]
inputs.append({ "txid" : t["txid"], "vout" : t["vout"], "address" : t["address"] } )
if total_in < amount_needed:
raise RuntimeError("Insufficient funds: need %d, have %d"%(amount_needed, total_in))
return (total_in, inputs)
def make_change(from_node, amount_in, amount_out, fee):
"""
Create change output(s), return them
"""
outputs = {}
amount = amount_out+fee
change = amount_in - amount
if change > amount*2:
# Create an extra change output to break up big inputs
change_address = from_node.getnewaddress()
# Split change in two, being careful of rounding:
outputs[change_address] = Decimal(change/2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
change = amount_in - amount - outputs[change_address]
if change > 0:
outputs[from_node.getnewaddress()] = change
return outputs
def send_zeropri_transaction(from_node, to_node, amount, fee):
"""
Create&broadcast a zero-priority transaction.
Returns (txid, hex-encoded-txdata)
Ensures transaction is zero-priority by first creating a send-to-self,
then using its output
"""
# Create a send-to-self with confirmed inputs:
self_address = from_node.getnewaddress()
(total_in, inputs) = gather_inputs(from_node, amount+fee*2)
outputs = make_change(from_node, total_in, amount+fee, fee)
outputs[self_address] = float(amount+fee)
self_rawtx = from_node.createrawtransaction(inputs, outputs)
self_signresult = from_node.signrawtransaction(self_rawtx)
self_txid = from_node.sendrawtransaction(self_signresult["hex"], True)
vout = find_output(from_node, self_txid, amount+fee)
# Now immediately spend the output to create a 1-input, 1-output
# zero-priority transaction:
inputs = [ { "txid" : self_txid, "vout" : vout } ]
outputs = { to_node.getnewaddress() : float(amount) }
rawtx = from_node.createrawtransaction(inputs, outputs)
signresult = from_node.signrawtransaction(rawtx)
txid = from_node.sendrawtransaction(signresult["hex"], True)
return (txid, signresult["hex"])
def random_zeropri_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random zero-priority transaction.
Returns (txid, hex-encoded-transaction-data, fee)
"""
from_node = random.choice(nodes)
to_node = random.choice(nodes)
fee = min_fee + fee_increment*random.randint(0,fee_variants)
(txid, txhex) = send_zeropri_transaction(from_node, to_node, amount, fee)
return (txid, txhex, fee)
def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random transaction.
Returns (txid, hex-encoded-transaction-data, fee)
"""
from_node = random.choice(nodes)
to_node = random.choice(nodes)
fee = min_fee + fee_increment*random.randint(0,fee_variants)
(total_in, inputs) = gather_inputs(from_node, amount+fee)
outputs = make_change(from_node, total_in, amount, fee)
outputs[to_node.getnewaddress()] = float(amount)
rawtx = from_node.createrawtransaction(inputs, outputs)
signresult = from_node.signrawtransaction(rawtx)
txid = from_node.sendrawtransaction(signresult["hex"], True)
return (txid, signresult["hex"], fee)
def assert_fee_amount(fee, tx_size, fee_per_kB):
"""Assert the fee was in range"""
target_fee = tx_size * fee_per_kB / 1000
if fee < target_fee:
raise AssertionError("Fee of %s NAV too low! (Should be %s NAV)"%(str(fee), str(target_fee)))
# allow the wallet's estimation to be at most 2 bytes off
if fee > (tx_size + 2) * fee_per_kB / 1000:
raise AssertionError("Fee of %s NAV too high! (Should be %s NAV)"%(str(fee), str(target_fee)))
def assert_equal(thing1, thing2):
if thing1 != thing2:
raise AssertionError("%s != %s"%(str(thing1),str(thing2)))
def assert_greater_than(thing1, thing2):
if thing1 <= thing2:
raise AssertionError("%s <= %s"%(str(thing1),str(thing2)))
def assert_raises(exc, fun, *args, **kwds):
try:
fun(*args, **kwds)
except exc:
pass
except Exception as e:
raise AssertionError("Unexpected exception raised: "+type(e).__name__)
else:
raise AssertionError("No exception raised")
def assert_is_hex_string(string):
try:
int(string, 16)
except Exception as e:
raise AssertionError(
"Couldn't interpret %r as hexadecimal; raised: %s" % (string, e))
def assert_is_hash_string(string, length=64):
if not isinstance(string, str):
raise AssertionError("Expected a string, got type %r" % type(string))
elif length and len(string) != length:
raise AssertionError(
"String of length %d expected; got %d" % (length, len(string)))
elif not re.match('[abcdef0-9]+$', string):
raise AssertionError(
"String %r contains invalid characters for a hash." % string)
def assert_array_result(object_array, to_match, expected, should_not_find = False):
"""
Pass in array of JSON objects, a dictionary with key/value pairs
to match against, and another dictionary with expected key/value
pairs.
If the should_not_find flag is true, to_match should not be found
in object_array
"""
if should_not_find == True:
assert_equal(expected, { })
num_matched = 0
for item in object_array:
all_match = True
for key,value in to_match.items():
if item[key] != value:
all_match = False
if not all_match:
continue
elif should_not_find == True:
num_matched = num_matched+1
for key,value in expected.items():
if item[key] != value:
raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value)))
num_matched = num_matched+1
if num_matched == 0 and should_not_find != True:
raise AssertionError("No objects matched %s"%(str(to_match)))
if num_matched > 0 and should_not_find == True:
raise AssertionError("Objects were found %s"%(str(to_match)))
def assert_raises_rpc_error(code, message, fun, *args, **kwds):
"""Run an RPC and verify that a specific JSONRPC exception code and message is raised.
Calls function `fun` with arguments `args` and `kwds`. Catches a JSONRPCException
and verifies that the error code and message are as expected. Throws AssertionError if
no JSONRPCException was raised or if the error code/message are not as expected.
Args:
code (int), optional: the error code returned by the RPC call (defined
in src/rpc/protocol.h). Set to None if checking the error code is not required.
message (string), optional: [a substring of] the error string returned by the
RPC call. Set to None if checking the error string is not required.
fun (function): the function to call. This should be the name of an RPC.
args*: positional arguments for the function.
kwds**: named arguments for the function.
"""
assert try_rpc(code, message, fun, *args, **kwds), "No exception raised"
def try_rpc(code, message, fun, *args, **kwds):
"""Tries to run an rpc command.
Test against error code and message if the rpc fails.
Returns whether a JSONRPCException was raised."""
try:
fun(*args, **kwds)
except JSONRPCException as e:
# JSONRPCException was thrown as expected. Check the code and message values are correct.
if (code is not None) and (code != e.error["code"]):
raise AssertionError("Unexpected JSONRPC error code %i" % e.error["code"])
if (message is not None) and (message not in e.error['message']):
raise AssertionError("Expected substring not found:" + e.error['message'])
return True
except Exception as e:
raise AssertionError("Unexpected exception raised: " + type(e).__name__)
else:
return False
def satoshi_round(amount):
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
# Helper to create at least "count" utxos
# Pass in a fee that is sufficient for relay and mining new transactions.
def create_confirmed_utxos(fee, node, count):
node.generate(int(0.5*count)+101)
utxos = node.listunspent()
iterations = count - len(utxos)
addr1 = node.getnewaddress()
addr2 = node.getnewaddress()
if iterations <= 0:
return utxos
for i in range(iterations):
t = utxos.pop()
inputs = []
inputs.append({ "txid" : t["txid"], "vout" : t["vout"]})
outputs = {}
send_value = t['amount'] - fee
outputs[addr1] = satoshi_round(send_value/2)
outputs[addr2] = satoshi_round(send_value/2)
raw_tx = node.createrawtransaction(inputs, outputs)
signed_tx = node.signrawtransaction(raw_tx)["hex"]
txid = node.sendrawtransaction(signed_tx)
while (node.getmempoolinfo()['size'] > 0):
node.generate(1)
utxos = node.listunspent()
assert(len(utxos) >= count)
return utxos
# Create large OP_RETURN txouts that can be appended to a transaction
# to make it large (helper for constructing large transactions).
def gen_return_txouts():
# Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create
# So we have big transactions (and therefore can't fit very many into each block)
# create one script_pubkey
script_pubkey = "6a4d0200" #OP_RETURN OP_PUSH2 512 bytes
for i in range (512):
script_pubkey = script_pubkey + "01"
# concatenate 128 txouts of above script_pubkey which we'll insert before the txout for change
txouts = "81"
for k in range(128):
# add txout value
txouts = txouts + "0000000000000000"
# add length of script_pubkey
txouts = txouts + "fd0402"
# add script_pubkey
txouts = txouts + script_pubkey
return txouts
def create_tx(node, coinbase, to_address, amount):
inputs = [{ "txid" : coinbase, "vout" : 0}]
outputs = { to_address : amount }
rawtx = node.createrawtransaction(inputs, outputs)
signresult = node.signrawtransaction(rawtx)
assert_equal(signresult["complete"], True)
return signresult["hex"]
# Create a spend of each passed-in utxo, splicing in "txouts" to each raw
# transaction to make it large. See gen_return_txouts() above.
def create_lots_of_big_transactions(node, txouts, utxos, fee):
addr = node.getnewaddress()
txids = []
for i in range(len(utxos)):
t = utxos.pop()
inputs = []
inputs.append({ "txid" : t["txid"], "vout" : t["vout"]})
outputs = {}
send_value = t['amount'] - fee
outputs[addr] = satoshi_round(send_value)
rawtx = node.createrawtransaction(inputs, outputs)
newtx = rawtx[0:92]
newtx = newtx + txouts
newtx = newtx + rawtx[94:]
signresult = node.signrawtransaction(newtx, None, None, "NONE")
txid = node.sendrawtransaction(signresult["hex"], True)
txids.append(txid)
return txids
def get_bip9_status(node, key):
info = node.getblockchaininfo()
return info['bip9_softforks'][key]
def slow_gen(node, count, sleep = 0.1):
total = count
blocks = []
while total > 0:
now = min(total, 10)
blocks.extend(node.generate(now))
total -= now
time.sleep(sleep)
return blocks

View File

@@ -51,6 +51,9 @@ MSG_TYPE_MASK = 0xffffffff >> 2
def sha256(s):
return hashlib.new('sha256', s).digest()
def ripemd160(s):
return hashlib.new('ripemd160', s).digest()
def hash256(s):
return sha256(sha256(s))
@@ -181,18 +184,11 @@ def ser_string_vector(l):
return r
# Deserialize from bytes
def FromBytes(obj, tx_bytes):
obj.deserialize(BytesIO(tx_bytes))
return obj
# Deserialize from a hex string representation (eg from RPC)
def FromHex(obj, hex_string):
obj.deserialize(BytesIO(hex_str_to_bytes(hex_string)))
return obj
# Convert a binary-serializable object to hex (eg for submission via RPC)
def ToHex(obj):
return bytes_to_hex_str(obj.serialize())
@@ -422,114 +418,43 @@ class CTxOut:
bytes_to_hex_str(self.scriptPubKey))
class SpendDescription:
def deserialize(self, f):
self.cv = deser_uint256(f)
self.anchor = deser_uint256(f)
self.nullifier = deser_uint256(f)
self.rk = deser_uint256(f)
self.zkproof = f.read(192)
self.spendAuthSig = f.read(64)
def serialize(self):
r = b""
r += ser_uint256(self.cv)
r += ser_uint256(self.anchor)
r += ser_uint256(self.nullifier)
r += ser_uint256(self.rk)
r += self.zkproof
r += self.spendAuthSig
return r
class OutputDescription:
def deserialize(self, f):
self.cv = deser_uint256(f)
self.cmu = deser_uint256(f)
self.ephemeralKey = deser_uint256(f)
self.encCiphertext = f.read(580)
self.outCiphertext = f.read(80)
self.zkproof = f.read(192)
def serialize(self):
r = b""
r += ser_uint256(self.cv)
r += ser_uint256(self.cmu)
r += ser_uint256(self.ephemeralKey)
r += self.encCiphertext
r += self.outCiphertext
r += self.zkproof
return r
class SaplingTxData:
def deserialize(self, f):
self.pre = f.read(1)
self.valueBalance = struct.unpack("<q", f.read(8))[0]
self.vShieldedSpend = deser_vector(f, SpendDescription)
self.vShieldedOutput = deser_vector(f, OutputDescription)
self.bindingSig = f.read(64)
def serialize(self):
r = b""
r += self.pre
r += struct.pack("<q", self.valueBalance)
r += ser_vector(self.vShieldedSpend)
r += ser_vector(self.vShieldedOutput)
r += self.bindingSig
return r
class CTransaction:
def __init__(self, tx=None):
if tx is None:
self.nVersion = 1
self.nType = 0
self.vin = []
self.vout = []
self.sapData = None
self.extraData = b""
self.sapData = b""
self.nLockTime = 0
self.sha256 = None
self.hash = None
else:
self.nVersion = tx.nVersion
self.nType = tx.nType
self.vin = copy.deepcopy(tx.vin)
self.vout = copy.deepcopy(tx.vout)
self.nLockTime = tx.nLockTime
self.sapData = tx.sapData
self.extraData = tx.extraData
self.sha256 = tx.sha256
self.hash = tx.hash
def deserialize(self, f):
self.nVersion = struct.unpack("<h", f.read(2))[0]
self.nType = struct.unpack("<h", f.read(2))[0]
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.vin = deser_vector(f, CTxIn)
self.vout = deser_vector(f, CTxOut)
self.nLockTime = struct.unpack("<I", f.read(4))[0]
if self.nVersion >= 3:
self.sapData = SaplingTxData()
self.sapData.deserialize(f)
if self.nType != 0:
self.extraData = deser_string(f)
if self.nVersion >= 2:
self.sapData = deser_string(f)
self.sha256 = None
self.hash = None
def serialize_without_witness(self):
r = b""
r += struct.pack("<h", self.nVersion)
r += struct.pack("<h", self.nType)
r += struct.pack("<i", self.nVersion)
r += ser_vector(self.vin)
r += ser_vector(self.vout)
r += struct.pack("<I", self.nLockTime)
if self.nVersion >= 3:
r += self.sapData.serialize()
if self.nType != 0:
r += ser_string(self.extraData)
if self.nVersion >= 2:
r += ser_string(self.sapData)
return r
# Regular serialization is with witness -- must explicitly
@@ -579,8 +504,8 @@ class CTransaction:
x.prevout.hash == outpoint.hash and x.prevout.n == outpoint.n]) > 0
def __repr__(self):
return "CTransaction(nVersion=%i nType=%i vin=%s vout=%s nLockTime=%i)" \
% (self.nVersion, self.nType, repr(self.vin), repr(self.vout), self.nLockTime)
return "CTransaction(nVersion=%i vin=%s vout=%s nLockTime=%i)" \
% (self.nVersion, repr(self.vin), repr(self.vout), self.nLockTime)
class CBlockHeader:

View File

@@ -1,159 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2022-2024 tecnovert
# Copyright (c) 2024 The Basicswap developers
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
from .btc import BTCInterface
from basicswap.chainparams import Coins
from basicswap.util.address import decodeAddress
from basicswap.contrib.mnemonic import Mnemonic
from basicswap.contrib.test_framework.script import (
CScript,
OP_DUP,
OP_HASH160,
OP_EQUALVERIFY,
OP_CHECKSIG,
)
class DASHInterface(BTCInterface):
@staticmethod
def coin_type():
return Coins.DASH
def __init__(self, coin_settings, network, swap_client=None):
super().__init__(coin_settings, network, swap_client)
self._wallet_passphrase = ""
self._have_checked_seed = False
self._wallet_v20_compatible = (
False
if not swap_client
else swap_client.getChainClientSettings(self.coin_type()).get(
"wallet_v20_compatible", False
)
)
def decodeAddress(self, address: str) -> bytes:
return decodeAddress(address)[1:]
def getWalletSeedID(self) -> str:
hdseed: str = self.rpc_wallet("dumphdinfo")["hdseed"]
return self.getSeedHash(bytes.fromhex(hdseed)).hex()
def entropyToMnemonic(self, key: bytes) -> None:
return Mnemonic("english").to_mnemonic(key)
def initialiseWallet(self, key_bytes: bytes, restore_time: int = -1) -> None:
self._have_checked_seed = False
if self._wallet_v20_compatible:
self._log.warning("Generating wallet compatible with v20 seed.")
words = self.entropyToMnemonic(key_bytes)
mnemonic_passphrase = ""
self.rpc_wallet(
"upgradetohd", [words, mnemonic_passphrase, self._wallet_passphrase]
)
self._have_checked_seed = False
if self._wallet_passphrase != "":
self.unlockWallet(self._wallet_passphrase)
return
key_wif = self.encodeKey(key_bytes)
self.rpc_wallet("sethdseed", [True, key_wif])
def checkExpectedSeed(self, expect_seedid: str) -> bool:
self._expect_seedid_hex = expect_seedid
try:
rv = self.rpc_wallet("dumphdinfo")
except Exception as e:
self._log.debug(f"DASH dumphdinfo failed {e}.")
return False
if rv["mnemonic"] != "":
entropy = Mnemonic("english").to_entropy(rv["mnemonic"].split(" "))
entropy_hash = self.getAddressHashFromKey(entropy)[::-1].hex()
have_expected_seed: bool = expect_seedid == entropy_hash
else:
have_expected_seed: bool = expect_seedid == self.getWalletSeedID()
self._have_checked_seed = True
return have_expected_seed
def withdrawCoin(self, value, addr_to, subfee):
params = [addr_to, value, "", "", subfee, False, False, self._conf_target]
return self.rpc_wallet("sendtoaddress", params)
def getSpendableBalance(self) -> int:
return self.make_int(self.rpc_wallet("getwalletinfo")["balance"])
def getScriptForPubkeyHash(self, pkh: bytes) -> bytearray:
# Return P2PKH
return CScript([OP_DUP, OP_HASH160, pkh, OP_EQUALVERIFY, OP_CHECKSIG])
def getBLockSpendTxFee(self, tx, fee_rate: int) -> int:
add_bytes = 107
size = len(tx.serialize_with_witness()) + add_bytes
pay_fee = round(fee_rate * size / 1000)
self._log.info(
f"BLockSpendTx fee_rate, size, fee: {fee_rate}, {size}, {pay_fee}."
)
return pay_fee
def findTxnByHash(self, txid_hex: str):
# Only works for wallet txns
try:
rv = self.rpc_wallet("gettransaction", [txid_hex])
except Exception as e: # noqa: F841
self._log.debug(
"findTxnByHash getrawtransaction failed: {}".format(txid_hex)
)
return None
if "confirmations" in rv and rv["confirmations"] >= self.blocks_confirmed:
block_height = self.getBlockHeader(rv["blockhash"])["height"]
return {"txid": txid_hex, "amount": 0, "height": block_height}
return None
def unlockWallet(self, password: str, check_seed: bool = True) -> None:
super().unlockWallet(password, check_seed)
if self._wallet_v20_compatible:
# Store password for initialiseWallet
self._wallet_passphrase = password
def lockWallet(self):
super().lockWallet()
self._wallet_passphrase = ""
def encryptWallet(
self, old_password: str, new_password: str, check_seed: bool = True
):
if old_password != "":
self.unlockWallet(old_password, check_seed=False)
seed_id_before: str = self.getWalletSeedID()
self.rpc_wallet("encryptwallet", [new_password])
if check_seed is False or seed_id_before == "Not found":
return
self.unlockWallet(new_password, check_seed=False)
seed_id_after: str = self.getWalletSeedID()
self.lockWallet()
if seed_id_before == seed_id_after:
return
self._log.warning(f"{self.ticker()} wallet seed changed after encryption.")
self._log.debug(
f"seed_id_before: {seed_id_before} seed_id_after: {seed_id_after}."
)
self.setWalletSeedWarning(True)
def changeWalletPassword(
self, old_password: str, new_password: str, check_seed_if_encrypt: bool = True
):
self._log.info("changeWalletPassword - {}".format(self.ticker()))
if old_password == "":
if self.isWalletEncrypted():
raise ValueError("Old password must be set")
return self.encryptWallet(old_password, new_password, check_seed_if_encrypt)
self.rpc_wallet("walletpassphrasechange", [old_password, new_password])

View File

@@ -1,5 +0,0 @@
from .dcr import DCRInterface
__all__ = [
"DCRInterface",
]

File diff suppressed because it is too large Load Diff

View File

@@ -1,212 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2024 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import copy
from enum import IntEnum
from basicswap.util.crypto import blake256
from basicswap.util.integer import decode_compactsize, encode_compactsize
class TxSerializeType(IntEnum):
Full = 0
NoWitness = 1
OnlyWitness = 2
class SigHashType(IntEnum):
SigHashAll = 0x1
SigHashNone = 0x2
SigHashSingle = 0x3
SigHashAnyOneCanPay = 0x80
SigHashMask = 0x1F
class SignatureType(IntEnum):
STEcdsaSecp256k1 = 0
STEd25519 = 1
STSchnorrSecp256k1 = 2
class COutPoint:
__slots__ = ("hash", "n", "tree")
def __init__(self, hash=0, n=0, tree=0):
self.hash = hash
self.n = n
self.tree = tree
def get_hash(self) -> bytes:
return self.hash.to_bytes(32, "big")
class CTxIn:
__slots__ = (
"prevout",
"sequence",
"value_in",
"block_height",
"block_index",
"signature_script",
) # Witness
def __init__(self, prevout=COutPoint(), sequence=0):
self.prevout = prevout
self.sequence = sequence
self.value_in = -1
self.block_height = 0
self.block_index = 0xFFFFFFFF
self.signature_script = bytes()
class CTxOut:
__slots__ = ("value", "version", "script_pubkey")
def __init__(self, value=0, script_pubkey=bytes()):
self.value = value
self.version = 0
self.script_pubkey = script_pubkey
class CTransaction:
__slots__ = ("hash", "version", "vin", "vout", "locktime", "expiry")
def __init__(self, tx=None):
if tx is None:
self.version = 1
self.vin = []
self.vout = []
self.locktime = 0
self.expiry = 0
else:
self.version = tx.version
self.vin = copy.deepcopy(tx.vin)
self.vout = copy.deepcopy(tx.vout)
self.locktime = tx.locktime
self.expiry = tx.expiry
def deserialize(self, data: bytes) -> None:
version = int.from_bytes(data[:4], "little")
self.version = version & 0xFFFF
ser_type: int = version >> 16
o = 4
if ser_type == TxSerializeType.Full or ser_type == TxSerializeType.NoWitness:
num_txin, nb = decode_compactsize(data, o)
o += nb
for i in range(num_txin):
txi = CTxIn()
txi.prevout = COutPoint()
txi.prevout.hash = int.from_bytes(data[o : o + 32], "little")
o += 32
txi.prevout.n = int.from_bytes(data[o : o + 4], "little")
o += 4
txi.prevout.tree = data[o]
o += 1
txi.sequence = int.from_bytes(data[o : o + 4], "little")
o += 4
self.vin.append(txi)
num_txout, nb = decode_compactsize(data, o)
o += nb
for i in range(num_txout):
txo = CTxOut()
txo.value = int.from_bytes(data[o : o + 8], "little")
o += 8
txo.version = int.from_bytes(data[o : o + 2], "little")
o += 2
script_bytes, nb = decode_compactsize(data, o)
o += nb
txo.script_pubkey = data[o : o + script_bytes]
o += script_bytes
self.vout.append(txo)
self.locktime = int.from_bytes(data[o : o + 4], "little")
o += 4
self.expiry = int.from_bytes(data[o : o + 4], "little")
o += 4
if ser_type == TxSerializeType.NoWitness:
return
num_wit_scripts, nb = decode_compactsize(data, o)
o += nb
if ser_type == TxSerializeType.OnlyWitness:
self.vin = [CTxIn() for _ in range(num_wit_scripts)]
else:
if num_wit_scripts != len(self.vin):
raise ValueError("non equal witness and prefix txin quantities")
for i in range(num_wit_scripts):
txi = self.vin[i]
txi.value_in = int.from_bytes(data[o : o + 8], "little")
o += 8
txi.block_height = int.from_bytes(data[o : o + 4], "little")
o += 4
txi.block_index = int.from_bytes(data[o : o + 4], "little")
o += 4
script_bytes, nb = decode_compactsize(data, o)
o += nb
txi.signature_script = data[o : o + script_bytes]
o += script_bytes
def serialize(self, ser_type=TxSerializeType.Full) -> bytes:
data = bytes()
version = (self.version & 0xFFFF) | (ser_type << 16)
data += version.to_bytes(4, "little")
if ser_type == TxSerializeType.Full or ser_type == TxSerializeType.NoWitness:
data += encode_compactsize(len(self.vin))
for txi in self.vin:
data += txi.prevout.hash.to_bytes(32, "little")
data += txi.prevout.n.to_bytes(4, "little")
data += txi.prevout.tree.to_bytes(1, "little")
data += txi.sequence.to_bytes(4, "little")
data += encode_compactsize(len(self.vout))
for txo in self.vout:
data += txo.value.to_bytes(8, "little")
data += txo.version.to_bytes(2, "little")
data += encode_compactsize(len(txo.script_pubkey))
data += txo.script_pubkey
data += self.locktime.to_bytes(4, "little")
data += self.expiry.to_bytes(4, "little")
if ser_type == TxSerializeType.Full or ser_type == TxSerializeType.OnlyWitness:
data += encode_compactsize(len(self.vin))
for txi in self.vin:
tc_value_in = (
txi.value_in & 0xFFFFFFFFFFFFFFFF
) # Convert negative values
data += tc_value_in.to_bytes(8, "little")
data += txi.block_height.to_bytes(4, "little")
data += txi.block_index.to_bytes(4, "little")
data += encode_compactsize(len(txi.signature_script))
data += txi.signature_script
return data
def TxHash(self) -> bytes:
return blake256(self.serialize(TxSerializeType.NoWitness))[::-1]
def TxHashWitness(self) -> bytes:
raise ValueError("todo")
def TxHashFull(self) -> bytes:
raise ValueError("todo")
def findOutput(tx, script_pk: bytes):
for i in range(len(tx.vout)):
if tx.vout[i].script_pubkey == script_pk:
return i
return None

View File

@@ -1,47 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2024 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import json
import traceback
from basicswap.rpc import Jsonrpc
def callrpc(rpc_port, auth, method, params=[], host="127.0.0.1"):
try:
url = "http://{}@{}:{}/".format(auth, host, rpc_port)
x = Jsonrpc(url)
x.__handler = None
v = x.json_request(method, params)
x.close()
r = json.loads(v.decode("utf-8"))
except Exception as ex:
traceback.print_exc()
raise ValueError("RPC server error " + str(ex) + ", method: " + method)
if "error" in r and r["error"] is not None:
raise ValueError("RPC error " + str(r["error"]))
return r["result"]
def openrpc(rpc_port, auth, host="127.0.0.1"):
try:
url = "http://{}@{}:{}/".format(auth, host, rpc_port)
return Jsonrpc(url)
except Exception as ex:
traceback.print_exc()
raise ValueError("RPC error " + str(ex))
def make_rpc_func(port, auth, host="127.0.0.1"):
port = port
auth = auth
host = host
def rpc_func(method, params=None):
return callrpc(port, auth, method, params, host)
return rpc_func

View File

@@ -1,50 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2024 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
OP_0 = 0x00
OP_DATA_1 = 0x01
OP_1NEGATE = 0x4F
OP_1 = 0x51
OP_IF = 0x63
OP_ELSE = 0x67
OP_ENDIF = 0x68
OP_DROP = 0x75
OP_DUP = 0x76
OP_EQUAL = 0x87
OP_EQUALVERIFY = 0x88
OP_PUSHDATA1 = 0x4C
OP_PUSHDATA2 = 0x4D
OP_PUSHDATA4 = 0x4E
OP_HASH160 = 0xA9
OP_CHECKSIG = 0xAC
OP_CHECKMULTISIG = 0xAE
OP_CHECKSEQUENCEVERIFY = 0xB2
def push_script_data(data_array: bytearray, data: bytes) -> None:
len_data: int = len(data)
if len_data == 0 or (len_data == 1 and data[0] == 0):
data_array += bytes((OP_0,))
return
if len_data == 1 and data[0] <= 16:
data_array += bytes((OP_1 - 1 + data[0],))
return
if len_data == 1 and data[0] == 0x81:
data_array += bytes((OP_1NEGATE,))
return
if len_data < OP_PUSHDATA1:
data_array += len_data.to_bytes(1, "little")
elif len_data <= 0xFF:
data_array += bytes((OP_PUSHDATA1, len_data))
elif len_data <= 0xFFFF:
data_array += bytes((OP_PUSHDATA2,)) + len_data.to_bytes(2, "little")
else:
data_array += bytes((OP_PUSHDATA4,)) + len_data.to_bytes(4, "little")
data_array += data

View File

@@ -1,68 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2024 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import os
import select
import subprocess
def createDCRWallet(args, hex_seed, logging, delay_event):
logging.info("Creating DCR wallet")
(pipe_r, pipe_w) = os.pipe() # subprocess.PIPE is buffered, blocks when read
if os.name == "nt":
str_args = " ".join(args)
p = subprocess.Popen(
str_args, shell=True, stdin=subprocess.PIPE, stdout=pipe_w, stderr=pipe_w
)
else:
p = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=pipe_w, stderr=pipe_w)
def readOutput():
buf = os.read(pipe_r, 1024).decode("utf-8")
response = None
if "Opened wallet" in buf:
pass
elif "Use the existing configured private passphrase" in buf:
response = b"y\n"
elif "Do you want to add an additional layer of encryption" in buf:
response = b"n\n"
elif "Do you have an existing wallet seed" in buf:
response = b"y\n"
elif "Enter existing wallet seed" in buf:
response = (hex_seed + "\n").encode("utf-8")
elif "Seed input successful" in buf:
pass
elif "Upgrading database from version" in buf:
pass
elif "Ticket commitments db upgrade done" in buf:
pass
elif "The wallet has been created successfully" in buf:
pass
else:
raise ValueError(f"Unexpected output: {buf}")
if response is not None:
p.stdin.write(response)
p.stdin.flush()
try:
while p.poll() is None:
if os.name == "nt":
readOutput()
delay_event.wait(0.1)
continue
while len(select.select([pipe_r], [], [], 0)[0]) == 1:
readOutput()
delay_event.wait(0.1)
except Exception as e:
logging.error(f"dcrwallet --create failed: {e}")
finally:
if p.poll() is None:
p.terminate()
os.close(pipe_r)
os.close(pipe_w)
p.stdin.close()

View File

@@ -1,62 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2024 The BasicSwap developers
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
from .btc import BTCInterface
from basicswap.chainparams import Coins
from basicswap.util.crypto import hash160
from basicswap.contrib.test_framework.script import (
CScript,
OP_DUP,
OP_CHECKSIG,
OP_HASH160,
OP_EQUAL,
OP_EQUALVERIFY,
)
class DOGEInterface(BTCInterface):
@staticmethod
def coin_type():
return Coins.DOGE
@staticmethod
def est_lock_tx_vsize() -> int:
return 192
@staticmethod
def xmr_swap_b_lock_spend_tx_vsize() -> int:
return 192
def __init__(self, coin_settings, network, swap_client=None):
super(DOGEInterface, self).__init__(coin_settings, network, swap_client)
def getScriptDest(self, script: bytearray) -> bytearray:
# P2SH
script_hash = hash160(script)
assert len(script_hash) == 20
return CScript([OP_HASH160, script_hash, OP_EQUAL])
def getScriptForPubkeyHash(self, pkh: bytes) -> bytearray:
# Return P2PKH
return CScript([OP_DUP, OP_HASH160, pkh, OP_EQUALVERIFY, OP_CHECKSIG])
def encodeScriptDest(self, script_dest: bytes) -> str:
# Extract hash from script
script_hash = script_dest[2:-1]
return self.sh_to_address(script_hash)
def getBLockSpendTxFee(self, tx, fee_rate: int) -> int:
add_bytes = 107
size = len(tx.serialize_with_witness()) + add_bytes
pay_fee = round(fee_rate * size / 1000)
self._log.info(
f"BLockSpendTx fee_rate, size, fee: {fee_rate}, {size}, {pay_fee}."
)
return pay_fee

View File

@@ -1,474 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2022-2023 tecnovert
# Copyright (c) 2024-2025 The Basicswap developers
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import hashlib
import random
from .btc import BTCInterface, find_vout_for_address_from_txobj
from basicswap.util import (
i2b,
ensure,
)
from basicswap.rpc import make_rpc_func
from basicswap.util.crypto import hash160
from basicswap.util.address import decodeAddress
from basicswap.chainparams import Coins
from basicswap.interface.contrib.firo_test_framework.script import (
CScript,
OP_DUP,
OP_EQUAL,
OP_HASH160,
OP_CHECKSIG,
OP_EQUALVERIFY,
)
from basicswap.interface.contrib.firo_test_framework.mininode import (
CBlock,
FromHex,
CTransaction,
)
class FIROInterface(BTCInterface):
@staticmethod
def coin_type():
return Coins.FIRO
def __init__(self, coin_settings, network, swap_client=None):
super(FIROInterface, self).__init__(coin_settings, network, swap_client)
# No multiwallet support
self.rpc_wallet = make_rpc_func(
self._rpcport, self._rpcauth, host=self._rpc_host
)
self.rpc_wallet_watch = self.rpc_wallet
if "wallet_name" in coin_settings:
raise ValueError(f"Invalid setting for {self.coin_name()}: wallet_name")
def getExchangeName(self, exchange_name: str) -> str:
return "zcoin"
def initialiseWallet(self, key, restore_time: int = -1):
# load with -hdseed= parameter
pass
def checkWallets(self) -> int:
return 1
def encryptWallet(self, password: str, check_seed: bool = True):
# Watchonly wallets are not encrypted
# Firo shuts down after encryptwallet
seed_id_before: str = self.getWalletSeedID() if check_seed else "Not found"
self.rpc_wallet("encryptwallet", [password])
if check_seed is False or seed_id_before == "Not found":
return
seed_id_after: str = self.getWalletSeedID()
if seed_id_before == seed_id_after:
return
self._log.warning(f"{self.ticker()} wallet seed changed after encryption.")
self._log.debug(
f"seed_id_before: {seed_id_before} seed_id_after: {seed_id_after}."
)
self.setWalletSeedWarning(True)
def getNewAddress(self, use_segwit, label="swap_receive"):
return self.rpc("getnewaddress", [label])
# addr_plain = self.rpc('getnewaddress', [label])
# return self.rpc('addwitnessaddress', [addr_plain])
def decodeAddress(self, address):
return decodeAddress(address)[1:]
def encodeSegwitAddress(self, script):
raise ValueError("TODO")
def decodeSegwitAddress(self, addr):
raise ValueError("TODO")
def isWatchOnlyAddress(self, address):
addr_info = self.rpc("validateaddress", [address])
return addr_info["iswatchonly"]
def isAddressMine(self, address: str, or_watch_only: bool = False) -> bool:
addr_info = self.rpc("validateaddress", [address])
if not or_watch_only:
return addr_info["ismine"]
return addr_info["ismine"] or addr_info["iswatchonly"]
def getSCLockScriptAddress(self, lock_script: bytes) -> str:
lock_tx_dest = self.getScriptDest(lock_script)
address = self.encodeScriptDest(lock_tx_dest)
if not self.isAddressMine(address, or_watch_only=True):
# Expects P2WSH nested in BIP16_P2SH
self.rpc("importaddress", [lock_tx_dest.hex(), "bid lock", False, True])
return address
def getLockTxHeight(
self,
txid,
dest_address,
bid_amount,
rescan_from,
find_index: bool = False,
vout: int = -1,
):
# Add watchonly address and rescan if required
if not self.isAddressMine(dest_address, or_watch_only=True):
self.importWatchOnlyAddress(dest_address, "bid")
self._log.info(
"Imported watch-only addr: {}".format(self._log.addr(dest_address))
)
self._log.info(
"Rescanning {} chain from height: {}".format(
self.coin_name(), rescan_from
)
)
self.rescanBlockchainForAddress(rescan_from, dest_address)
return_txid = True if txid is None else False
if txid is None:
txns = self.rpc(
"listunspent",
[
0,
9999999,
[
dest_address,
],
],
)
for tx in txns:
if self.make_int(tx["amount"]) == bid_amount:
txid = bytes.fromhex(tx["txid"])
break
if txid is None:
return None
try:
tx = self.rpc("gettransaction", [txid.hex()])
block_height = 0
if "blockhash" in tx:
block_header = self.rpc("getblockheader", [tx["blockhash"]])
block_height = block_header["height"]
rv = {
"depth": 0 if "confirmations" not in tx else tx["confirmations"],
"height": block_height,
}
except Exception as e:
self._log.debug(
"getLockTxHeight gettransaction failed: %s, %s", txid.hex(), str(e)
)
return None
if find_index:
tx_obj = self.rpc("decoderawtransaction", [tx["hex"]])
rv["index"] = find_vout_for_address_from_txobj(tx_obj, dest_address)
if return_txid:
rv["txid"] = txid.hex()
return rv
def createSCLockTx(
self, value: int, script: bytearray, vkbv: bytes = None
) -> bytes:
tx = CTransaction()
tx.nVersion = self.txVersion()
tx.vout.append(self.txoType()(value, self.getScriptDest(script)))
return tx.serialize()
def fundSCLockTx(self, tx_bytes, feerate, vkbv=None):
return self.fundTx(tx_bytes, feerate)
def signTxWithWallet(self, tx):
rv = self.rpc("signrawtransaction", [tx.hex()])
return bytes.fromhex(rv["hex"])
def createRawFundedTransaction(
self,
addr_to: str,
amount: int,
sub_fee: bool = False,
lock_unspents: bool = True,
) -> str:
txn = self.rpc(
"createrawtransaction", [[], {addr_to: self.format_amount(amount)}]
)
fee_rate, fee_src = self.get_fee_rate(self._conf_target)
self._log.debug(
f"Fee rate: {fee_rate}, source: {fee_src}, block target: {self._conf_target}"
)
options = {
"lockUnspents": lock_unspents,
"feeRate": fee_rate,
}
if sub_fee:
options["subtractFeeFromOutputs"] = [
0,
]
return self.rpc("fundrawtransaction", [txn, options])["hex"]
def createRawSignedTransaction(self, addr_to, amount) -> str:
txn_funded = self.createRawFundedTransaction(addr_to, amount)
return self.rpc("signrawtransaction", [txn_funded])["hex"]
def getScriptForPubkeyHash(self, pkh: bytes) -> bytearray:
# Return P2PKH
return CScript([OP_DUP, OP_HASH160, pkh, OP_EQUALVERIFY, OP_CHECKSIG])
def getScriptDest(self, script: bytearray) -> bytearray:
# P2SH
script_hash = hash160(script)
assert len(script_hash) == 20
return CScript([OP_HASH160, script_hash, OP_EQUAL])
def getSeedHash(self, seed: bytes) -> bytes:
return hash160(seed)[::-1]
def encodeScriptDest(self, script_dest: bytes) -> str:
# Extract hash from script
script_hash = script_dest[2:-1]
return self.sh_to_address(script_hash)
def getDestForScriptHash(self, script_hash):
assert len(script_hash) == 20
return CScript([OP_HASH160, script_hash, OP_EQUAL])
def withdrawCoin(self, value, addr_to, subfee):
params = [addr_to, value, "", "", subfee]
return self.rpc("sendtoaddress", params)
def getWalletSeedID(self):
return self.rpc("getwalletinfo")["hdmasterkeyid"]
def getSpendableBalance(self) -> int:
return self.make_int(self.rpc("getwalletinfo")["balance"])
def getBLockSpendTxFee(self, tx, fee_rate: int) -> int:
add_bytes = 107
size = len(tx.serialize_with_witness()) + add_bytes
pay_fee = round(fee_rate * size / 1000)
self._log.info(
f"BLockSpendTx fee_rate, size, fee: {fee_rate}, {size}, {pay_fee}."
)
return pay_fee
def signTxWithKey(self, tx: bytes, key: bytes) -> bytes:
key_wif = self.encodeKey(key)
rv = self.rpc(
"signrawtransaction",
[
tx.hex(),
[],
[
key_wif,
],
],
)
return bytes.fromhex(rv["hex"])
def findTxnByHash(self, txid_hex: str):
# Only works for wallet txns
try:
rv = self.rpc("gettransaction", [txid_hex])
except Exception as e: # noqa: F841
self._log.debug(
"findTxnByHash getrawtransaction failed: {}".format(txid_hex)
)
return None
if "confirmations" in rv and rv["confirmations"] >= self.blocks_confirmed:
block_height = self.getBlockHeader(rv["blockhash"])["height"]
return {"txid": txid_hex, "amount": 0, "height": block_height}
return None
def getProofOfFunds(self, amount_for, extra_commit_bytes):
# TODO: Lock unspent and use same output/s to fund bid
unspents_by_addr = dict()
unspents = self.rpc("listunspent")
for u in unspents:
if u["spendable"] is not True:
continue
if u["address"] not in unspents_by_addr:
unspents_by_addr[u["address"]] = {"total": 0, "utxos": []}
utxo_amount: int = self.make_int(u["amount"], r=1)
unspents_by_addr[u["address"]]["total"] += utxo_amount
unspents_by_addr[u["address"]]["utxos"].append(
(utxo_amount, u["txid"], u["vout"])
)
max_utxos: int = 4
viable_addrs = []
for addr, data in unspents_by_addr.items():
if data["total"] >= amount_for:
# Sort from largest to smallest amount
sorted_utxos = sorted(data["utxos"], key=lambda x: x[0])
# Max outputs required to reach amount_for
utxos_req: int = 0
sum_value: int = 0
for utxo in sorted_utxos:
sum_value += utxo[0]
utxos_req += 1
if sum_value >= amount_for:
break
if utxos_req <= max_utxos:
viable_addrs.append(addr)
continue
ensure(
len(viable_addrs) > 0, "Could not find address with enough funds for proof"
)
sign_for_addr: str = random.choice(viable_addrs)
self._log.debug("sign_for_addr %s", sign_for_addr)
prove_utxos = []
sorted_utxos = sorted(
unspents_by_addr[sign_for_addr]["utxos"], key=lambda x: x[0]
)
hasher = hashlib.sha256()
sum_value: int = 0
for utxo in sorted_utxos:
sum_value += utxo[0]
outpoint = (bytes.fromhex(utxo[1]), utxo[2])
prove_utxos.append(outpoint)
hasher.update(outpoint[0])
hasher.update(outpoint[1].to_bytes(2, "big"))
if sum_value >= amount_for:
break
utxos_hash = hasher.digest()
if (
self.using_segwit()
): # TODO: Use isSegwitAddress when scantxoutset can use combo
# 'Address does not refer to key' for non p2pkh
pkh = self.decodeAddress(sign_for_addr)
sign_for_addr = self.pkh_to_address(pkh)
self._log.debug("sign_for_addr converted %s", sign_for_addr)
signature = self.rpc(
"signmessage",
[
sign_for_addr,
sign_for_addr
+ "_swap_proof_"
+ utxos_hash.hex()
+ extra_commit_bytes.hex(),
],
)
return (sign_for_addr, signature, prove_utxos)
def verifyProofOfFunds(self, address, signature, utxos, extra_commit_bytes):
hasher = hashlib.sha256()
sum_value: int = 0
for outpoint in utxos:
hasher.update(outpoint[0])
hasher.update(outpoint[1].to_bytes(2, "big"))
utxos_hash = hasher.digest()
passed = self.verifyMessage(
address,
address + "_swap_proof_" + utxos_hash.hex() + extra_commit_bytes.hex(),
signature,
)
ensure(passed is True, "Proof of funds signature invalid")
if self.using_segwit():
address = self.encodeSegwitAddress(decodeAddress(address)[1:])
sum_value: int = 0
for outpoint in utxos:
txout = self.rpc("gettxout", [outpoint[0].hex(), outpoint[1]])
sum_value += self.make_int(txout["value"])
return sum_value
def rescanBlockchainForAddress(self, height_start: int, addr_find: str):
# Very ugly workaround for missing `rescanblockchain` rpc command
chain_blocks: int = self.getChainHeight()
current_height: int = chain_blocks
block_hash = self.rpc("getblockhash", [current_height])
script_hash: bytes = self.decodeAddress(addr_find)
find_scriptPubKey = self.getDestForScriptHash(script_hash)
while current_height > height_start:
block_hash = self.rpc("getblockhash", [current_height])
block = self.rpc("getblock", [block_hash, False])
decoded_block = CBlock()
decoded_block = FromHex(decoded_block, block)
for tx in decoded_block.vtx:
for txo in tx.vout:
if txo.scriptPubKey == find_scriptPubKey:
tx.rehash()
txid = i2b(tx.sha256)
self._log.info(
"Found output to addr: {} in tx {} in block {}".format(
addr_find, txid.hex(), block_hash
)
)
self._log.info(
"rescanblockchain hack invalidateblock {}".format(
block_hash
)
)
self.rpc("invalidateblock", [block_hash])
self.rpc("reconsiderblock", [block_hash])
return
current_height -= 1
def getBlockWithTxns(self, block_hash: str):
# TODO: Bypass decoderawtransaction and getblockheader
block = self.rpc("getblock", [block_hash, False])
block_header = self.rpc("getblockheader", [block_hash])
decoded_block = CBlock()
decoded_block = FromHex(decoded_block, block)
tx_rv = []
for tx in decoded_block.vtx:
tx_hex = tx.serialize_with_witness().hex()
tx_dec = self.rpc("decoderawtransaction", [tx_hex])
if "hex" not in tx_dec:
tx_dec["hex"] = tx_hex
tx_rv.append(tx_dec)
block_rv = {
"hash": block_hash,
"previousblockhash": block_header["previousblockhash"],
"tx": tx_rv,
"confirmations": block_header["confirmations"],
"height": block_header["height"],
"time": block_header["time"],
"version": block_header["version"],
"merkleroot": block_header["merkleroot"],
}
return block_rv

View File

@@ -1,161 +1,15 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2020-2023 tecnovert
# Copyright (c) 2024-2025 The Basicswap developers
# Copyright (c) 2020 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
from .btc import BTCInterface
from basicswap.rpc import make_rpc_func
from basicswap.chainparams import Coins, chainparams
from basicswap.chainparams import Coins
class LTCInterface(BTCInterface):
@staticmethod
def coin_type():
return Coins.LTC
def __init__(self, coin_settings, network, swap_client=None):
super(LTCInterface, self).__init__(coin_settings, network, swap_client)
self._rpc_wallet_mweb = coin_settings.get("mweb_wallet_name", "mweb")
self.rpc_wallet_mweb = make_rpc_func(
self._rpcport,
self._rpcauth,
host=self._rpc_host,
wallet=self._rpc_wallet_mweb,
)
def getNewMwebAddress(self, use_segwit=False, label="swap_receive") -> str:
return self.rpc_wallet_mweb("getnewaddress", [label, "mweb"])
def getNewStealthAddress(self, label=""):
return self.getNewMwebAddress(False, label)
def withdrawCoin(self, value, type_from: str, addr_to: str, subfee: bool) -> str:
params = [addr_to, value, "", "", subfee, True, self._conf_target]
if type_from == "mweb":
return self.rpc_wallet_mweb("sendtoaddress", params)
return self.rpc_wallet("sendtoaddress", params)
def createUTXO(self, value_sats: int):
# Create a new address and send value_sats to it
spendable_balance = self.getSpendableBalance()
if spendable_balance < value_sats:
raise ValueError("Balance too low")
address = self.getNewAddress(self._use_segwit, "create_utxo")
return (
self.withdrawCoin(self.format_amount(value_sats), "plain", address, False),
address,
)
def getWalletInfo(self):
rv = super(LTCInterface, self).getWalletInfo()
mweb_info = self.rpc_wallet_mweb("getwalletinfo")
rv["mweb_balance"] = mweb_info["balance"]
rv["mweb_unconfirmed"] = mweb_info["unconfirmed_balance"]
rv["mweb_immature"] = mweb_info["immature_balance"]
return rv
def getUnspentsByAddr(self):
unspent_addr = dict()
unspent = self.rpc_wallet("listunspent")
for u in unspent:
if u.get("spendable", False) is False:
continue
if u.get("solvable", False) is False: # Filter out mweb outputs
continue
if "address" not in u:
continue
if "desc" in u:
desc = u["desc"]
if self.using_segwit:
if self.use_p2shp2wsh():
if not desc.startswith("sh(wpkh"):
continue
else:
if not desc.startswith("wpkh"):
continue
else:
if not desc.startswith("pkh"):
continue
unspent_addr[u["address"]] = unspent_addr.get(
u["address"], 0
) + self.make_int(u["amount"], r=1)
return unspent_addr
class LTCInterfaceMWEB(LTCInterface):
def interface_type(self) -> int:
return Coins.LTC_MWEB
def __init__(self, coin_settings, network, swap_client=None):
super(LTCInterfaceMWEB, self).__init__(coin_settings, network, swap_client)
self._rpc_wallet = coin_settings.get("mweb_wallet_name", "mweb")
self.rpc_wallet = make_rpc_func(
self._rpcport, self._rpcauth, host=self._rpc_host, wallet=self._rpc_wallet
)
self.rpc_wallet_watch = self.rpc_wallet
def chainparams(self):
return chainparams[Coins.LTC]
def chainparams_network(self):
return chainparams[Coins.LTC][self._network]
def coin_name(self) -> str:
coin_chainparams = chainparams[Coins.LTC]
return coin_chainparams["name"].capitalize() + " MWEB"
def ticker(self) -> str:
ticker = chainparams[Coins.LTC]["ticker"]
if self._network == "testnet":
ticker = "t" + ticker
elif self._network == "regtest":
ticker = "rt" + ticker
return ticker + "_MWEB"
def getNewAddress(self, use_segwit=False, label="swap_receive") -> str:
return self.getNewMwebAddress()
def has_mweb_wallet(self) -> bool:
return "mweb" in self.rpc("listwallets")
def init_wallet(self, password=None):
# If system is encrypted mweb wallet will be created at first unlock
self._log.info("init_wallet - {}".format(self.ticker()))
self._log.info(f"Creating wallet {self._rpc_wallet} for {self.coin_name()}.")
# wallet_name, disable_private_keys, blank, passphrase, avoid_reuse, descriptors, load_on_startup
self.rpc("createwallet", ["mweb", False, True, password, False, False, True])
if password is not None:
# Max timeout value, ~3 years
self.rpc_wallet("walletpassphrase", [password, 100000000])
if self.getWalletSeedID() == "Not found":
self._sc.initialiseWallet(self.interface_type())
# Workaround to trigger mweb_spk_man->LoadMWEBKeychain()
self.rpc("unloadwallet", ["mweb"])
self.rpc("loadwallet", ["mweb"])
if password is not None:
self.rpc_wallet("walletpassphrase", [password, 100000000])
self.rpc_wallet("keypoolrefill")
def unlockWallet(self, password: str, check_seed: bool = True) -> None:
if password == "":
return
self._log.info("unlockWallet - {}".format(self.ticker()))
if not self.has_mweb_wallet():
self.init_wallet(password)
else:
# Max timeout value, ~3 years
self.rpc_wallet("walletpassphrase", [password, 100000000])
if check_seed:
self._sc.checkWalletSeed(self.coin_type())

View File

@@ -1,999 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2023 tecnovert
# Copyright (c) 2024-2025 The Basicswap developers
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import random
import hashlib
from io import BytesIO
from coincurve.keys import (
PublicKey,
PrivateKey,
)
from basicswap.interface.btc import (
BTCInterface,
extractScriptLockRefundScriptValues,
findOutput,
find_vout_for_address_from_txobj,
)
from basicswap.rpc import make_rpc_func
from basicswap.chainparams import Coins
from basicswap.contrib.mnemonic import Mnemonic
from basicswap.interface.contrib.nav_test_framework.mininode import (
CTxIn,
CTxOut,
CBlock,
COutPoint,
CTransaction,
CTxInWitness,
FromHex,
)
from basicswap.util.crypto import hash160
from basicswap.util.address import (
decodeWif,
pubkeyToAddress,
encodeAddress,
)
from basicswap.util import (
b2i,
i2b,
ensure,
)
from basicswap.basicswap_util import (
getVoutByScriptPubKey,
)
from basicswap.interface.contrib.nav_test_framework.script import (
CScript,
OP_0,
OP_EQUAL,
OP_DUP,
OP_HASH160,
OP_EQUALVERIFY,
OP_CHECKSIG,
SIGHASH_ALL,
SegwitVersion1SignatureHash,
)
class NAVInterface(BTCInterface):
@staticmethod
def coin_type():
return Coins.NAV
@staticmethod
def txVersion() -> int:
return 3
@staticmethod
def txoType():
return CTxOut
def __init__(self, coin_settings, network, swap_client=None):
super(NAVInterface, self).__init__(coin_settings, network, swap_client)
# No multiwallet support
self.rpc_wallet = make_rpc_func(
self._rpcport, self._rpcauth, host=self._rpc_host
)
self.rpc_wallet_watch = self.rpc_wallet
if "wallet_name" in coin_settings:
raise ValueError(f"Invalid setting for {self.coin_name()}: wallet_name")
def use_p2shp2wsh(self) -> bool:
# p2sh-p2wsh
return True
def initialiseWallet(self, key, restore_time: int = -1):
# Load with -importmnemonic= parameter
pass
def checkWallets(self) -> int:
return 1
def getWalletSeedID(self):
return self.rpc("getwalletinfo")["hdmasterkeyid"]
def withdrawCoin(self, value, addr_to: str, subfee: bool):
strdzeel = ""
params = [addr_to, value, "", "", strdzeel, subfee]
return self.rpc("sendtoaddress", params)
def getSpendableBalance(self) -> int:
return self.make_int(self.rpc("getwalletinfo")["balance"])
def signTxWithWallet(self, tx: bytes) -> bytes:
rv = self.rpc("signrawtransaction", [tx.hex()])
return bytes.fromhex(rv["hex"])
def checkExpectedSeed(self, key_hash: str):
try:
rv = self.rpc("dumpmnemonic")
entropy = Mnemonic("english").to_entropy(rv.split(" "))
entropy_hash = self.getAddressHashFromKey(entropy)[::-1].hex()
self._have_checked_seed = True
return entropy_hash == key_hash
except Exception as e:
self._log.warning("checkExpectedSeed failed: {}".format(str(e)))
return False
def getScriptForP2PKH(self, pkh: bytes) -> bytearray:
# Return P2PKH
return CScript([OP_DUP, OP_HASH160, pkh, OP_EQUALVERIFY, OP_CHECKSIG])
def getScriptForPubkeyHash(self, pkh: bytes) -> bytearray:
# Return P2SH-p2wpkh
script = CScript([OP_0, pkh])
script_hash = hash160(script)
assert len(script_hash) == 20
return CScript([OP_HASH160, script_hash, OP_EQUAL])
def getInputScriptForPubkeyHash(self, pkh: bytes) -> bytearray:
script = CScript([OP_0, pkh])
return bytes((len(script),)) + script
def encodeSegwitAddress(self, pkh: bytes) -> str:
# P2SH-p2wpkh
script = CScript([OP_0, pkh])
script_hash = hash160(script)
assert len(script_hash) == 20
return encodeAddress(
bytes((self.chainparams_network()["script_address"],)) + script_hash
)
def encodeSegwitAddressScript(self, script: bytes) -> str:
if (
len(script) == 23
and script[0] == OP_HASH160
and script[1] == 20
and script[22] == OP_EQUAL
):
script_hash = script[2:22]
return encodeAddress(
bytes((self.chainparams_network()["script_address"],)) + script_hash
)
raise ValueError("Unknown Script")
def loadTx(self, tx_bytes: bytes) -> CTransaction:
# Load tx from bytes to internal representation
tx = CTransaction()
tx.deserialize(BytesIO(tx_bytes))
return tx
def signTx(
self,
key_bytes: bytes,
tx_bytes: bytes,
input_n: int,
prevout_script,
prevout_value: int,
):
tx = self.loadTx(tx_bytes)
sig_hash = SegwitVersion1SignatureHash(
prevout_script, tx, input_n, SIGHASH_ALL, prevout_value
)
eck = PrivateKey(key_bytes)
return eck.sign(sig_hash, hasher=None) + bytes((SIGHASH_ALL,))
def setTxSignature(self, tx_bytes: bytes, stack) -> bytes:
tx = self.loadTx(tx_bytes)
tx.wit.vtxinwit.clear()
tx.wit.vtxinwit.append(CTxInWitness())
tx.wit.vtxinwit[0].scriptWitness.stack = stack
return tx.serialize_with_witness()
def getProofOfFunds(self, amount_for, extra_commit_bytes):
# TODO: Lock unspent and use same output/s to fund bid
unspents_by_addr = dict()
unspents = self.rpc("listunspent")
for u in unspents:
if u["spendable"] is not True:
continue
if u["address"] not in unspents_by_addr:
unspents_by_addr[u["address"]] = {"total": 0, "utxos": []}
utxo_amount: int = self.make_int(u["amount"], r=1)
unspents_by_addr[u["address"]]["total"] += utxo_amount
unspents_by_addr[u["address"]]["utxos"].append(
(utxo_amount, u["txid"], u["vout"])
)
max_utxos: int = 4
viable_addrs = []
for addr, data in unspents_by_addr.items():
if data["total"] >= amount_for:
# Sort from largest to smallest amount
sorted_utxos = sorted(data["utxos"], key=lambda x: x[0])
# Max outputs required to reach amount_for
utxos_req: int = 0
sum_value: int = 0
for utxo in sorted_utxos:
sum_value += utxo[0]
utxos_req += 1
if sum_value >= amount_for:
break
if utxos_req <= max_utxos:
viable_addrs.append(addr)
continue
ensure(
len(viable_addrs) > 0, "Could not find address with enough funds for proof"
)
sign_for_addr: str = random.choice(viable_addrs)
self._log.debug("sign_for_addr %s", sign_for_addr)
prove_utxos = []
sorted_utxos = sorted(
unspents_by_addr[sign_for_addr]["utxos"], key=lambda x: x[0]
)
hasher = hashlib.sha256()
sum_value: int = 0
for utxo in sorted_utxos:
sum_value += utxo[0]
outpoint = (bytes.fromhex(utxo[1]), utxo[2])
prove_utxos.append(outpoint)
hasher.update(outpoint[0])
hasher.update(outpoint[1].to_bytes(2, "big"))
if sum_value >= amount_for:
break
utxos_hash = hasher.digest()
if (
self.using_segwit()
): # TODO: Use isSegwitAddress when scantxoutset can use combo
# 'Address does not refer to key' for non p2pkh
addr_info = self.rpc(
"validateaddress",
[
addr,
],
)
if "isscript" in addr_info and addr_info["isscript"] and "hex" in addr_info:
pkh = bytes.fromhex(addr_info["hex"])[2:]
sign_for_addr = self.pkh_to_address(pkh)
self._log.debug("sign_for_addr converted %s", sign_for_addr)
signature = self.rpc(
"signmessage",
[
sign_for_addr,
sign_for_addr
+ "_swap_proof_"
+ utxos_hash.hex()
+ extra_commit_bytes.hex(),
],
)
return (sign_for_addr, signature, prove_utxos)
def verifyProofOfFunds(self, address, signature, utxos, extra_commit_bytes):
hasher = hashlib.sha256()
sum_value: int = 0
for outpoint in utxos:
hasher.update(outpoint[0])
hasher.update(outpoint[1].to_bytes(2, "big"))
utxos_hash = hasher.digest()
passed = self.verifyMessage(
address,
address + "_swap_proof_" + utxos_hash.hex() + extra_commit_bytes.hex(),
signature,
)
ensure(passed is True, "Proof of funds signature invalid")
if self.using_segwit():
address = self.encodeSegwitAddress(self.decodeAddress(address)[1:])
sum_value: int = 0
for outpoint in utxos:
txout = self.rpc("gettxout", [outpoint[0].hex(), outpoint[1]])
sum_value += self.make_int(txout["value"])
return sum_value
def createRawFundedTransaction(
self,
addr_to: str,
amount: int,
sub_fee: bool = False,
lock_unspents: bool = True,
) -> str:
txn = self.rpc(
"createrawtransaction", [[], {addr_to: self.format_amount(amount)}]
)
fee_rate, fee_src = self.get_fee_rate(self._conf_target)
self._log.debug(
f"Fee rate: {fee_rate}, source: {fee_src}, block target: {self._conf_target}"
)
if sub_fee:
raise ValueError(
"Navcoin fundrawtransaction is missing the subtractFeeFromOutputs parameter"
)
# options['subtractFeeFromOutputs'] = [0,]
fee_rate = self.make_int(fee_rate, r=1)
return self.fundTx(txn, fee_rate, lock_unspents).hex()
def isAddressMine(self, address: str, or_watch_only: bool = False) -> bool:
addr_info = self.rpc("validateaddress", [address])
if not or_watch_only:
return addr_info["ismine"]
return addr_info["ismine"] or addr_info["iswatchonly"]
def createRawSignedTransaction(self, addr_to, amount) -> str:
txn_funded = self.createRawFundedTransaction(addr_to, amount)
return self.rpc("signrawtransaction", [txn_funded])["hex"]
def getBlockchainInfo(self):
rv = self.rpc("getblockchaininfo")
synced = round(rv["verificationprogress"], 3)
if synced >= 0.997:
rv["verificationprogress"] = 1.0
return rv
def encodeScriptDest(self, script_dest: bytes) -> str:
script_hash = script_dest[2:-1] # Extract hash from script
return self.sh_to_address(script_hash)
def encode_p2wsh(self, script: bytes) -> str:
return pubkeyToAddress(self.chainparams_network()["script_address"], script)
def find_prevout_info(self, txn_hex: str, txn_script: bytes):
txjs = self.rpc("decoderawtransaction", [txn_hex])
n = getVoutByScriptPubKey(txjs, self.getScriptDest(txn_script).hex())
return {
"txid": txjs["txid"],
"vout": n,
"scriptPubKey": txjs["vout"][n]["scriptPubKey"]["hex"],
"redeemScript": txn_script.hex(),
"amount": txjs["vout"][n]["value"],
}
def getNewAddress(self, use_segwit: bool, label: str = "swap_receive") -> str:
address: str = self.rpc(
"getnewaddress",
[
label,
],
)
if use_segwit:
return self.rpc(
"addwitnessaddress",
[
address,
],
)
return address
def createRedeemTxn(
self, prevout, output_addr: str, output_value: int, txn_script: bytes
) -> str:
tx = CTransaction()
tx.nVersion = self.txVersion()
prev_txid = b2i(bytes.fromhex(prevout["txid"]))
tx.vin.append(
CTxIn(
COutPoint(prev_txid, prevout["vout"]),
scriptSig=self.getScriptScriptSig(txn_script),
)
)
pkh = self.decodeAddress(output_addr)
script = self.getScriptForPubkeyHash(pkh)
tx.vout.append(self.txoType()(output_value, script))
tx.rehash()
return tx.serialize().hex()
def createRefundTxn(
self,
prevout,
output_addr: str,
output_value: int,
locktime: int,
sequence: int,
txn_script: bytes,
) -> str:
tx = CTransaction()
tx.nVersion = self.txVersion()
tx.nLockTime = locktime
prev_txid = b2i(bytes.fromhex(prevout["txid"]))
tx.vin.append(
CTxIn(
COutPoint(prev_txid, prevout["vout"]),
nSequence=sequence,
scriptSig=self.getScriptScriptSig(txn_script),
)
)
pkh = self.decodeAddress(output_addr)
script = self.getScriptForPubkeyHash(pkh)
tx.vout.append(self.txoType()(output_value, script))
tx.rehash()
return tx.serialize().hex()
def getTxSignature(self, tx_hex: str, prevout_data, key_wif: str) -> str:
key = decodeWif(key_wif)
redeem_script = bytes.fromhex(prevout_data["redeemScript"])
sig = self.signTx(
key,
bytes.fromhex(tx_hex),
0,
redeem_script,
self.make_int(prevout_data["amount"]),
)
return sig.hex()
def verifyTxSig(
self,
tx_bytes: bytes,
sig: bytes,
K: bytes,
input_n: int,
prevout_script: bytes,
prevout_value: int,
) -> bool:
tx = self.loadTx(tx_bytes)
sig_hash = SegwitVersion1SignatureHash(
prevout_script, tx, input_n, SIGHASH_ALL, prevout_value
)
pubkey = PublicKey(K)
return pubkey.verify(sig[:-1], sig_hash, hasher=None) # Pop the hashtype byte
def verifyRawTransaction(self, tx_hex: str, prevouts):
# Only checks signature
# verifyrawtransaction
self._log.warning("NAV verifyRawTransaction only checks signature")
inputs_valid: bool = False
validscripts: int = 0
tx_bytes = bytes.fromhex(tx_hex)
tx = self.loadTx(bytes.fromhex(tx_hex))
signature = tx.wit.vtxinwit[0].scriptWitness.stack[0]
pubkey = tx.wit.vtxinwit[0].scriptWitness.stack[1]
input_n: int = 0
prevout_data = prevouts[input_n]
redeem_script = bytes.fromhex(prevout_data["redeemScript"])
prevout_value = self.make_int(prevout_data["amount"])
if self.verifyTxSig(
tx_bytes, signature, pubkey, input_n, redeem_script, prevout_value
):
validscripts += 1
# TODO: validate inputs
inputs_valid = True
return {
"inputs_valid": inputs_valid,
"validscripts": validscripts,
}
def getHTLCSpendTxVSize(self, redeem: bool = True) -> int:
tx_vsize = (
5 # Add a few bytes, sequence in script takes variable amount of bytes
)
tx_vsize += 184 if redeem else 187
return tx_vsize
def getTxid(self, tx) -> bytes:
if isinstance(tx, str):
tx = bytes.fromhex(tx)
if isinstance(tx, bytes):
tx = self.loadTx(tx)
tx.rehash()
return i2b(tx.sha256)
def rescanBlockchainForAddress(self, height_start: int, addr_find: str):
# Very ugly workaround for missing `rescanblockchain` rpc command
chain_blocks: int = self.getChainHeight()
current_height: int = chain_blocks
block_hash = self.rpc("getblockhash", [current_height])
script_hash: bytes = self.decodeAddress(addr_find)
find_scriptPubKey = self.getDestForScriptHash(script_hash)
while current_height > height_start:
block_hash = self.rpc("getblockhash", [current_height])
block = self.rpc("getblock", [block_hash, False])
decoded_block = CBlock()
decoded_block = FromHex(decoded_block, block)
for tx in decoded_block.vtx:
for txo in tx.vout:
if txo.scriptPubKey == find_scriptPubKey:
tx.rehash()
txid = i2b(tx.sha256)
self._log.info(
"Found output to addr: {} in tx {} in block {}".format(
addr_find, txid.hex(), block_hash
)
)
self._log.info(
"rescanblockchain hack invalidateblock {}".format(
block_hash
)
)
self.rpc("invalidateblock", [block_hash])
self.rpc("reconsiderblock", [block_hash])
return
current_height -= 1
def getLockTxHeight(
self,
txid,
dest_address,
bid_amount,
rescan_from,
find_index: bool = False,
vout: int = -1,
):
# Add watchonly address and rescan if required
if not self.isAddressMine(dest_address, or_watch_only=True):
self.importWatchOnlyAddress(dest_address, "bid")
self._log.info(
"Imported watch-only addr: {}".format(self._log.addr(dest_address))
)
self._log.info(
"Rescanning {} chain from height: {}".format(
self.coin_name(), rescan_from
)
)
self.rescanBlockchainForAddress(rescan_from, dest_address)
return_txid = True if txid is None else False
if txid is None:
txns = self.rpc(
"listunspent",
[
0,
9999999,
[
dest_address,
],
],
)
for tx in txns:
if self.make_int(tx["amount"]) == bid_amount:
txid = bytes.fromhex(tx["txid"])
break
if txid is None:
return None
try:
tx = self.rpc("gettransaction", [txid.hex()])
block_height = 0
if "blockhash" in tx:
block_header = self.rpc("getblockheader", [tx["blockhash"]])
block_height = block_header["height"]
rv = {
"depth": 0 if "confirmations" not in tx else tx["confirmations"],
"height": block_height,
}
except Exception as e:
self._log.debug(
"getLockTxHeight gettransaction failed: %s, %s", txid.hex(), str(e)
)
return None
if find_index:
tx_obj = self.rpc("decoderawtransaction", [tx["hex"]])
rv["index"] = find_vout_for_address_from_txobj(tx_obj, dest_address)
if return_txid:
rv["txid"] = txid.hex()
return rv
def getBlockWithTxns(self, block_hash):
# TODO: Bypass decoderawtransaction and getblockheader
block = self.rpc("getblock", [block_hash, False])
block_header = self.rpc("getblockheader", [block_hash])
decoded_block = CBlock()
decoded_block = FromHex(decoded_block, block)
tx_rv = []
for tx in decoded_block.vtx:
tx_hex = tx.serialize_with_witness().hex()
tx_dec = self.rpc("decoderawtransaction", [tx_hex])
if "hex" not in tx_dec:
tx_dec["hex"] = tx_hex
tx_rv.append(tx_dec)
block_rv = {
"hash": block_hash,
"previousblockhash": block_header["previousblockhash"],
"tx": tx_rv,
"confirmations": block_header["confirmations"],
"height": block_header["height"],
"time": block_header["time"],
"version": block_header["version"],
"merkleroot": block_header["merkleroot"],
}
return block_rv
def getScriptScriptSig(self, script: bytes) -> bytes:
return self.getP2SHP2WSHScriptSig(script)
def getScriptDest(self, script):
return self.getP2SHP2WSHDest(script)
def getDestForScriptHash(self, script_hash):
assert len(script_hash) == 20
return CScript([OP_HASH160, script_hash, OP_EQUAL])
def pubkey_to_segwit_address(self, pk: bytes) -> str:
pkh = hash160(pk)
script_out = self.getScriptForPubkeyHash(pkh)
return self.encodeSegwitAddressScript(script_out)
def createBLockTx(self, Kbs: bytes, output_amount: int, vkbv=None) -> bytes:
tx = CTransaction()
tx.nVersion = self.txVersion()
script_pk = self.getPkDest(Kbs)
tx.vout.append(self.txoType()(output_amount, script_pk))
return tx.serialize()
def spendBLockTx(
self,
chain_b_lock_txid: bytes,
address_to: str,
kbv: bytes,
kbs: bytes,
cb_swap_value: int,
b_fee: int,
restore_height: int,
spend_actual_balance: bool = False,
lock_tx_vout=None,
) -> bytes:
self._log.info("spendBLockTx %s:\n", chain_b_lock_txid.hex())
wtx = self.rpc(
"gettransaction",
[
chain_b_lock_txid.hex(),
],
)
lock_tx = self.loadTx(bytes.fromhex(wtx["hex"]))
Kbs = self.getPubkey(kbs)
script_pk = self.getPkDest(Kbs)
locked_n = findOutput(lock_tx, script_pk)
ensure(locked_n is not None, "Output not found in tx")
pkh_to = self.decodeAddress(address_to)
tx = CTransaction()
tx.nVersion = self.txVersion()
chain_b_lock_txid_int = b2i(chain_b_lock_txid)
script_sig = self.getInputScriptForPubkeyHash(self.getPubkeyHash(Kbs))
tx.vin.append(
CTxIn(
COutPoint(chain_b_lock_txid_int, locked_n),
nSequence=0,
scriptSig=script_sig,
)
)
tx.vout.append(
self.txoType()(cb_swap_value, self.getScriptForPubkeyHash(pkh_to))
)
pay_fee = self.getBLockSpendTxFee(tx, b_fee)
tx.vout[0].nValue = cb_swap_value - pay_fee
b_lock_spend_tx = tx.serialize()
b_lock_spend_tx = self.signTxWithKey(b_lock_spend_tx, kbs, cb_swap_value)
return bytes.fromhex(self.publishTx(b_lock_spend_tx))
def signTxWithKey(self, tx: bytes, key: bytes, prev_amount: int) -> bytes:
Key = self.getPubkey(key)
pkh = self.getPubkeyHash(Key)
script = self.getScriptForP2PKH(pkh)
sig = self.signTx(key, tx, 0, script, prev_amount)
stack = [
sig,
Key,
]
return self.setTxSignature(tx, stack)
def findTxnByHash(self, txid_hex: str):
# Only works for wallet txns
try:
rv = self.rpc("gettransaction", [txid_hex])
except Exception as e: # noqa: F841
self._log.debug(
"findTxnByHash getrawtransaction failed: {}".format(txid_hex)
)
return None
if "confirmations" in rv and rv["confirmations"] >= self.blocks_confirmed:
block_height = self.getBlockHeader(rv["blockhash"])["height"]
return {"txid": txid_hex, "amount": 0, "height": block_height}
return None
def createSCLockTx(
self, value: int, script: bytearray, vkbv: bytes = None
) -> bytes:
tx = CTransaction()
tx.nVersion = self.txVersion()
tx.vout.append(self.txoType()(value, self.getScriptDest(script)))
return tx.serialize()
def fundTx(self, tx_hex: str, feerate: int, lock_unspents: bool = True):
feerate_str = self.format_amount(feerate)
# TODO: unlock unspents if bid cancelled
options = {
"lockUnspents": lock_unspents,
"feeRate": feerate_str,
}
rv = self.rpc("fundrawtransaction", [tx_hex, options])
# Sign transaction then strip witness data to fill scriptsig
rv = self.rpc("signrawtransaction", [rv["hex"]])
tx_signed = self.loadTx(bytes.fromhex(rv["hex"]))
if len(tx_signed.vin) != len(tx_signed.wit.vtxinwit):
raise ValueError("txn has non segwit input")
for witness_data in tx_signed.wit.vtxinwit:
if len(witness_data.scriptWitness.stack) < 2:
raise ValueError("txn has non segwit input")
return tx_signed.serialize_without_witness()
def fundSCLockTx(self, tx_bytes: bytes, feerate, vkbv=None) -> bytes:
tx_funded = self.fundTx(tx_bytes.hex(), feerate)
return tx_funded
def createSCLockRefundTx(
self,
tx_lock_bytes,
script_lock,
Kal,
Kaf,
lock1_value,
csv_val,
tx_fee_rate,
vkbv=None,
):
tx_lock = CTransaction()
tx_lock = self.loadTx(tx_lock_bytes)
output_script = self.getScriptDest(script_lock)
locked_n = findOutput(tx_lock, output_script)
ensure(locked_n is not None, "Output not found in tx")
locked_coin = tx_lock.vout[locked_n].nValue
tx_lock.rehash()
tx_lock_id_int = tx_lock.sha256
refund_script = self.genScriptLockRefundTxScript(Kal, Kaf, csv_val)
tx = CTransaction()
tx.nVersion = self.txVersion()
tx.vin.append(
CTxIn(
COutPoint(tx_lock_id_int, locked_n),
nSequence=lock1_value,
scriptSig=self.getScriptScriptSig(script_lock),
)
)
tx.vout.append(self.txoType()(locked_coin, self.getScriptDest(refund_script)))
dummy_witness_stack = self.getScriptLockTxDummyWitness(script_lock)
witness_bytes = self.getWitnessStackSerialisedLength(dummy_witness_stack)
vsize = self.getTxVSize(tx, add_witness_bytes=witness_bytes)
pay_fee = round(tx_fee_rate * vsize / 1000)
tx.vout[0].nValue = locked_coin - pay_fee
tx.rehash()
self._log.info(
"createSCLockRefundTx {}{}.".format(
self._log.id(i2b(tx.sha256)),
(
""
if self._log.safe_logs
else f":\n fee_rate, vsize, fee: {tx_fee_rate}, {vsize}, {pay_fee}"
),
)
)
return tx.serialize(), refund_script, tx.vout[0].nValue
def createSCLockRefundSpendTx(
self,
tx_lock_refund_bytes,
script_lock_refund,
pkh_refund_to,
tx_fee_rate,
vkbv=None,
):
# Returns the coinA locked coin to the leader
# The follower will sign the multisig path with a signature encumbered by the leader's coinB spend pubkey
# If the leader publishes the decrypted signature the leader's coinB spend privatekey will be revealed to the follower
tx_lock_refund = self.loadTx(tx_lock_refund_bytes)
output_script = self.getScriptDest(script_lock_refund)
locked_n = findOutput(tx_lock_refund, output_script)
ensure(locked_n is not None, "Output not found in tx")
locked_coin = tx_lock_refund.vout[locked_n].nValue
tx_lock_refund.rehash()
tx_lock_refund_hash_int = tx_lock_refund.sha256
tx = CTransaction()
tx.nVersion = self.txVersion()
tx.vin.append(
CTxIn(
COutPoint(tx_lock_refund_hash_int, locked_n),
nSequence=0,
scriptSig=self.getScriptScriptSig(script_lock_refund),
)
)
tx.vout.append(
self.txoType()(locked_coin, self.getScriptForPubkeyHash(pkh_refund_to))
)
dummy_witness_stack = self.getScriptLockRefundSpendTxDummyWitness(
script_lock_refund
)
witness_bytes = self.getWitnessStackSerialisedLength(dummy_witness_stack)
vsize = self.getTxVSize(tx, add_witness_bytes=witness_bytes)
pay_fee = round(tx_fee_rate * vsize / 1000)
tx.vout[0].nValue = locked_coin - pay_fee
tx.rehash()
self._log.info(
"createSCLockRefundSpendTx {}{}.".format(
self._log.id(i2b(tx.sha256)),
(
""
if self._log.safe_logs
else f":\n fee_rate, vsize, fee: {tx_fee_rate}, {vsize}, {pay_fee}"
),
)
)
return tx.serialize()
def createSCLockRefundSpendToFTx(
self,
tx_lock_refund_bytes,
script_lock_refund,
pkh_dest,
tx_fee_rate,
vkbv=None,
kbsf=None,
):
# lock refund swipe tx
# Sends the coinA locked coin to the follower
tx_lock_refund = self.loadTx(tx_lock_refund_bytes)
output_script = self.getScriptDest(script_lock_refund)
locked_n = findOutput(tx_lock_refund, output_script)
ensure(locked_n is not None, "Output not found in tx")
locked_coin = tx_lock_refund.vout[locked_n].nValue
A, B, lock2_value, C = extractScriptLockRefundScriptValues(script_lock_refund)
tx_lock_refund.rehash()
tx_lock_refund_hash_int = tx_lock_refund.sha256
tx = CTransaction()
tx.nVersion = self.txVersion()
tx.vin.append(
CTxIn(
COutPoint(tx_lock_refund_hash_int, locked_n),
nSequence=lock2_value,
scriptSig=self.getScriptScriptSig(script_lock_refund),
)
)
tx.vout.append(
self.txoType()(locked_coin, self.getScriptForPubkeyHash(pkh_dest))
)
dummy_witness_stack = self.getScriptLockRefundSwipeTxDummyWitness(
script_lock_refund
)
witness_bytes = self.getWitnessStackSerialisedLength(dummy_witness_stack)
vsize = self.getTxVSize(tx, add_witness_bytes=witness_bytes)
pay_fee = round(tx_fee_rate * vsize / 1000)
tx.vout[0].nValue = locked_coin - pay_fee
tx.rehash()
self._log.info(
"createSCLockRefundSpendToFTx {}{}.".format(
self._log.id(i2b(tx.sha256)),
(
""
if self._log.safe_logs
else f":\n fee_rate, vsize, fee: {tx_fee_rate}, {vsize}, {pay_fee}"
),
)
)
return tx.serialize()
def createSCLockSpendTx(
self, tx_lock_bytes, script_lock, pkh_dest, tx_fee_rate, vkbv=None, fee_info={}
):
tx_lock = self.loadTx(tx_lock_bytes)
output_script = self.getScriptDest(script_lock)
locked_n = findOutput(tx_lock, output_script)
ensure(locked_n is not None, "Output not found in tx")
locked_coin = tx_lock.vout[locked_n].nValue
tx_lock.rehash()
tx_lock_id_int = tx_lock.sha256
tx = CTransaction()
tx.nVersion = self.txVersion()
tx.vin.append(
CTxIn(
COutPoint(tx_lock_id_int, locked_n),
scriptSig=self.getScriptScriptSig(script_lock),
)
)
tx.vout.append(
self.txoType()(locked_coin, self.getScriptForPubkeyHash(pkh_dest))
)
dummy_witness_stack = self.getScriptLockTxDummyWitness(script_lock)
witness_bytes = self.getWitnessStackSerialisedLength(dummy_witness_stack)
vsize = self.getTxVSize(tx, add_witness_bytes=witness_bytes)
pay_fee = round(tx_fee_rate * vsize / 1000)
tx.vout[0].nValue = locked_coin - pay_fee
fee_info["fee_paid"] = pay_fee
fee_info["rate_used"] = tx_fee_rate
fee_info["witness_bytes"] = witness_bytes
fee_info["vsize"] = vsize
tx.rehash()
self._log.info(
"createSCLockSpendTx {}{}.".format(
self._log.id(i2b(tx.sha256)),
(
""
if self._log.safe_logs
else f":\n fee_rate, vsize, fee: {tx_fee_rate}, {vsize}, {pay_fee}"
),
)
)
return tx.serialize()

View File

@@ -2,15 +2,41 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2020-2022 tecnovert
# Copyright (c) 2025 The Basicswap developers
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
from .btc import BTCInterface
from basicswap.chainparams import Coins
from basicswap.util import (
make_int,
)
class NMCInterface(BTCInterface):
@staticmethod
def coin_type():
return Coins.NMC
def getLockTxHeight(self, txid, dest_address, bid_amount, rescan_from, find_index=False):
self._log.debug('[rm] scantxoutset start') # scantxoutset is slow
ro = self.rpc_callback('scantxoutset', ['start', ['addr({})'.format(dest_address)]]) # TODO: Use combo(address) where possible
self._log.debug('[rm] scantxoutset end')
return_txid = True if txid is None else False
for o in ro['unspents']:
if txid and o['txid'] != txid.hex():
continue
# Verify amount
if make_int(o['amount']) != int(bid_amount):
self._log.warning('Found output to lock tx address of incorrect value: %s, %s', str(o['amount']), o['txid'])
continue
rv = {
'depth': 0,
'height': o['height']}
if o['height'] > 0:
rv['depth'] = ro['height'] - o['height']
if find_index:
rv['index'] = o['vout']
if return_txid:
rv['txid'] = o['txid']
return rv

File diff suppressed because it is too large Load Diff

View File

@@ -6,7 +6,8 @@
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
from .btc import BTCInterface
from basicswap.contrib.test_framework.messages import CTxOut
from basicswap.contrib.test_framework.messages import (
CTxOut)
class PassthroughBTCInterface(BTCInterface):
@@ -14,5 +15,5 @@ class PassthroughBTCInterface(BTCInterface):
super().__init__(coin_settings, network)
self.txoType = CTxOut
self._network = network
self.blocks_confirmed = coin_settings["blocks_confirmed"]
self.setConfTarget(coin_settings["conf_target"])
self.blocks_confirmed = coin_settings['blocks_confirmed']
self.setConfTarget(coin_settings['conf_target'])

View File

@@ -1,25 +1,16 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2022 tecnovert
# Copyright (c) 2024 The Basicswap developers
# Copyright (c) 2020 tecnovert
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
from io import BytesIO
from .btc import BTCInterface
from basicswap.rpc import make_rpc_func
from basicswap.chainparams import Coins
from basicswap.util.address import decodeAddress
from .contrib.pivx_test_framework.messages import CBlock, ToHex, FromHex, CTransaction
from basicswap.contrib.test_framework.script import (
CScript,
OP_DUP,
OP_HASH160,
OP_CHECKSIG,
OP_EQUALVERIFY,
)
from .contrib.pivx_test_framework.messages import (
CBlock,
ToHex,
FromHex)
class PIVXInterface(BTCInterface):
@@ -27,153 +18,39 @@ class PIVXInterface(BTCInterface):
def coin_type():
return Coins.PIVX
def __init__(self, coin_settings, network, swap_client=None):
super(PIVXInterface, self).__init__(coin_settings, network, swap_client)
# No multiwallet support
self.rpc_wallet = make_rpc_func(
self._rpcport, self._rpcauth, host=self._rpc_host
)
self.rpc_wallet_watch = self.rpc_wallet
def createRawSignedTransaction(self, addr_to, amount):
txn = self.rpc_callback('createrawtransaction', [[], {addr_to: self.format_amount(amount)}])
def encryptWallet(self, password: str, check_seed: bool = True):
# Watchonly wallets are not encrypted
seed_id_before: str = self.getWalletSeedID()
self.rpc_wallet("encryptwallet", [password])
if check_seed is False or seed_id_before == "Not found":
return
seed_id_after: str = self.getWalletSeedID()
if seed_id_before == seed_id_after:
return
self._log.warning(f"{self.ticker()} wallet seed changed after encryption.")
self._log.debug(
f"seed_id_before: {seed_id_before} seed_id_after: {seed_id_after}."
)
self.setWalletSeedWarning(True)
# Workaround for https://github.com/bitcoin/bitcoin/issues/26607
chain_client_settings = self._sc.getChainClientSettings(
self.coin_type()
) # basicswap.json
if chain_client_settings.get("manage_daemon", False) is False:
self._log.warning(
f"{self.ticker()} manage_daemon is false. Can't attempt to fix."
)
return
def signTxWithWallet(self, tx):
rv = self.rpc("signrawtransaction", [tx.hex()])
return bytes.fromhex(rv["hex"])
def createRawFundedTransaction(
self,
addr_to: str,
amount: int,
sub_fee: bool = False,
lock_unspents: bool = True,
) -> str:
txn = self.rpc(
"createrawtransaction", [[], {addr_to: self.format_amount(amount)}]
)
fee_rate, fee_src = self.get_fee_rate(self._conf_target)
self._log.debug(
f"Fee rate: {fee_rate}, source: {fee_src}, block target: {self._conf_target}"
)
self._log.debug(f'Fee rate: {fee_rate}, source: {fee_src}, block target: {self._conf_target}')
options = {
"lockUnspents": lock_unspents,
"feeRate": fee_rate,
'lockUnspents': True,
'feeRate': fee_rate,
}
if sub_fee:
options["subtractFeeFromOutputs"] = [
0,
]
return self.rpc("fundrawtransaction", [txn, options])["hex"]
def createRawSignedTransaction(self, addr_to, amount) -> str:
txn_funded = self.createRawFundedTransaction(addr_to, amount)
return self.rpc("signrawtransaction", [txn_funded])["hex"]
def decodeAddress(self, address):
return decodeAddress(address)[1:]
txn_funded = self.rpc_callback('fundrawtransaction', [txn, options])['hex']
txn_signed = self.rpc_callback('signrawtransaction', [txn_funded])['hex']
return txn_signed
def getBlockWithTxns(self, block_hash):
# TODO: Bypass decoderawtransaction and getblockheader
block = self.rpc("getblock", [block_hash, False])
block_header = self.rpc("getblockheader", [block_hash])
block = self.rpc_callback('getblock', [block_hash, False])
block_header = self.rpc_callback('getblockheader', [block_hash])
decoded_block = CBlock()
decoded_block = FromHex(decoded_block, block)
tx_rv = []
for tx in decoded_block.vtx:
tx_dec = self.rpc("decoderawtransaction", [ToHex(tx)])
tx_dec = self.rpc_callback('decoderawtransaction', [ToHex(tx)])
tx_rv.append(tx_dec)
block_rv = {
"hash": block_hash,
"previousblockhash": block_header["previousblockhash"],
"tx": tx_rv,
"confirmations": block_header["confirmations"],
"height": block_header["height"],
"time": block_header["time"],
"version": block_header["version"],
"merkleroot": block_header["merkleroot"],
'hash': block_hash,
'tx': tx_rv,
'confirmations': block_header['confirmations'],
'height': block_header['height'],
'version': block_header['version'],
'merkleroot': block_header['merkleroot'],
}
return block_rv
def withdrawCoin(self, value, addr_to, subfee):
params = [addr_to, value, "", "", subfee]
return self.rpc("sendtoaddress", params)
def getSpendableBalance(self) -> int:
return self.make_int(self.rpc("getwalletinfo")["balance"])
def loadTx(self, tx_bytes):
# Load tx from bytes to internal representation
tx = CTransaction()
tx.deserialize(BytesIO(tx_bytes))
return tx
def getScriptForPubkeyHash(self, pkh: bytes) -> bytearray:
# Return P2PKH
return CScript([OP_DUP, OP_HASH160, pkh, OP_EQUALVERIFY, OP_CHECKSIG])
def getBLockSpendTxFee(self, tx, fee_rate: int) -> int:
add_bytes = 107
size = len(tx.serialize_with_witness()) + add_bytes
pay_fee = round(fee_rate * size / 1000)
self._log.info(
f"BLockSpendTx fee_rate, size, fee: {fee_rate}, {size}, {pay_fee}."
)
return pay_fee
def signTxWithKey(self, tx: bytes, key: bytes) -> bytes:
key_wif = self.encodeKey(key)
rv = self.rpc(
"signrawtransaction",
[
tx.hex(),
[],
[
key_wif,
],
],
)
return bytes.fromhex(rv["hex"])
def findTxnByHash(self, txid_hex: str):
# Only works for wallet txns
try:
rv = self.rpc("gettransaction", [txid_hex])
except Exception as e: # noqa: F841
self._log.debug(
"findTxnByHash getrawtransaction failed: {}".format(txid_hex)
)
return None
if "confirmations" in rv and rv["confirmations"] >= self.blocks_confirmed:
block_height = self.getBlockHeader(rv["blockhash"])["height"]
return {"txid": txid_hex, "amount": 0, "height": block_height}
return None

View File

@@ -1,56 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2024 The Basicswap developers
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
from basicswap.chainparams import WOW_COIN, Coins
from .xmr import XMRInterface
class WOWInterface(XMRInterface):
@staticmethod
def coin_type():
return Coins.WOW
@staticmethod
def ticker_str() -> int:
return Coins.WOW.name
@staticmethod
def COIN():
return WOW_COIN
@staticmethod
def exp() -> int:
return 11
@staticmethod
def depth_spendable() -> int:
return 3
# below only needed until wow is rebased to monero v0.18.4.0+
def openWallet(self, filename):
params = {"filename": filename}
if self._wallet_password is not None:
params["password"] = self._wallet_password
try:
self.rpc_wallet("open_wallet", params)
except Exception as e:
if "no connection to daemon" in str(e):
self._log.debug(f"{self.coin_name()} {e}")
return # bypass refresh error to allow startup with a busy daemon
try:
# TODO Remove `store` after upstream fix to autosave on close_wallet
self.rpc_wallet("store")
self.rpc_wallet("close_wallet")
self._log.debug(f"Attempt to save and close {self.coin_name()} wallet")
except Exception as e: # noqa: F841
pass
self.rpc_wallet("open_wallet", params)
self._log.debug(f"Reattempt to open {self.coin_name()} wallet")

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

133
basicswap/messages.proto Normal file
View File

@@ -0,0 +1,133 @@
syntax = "proto3";
package basicswap;
/* Step 1, seller -> network */
message OfferMessage {
uint32 coin_from = 1;
uint32 coin_to = 2;
uint64 amount_from = 3;
uint64 rate = 4;
uint64 min_bid_amount = 5;
uint64 time_valid = 6;
enum LockType {
NOT_SET = 0;
SEQUENCE_LOCK_BLOCKS = 1;
SEQUENCE_LOCK_TIME = 2;
ABS_LOCK_BLOCKS = 3;
ABS_LOCK_TIME = 4;
}
LockType lock_type = 7;
uint32 lock_value = 8;
uint32 swap_type = 9;
/* optional */
string proof_address = 10;
string proof_signature = 11;
bytes pkhash_seller = 12;
bytes secret_hash = 13;
uint64 fee_rate_from = 14;
uint64 fee_rate_to = 15;
uint32 protocol_version = 16;
bool amount_negotiable = 17;
bool rate_negotiable = 18;
}
/* Step 2, buyer -> seller */
message BidMessage {
bytes offer_msg_id = 1;
uint64 time_valid = 2; /* seconds bid is valid for */
uint64 amount = 3; /* amount of amount_from bid is for */
/* optional */
uint64 rate = 4;
bytes pkhash_buyer = 5; /* buyer's address to receive amount_from */
string proof_address = 6;
string proof_signature = 7;
uint32 protocol_version = 8;
}
/* Step 3, seller -> buyer */
message BidAcceptMessage {
bytes bid_msg_id = 1;
bytes initiate_txid = 2;
bytes contract_script = 3;
}
message OfferRevokeMessage {
bytes offer_msg_id = 1;
bytes signature = 2;
}
message BidRejectMessage {
bytes bid_msg_id = 1;
uint32 reject_code = 2;
}
message XmrBidMessage {
/* MSG1L, F -> L */
bytes offer_msg_id = 1;
uint64 time_valid = 2; /* seconds bid is valid for */
uint64 amount = 3; /* amount of amount_from bid is for */
uint64 rate = 4;
bytes pkaf = 5;
bytes kbvf = 6;
bytes kbsf_dleag = 7;
bytes dest_af = 8;
uint32 protocol_version = 9;
}
message XmrSplitMessage {
bytes msg_id = 1;
uint32 msg_type = 2; /* 1 XmrBid, 2 XmrBidAccept */
uint32 sequence = 3;
bytes dleag = 4;
}
message XmrBidAcceptMessage {
bytes bid_msg_id = 1;
bytes pkal = 3;
bytes kbvl = 4;
bytes kbsl_dleag = 5;
/* MSG2F */
bytes a_lock_tx = 6;
bytes a_lock_tx_script = 7;
bytes a_lock_refund_tx = 8;
bytes a_lock_refund_tx_script = 9;
bytes a_lock_refund_spend_tx = 10;
bytes al_lock_refund_tx_sig = 11;
}
message XmrBidLockTxSigsMessage {
/* MSG3L */
bytes bid_msg_id = 1;
bytes af_lock_refund_spend_tx_esig = 2;
bytes af_lock_refund_tx_sig = 3;
}
message XmrBidLockSpendTxMessage {
/* MSG4F */
bytes bid_msg_id = 1;
bytes a_lock_spend_tx = 2;
bytes kal_sig = 3;
}
message XmrBidLockReleaseMessage {
/* MSG5F */
bytes bid_msg_id = 1;
bytes al_lock_spend_tx_esig = 2;
}

View File

@@ -1,284 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2024 tecnovert
# Copyright (c) 2025 The Basicswap developers
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
"""
syntax = "proto3";
0 VARINT int32, int64, uint32, uint64, sint32, sint64, bool, enum
1 I64 fixed64, sfixed64, double
2 LEN string, bytes, embedded messages, packed repeated fields
5 I32 fixed32, sfixed32, float
Don't encode fields of default values.
When decoding initialise all fields not set from data.
protobuf ParseFromString would reset the whole object, from_bytes won't.
"""
from basicswap.util.integer import encode_varint, decode_varint
NPBW_INT = 0
NPBW_BYTES = 2
NPBF_STR = 1
NPBF_BOOL = 2
class NonProtobufClass:
def __init__(self, init_all: bool = True, **kwargs):
for key, value in kwargs.items():
found_field: bool = False
for field_num, v in self._map.items():
field_name, wire_type, field_type = v
if field_name == key:
setattr(self, field_name, value)
found_field = True
break
if found_field is False:
raise ValueError(f"Got an unexpected keyword argument '{key}'")
if init_all:
self.init_fields()
def init_fields(self) -> None:
# Set default values for missing fields
for field_num, v in self._map.items():
field_name, wire_type, field_type = v
if hasattr(self, field_name):
continue
if wire_type == 0:
setattr(self, field_name, 0)
elif wire_type == 2:
if field_type == 1:
setattr(self, field_name, str())
else:
setattr(self, field_name, bytes())
else:
raise ValueError(f"Unknown wire_type {wire_type}")
def to_bytes(self) -> bytes:
rv = bytes()
for field_num, v in self._map.items():
field_name, wire_type, field_type = v
if not hasattr(self, field_name):
continue
field_value = getattr(self, field_name)
tag = (field_num << 3) | wire_type
if wire_type == 0:
if field_value == 0:
continue
rv += encode_varint(tag)
rv += encode_varint(field_value)
elif wire_type == 2:
if len(field_value) == 0:
continue
rv += encode_varint(tag)
if isinstance(field_value, str):
field_value = field_value.encode("utf-8")
rv += encode_varint(len(field_value))
rv += field_value
else:
raise ValueError(f"Unknown wire_type {wire_type}")
return rv
def from_bytes(self, b: bytes, init_all: bool = True) -> None:
max_len: int = len(b)
o: int = 0
while o < max_len:
tag, lv = decode_varint(b, o)
o += lv
wire_type = tag & 7
field_num = tag >> 3
field_name, wire_type_expect, field_type = self._map[field_num]
if wire_type != wire_type_expect:
raise ValueError(
f"Unexpected wire_type {wire_type} for field {field_num}"
)
if wire_type == 0:
field_value, lv = decode_varint(b, o)
o += lv
elif wire_type == 2:
field_len, lv = decode_varint(b, o)
o += lv
field_value = b[o : o + field_len]
o += field_len
if field_type == 1:
field_value = field_value.decode("utf-8")
else:
raise ValueError(f"Unknown wire_type {wire_type}")
setattr(self, field_name, field_value)
if init_all:
self.init_fields()
class OfferMessage(NonProtobufClass):
_map = {
1: ("protocol_version", NPBW_INT, 0),
2: ("coin_from", NPBW_INT, 0),
3: ("coin_to", NPBW_INT, 0),
4: ("amount_from", NPBW_INT, 0),
5: ("amount_to", NPBW_INT, 0),
6: ("min_bid_amount", NPBW_INT, 0),
7: ("time_valid", NPBW_INT, 0),
8: ("lock_type", NPBW_INT, 0),
9: ("lock_value", NPBW_INT, 0),
10: ("swap_type", NPBW_INT, 0),
11: ("proof_address", NPBW_BYTES, NPBF_STR),
12: ("proof_signature", NPBW_BYTES, NPBF_STR),
13: ("pkhash_seller", NPBW_BYTES, 0),
14: ("secret_hash", NPBW_BYTES, 0),
15: ("fee_rate_from", NPBW_INT, 0),
16: ("fee_rate_to", NPBW_INT, 0),
17: ("amount_negotiable", NPBW_INT, NPBF_BOOL),
18: ("rate_negotiable", NPBW_INT, NPBF_BOOL),
19: ("proof_utxos", NPBW_BYTES, 0),
20: ("auto_accept_type", 0, 0),
}
class BidMessage(NonProtobufClass):
_map = {
1: ("protocol_version", NPBW_INT, 0),
2: ("offer_msg_id", NPBW_BYTES, 0),
3: ("time_valid", NPBW_INT, 0),
4: ("amount", NPBW_INT, 0),
5: ("amount_to", NPBW_INT, 0),
6: ("pkhash_buyer", NPBW_BYTES, 0),
7: ("proof_address", NPBW_BYTES, NPBF_STR),
8: ("proof_signature", NPBW_BYTES, NPBF_STR),
9: ("proof_utxos", NPBW_BYTES, 0),
10: ("pkhash_buyer_to", NPBW_BYTES, 0),
}
class BidAcceptMessage(NonProtobufClass):
# Step 3, seller -> buyer
_map = {
1: ("bid_msg_id", NPBW_BYTES, 0),
2: ("initiate_txid", NPBW_BYTES, 0),
3: ("contract_script", NPBW_BYTES, 0),
4: ("pkhash_seller", NPBW_BYTES, 0),
}
class OfferRevokeMessage(NonProtobufClass):
_map = {
1: ("offer_msg_id", NPBW_BYTES, 0),
2: ("signature", NPBW_BYTES, 0),
}
class BidRejectMessage(NonProtobufClass):
_map = {
1: ("bid_msg_id", NPBW_BYTES, 0),
2: ("reject_code", NPBW_INT, 0),
}
class XmrBidMessage(NonProtobufClass):
# MSG1L, F -> L
_map = {
1: ("protocol_version", NPBW_INT, 0),
2: ("offer_msg_id", NPBW_BYTES, 0),
3: ("time_valid", NPBW_INT, 0),
4: ("amount", NPBW_INT, 0),
5: ("amount_to", NPBW_INT, 0),
6: ("pkaf", NPBW_BYTES, 0),
7: ("kbvf", NPBW_BYTES, 0),
8: ("kbsf_dleag", NPBW_BYTES, 0),
9: ("dest_af", NPBW_BYTES, 0),
}
class XmrSplitMessage(NonProtobufClass):
_map = {
1: ("msg_id", NPBW_BYTES, 0),
2: ("msg_type", NPBW_INT, 0),
3: ("sequence", NPBW_INT, 0),
4: ("dleag", NPBW_BYTES, 0),
}
class XmrBidAcceptMessage(NonProtobufClass):
_map = {
1: ("bid_msg_id", NPBW_BYTES, 0),
2: ("pkal", NPBW_BYTES, 0),
3: ("kbvl", NPBW_BYTES, 0),
4: ("kbsl_dleag", NPBW_BYTES, 0),
# MSG2F
5: ("a_lock_tx", NPBW_BYTES, 0),
6: ("a_lock_tx_script", NPBW_BYTES, 0),
7: ("a_lock_refund_tx", NPBW_BYTES, 0),
8: ("a_lock_refund_tx_script", NPBW_BYTES, 0),
9: ("a_lock_refund_spend_tx", NPBW_BYTES, 0),
10: ("al_lock_refund_tx_sig", NPBW_BYTES, 0),
}
class XmrBidLockTxSigsMessage(NonProtobufClass):
# MSG3L
_map = {
1: ("bid_msg_id", NPBW_BYTES, 0),
2: ("af_lock_refund_spend_tx_esig", NPBW_BYTES, 0),
3: ("af_lock_refund_tx_sig", NPBW_BYTES, 0),
}
class XmrBidLockSpendTxMessage(NonProtobufClass):
# MSG4F
_map = {
1: ("bid_msg_id", NPBW_BYTES, 0),
2: ("a_lock_spend_tx", NPBW_BYTES, 0),
3: ("kal_sig", NPBW_BYTES, 0),
}
class XmrBidLockReleaseMessage(NonProtobufClass):
# MSG5F
_map = {
1: ("bid_msg_id", NPBW_BYTES, 0),
2: ("al_lock_spend_tx_esig", NPBW_BYTES, 0),
}
class ADSBidIntentMessage(NonProtobufClass):
# L -> F Sent from bidder, construct a reverse bid
_map = {
1: ("protocol_version", NPBW_INT, 0),
2: ("offer_msg_id", NPBW_BYTES, 0),
3: ("time_valid", NPBW_INT, 0),
4: ("amount_from", NPBW_INT, 0),
5: ("amount_to", NPBW_INT, 0),
}
class ADSBidIntentAcceptMessage(NonProtobufClass):
# F -> L Sent from offerer, construct a reverse bid
_map = {
1: ("bid_msg_id", NPBW_BYTES, 0),
2: ("pkaf", NPBW_BYTES, 0),
3: ("kbvf", NPBW_BYTES, 0),
4: ("kbsf_dleag", NPBW_BYTES, 0),
5: ("dest_af", NPBW_BYTES, 0),
}
class ConnectReqMessage(NonProtobufClass):
_map = {
1: ("network_type", NPBW_INT, 0),
2: ("network_data", NPBW_BYTES, 0),
3: ("request_type", NPBW_INT, 0),
4: ("request_data", NPBW_BYTES, 0),
}

47
basicswap/messages_pb2.py Normal file
View File

@@ -0,0 +1,47 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: messages.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import builder as _builder
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0emessages.proto\x12\tbasicswap\"\xa6\x04\n\x0cOfferMessage\x12\x11\n\tcoin_from\x18\x01 \x01(\r\x12\x0f\n\x07\x63oin_to\x18\x02 \x01(\r\x12\x13\n\x0b\x61mount_from\x18\x03 \x01(\x04\x12\x0c\n\x04rate\x18\x04 \x01(\x04\x12\x16\n\x0emin_bid_amount\x18\x05 \x01(\x04\x12\x12\n\ntime_valid\x18\x06 \x01(\x04\x12\x33\n\tlock_type\x18\x07 \x01(\x0e\x32 .basicswap.OfferMessage.LockType\x12\x12\n\nlock_value\x18\x08 \x01(\r\x12\x11\n\tswap_type\x18\t \x01(\r\x12\x15\n\rproof_address\x18\n \x01(\t\x12\x17\n\x0fproof_signature\x18\x0b \x01(\t\x12\x15\n\rpkhash_seller\x18\x0c \x01(\x0c\x12\x13\n\x0bsecret_hash\x18\r \x01(\x0c\x12\x15\n\rfee_rate_from\x18\x0e \x01(\x04\x12\x13\n\x0b\x66\x65\x65_rate_to\x18\x0f \x01(\x04\x12\x18\n\x10protocol_version\x18\x10 \x01(\r\x12\x19\n\x11\x61mount_negotiable\x18\x11 \x01(\x08\x12\x17\n\x0frate_negotiable\x18\x12 \x01(\x08\"q\n\x08LockType\x12\x0b\n\x07NOT_SET\x10\x00\x12\x18\n\x14SEQUENCE_LOCK_BLOCKS\x10\x01\x12\x16\n\x12SEQUENCE_LOCK_TIME\x10\x02\x12\x13\n\x0f\x41\x42S_LOCK_BLOCKS\x10\x03\x12\x11\n\rABS_LOCK_TIME\x10\x04\"\xb4\x01\n\nBidMessage\x12\x14\n\x0coffer_msg_id\x18\x01 \x01(\x0c\x12\x12\n\ntime_valid\x18\x02 \x01(\x04\x12\x0e\n\x06\x61mount\x18\x03 \x01(\x04\x12\x0c\n\x04rate\x18\x04 \x01(\x04\x12\x14\n\x0cpkhash_buyer\x18\x05 \x01(\x0c\x12\x15\n\rproof_address\x18\x06 \x01(\t\x12\x17\n\x0fproof_signature\x18\x07 \x01(\t\x12\x18\n\x10protocol_version\x18\x08 \x01(\r\"V\n\x10\x42idAcceptMessage\x12\x12\n\nbid_msg_id\x18\x01 \x01(\x0c\x12\x15\n\rinitiate_txid\x18\x02 \x01(\x0c\x12\x17\n\x0f\x63ontract_script\x18\x03 \x01(\x0c\"=\n\x12OfferRevokeMessage\x12\x14\n\x0coffer_msg_id\x18\x01 \x01(\x0c\x12\x11\n\tsignature\x18\x02 \x01(\x0c\";\n\x10\x42idRejectMessage\x12\x12\n\nbid_msg_id\x18\x01 \x01(\x0c\x12\x13\n\x0breject_code\x18\x02 \x01(\r\"\xb2\x01\n\rXmrBidMessage\x12\x14\n\x0coffer_msg_id\x18\x01 \x01(\x0c\x12\x12\n\ntime_valid\x18\x02 \x01(\x04\x12\x0e\n\x06\x61mount\x18\x03 \x01(\x04\x12\x0c\n\x04rate\x18\x04 \x01(\x04\x12\x0c\n\x04pkaf\x18\x05 \x01(\x0c\x12\x0c\n\x04kbvf\x18\x06 \x01(\x0c\x12\x12\n\nkbsf_dleag\x18\x07 \x01(\x0c\x12\x0f\n\x07\x64\x65st_af\x18\x08 \x01(\x0c\x12\x18\n\x10protocol_version\x18\t \x01(\r\"T\n\x0fXmrSplitMessage\x12\x0e\n\x06msg_id\x18\x01 \x01(\x0c\x12\x10\n\x08msg_type\x18\x02 \x01(\r\x12\x10\n\x08sequence\x18\x03 \x01(\r\x12\r\n\x05\x64leag\x18\x04 \x01(\x0c\"\x80\x02\n\x13XmrBidAcceptMessage\x12\x12\n\nbid_msg_id\x18\x01 \x01(\x0c\x12\x0c\n\x04pkal\x18\x03 \x01(\x0c\x12\x0c\n\x04kbvl\x18\x04 \x01(\x0c\x12\x12\n\nkbsl_dleag\x18\x05 \x01(\x0c\x12\x11\n\ta_lock_tx\x18\x06 \x01(\x0c\x12\x18\n\x10\x61_lock_tx_script\x18\x07 \x01(\x0c\x12\x18\n\x10\x61_lock_refund_tx\x18\x08 \x01(\x0c\x12\x1f\n\x17\x61_lock_refund_tx_script\x18\t \x01(\x0c\x12\x1e\n\x16\x61_lock_refund_spend_tx\x18\n \x01(\x0c\x12\x1d\n\x15\x61l_lock_refund_tx_sig\x18\x0b \x01(\x0c\"r\n\x17XmrBidLockTxSigsMessage\x12\x12\n\nbid_msg_id\x18\x01 \x01(\x0c\x12$\n\x1c\x61\x66_lock_refund_spend_tx_esig\x18\x02 \x01(\x0c\x12\x1d\n\x15\x61\x66_lock_refund_tx_sig\x18\x03 \x01(\x0c\"X\n\x18XmrBidLockSpendTxMessage\x12\x12\n\nbid_msg_id\x18\x01 \x01(\x0c\x12\x17\n\x0f\x61_lock_spend_tx\x18\x02 \x01(\x0c\x12\x0f\n\x07kal_sig\x18\x03 \x01(\x0c\"M\n\x18XmrBidLockReleaseMessage\x12\x12\n\nbid_msg_id\x18\x01 \x01(\x0c\x12\x1d\n\x15\x61l_lock_spend_tx_esig\x18\x02 \x01(\x0c\x62\x06proto3')
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'messages_pb2', globals())
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
_OFFERMESSAGE._serialized_start=30
_OFFERMESSAGE._serialized_end=580
_OFFERMESSAGE_LOCKTYPE._serialized_start=467
_OFFERMESSAGE_LOCKTYPE._serialized_end=580
_BIDMESSAGE._serialized_start=583
_BIDMESSAGE._serialized_end=763
_BIDACCEPTMESSAGE._serialized_start=765
_BIDACCEPTMESSAGE._serialized_end=851
_OFFERREVOKEMESSAGE._serialized_start=853
_OFFERREVOKEMESSAGE._serialized_end=914
_BIDREJECTMESSAGE._serialized_start=916
_BIDREJECTMESSAGE._serialized_end=975
_XMRBIDMESSAGE._serialized_start=978
_XMRBIDMESSAGE._serialized_end=1156
_XMRSPLITMESSAGE._serialized_start=1158
_XMRSPLITMESSAGE._serialized_end=1242
_XMRBIDACCEPTMESSAGE._serialized_start=1245
_XMRBIDACCEPTMESSAGE._serialized_end=1501
_XMRBIDLOCKTXSIGSMESSAGE._serialized_start=1503
_XMRBIDLOCKTXSIGSMESSAGE._serialized_end=1617
_XMRBIDLOCKSPENDTXMESSAGE._serialized_start=1619
_XMRBIDLOCKSPENDTXMESSAGE._serialized_end=1707
_XMRBIDLOCKRELEASEMESSAGE._serialized_start=1709
_XMRBIDLOCKRELEASEMESSAGE._serialized_end=1786
# @@protoc_insertion_point(module_scope)

View File

@@ -5,25 +5,26 @@
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
"""
Message 2 bytes msg_class, 4 bytes length, [ 2 bytes msg_type, payload ]
'''
Message 2 bytes msg_class, 4 bytes length, [ 2 bytes msg_type, payload ]
Handshake procedure:
node0 connecting to node1
node0 send_handshake
node1 process_handshake
node1 send_ping - With a version field
node0 recv_ping
Both nodes are initialised
Handshake procedure:
node0 connecting to node1
node0 send_handshake
node1 process_handshake
node1 send_ping - With a version field
node0 recv_ping
Both nodes are initialised
XChaCha20_Poly1305 mac is 16bytes
"""
XChaCha20_Poly1305 mac is 16bytes
'''
import time
import queue
import random
import select
import socket
import struct
import hashlib
import logging
import secrets
@@ -36,12 +37,11 @@ from Crypto.Cipher import ChaCha20_Poly1305 # TODO: Add to libsecp256k1/coincur
from coincurve.keys import PrivateKey, PublicKey
from basicswap.contrib.rfc6979 import (
rfc6979_hmac_sha256_initialize,
rfc6979_hmac_sha256_generate,
)
rfc6979_hmac_sha256_generate)
START_TOKEN = 0xABCD
MSG_START_TOKEN = START_TOKEN.to_bytes(2, "big")
START_TOKEN = 0xabcd
MSG_START_TOKEN = struct.pack('>H', START_TOKEN)
MSG_MAX_SIZE = 0x200000 # 2MB
@@ -64,71 +64,49 @@ class NetMessageTypes(IntEnum):
return value in cls._value2member_map_
"""
'''
class NetMessage:
def __init__(self):
self._msg_class = None # 2 bytes
self._len = None # 4 bytes
self._msg_type = None # 2 bytes
"""
'''
# Ensure handshake keys are not reused by including the time in the msg, mac and key hash
# Verify timestamp is not too old
# Add keys to db to catch concurrent attempts, records can be cleared periodically, the timestamp should catch older replay attempts
class MsgHandshake:
__slots__ = ("_timestamp", "_ephem_pk", "_ct", "_mac")
__slots__ = ('_timestamp', '_ephem_pk', '_ct', '_mac')
def __init__(self):
pass
def encode_aad(self): # Additional Authenticated Data
return (
int(NetMessageTypes.HANDSHAKE).to_bytes(2, "big")
+ self._timestamp.to_bytes(8, "big")
+ self._ephem_pk
)
return struct.pack('>H', NetMessageTypes.HANDSHAKE) + \
struct.pack('>Q', self._timestamp) + \
self._ephem_pk
def encode(self):
return self.encode_aad() + self._ct + self._mac
def decode(self, msg_mv):
o = 2
self._timestamp = int.from_bytes(msg_mv[o : o + 8], "big")
self._timestamp = struct.unpack('>Q', msg_mv[o: o + 8])[0]
o += 8
self._ephem_pk = bytes(msg_mv[o : o + 33])
self._ephem_pk = bytes(msg_mv[o: o + 33])
o += 33
self._ct = bytes(msg_mv[o:-16])
self._ct = bytes(msg_mv[o: -16])
self._mac = bytes(msg_mv[-16:])
class Peer:
__slots__ = (
"_mx",
"_pubkey",
"_address",
"_socket",
"_version",
"_ready",
"_incoming",
"_connected_at",
"_last_received_at",
"_bytes_sent",
"_bytes_received",
"_receiving_length",
"_receiving_buffer",
"_recv_messages",
"_misbehaving_score",
"_ke",
"_km",
"_dir",
"_sent_nonce",
"_recv_nonce",
"_last_handshake_at",
"_ping_nonce",
"_last_ping_at",
"_last_ping_rtt",
)
'_mx', '_pubkey', '_address', '_socket', '_version', '_ready', '_incoming',
'_connected_at', '_last_received_at', '_bytes_sent', '_bytes_received',
'_receiving_length', '_receiving_buffer', '_recv_messages', '_misbehaving_score',
'_ke', '_km', '_dir', '_sent_nonce', '_recv_nonce', '_last_handshake_at',
'_ping_nonce', '_last_ping_at', '_last_ping_rtt')
def __init__(self, address, socket, pubkey):
self._mx = threading.Lock()
@@ -164,16 +142,14 @@ def listen_thread(cls):
max_bytes = 0x10000
while cls._running:
# logging.info('[rm] network loop %d', cls._running)
readable, writable, errored = select.select(
cls._read_sockets, cls._write_sockets, cls._error_sockets, timeout
)
readable, writable, errored = select.select(cls._read_sockets, cls._write_sockets, cls._error_sockets, timeout)
cls._mx.acquire()
try:
disconnected_peers = []
for s in readable:
if s == cls._socket:
peer_socket, address = cls._socket.accept()
logging.info("Connection from %s", address)
logging.info('Connection from %s', address)
new_peer = Peer(address, peer_socket, None)
new_peer._incoming = True
cls._peers.append(new_peer)
@@ -185,12 +161,12 @@ def listen_thread(cls):
try:
bytes_recv = s.recv(max_bytes, socket.MSG_DONTWAIT)
except socket.error as se:
if se.args[0] not in (socket.EWOULDBLOCK,):
logging.error("Receive error %s", str(se))
if se.args[0] not in (socket.EWOULDBLOCK, ):
logging.error('Receive error %s', str(se))
disconnected_peers.append(peer)
continue
except Exception as e:
logging.error("Receive error %s", str(e))
logging.error('Receive error %s', str(e))
disconnected_peers.append(peer)
continue
@@ -200,7 +176,7 @@ def listen_thread(cls):
cls.receive_bytes(peer, bytes_recv)
for s in errored:
logging.warning("Socket error")
logging.warning('Socket error')
for peer in disconnected_peers:
cls.disconnect(peer)
@@ -218,9 +194,7 @@ def msg_thread(cls):
try:
now_us = time.time_ns() // 1000
if peer._ready is True:
if (
now_us - peer._last_ping_at >= 5000000
): # 5 seconds TODO: Make variable
if now_us - peer._last_ping_at >= 5000000: # 5 seconds TODO: Make variable
cls.send_ping(peer)
msg = peer._recv_messages.get(False)
cls.process_message(peer, msg)
@@ -228,7 +202,7 @@ def msg_thread(cls):
except queue.Empty:
pass
except Exception as e:
logging.warning("process message error %s", str(e))
logging.warning('process message error %s', str(e))
if cls._sc.debug:
logging.error(traceback.format_exc())
@@ -238,24 +212,9 @@ def msg_thread(cls):
class Network:
__slots__ = (
"_p2p_host",
"_p2p_port",
"_network_key",
"_network_pubkey",
"_sc",
"_peers",
"_max_connections",
"_running",
"_network_thread",
"_msg_thread",
"_mx",
"_socket",
"_read_sockets",
"_write_sockets",
"_error_sockets",
"_csprng",
"_seen_ephem_keys",
)
'_p2p_host', '_p2p_port', '_network_key', '_network_pubkey',
'_sc', '_peers', '_max_connections', '_running', '_network_thread', '_msg_thread',
'_mx', '_socket', '_read_sockets', '_write_sockets', '_error_sockets', '_csprng', '_seen_ephem_keys')
def __init__(self, p2p_host, p2p_port, network_key, swap_client):
self._p2p_host = p2p_host
@@ -320,13 +279,7 @@ class Network:
self._mx.release()
def add_connection(self, host, port, peer_pubkey):
self._sc.log.info(
"Connecting from %s to %s at %s %d",
self._network_pubkey.hex(),
peer_pubkey.hex(),
host,
port,
)
self._sc.log.info('Connecting from %s to %s at %s %d', self._network_pubkey.hex(), peer_pubkey.hex(), host, port)
self._mx.acquire()
try:
address = (host, port)
@@ -342,7 +295,7 @@ class Network:
self.send_handshake(peer)
def disconnect(self, peer):
self._sc.log.info("Closing peer socket %s", peer._address)
self._sc.log.info('Closing peer socket %s', peer._address)
self._read_sockets.pop(self._read_sockets.index(peer._socket))
self._error_sockets.pop(self._error_sockets.index(peer._socket))
peer.close()
@@ -353,11 +306,7 @@ class Network:
used = self._seen_ephem_keys.get(ephem_pk)
if used:
raise ValueError(
"Handshake ephem_pk reused %s peer %s",
"for" if direction == 1 else "by",
used[0],
)
raise ValueError('Handshake ephem_pk reused %s peer %s', 'for' if direction == 1 else 'by', used[0])
self._seen_ephem_keys[ephem_pk] = (peer._address, timestamp)
@@ -365,14 +314,12 @@ class Network:
self._seen_ephem_keys.popitem(last=False)
def send_handshake(self, peer):
self._sc.log.debug("send_handshake %s", peer._address)
self._sc.log.debug('send_handshake %s', peer._address)
peer._mx.acquire()
try:
# TODO: Drain peer._recv_messages
if not peer._recv_messages.empty():
self._sc.log.warning(
"send_handshake %s - Receive queue dumped.", peer._address
)
self._sc.log.warning('send_handshake %s - Receive queue dumped.', peer._address)
while not peer._recv_messages.empty():
peer._recv_messages.get(False)
@@ -386,7 +333,7 @@ class Network:
ss = k.ecdh(peer._pubkey)
hashed = hashlib.sha512(ss + msg._timestamp.to_bytes(8, "big")).digest()
hashed = hashlib.sha512(ss + struct.pack('>Q', msg._timestamp)).digest()
peer._ke = hashed[:32]
peer._km = hashed[32:]
@@ -415,13 +362,11 @@ class Network:
peer._mx.release()
def process_handshake(self, peer, msg_mv):
self._sc.log.debug("process_handshake %s", peer._address)
self._sc.log.debug('process_handshake %s', peer._address)
# TODO: Drain peer._recv_messages
if not peer._recv_messages.empty():
self._sc.log.warning(
"process_handshake %s - Receive queue dumped.", peer._address
)
self._sc.log.warning('process_handshake %s - Receive queue dumped.', peer._address)
while not peer._recv_messages.empty():
peer._recv_messages.get(False)
@@ -431,19 +376,17 @@ class Network:
try:
now = int(time.time())
if now - peer._last_handshake_at < 30:
raise ValueError("Too many handshakes from peer %s", peer._address)
raise ValueError('Too many handshakes from peer %s', peer._address)
if abs(msg._timestamp - now) > TIMESTAMP_LEEWAY:
raise ValueError("Bad handshake timestamp from peer %s", peer._address)
raise ValueError('Bad handshake timestamp from peer %s', peer._address)
self.check_handshake_ephem_key(
peer, msg._timestamp, msg._ephem_pk, direction=2
)
self.check_handshake_ephem_key(peer, msg._timestamp, msg._ephem_pk, direction=2)
nk = PrivateKey(self._network_key)
ss = nk.ecdh(msg._ephem_pk)
hashed = hashlib.sha512(ss + msg._timestamp.to_bytes(8, "big")).digest()
hashed = hashlib.sha512(ss + struct.pack('>Q', msg._timestamp)).digest()
peer._ke = hashed[:32]
peer._km = hashed[32:]
@@ -453,9 +396,7 @@ class Network:
aad += nonce
cipher = ChaCha20_Poly1305.new(key=peer._ke, nonce=nonce)
cipher.update(aad)
plaintext = cipher.decrypt_and_verify(
msg._ct, msg._mac
) # Will raise error if mac doesn't match
plaintext = cipher.decrypt_and_verify(msg._ct, msg._mac) # Will raise error if mac doesn't match
peer._version = plaintext[:6]
sig = plaintext[6:]
@@ -474,30 +415,26 @@ class Network:
except Exception as e:
# TODO: misbehaving
self._sc.log.debug("[rm] process_handshake %s", str(e))
self._sc.log.debug('[rm] process_handshake %s', str(e))
def process_ping(self, peer, msg_mv):
nonce = peer._recv_nonce[:24]
cipher = ChaCha20_Poly1305.new(key=peer._ke, nonce=nonce)
cipher.update(msg_mv[0:2])
cipher.update(msg_mv[0: 2])
cipher.update(nonce)
mac = msg_mv[-16:]
plaintext = cipher.decrypt_and_verify(msg_mv[2:-16], mac)
plaintext = cipher.decrypt_and_verify(msg_mv[2: -16], mac)
ping_nonce = int.from_bytes(plaintext[:4], "big")
ping_nonce = struct.unpack('>I', plaintext[:4])[0]
# Version is added to a ping following a handshake message
if len(plaintext) >= 10:
peer._ready = True
version = plaintext[4:10]
version = plaintext[4: 10]
if peer._version is None:
peer._version = version
self._sc.log.debug(
"Set version from ping %s, %s",
peer._pubkey.hex(),
peer._version.hex(),
)
self._sc.log.debug('Set version from ping %s, %s', peer._pubkey.hex(), peer._version.hex())
peer._recv_nonce = hashlib.sha256(nonce + mac).digest()
@@ -507,32 +444,32 @@ class Network:
nonce = peer._recv_nonce[:24]
cipher = ChaCha20_Poly1305.new(key=peer._ke, nonce=nonce)
cipher.update(msg_mv[0:2])
cipher.update(msg_mv[0: 2])
cipher.update(nonce)
mac = msg_mv[-16:]
plaintext = cipher.decrypt_and_verify(msg_mv[2:-16], mac)
plaintext = cipher.decrypt_and_verify(msg_mv[2: -16], mac)
pong_nonce = int.from_bytes(plaintext[:4], "big")
pong_nonce = struct.unpack('>I', plaintext[:4])[0]
if pong_nonce == peer._ping_nonce:
peer._last_ping_rtt = (time.time_ns() // 1000) - peer._last_ping_at
else:
self._sc.log.debug("Pong received out of order %s", peer._address)
self._sc.log.debug('Pong received out of order %s', peer._address)
peer._recv_nonce = hashlib.sha256(nonce + mac).digest()
def send_ping(self, peer):
ping_nonce = random.getrandbits(32)
msg_bytes = int(NetMessageTypes.PING).to_bytes(2, "big")
msg_bytes = struct.pack('>H', NetMessageTypes.PING)
nonce = peer._sent_nonce[:24]
cipher = ChaCha20_Poly1305.new(key=peer._ke, nonce=nonce)
cipher.update(msg_bytes)
cipher.update(nonce)
payload = ping_nonce.to_bytes(4, "big")
payload = struct.pack('>I', ping_nonce)
if peer._last_ping_at == 0:
payload += self._sc._version
ct, mac = cipher.encrypt_and_digest(payload)
@@ -547,14 +484,14 @@ class Network:
self.send_msg(peer, msg_bytes)
def send_pong(self, peer, ping_nonce):
msg_bytes = int(NetMessageTypes.PONG).to_bytes(2, "big")
msg_bytes = struct.pack('>H', NetMessageTypes.PONG)
nonce = peer._sent_nonce[:24]
cipher = ChaCha20_Poly1305.new(key=peer._ke, nonce=nonce)
cipher.update(msg_bytes)
cipher.update(nonce)
payload = ping_nonce.to_bytes(4, "big")
payload = struct.pack('>I', ping_nonce)
ct, mac = cipher.encrypt_and_digest(payload)
msg_bytes += ct + mac
@@ -566,21 +503,19 @@ class Network:
msg_encoded = msg if isinstance(msg, bytes) else msg.encode()
len_encoded = len(msg_encoded)
msg_packed = (
bytearray(MSG_START_TOKEN) + len_encoded.to_bytes(4, "big") + msg_encoded
)
msg_packed = bytearray(MSG_START_TOKEN) + struct.pack('>I', len_encoded) + msg_encoded
peer._socket.sendall(msg_packed)
peer._bytes_sent += len_encoded
def process_message(self, peer, msg_bytes):
logging.info("[rm] process_message %s len %d", peer._address, len(msg_bytes))
logging.info('[rm] process_message %s len %d', peer._address, len(msg_bytes))
peer._mx.acquire()
try:
mv = memoryview(msg_bytes)
o = 0
msg_type = int.from_bytes(mv[o : o + 2], "big")
msg_type = struct.unpack('>H', mv[o: o + 2])[0]
if msg_type == NetMessageTypes.HANDSHAKE:
self.process_handshake(peer, mv)
elif msg_type == NetMessageTypes.PING:
@@ -588,7 +523,7 @@ class Network:
elif msg_type == NetMessageTypes.PONG:
self.process_pong(peer, mv)
else:
self._sc.log.debug("Unknown message type %d", msg_type)
self._sc.log.debug('Unknown message type %d', msg_type)
finally:
peer._mx.release()
@@ -599,6 +534,7 @@ class Network:
peer._last_received_at = time.time()
peer._bytes_received += len_received
invalid_msg = False
mv = memoryview(bytes_recv)
o = 0
@@ -606,34 +542,34 @@ class Network:
while o < len_received:
if peer._receiving_length == 0:
if len(bytes_recv) < MSG_HEADER_LEN:
raise ValueError("Msg too short")
raise ValueError('Msg too short')
if mv[o : o + 2] != MSG_START_TOKEN:
raise ValueError("Invalid start token")
if mv[o: o + 2] != MSG_START_TOKEN:
raise ValueError('Invalid start token')
o += 2
msg_len = int.from_bytes(mv[o : o + 4], "big")
msg_len = struct.unpack('>I', mv[o: o + 4])[0]
o += 4
if msg_len < 2 or msg_len > MSG_MAX_SIZE:
raise ValueError("Invalid data length")
raise ValueError('Invalid data length')
# Precheck msg_type
msg_type = int.from_bytes(mv[o : o + 2], "big")
msg_type = struct.unpack('>H', mv[o: o + 2])[0]
# o += 2 # Don't inc offset, msg includes type
if not NetMessageTypes.has_value(msg_type):
raise ValueError("Invalid msg type")
raise ValueError('Invalid msg type')
peer._receiving_length = msg_len
len_pkt = len_received - o
len_pkt = (len_received - o)
nc = msg_len if len_pkt > msg_len else len_pkt
peer._receiving_buffer = mv[o : o + nc]
peer._receiving_buffer = mv[o: o + nc]
o += nc
else:
len_to_go = peer._receiving_length - len(peer._receiving_buffer)
len_pkt = len_received - o
len_pkt = (len_received - o)
nc = len_to_go if len_pkt > len_to_go else len_pkt
peer._receiving_buffer = mv[o : o + nc]
peer._receiving_buffer = mv[o: o + nc]
o += nc
if len(peer._receiving_buffer) == peer._receiving_length:
peer._recv_messages.put(peer._receiving_buffer)
@@ -641,13 +577,11 @@ class Network:
except Exception as e:
if self._sc.debug:
self._sc.log.error(
"Invalid message received from %s %s", peer._address, str(e)
)
self._sc.log.error('Invalid message received from %s %s', peer._address, str(e))
# TODO: misbehaving
def test_onion(self, path):
self._sc.log.debug("test_onion packet")
self._sc.log.debug('test_onion packet')
def get_info(self):
rv = {}
@@ -656,14 +590,14 @@ class Network:
with self._mx:
for peer in self._peers:
peer_info = {
"pubkey": "Unknown" if not peer._pubkey else peer._pubkey.hex(),
"address": "{}:{}".format(peer._address[0], peer._address[1]),
"bytessent": peer._bytes_sent,
"bytesrecv": peer._bytes_received,
"ready": peer._ready,
"incoming": peer._incoming,
'pubkey': 'Unknown' if not peer._pubkey else peer._pubkey.hex(),
'address': '{}:{}'.format(peer._address[0], peer._address[1]),
'bytessent': peer._bytes_sent,
'bytesrecv': peer._bytes_received,
'ready': peer._ready,
'incoming': peer._incoming,
}
peers.append(peer_info)
rv["peers"] = peers
rv['peers'] = peers
return rv

View File

@@ -1,523 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2025 The Basicswap developers
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import base64
import json
import threading
import traceback
import websocket
from queue import Queue, Empty
from basicswap.util.smsg import (
smsgEncrypt,
smsgDecrypt,
smsgGetID,
)
from basicswap.chainparams import (
Coins,
)
from basicswap.util.address import (
b58decode,
decodeWif,
)
def encode_base64(data: bytes) -> str:
return base64.b64encode(data).decode("utf-8")
def decode_base64(encoded_data: str) -> bytes:
return base64.b64decode(encoded_data)
class WebSocketThread(threading.Thread):
def __init__(self, url: str, tag: str = None, logger=None):
super().__init__()
self.url: str = url
self.tag = tag
self.logger = logger
self.ws = None
self.mutex = threading.Lock()
self.corrId: int = 0
self.connected: bool = False
self.delay_event = threading.Event()
self.recv_queue = Queue()
self.cmd_recv_queue = Queue()
self.delayed_events_queue = Queue()
self.ignore_events: bool = False
self.num_messages_received: int = 0
def disable_debug_mode(self):
self.ignore_events = False
for i in range(100):
try:
message = self.delayed_events_queue.get(block=False)
except Empty:
break
self.recv_queue.put(message)
def on_message(self, ws, message):
if self.logger:
self.logger.debug("Simplex received msg")
else:
print(f"{self.tag} - Received msg")
if message.startswith('{"corrId"'):
self.cmd_recv_queue.put(message)
else:
self.num_messages_received += 1
self.recv_queue.put(message)
def queue_get(self):
try:
return self.recv_queue.get(block=False)
except Empty:
return None
def cmd_queue_get(self):
try:
return self.cmd_recv_queue.get(block=False)
except Empty:
return None
def on_error(self, ws, error):
if self.logger:
self.logger.error(f"Simplex ws - {error}")
else:
print(f"{self.tag} - Error: {error}")
def on_close(self, ws, close_status_code, close_msg):
if self.logger:
self.logger.info(f"Simplex ws - Closed: {close_status_code}, {close_msg}")
else:
print(f"{self.tag} - Closed: {close_status_code}, {close_msg}")
def on_open(self, ws):
if self.logger:
self.logger.info("Simplex ws - Connection opened")
else:
print(f"{self.tag}: WebSocket connection opened")
self.connected = True
def send_command(self, cmd_str: str):
with self.mutex:
self.corrId += 1
if self.logger:
self.logger.debug(f"Simplex sent command {self.corrId}")
else:
print(f"{self.tag}: sent command {self.corrId}")
cmd = json.dumps({"corrId": str(self.corrId), "cmd": cmd_str})
self.ws.send(cmd)
return self.corrId
def wait_for_command_response(self, cmd_id, num_tries: int = 200):
cmd_id = str(cmd_id)
for i in range(num_tries):
message = self.cmd_queue_get()
if message is not None:
data = json.loads(message)
if "corrId" in data:
if data["corrId"] == cmd_id:
return data
self.delay_event.wait(0.5)
raise ValueError(
f"wait_for_command_response timed-out waiting for ID: {cmd_id}"
)
def run(self):
self.ws = websocket.WebSocketApp(
self.url,
on_message=self.on_message,
on_error=self.on_error,
on_open=self.on_open,
on_close=self.on_close,
)
while not self.delay_event.is_set():
self.ws.run_forever()
self.delay_event.wait(0.5)
def stop(self):
self.delay_event.set()
if self.ws:
self.ws.close()
def waitForResponse(ws_thread, sent_id, delay_event):
sent_id = str(sent_id)
for i in range(200):
message = ws_thread.cmd_queue_get()
if message is not None:
data = json.loads(message)
if "corrId" in data:
if data["corrId"] == sent_id:
return data
delay_event.wait(0.5)
raise ValueError(f"waitForResponse timed-out waiting for ID: {sent_id}")
def waitForConnected(ws_thread, delay_event):
for i in range(100):
if ws_thread.connected:
return True
delay_event.wait(0.5)
raise ValueError("waitForConnected timed-out.")
def getPrivkeyForAddress(self, addr) -> bytes:
ci_part = self.ci(Coins.PART)
try:
return ci_part.decodeKey(
self.callrpc(
"smsgdumpprivkey",
[
addr,
],
)
)
except Exception as e: # noqa: F841
pass
try:
return ci_part.decodeKey(
ci_part.rpc_wallet(
"dumpprivkey",
[
addr,
],
)
)
except Exception as e: # noqa: F841
pass
raise ValueError("key not found")
def encryptMsg(
self,
addr_from: str,
addr_to: str,
payload: bytes,
msg_valid: int,
cursor,
timestamp=None,
deterministic=False,
) -> bytes:
self.log.debug("encryptMsg")
try:
rv = self.callrpc(
"smsggetpubkey",
[
addr_to,
],
)
pubkey_to: bytes = b58decode(rv["publickey"])
except Exception as e: # noqa: F841
use_cursor = self.openDB(cursor)
try:
query: str = "SELECT pk_from FROM offers WHERE addr_from = :addr_to LIMIT 1"
rows = use_cursor.execute(query, {"addr_to": addr_to}).fetchall()
if len(rows) > 0:
pubkey_to = rows[0][0]
else:
query: str = (
"SELECT pk_bid_addr FROM bids WHERE bid_addr = :addr_to LIMIT 1"
)
rows = use_cursor.execute(query, {"addr_to": addr_to}).fetchall()
if len(rows) > 0:
pubkey_to = rows[0][0]
else:
raise ValueError(f"Could not get public key for address {addr_to}")
finally:
if cursor is None:
self.closeDB(use_cursor, commit=False)
privkey_from = getPrivkeyForAddress(self, addr_from)
payload += bytes((0,)) # Include null byte to match smsg
smsg_msg: bytes = smsgEncrypt(
privkey_from, pubkey_to, payload, timestamp, deterministic
)
return smsg_msg
def sendSimplexMsg(
self,
network,
addr_from: str,
addr_to: str,
payload: bytes,
msg_valid: int,
cursor,
timestamp: int = None,
deterministic: bool = False,
to_user_name: str = None,
) -> bytes:
self.log.debug("sendSimplexMsg")
smsg_msg: bytes = encryptMsg(
self, addr_from, addr_to, payload, msg_valid, cursor, timestamp, deterministic
)
smsg_id = smsgGetID(smsg_msg)
ws_thread = network["ws_thread"]
if to_user_name is not None:
to = "@" + to_user_name + " "
else:
to = "#bsx "
sent_id = ws_thread.send_command(to + encode_base64(smsg_msg))
response = waitForResponse(ws_thread, sent_id, self.delay_event)
if getResponseData(response, "type") != "newChatItems":
json_str = json.dumps(response, indent=4)
self.log.debug(f"Response {json_str}")
raise ValueError("Send failed")
return smsg_id
def decryptSimplexMsg(self, msg_data):
ci_part = self.ci(Coins.PART)
# Try with the network key first
network_key: bytes = decodeWif(self.network_key)
try:
decrypted = smsgDecrypt(network_key, msg_data, output_dict=True)
decrypted["from"] = ci_part.pubkey_to_address(
bytes.fromhex(decrypted["pk_from"])
)
decrypted["to"] = self.network_addr
decrypted["msg_net"] = "simplex"
return decrypted
except Exception as e: # noqa: F841
pass
# Try with all active bid/offer addresses
query: str = """SELECT DISTINCT address FROM (
SELECT b.bid_addr AS address FROM bids b
JOIN bidstates s ON b.state = s.state_id
WHERE b.active_ind = 1
AND (s.in_progress OR (s.swap_ended = 0 AND b.expire_at > :now))
UNION
SELECT addr_from AS address FROM offers WHERE active_ind = 1 AND expire_at > :now
)"""
now: int = self.getTime()
try:
cursor = self.openDB()
addr_rows = cursor.execute(query, {"now": now}).fetchall()
finally:
self.closeDB(cursor, commit=False)
decrypted = None
for row in addr_rows:
addr = row[0]
try:
vk_addr = getPrivkeyForAddress(self, addr)
decrypted = smsgDecrypt(vk_addr, msg_data, output_dict=True)
decrypted["from"] = ci_part.pubkey_to_address(
bytes.fromhex(decrypted["pk_from"])
)
decrypted["to"] = addr
decrypted["msg_net"] = "simplex"
return decrypted
except Exception as e: # noqa: F841
pass
return decrypted
def parseSimplexMsg(self, chat_item):
item_status = chat_item["chatItem"]["meta"]["itemStatus"]
dir_type = item_status["type"]
if dir_type not in ("sndRcvd", "rcvNew"):
return None
snd_progress = item_status.get("sndProgress", None)
if snd_progress and snd_progress != "complete":
item_id = chat_item["chatItem"]["meta"]["itemId"]
self.log.debug(f"simplex chat item {item_id} {snd_progress}")
return None
conn_id = None
msg_dir: str = "recv" if dir_type == "rcvNew" else "sent"
chat_type: str = chat_item["chatInfo"]["type"]
if chat_type == "group":
chat_name = chat_item["chatInfo"]["groupInfo"]["localDisplayName"]
conn_id = chat_item["chatInfo"]["groupInfo"]["groupId"]
self.num_group_simplex_messages_received += 1
elif chat_type == "direct":
chat_name = chat_item["chatInfo"]["contact"]["localDisplayName"]
conn_id = chat_item["chatInfo"]["contact"]["activeConn"]["connId"]
self.num_direct_simplex_messages_received += 1
else:
return None
msg_content = chat_item["chatItem"]["content"]["msgContent"]["text"]
try:
msg_data: bytes = decode_base64(msg_content)
decrypted_msg = decryptSimplexMsg(self, msg_data)
if decrypted_msg is None:
return None
decrypted_msg["chat_type"] = chat_type
decrypted_msg["chat_name"] = chat_name
decrypted_msg["conn_id"] = conn_id
decrypted_msg["msg_dir"] = msg_dir
return decrypted_msg
except Exception as e: # noqa: F841
# self.log.debug(f"decryptSimplexMsg error: {e}")
self.log.debug(f"decryptSimplexMsg error: {e}")
pass
return None
def processEvent(self, ws_thread, msg_type: str, data) -> bool:
if ws_thread.ignore_events:
if msg_type not in ("contactConnected", "contactDeletedByContact"):
return False
ws_thread.delayed_events_queue.put(json.dumps(data))
return True
if msg_type == "contactConnected":
self.processContactConnected(data)
elif msg_type == "contactDeletedByContact":
self.processContactDisconnected(data)
else:
return False
return True
def readSimplexMsgs(self, network):
ws_thread = network["ws_thread"]
for i in range(100):
message = ws_thread.queue_get()
if message is None:
break
if self.delay_event.is_set():
break
data = json.loads(message)
# self.log.debug(f"Message: {json.dumps(data, indent=4)}")
try:
msg_type: str = getResponseData(data, "type")
if msg_type in ("chatItemsStatusesUpdated", "newChatItems"):
for chat_item in getResponseData(data, "chatItems"):
decrypted_msg = parseSimplexMsg(self, chat_item)
if decrypted_msg is None:
continue
self.processMsg(decrypted_msg)
elif msg_type == "chatError":
# self.log.debug(f"chatError Message: {json.dumps(data, indent=4)}")
pass
elif processEvent(self, ws_thread, msg_type, data):
pass
else:
self.log.debug(f"Unknown msg_type: {msg_type}")
# self.log.debug(f"Message: {json.dumps(data, indent=4)}")
except Exception as e:
self.log.debug(f"readSimplexMsgs error: {e}")
if self.debug:
self.log.error(traceback.format_exc())
self.delay_event.wait(0.05)
def getResponseData(data, tag=None):
if "Right" in data["resp"]:
if tag:
return data["resp"]["Right"][tag]
return data["resp"]["Right"]
if tag:
return data["resp"][tag]
return data["resp"]
def getNewSimplexLink(data):
response_data = getResponseData(data)
if "connLinkContact" in response_data:
return response_data["connLinkContact"]["connFullLink"]
return response_data["connReqContact"]
def getJoinedSimplexLink(data):
response_data = getResponseData(data)
if "connLinkInvitation" in response_data:
return response_data["connLinkInvitation"]["connFullLink"]
return response_data["connReqInvitation"]
def initialiseSimplexNetwork(self, network_config) -> None:
self.log.debug("initialiseSimplexNetwork")
client_host: str = network_config.get("client_host", "127.0.0.1")
ws_port: str = network_config.get("ws_port")
ws_thread = WebSocketThread(f"ws://{client_host}:{ws_port}", logger=self.log)
self.threads.append(ws_thread)
ws_thread.start()
waitForConnected(ws_thread, self.delay_event)
sent_id = ws_thread.send_command("/groups")
response = waitForResponse(ws_thread, sent_id, self.delay_event)
if len(getResponseData(response, "groups")) < 1:
sent_id = ws_thread.send_command("/c " + network_config["group_link"])
response = waitForResponse(ws_thread, sent_id, self.delay_event)
assert "groupLinkId" in getResponseData(response, "connection")
network = {
"type": "simplex",
"ws_thread": ws_thread,
}
self.active_networks.append(network)
def closeSimplexChat(self, net_i, connId) -> bool:
try:
cmd_id = net_i.send_command("/chats")
response = net_i.wait_for_command_response(cmd_id, num_tries=500)
remote_name = None
for chat in getResponseData(response, "chats"):
if (
"chatInfo" not in chat
or "type" not in chat["chatInfo"]
or chat["chatInfo"]["type"] != "direct"
):
continue
try:
if chat["chatInfo"]["contact"]["activeConn"]["connId"] == connId:
remote_name = chat["chatInfo"]["contact"]["localDisplayName"]
break
except Exception as e:
self.log.debug(f"Error parsing chat: {e}")
if remote_name is None:
self.log.warning(
f"Unable to find remote name for simplex direct chat, ID: {connId}"
)
return False
self.log.debug(f"Deleting simplex chat @{remote_name}, connID {connId}")
cmd_id = net_i.send_command(f"/delete @{remote_name}")
cmd_response = net_i.wait_for_command_response(cmd_id)
if getResponseData(cmd_response, "type") != "contactDeleted":
self.log.warning(f"Failed to delete simplex chat, ID: {connId}")
self.log.debug(
"cmd_response: {}".format(json.dumps(cmd_response, indent=4))
)
return False
except Exception as e:
self.log.warning(f"Error deleting simplex chat, ID: {connId} - {e}")
return False
return True

View File

@@ -1,132 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2025 The Basicswap developers
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import os
import select
import sqlite3
import subprocess
import time
from basicswap.util.daemon import Daemon
def initSimplexClient(args, logger, delay_event):
logger.info("Initialising Simplex client")
(pipe_r, pipe_w) = os.pipe() # subprocess.PIPE is buffered, blocks when read
if os.name == "nt":
str_args = " ".join(args)
p = subprocess.Popen(
str_args, shell=True, stdin=subprocess.PIPE, stdout=pipe_w, stderr=pipe_w
)
else:
p = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=pipe_w, stderr=pipe_w)
def readOutput():
buf = os.read(pipe_r, 1024).decode("utf-8")
response = None
# logger.debug(f"simplex-chat output: {buf}")
if "display name:" in buf:
logger.debug("Setting display name")
response = b"user\n"
else:
logger.debug(f"Unexpected output: {buf}")
return
if response is not None:
p.stdin.write(response)
p.stdin.flush()
try:
start_time: int = time.time()
max_wait_seconds: int = 60
while p.poll() is None:
if time.time() > start_time + max_wait_seconds:
raise RuntimeError("Timed out")
if os.name == "nt":
readOutput()
delay_event.wait(0.1)
continue
while len(select.select([pipe_r], [], [], 0)[0]) == 1:
readOutput()
delay_event.wait(0.1)
except Exception as e:
logger.error(f"initSimplexClient: {e}")
finally:
if p.poll() is None:
p.terminate()
os.close(pipe_r)
os.close(pipe_w)
p.stdin.close()
def startSimplexClient(
bin_path: str,
data_path: str,
server_address: str,
websocket_port: int,
logger,
delay_event,
socks_proxy=None,
log_level: str = "debug",
) -> Daemon:
logger.info("Starting Simplex client")
if not os.path.exists(data_path):
os.makedirs(data_path)
simplex_data_prefix = os.path.join(data_path, "simplex_client_data")
simplex_db_path = simplex_data_prefix + "_chat.db"
args = [bin_path, "-d", simplex_data_prefix, "-p", str(websocket_port)]
if socks_proxy:
args += ["--socks-proxy", socks_proxy]
if not os.path.exists(simplex_db_path):
# Need to set initial profile through CLI
# TODO: Must be a better way?
init_args = args + ["-e", "/help"] # Run command to exit client
init_args += ["-s", server_address]
initSimplexClient(init_args, logger, delay_event)
else:
# Workaround to avoid error:
# SQLite3 returned ErrorConstraint while attempting to perform step: UNIQUE constraint failed: protocol_servers.user_id, protocol_servers.host, protocol_servers.port
# TODO: Remove?
with sqlite3.connect(simplex_db_path) as con:
c = con.cursor()
if ":" in server_address:
host, port = server_address.split(":")
else:
host = server_address
port = ""
query: str = (
"SELECT COUNT(*) FROM protocol_servers WHERE host = :host and port = :port"
)
q = c.execute(query, {"host": host, "port": port}).fetchone()
if q[0] < 1:
args += ["-s", server_address]
args += ["-l", log_level]
opened_files = []
stdout_dest = open(
os.path.join(data_path, "simplex_stdout.log"),
"w",
)
opened_files.append(stdout_dest)
stderr_dest = stdout_dest
return Daemon(
subprocess.Popen(
args,
shell=False,
stdin=subprocess.PIPE,
stdout=stdout_dest,
stderr=stderr_dest,
cwd=data_path,
),
opened_files,
"simplex-chat",
)

View File

@@ -1,20 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2025 The Basicswap developers
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
from basicswap.util.address import b58decode
def getMsgPubkey(self, msg) -> bytes:
if "pk_from" in msg:
return bytes.fromhex(msg["pk_from"])
rv = self.callrpc(
"smsggetpubkey",
[
msg["from"],
],
)
return b58decode(rv["publickey"])

View File

@@ -1,54 +0,0 @@
-----BEGIN PGP PUBLIC KEY BLOCK-----
Comment: FB44 AF81 A45B DE32 7319 797C 8510 7E35 7D4A 17FC
Comment: SimpleX Chat <chat@simplex.chat>
xsFNBGRDvZkBEACsxFENFWj5hMS1dCPCOXIJTNnWClVarltfUOESy5q0Ar84WJaj
hmAcc8j1Qw7uiLxVq/j+tMxcZOy79jnmhWpV5KrYA6H/E3I5NNlZOyT23rvah9mg
KtxfMHnhz/jJSwSXifYN2mmAYetQ1TQBSdLZayC7aW6BFhUaaQsaFABGli5abRUW
KArmnSfVEHI0f7TthLerPZ0hCoK06ZOPxEKCWt5CSqrC3J2d+8Cyb6j2jxkkB3GN
JXr9kI4JebivqrFNwvGw15xEDbSXIZf9I/+B/t9EA4Ebs+qrbLFRH5Drha50RIhu
LNYCkVnpKbrO6Y90KkJibm4ZtdUeNTFXjfXxT81Gi5lDmsvIyIMkFC78ePK68knM
dnESnIzEEwDtniV+ZvY0L9t/Ig1tGYggqPGVTVp9672bHKTGdiL3eXEzwv0FROD2
0HaZORXj2UZkAJTQO2ia7aS3hWdJL/iVBf4yIYARr+6NjPxv/sUMCaeuPYXTqCOB
Ykl6Lv3SPoSkEyPfVJY+12STtHH1ZofxJKYwo6Xe7EvmCiC9DK0KKVbeakZZ6wfd
5LO/tArDkqT2YjT3DUsfGqxQOoQvGCmk9yUuCm0s0vLwTHdJhSVgn9dxrEuK4FYL
IM3tGENAPAcK3e1VEbncgBMRikxvECKIz+YZyQVtoYzX2HDlT4D2HrbgXQARAQAB
zSBTaW1wbGVYIENoYXQgPGNoYXRAc2ltcGxleC5jaGF0PsLBlAQTAQgAPhYhBPtE
r4GkW94ycxl5fIUQfjV9Shf8BQJkQ72ZAhsDBQkHhh9CBQsJCAcCBhUKCQgLAgQW
AgMBAh4BAheAAAoJEIUQfjV9Shf8GekP/jpZYGJrna7467Qe82KV+qtwu+p2cRIy
IsoOmCje2p0D9DmmmDQH1IdxlJhvHZ8uEu21QwDK03r5y4iaXhz9bx4CDSDB5JPp
fMIDfOdc1V1GDT8Q2f/sYd5DX9kwpW6LdWOQZf6hwRDAeWDa+BQVhwo3E0WsPvRK
o5fqrbJzfWj8pz+JMlT8RGGt0ZxEyUjnD9C6XfqGckLdubBycs9CipPKV+3X4cY/
ix0zM2Nb3oSJ27VWMIFxi7lnBGtyUY69bE248Xhj0nJ79twPwzvk94+3e5tLQvyt
NIZcWEZEu+eYthyKcGDo/aA6lIvt1Bqp8eeFMogRxs5GJI4L/wQGwIDckemtLb45
gUdjpufEfPEfxuYWuuHuQ8W7Yvd2/ndiRkir4k+r8ypXx8yeCgocxnuUm1+4s+Wv
h64Op+M+l56cTjVCaEn25kv/T+4ll/RBplzKdNe4ClcH4NXppwXFkAHXq/j3RX++
64gRzIEC33TGheTo3btowUW+0/6iOi7Jy1RDsNvigzWwpm0p+pje54+d7hTxDmLR
bHxOZ9QiauO/HnlqNw/MezZLYL18hyEsghD3ns6QIHcUsHf17u/tRfLgN11x9tiE
ADqORsNgQ8FIRGdJcxGIt8lUlSe5vKPArsjpiomoA9CeAqepU27haIesl2QGe/jI
5OuS7CsRVDlOzsFNBGRDvZkBEADGYf7E+bzYgORnlSY3TZgS5UvkMIGswlw6GW7j
Vx6hAsMbiCwoKCVdzl/J4BImbJIJg2Pxvn/k7tYS2Jqb1q/EcpBmOZU9BRiTw49A
TiK8UfeH9aIPNFwuiatmA29dGxPH2RgSCwa3f4l2RsnQl301UdNlXj6mmWngD6mj
ae5/COUgH6CbKptfLp0Xw0WpPfKV1GK9+/X8Hv7W6RDA6xoWFlgzTyuy96rMmXJ1
3E7P/50ebIOundVzCni10dZyn7+W13cJGOyzxQnbR6PEMVHsgi4uZB/Gt6PxF0dC
s56IUi05hr9uH++p7ps2G8iIwvqXDu8VOvwN9hvt1fpxnRC2+Zv0lHwpDrnSBvjY
8er2tJxXlybXwEpk1nzctmDDWrgbBgQugOxTu4rkqIvAGwq7U98aLUb3vEqlyWSp
YDufsiLbGYC5owCli36yDzjfm48W0DwaOA5Ne5yVCih1f4ocF3RXVU6o1TEW1pfL
DEDZOXDT9sj0qef9NW0Nz+x/EiCT2k2Bkwt0ETf4TralsJ7smCcbhqfJbu1NG22g
oLNXZcZgTUxmOWmU+nlrFk8Hk7EK2KDeMKSgiX6jrAGpwbphrYYBZ3NLpvJ311l2
d56ZgmUt8gb1O5tLNiD2ySCvWKnpG0A5WoKZ2329nlnX2R30otYdpP1vcAEvA3GU
7fw5lQARAQABwsF8BBgBCAAmFiEE+0SvgaRb3jJzGXl8hRB+NX1KF/wFAmRDvZkC
GwwFCQeGH0IACgkQhRB+NX1KF/wNbw//bi4RcxEOVJpT37pyx6wSlq6urHopuZA5
duy0fGYxRXt4w/WR0UMH9i7iSU8J2E/UKgE7OMZg3oJqVt7g70zQDiT8ez+ep9d0
YvPAqgRnT1VDmAyMO8FOTPQPIrPMsQTnmtmxf9qrdoxW8HVqiyK+7mCGqd9ldcer
XGplALTugRWABY7iYyRyfpDSid+xMKV7KLHabv/0WdcT41HpZuUt0gmH0sMDDiJt
XrWW01LDqEZTdfaZ1xXPPp7oXUYGY6U7cH5CdLS6D38tPKR9x0ttgM83/SOx/hOO
XApcA+g113eMOyh4udowGYEkpxT26V3u8cLzCBOPDNSFx/H8ggFbfMsCWNBYV2Nx
EmAmciHvPMNLR7Hjfvn018/Q+lo1J6snoEhT9zFwpL15Lwurkqy5Z4n1D9BUyZ7m
hS/Wg7LDpaEeJCkSkOvQEPKz8YsnMpsbPc44ZZf0yuTUsWwJkZCVEqN8qByKXRdI
28zGBBJr5/rjaSJJ7+VGbh/FGUzaEkLONybzKcxazwjSASBNZXmasgStngOGWGpM
GKDnIuXs/Z7vljkKF2YoNT9bvGr7yoY74PCKrMkWdVSA1cQBj+cJ4OOojVvOGJaR
Gdpp/2r7me5UKImmUw2dhHf0KdM1iYwjzztCO72hi5Fw7vFlNS7QoadmYDzAgWkk
0oXYKNS+x2w=
=68E9
-----END PGP PUBLIC KEY BLOCK-----

File diff suppressed because it is too large Load Diff

View File

@@ -1,25 +0,0 @@
-----BEGIN PGP PUBLIC KEY BLOCK-----
mQMuBFmZ6L4RCACuqDDCIe2bzKznyKVN1aInzRQnSxdGTXuw0mcDz5HYudAhBjR8
gY6sxCRPNxvZCJVDZDpCygXMhWZlJtWLR8KMTCXxC4HLXXOY4RxQ5KGnYWxEAcKY
deq1ymmuOuMUp7ltRTSyWcBKbR9xTd2vW/+0W7GQIOxUW/aiT1V0x3cky+6kqaec
BorP3+uxJcx0Q8WdlS/6N4x3pBv/lfsdrZSaDD8fU/29pQGMDUEnupKoWJVVei6r
G+vxLHEtIFYYO8VWjZntymw3dl+aogrjyuxqWzl8mfPi9M/DgiRb4pJnH2yOGDI6
Lvg+oo9E79Vwi98UjYSicsB1dtcptKiA96UXAQD/hDB+dil7/SX/SDTlaw/+uTdd
Xg0No63dbN++iY4k3Qf/Xk1ZzbuDviLhe+zEhlJOw6TaMlxfwwQOtxEJXILS5uIL
jYlGcDbBtJh3p4qUoUduDOgjumJ9m47XqIq81rQ0pqzzGMbK1Y82NQjX5Sn8yTm9
p1hmOZ/uX9vCrUSbYBjxJXyQ1OXlerlLRLfBf5WQ0+LO+0cmgtCyX0zV4oGK7vph
XEm7lar7AezOOXaSrWAB+CTPUdJF1E7lcJiUuMVcqMx8pphrH+rfcsqPtN6tkyUD
TmPDpc5ViqFFelEEQnKSlmAY+3iCNZ3y/VdPPhuJ2lAsL3tm9MMh2JGV378LG45a
6SOkQrC977Qq1dhgJA+PGJxQvL2RJWsYlJwp79+Npgf9EfFaJVNzbdjGVq1XmNie
MZYqHRfABkyK0ooDxSyzJrq4vvuhWKInS4JhpKSabgNSsNiiaoDR+YYMHb0H8GRR
Za6JCmfU8w97R41UTI32N7dhul4xCDs5OV6maOIoNts20oigNGb7TKH9b5N7sDJB
zh3Of/fHCChO9Y2chbzU0bERfcn+evrWBf/9XdQGQ3ggoLbOtGpcUQuB/7ofTcBZ
awL6K4VJ2Qlb8DPlRgju6uU9AR/KTYeAlVFC8FX7R0FGgPRcJ3GNkNHGqrbuQ72q
AOhYOPx9nRrU5u+E2J325vOabLnLbOazze3j6LFPSFV4vfmTO9exYlwhz3g+lFAd
CrQ2Q2FsaW4gQ3VsaWFudSAoTmlsYWNUaGVHcmltKSA8Y2FsaW4uY3VsaWFudUBn
bWFpbC5jb20+iHoEExEIACIFAlmZ6L4CGwMGCwkIBwMCBhUIAgkKCwQWAgMBAh4B
AheAAAoJECGBClQgMcAsU5cBAO/ngONpHsxny2uTV4ge2f+5V2ajTjcIfN2jUZtg
31jJAQCl1NcrwcIu98+aM2IyjB1UFXkoaMANpr8L9jBopivRGQ==
=cf8I
-----END PGP PUBLIC KEY BLOCK-----

View File

@@ -1,166 +0,0 @@
-----BEGIN PGP PUBLIC KEY BLOCK-----
mQINBF1ULyUBEADFFliU0Hr+PRCQNT9/9ZEhZtLmMMu7tai3VCxhmrHrOpNJJHqX
f1/CeUyBhmCvXpKIpAAbH66l/Uc9GH5UgMZ19gMyGa3q3QJn9A6RR9ud4ALRg60P
fmYTAci+6Luko7bqTzkS+fYOUSy/LY57s5ANTpveE+iTsBd5grXczCxaYYnthKKA
ecmTs8GzQH8XEUgy6fduHcGySzMBj87daZBmPl2zninbTmOYkzev38HXFpr6KinJ
t3vRkhw4AOMSdgaTiNr6gALKoKLyCbhvHuDsVoDBQtIzBXtOeIGyzwBFdHlN2bFG
CcH2vWOzg/Yp1qYleWWV7KYHOVKcxrIycPM0tNueLlvrqVrI59QXMVRJHtBs8eQg
dH9rZNbO0vuv6rCP7e0nt2ACVT/fExdvrwuHHYZ/7IlwOBlFhab3QYpl/WWep2+X
95BSbDOXFrLWwEE9gND+douDG1DExVa3aSNXQJdi4/Mh7bMFiq2FsbXqu+TFSCTg
ae33WKl/AOmHVirgtipnq70PW9hHViaSg3rz0NyYHHczNVaCROHE8YdIM/bAmKY/
IYVBXJtT+6Mn8N87isK2TR7zMM3FvDJ4Dsqm1UTGwtDvMtB0sNa5IROaUCHdlMFu
rG8n+Bq/oGBFjk9Ay/twH4uOpxyr91aGoGtytw/jhd1+LOb0TGhFGpdc8QARAQAB
tBtQYXN0YSA8cGFzdGFAZGFzaGJvb3N0Lm9yZz6JAlQEEwEIAD4WIQQpWQNi7IeK
gf08ICtSUnvtq+h5hAUCXVQvJQIbAwUJA8PHawULCQgHAgYVCgkICwIEFgIDAQIe
AQIXgAAKCRBSUnvtq+h5hMqeEACQteY571XK50dW1oQzjgPq5tVuchoRQI727pr7
5145o2rOe0e0xrWzVNnhd9ZDzC4j8dh6wWVQWErHr+3Hhn8sCUW2PNU+o3GvhGR6
aqPl0Oh5gt4wHZalrcUnZ5u/RtFbDmGilobdASL/mpZge8ymLBj2lKiRR2X/JQe/
KAzr/7QW1zLh2oEUOOGVas6Ev+ziosAE0b3upGTHJFPQPMFv4za22MbeTKYeqyJ6
W6LdQDDssC/RBQKZXj3pRweA6RQFGOqw44CbtIHuQu/PV8ZDTpE+v9cWAzoNCMcQ
2fm5tCM8zYytt3perbA3VPwZNXcsITcRpIS5FgoeOntgIwzzKVmY+4GD8uWM/DHt
JPxyry7LpSa8CNyx+oN+Z2qCChn03ycJzO3UFsaCMG/CMAEkLxbg0AcxNyQ8kvIG
lcEDLINaz1xuHAtAxqTQKMYCP1xtd5rhGOe1FkGfVYEJX97+JgMGa8+2nD5+A6wG
0+JaJllqzfXY1VhNoVmfS/hFPQ+t/84jNSGR5Kn956C5MvTK65VumH+NRE59kpt1
nsIQNKu/v6fZUnbRtCFC05BSwIjoTzFvKXycJkCVjdSYARWkagki4bbFC1WZQuA9
BOF5TOUAYt6zaEBfAJgjeRT71Mr03eNExXaLm9k/hmvapGpmtJQhLY6NKPm/ctyf
IaEz/YkCVwQTAQgAQQIbAwIXgAUJDS2jLwULCQgHAgYVCgkICwIEFgIDAQIeBRYh
BClZA2Lsh4qB/TwgK1JSe+2r6HmEBQJlrVMsAhkBAAoJEFJSe+2r6HmE0KcP/2EG
b4CWvsmn3q6NoBmZ+u+rCitaX33+kXc4US6vRvAfhe0YiOWr5tNd4lg2JID+6jsN
2NkAZYgzm4TXXJLkjXkrB+s0sFkCjyG1/wBfZlPUSfxoDFusJry87N/7E9yMX7A+
YV2Hh/yOXbR+/jSINfmjC+3ttjWDUsUWT9m1yN8SBNg6h66TLffFyXgGFkRKYE27
eprP0cuVkI6Fks68ocSQ5FQ7gmdMCC4JFtOI4e1ax6mfvTFz2e2f5DlohPjW9w4e
KTn+k98Nuev+s3WGiDXjxSABoehAdwz2mbEjPsuz0jLeYKn6ialHh+hruYZozx8d
xpUIWEVlMwLDBteWCuwTp+XPmOvaKkgYLxkfjjeIqUy17f6py17GrDZFHLeiopcJ
qyQJ0XLQI/qAKXkySBpvGD86nrM1i+5X7nLxZ0YfjKQ7cI+fp5A6SsQPUk9SI95P
XRssx481zNse5wxFMP8J9oIB6nger39lpRRmvaSUJDNWjfsRZ/XK4mfib2OlLXoo
WuU5lCwqtQ+Jw9Zr/Gby2kTNIjrfIpdNyThTnth+uTwcA8KCJRJY2BrPBtWNWqPL
xLv9RLR3/N1siyJcichExIBKEzOhzzi/i/PTU8dK2OBXrSaJ8DXhPwyNTB2l7jnX
BO0hxeO4gmzAFQpM7QXXVDguL0b594y05UNOM/ljiQIcBBMBAgAGBQJeut/oAAoJ
ECqAP87D6bin7ZMP/3be6BDv/zf0gCTmgjD6StvPHu+F17op4VPj2cHYCgFP1ZHF
H2RjqRVhSN6Wk+hbmR5PDHoVA2ncxITv/DddKRjYc7fPRlrje7H19+urJgqqkWzm
uUbNlxKiXiVW/OPmCjjI89Okt3dZGCTicEAPzJ6LTpoVgo4n/Eu81nMm6caf++Pz
z1vEI3bJdPHPYyI+gN64mEhfP4OJu8v2XTbj+0ua3JxYWilxF7haytApmaPqeT7u
OEBrX7EV1M+DlQCSM61u2EC5eIwAoDba/ENXNyg5Z1JbFe3DxqE6ZVcAcZWXGdtP
otayuEy6WL3LB2UUsM4UB4FPSUwcFvnkV8YzBSV8Rqx+mkOFM6BhxzwK0zPvY+vv
+rXSwz7uE/yrToqO9KvGhFxMwMwzTRAJXI870fJQ9c5z2LzxoNg5gOUQH4vPG6YQ
T1ev04fj7IGYch9EhrSjuLCm94BApOEA+h/TTN6+xVLemUSB/l+Obm5701PP/naV
prCJcCqIU3tH5HU3BXpZH++AzWo0pmgbtd7ECsR/y0NR4Mxoef677q9YGJEG/psY
C0GZlzWsY5zjala+bEVn5gvbw6Lh4Q2gwpvVXdygb6PSPwRSkpgHtUxdvIQsDEaB
BGg/ae0x3O55z2/z95acnhIMRqQpUpnPmDZUBKlsDJ8tivw/2r8o16YtAlJ0iQEz
BBABCAAdFiEEYKz3C/cSZFBJ7m8V7+rxZoYiX2QFAmWp9dIACgkQ7+rxZoYiX2St
Mwf8CdL0fhz2TM1R79n+FW7QCSaINBzIE1lN2TbdVEZeyiwQLn9cbqOvVPFavj4v
xWFIXfAYzitLDHkikmg5Qzj7OXB2plFnqJxZ1tZSC1EdMHuNX1j55FDAggV/U/yv
2PDY2XuwJbj/hLj80oNzIL5qLnNco0CLggB8QLLleFw4BTKycGDrzQCk4AGQ8tDR
NoyI6Q/oFQtWQgQdm9Cs02Myr51QZBe09XXA4wpyqv9BM+E0o8SLp/x/wZXM99vD
Na7Df0nsRIQukFy5HqJJTufP1b6QFVMY1ouweyLxABXO4cvtYpOAUwQroY4U/q9Z
nRzxj8Sq+reAt8O/wwJ8ujy9ILR8UGFzdGEgKFNlZSBrZXliYXNlLmlvL3Bhc3Rh
IGZvciBwcm9vZnMgb24gbXkgaWRlbnRpZnkuIDYwQUNGNzBCRjcxMjY0NTA0OUVF
NkYxNUVGRUFGMTY2ODYyMjVGNjQgaXMgbXkgb2ZmbGluZSBvbmx5IEdQRyBrZXku
KYkCVAQTAQgAPgIbAwUJDS2jLwIXgBYhBClZA2Lsh4qB/TwgK1JSe+2r6HmEBQJl
qf1lBQsJCAcCBhUKCQgLAgQWAgMBAh4FAAoJEFJSe+2r6HmEhQMP/jiIGD9/Zzwa
GeBtrCD46WNT7Gxs9g/Lo+OsHqKzieN/H8EW61uS0kmkP7kKJdJHnpL7e8Q280OC
+YxV5YMG4byHmtOSvAbDNCTG8Eg3C7QW79ECIZaJldp5Bv6yrbwqsJyeDNfR61Zq
6lyG2Atvgt6fKjeHpxnDUfr0a9DqfkN8DLADzy1srwWlwilSAzhGBRsS7OV6gsbi
ZrQ/4sh/ZNtf/4lo3X/vyhKStTjh9UEEJykwkDyV+Ih3htrUAjHkKl60wHUKobxB
Jhsarye+DmrN+FIrHfvywpuGv+Xp6EXxGlbzlTUtTaDFF9b71AuGDFOjprbDaNJA
recDj8WwxW9rwyrRH52TBAAtLJNkk7Yt7rruVocDgwJo0h9WP8OIzerZDn0sUNpN
OGtdnbWRkAVgSCgoFVgeRWX4UpT120vDTEuwkhp7r8MhNqE96LGpBBRUhk1tSrKl
+ewKgP1f/px+hO+0er9f+tTFP5vH9RQ3v+VpjzwVK2e2mez/nRwkdj0OVubUD0rU
cXiIt7rGNSSjGDvPKrRFsApYIGIfeDg9y/c0L0PCBqiZ6XEi46NEDYJGutg/ChbM
9wI3D1WLC3oKP4Z+2z96FyiOkvj7sYM23jAVii7YT18dpJSw6B7jV4FBpE7mrlFU
qBlsSJck6gb0qXkmfNTtgRP0/8De+8p9iQEzBBABCAAdFiEEYKz3C/cSZFBJ7m8V
7+rxZoYiX2QFAmWp9ocACgkQ7+rxZoYiX2SLEQf+MXqtD4WGMiGgKg9eaVCGMJn8
N+Y0nqxwpCVq6RAJGdjYcT4BCfNTwjdYKqBEPRfK5JP+VZ6RZ6nBfZxUTfzomWWF
L6M+A6A1+4Y8++SJvnSn+CqlvIOjFAUx37lf7KwXRDWKK9pmQn1+iZ0IwowXvRzl
DIfwlc5phTq7YUNZLgmytP1j0yhmdFHzaTUcq5waZIwIKDtaVORUyOCpUYc0sevz
Z3j1uLx8aWQXXfVYTQVNv1hmoarTZru0w0q5KTuJYyCX4quBjIutIoJ+N80OJ3SU
dAkCHFo4YEQAKubC/G7BHS4Q1btfqjkGF2kDX9e4amIQnrF3wcimESqi5xpn67QW
UGFzdGEgPHBhc3RhQGRhc2gub3JnPokCVAQTAQgAPgIbAwUJA8PHawIXgBYhBClZ
A2Lsh4qB/TwgK1JSe+2r6HmEBQJlqf1lBQsJCAcCBhUKCQgLAgQWAgMBAh4FAAoJ
EFJSe+2r6HmECFwQAIDwX6fe0y6bc42zNU3Sqtd+Q3OgZfW0Rg23viI1ujyJE1uk
mmGR0i0b2luM+lSw1xOpr+pEsRX0dfaqAbbyUVIgyIZ5viXDZyWyJXr7NuBQZalX
k4njNfAELnQN2MPy/dqpelb6/J+kn6q4TC4DN95bJtSzPLK16rI94sSO+XUAJaiU
pr++cUelALoa5yHBL0mGuhlkNgCNdTE0eVwBLRQDrAywcUOEb6f2eNHyK6UY7WLy
0/LZZv2SzG/ZNQEQNY15/vrDwsQvD1ZueY5haCRK0Ga5o3GWZACU/+/c4VL2Ew7K
odxAjhVHBz50wIe35DUKVkYOQDIx9y+e50CPJicKOsnwjpC+NzQCk462ixCO9DFI
+9AFTJ6TD2BxVRHxLyUY7J21Mes4EILKFAV2dAOSZnd6LgqiYzqovJl6FmaLJyRM
JEfqvTi6Vy38Ns/6PCVGJTWKVsKz2lDas6U3/71jS0FSEwEJ9Rv9Yo75uErypNlJ
MiEahwy7kxqs8BKLtuPrF6QKRB7RgWgVxxU7z92VKCBzKDD0Oe3CDu4Lfva0487d
+TwNIGJdDeJ+ywhhFXIoGmeRm1YZferx1u5PCphiDLVkDDlLEolbp3bxKnN+l4wC
OUvhabciX46H3sM6KGMSoDRjh5n0UPr2+67qBq/rNJRCkALEFrG46i/+mNrYiQEz
BBABCAAdFiEEYKz3C/cSZFBJ7m8V7+rxZoYiX2QFAmWp9dIACgkQ7+rxZoYiX2Se
cQf+IKiMpD8+D93HtmmwG0twBbPMOVta0NU90Gvjxkw/v/JIDEWlZECClUW6Se8Z
Icq+WRZeDP6UZharGAg2GfRpfrKIwVt/aP16LsCqq+SiP4xaohmpcXQxacS5u813
G9FFuxmHud3x7/sXtxKSVQRkhgQlq+RRG/s5CodNvjliM5OQiiXGr+q1tWy5QhRs
xCXj4CTc2CiV0ycWB36Cx9tkx+/s0pf7X4778wCrhzT6Ds5fT0W9uZifcglfI/p5
jYYQkGpOrnOiHkBU3F80iFowIGsiv8pfaSqBP8yBAOtNBSVo5ksqSaH+TpVeIb0/
pfGrM1BOzpTVfTmEj77qSE2tvrkCDQRdVC8lARAAu64IaLWAvMStxVsC/+NnwBBx
YPef4Iq5gB5P1NgkmkD+tyohWVnzdN/hwVDX3BAXevF8M+y6MouUA9IxJRt2W9PK
06ArTdwhFpiam2NAO5OOUhuJ1F8eAhRQ5VvI8MbVttZKSk3LiCmXGSj5UUXEFKS1
B7WztZVwqG6YswoAPwbNerZuwYbH2gfa9LK+av1cdZ8tnDaVmZWL8z1xSCyfRa/U
AtZht/CEoTvAwXJ6CxVUBngIlqVnK0KvOrNzol2m5x4NgPcdtdDlrTQE+SpqTKjy
roRe27D+atiO6pFG/TOTkx4TWXR07YTeZQJT/fntV409daIxEgShD0md7nJ7rVYy
8u+9Z4JLlt2mtnsUKHezo1Axrlri05cewPVYQLuJND/5e2X9UzSTpY3NubQAtkD1
PpM5JeCbslT9PcMnRuUydZbhn7ieW0b57uWpOpE11s2eIJ5ixSci4mSJE9kW+IcC
ic/PPoD1Rh2CvFTBPl/bsw6Bzw64LMflPjgWkR7NVQb1DETfXo5C2A/QU6Z/o7O4
JaAeAoGki/sCmeAi5W+F1kcjPk/L/TXM6ZccMytVQOECYBOYVUxZ2VbhknKOcSFQ
cpk8bj2xsD1xX2EYhkXcCQkvutIgHGz/dt6dtvcaaL85krWD/y8h68TTFjQXK0+g
8gcpexfqTMcLnF7pqEEAEQEAAYkCPAQYAQgAJhYhBClZA2Lsh4qB/TwgK1JSe+2r
6HmEBQJdVC8lAhsMBQkDw8drAAoJEFJSe+2r6HmEDzEP/A8H3JkeSa/03kWvudFl
oVbGbfvP+XkKvGnAZPGHz3ne/SV2tcXljNgU15xHvLktI4GluEfJxRPUqvUal1zO
R9hqpas0vX8gsf0r0d3om2DHCyMY8GscfDF05Y8fqf0nU5/oLDlwwp11IyW8BDLS
wwANsTLZ1ysukfYc4hoopU71/wdAl85fae7I2QRduImWlMADfUtc9Orfb1tAhPta
CJVZj5vgfUNSZOTUJ73RGbdL3Z2dc42lO3mRMyDkPdykkq0EgOo6zZLuHZQFhxTz
WIWeUT8vWNjpkdTeRHLvv3cwPRx1k1atrM+pE9YkhCg0EOMTcmN+FMekgnU+ee0c
ibn5wWOvE05zwRKYROx34va2U6TUU6KkV3fFuq3qqkXaiMFauhI1lSFGgccg7BCN
MhbBpOBkfGI3croFGSm2pTydJ87/+P9C9ecOZSqCE7Zt5IfDs/xV7DjxBK99Z5+R
GxtsIpNlxpsUvlMSsxUNhOWyiCKr6NIOfOzdLYDkhHcKMqWGmc1zC3HHHuZvX5u6
orTyYXWqc8X5p3Kh7Qjf/ChtN2P6SCOUQquEvpiY5J1TdmQSuoqHzg3ZrN+7EOKd
nUH7y1KB7iTvgQ07lcHnAMbkFDcpQA+tAMd99LVNSXh8urXhJ/AtxaJbNbCSvpkO
GB4WHLy/V+JdomFC9Pb3oPeiiQI8BBgBCAAmAhsMFiEEKVkDYuyHioH9PCArUlJ7
7avoeYQFAmEb0RAFCQ0to2sACgkQUlJ77avoeYRHuxAAigKlhF2q7RYOxcCIsA+z
Af4jJCCkpdOWwWhjqgjtbFrS/39/FoRSC9TClO2CU4j5FIAkPKdv7EFiAXaMIDur
tpN4Ps+l6wUX/tS+xaGDVseRoAdhVjp7ilG9WIvmV3UMqxge6hbam3H5JhiVlmS+
DAxG07dbHiFrdqeHrVZU/3649K8JOO9/xSs7Qzf6XJqepfzCjQ4ZRnGy4A/0hhYT
yzGeJOcTNigSjsPHl5PNipG0xbnAn7mxFm2i5XdVmTMCqsThkH6Ac3OBbLgRBvBh
VRWUR1Fbod7ypLTjOrXFW3Yvm7mtbZU8oqLKgcaACyXaIvwAoBY9dIXgrws6Z1dg
wvFH+1N7V2A+mVkbjPzS7Iko9lC1e5WBAJ7VkW20/5Ki08JXpLmd7UyglCcioQTM
d7YyE/Aho3zQbo/9A10REC4kOsl/Ou6IeEURa+mfb9MYPgoVGTcKZnaX0d40auRJ
ptosuoYLenXciRdUmfsADAb2pVdm5b2H3+NLXf+TnbyY/zm24ZFGPXBRSj7tQgaV
6kn9NPSg32Z1WcR+pAn3Jwqts3f1PNuYCrZvWv66NohJRrdCZc1wV4dkYvl2M1s+
zf8iTVti4IifNjn57slXtEsH36miQy2vN6Cp9I3A7m5WeL07i27P8bvhxOg9q6r3
NAgNcAK3mOfpQ/ej25jgI5y4MwRm9a42FgkrBgEEAdpHDwEBB0AqRGVWZSZaVkMJ
2QwXfknlrvSgrc8SagU0r0oDKsOsPIkCswQYAQgAJhYhBClZA2Lsh4qB/TwgK1JS
e+2r6HmEBQJm9a42AhsCBQkDwmcAAIEJEFJSe+2r6HmEdiAEGRYIAB0WIQQCuOfQ
AhZ8i0Ua8F/i89eRbnItOAUCZvWuNgAKCRDi89eRbnItOFVdAPwK6OXfnljdVrDx
akjecvA1HXCuRzzkyLPkTcYTCIqyXQD/aG664lvKWApb8z6DzPdi2ZGXvE4UgSYc
bFtju14RWguf7Q//TgaDjrbuPs6fbdXZdT/Glh2PbTtpJzY2QZQRnuXjn7nx6Nao
jBGMsQCHaI8kycmtZtU1uu1E4kEy5uzpXoRUJoZzHMOqntWxwpWoCypAKDrHsAJe
/JV/7PlPpqBsMdoCWbkj4THbgLwzkOPjWkvYIrbPNc/HmMIXXvUjBmgU6weG1mho
s7eHc+MhaNLT9L0m1AjnxN39EjwLVLu9K7KzTelJKIxQnXNM6IIH3PFcyTqR7b2e
E+Ds+J8H9DMfBnf7D6pl4M45IyvZlUzTPWNFddNcNEqVIlMCnyaSczjZVtPVmFfj
/b5zrQd+kWZEne3a5/JFkdnpyJW4yvRaqFUuLdypTJa4TklJ/z/lu1/x/DCbMmyB
XxChnOVwoqYyTiLD05VAD2+zoLZ630JC1i/BXl6vrhwGUJEcF7A1XDwPSQ4VFNwU
45dVVP+iMWYGjx5WlL/n/tmwXOT7TmhvXTsaYz0rlhEujrt//PTcIn0wLfHSPhbh
Dr34OnZdo366FkRGcMi/j1ViFRB7Z2bDaVGpI6zEXC2DqKcplYNFqXnlmqGp89/I
Yn9Ng1DdVbuZSaAITJ+cWyt/XQDwNpUSwe2H7FtJUyZs697I05wJdBqDgPOlWk+d
w7ITptFnGG93750xYBA1k9T0OYpNwJB8IZDIRaIJ1G16qe19PfNcHyK1PbS4MwRm
9bROFgkrBgEEAdpHDwEBB0B92inq37NVcsS1Ls23yNdXE2nz3BXfscywSVXBqNZN
bIkCswQYAQgAJhYhBClZA2Lsh4qB/TwgK1JSe+2r6HmEBQJm9bROAhsCBQkDwmcA
AIEJEFJSe+2r6HmEdiAEGRYKAB0WIQRHpeVRP4vUB1Zsqy7N3qfpETFgUwUCZvW0
TgAKCRDN3qfpETFgUz3EAP9xNJ/BQGkvD7uZCkE+mUg0EPtrL9RU1DCKmNHY9h3P
IAD7B6v4nvM01lOBaxLnXxcESbV/eY9wcl8W/33L5fYBpQ9vvQ/+IlVEdqugj+0W
PBO5fbWOegpFR9ujNWIT7GUHY+kgiNXncNY2zXHpNAz/k/TKrAQHuNjMzLIL2Zhf
NuFTRPZ2qyzJUY+tFfMwqYUG9dW/oY5IydTVQLrkEDffGob7S7p/+aXs7/L0Dmp/
u5z3pX5GJxUlmjXedx/tyNZEQeqFquCmIABUh2XGCW7IQ2nXMTJUjgMuphtQ8JkS
n2de2HwVTkx6RonebA5fHQP07IfUiVFpSAZqZJvQ6HNVwTMaP9lU3JzvmexJSL74
zmm7YEoH1C+Cz6jGi3mlsIY8y+xSQ14vOoO6I+TulF9vEFNoQO5l9IYbqNMTGA7r
2Ukq8GH0n9rfAxJEM7OkaX4pZNKXXG2d0DbvoJjSNTyctQkGrl1EKYL8rRY5CKpz
/X1akcKXaJ6mYoLeYamTsZzXEsO7r10nKGKhZMt1cpvf8qy6PsSTCEhbo+YE///L
0ppFGugsl1QqDgjYaLci7Wcz7kHgYdHttsXT2bq1q0AvHsTt9TjFNFKwnGDGsw28
XHYJkZs5vJOQj46glPxEsHMdkdZzUIyCC3HT/KfvArfdDgZZQ4QhzTsG4Becsrfx
ch6p/gvyxN9gielc/pQZhqqUtB5PF9pv9f/OnQf8uGqbhPHr6i4GfwQCov7LTJhc
t8FIucvlOdt4EqKaSmoBQZk0Aj/N5q4=
=vjZr
-----END PGP PUBLIC KEY BLOCK-----

Some files were not shown because too many files have changed in this diff Show More