Merge pull request #311 from tecnovert/multinet

Multinet
This commit is contained in:
tecnovert
2025-06-08 19:46:57 +00:00
committed by GitHub
21 changed files with 2600 additions and 858 deletions

View File

@@ -38,6 +38,7 @@ jobs:
sudo apt-get install -y firefox sudo apt-get install -y firefox
fi fi
python -m pip install --upgrade pip python -m pip install --upgrade pip
pip install python-gnupg
pip install -e .[dev] pip install -e .[dev]
pip install -r requirements.txt --require-hashes pip install -r requirements.txt --require-hashes
- name: Install - name: Install

View File

@@ -71,6 +71,7 @@ class BaseApp(DBMethods):
self.default_socket = socket.socket self.default_socket = socket.socket
self.default_socket_timeout = socket.getdefaulttimeout() self.default_socket_timeout = socket.getdefaulttimeout()
self.default_socket_getaddrinfo = socket.getaddrinfo self.default_socket_getaddrinfo = socket.getaddrinfo
self._force_db_upgrade = False
def __del__(self): def __del__(self):
if self.fp: if self.fp:

File diff suppressed because it is too large Load Diff

View File

@@ -36,6 +36,11 @@ class KeyTypes(IntEnum):
KAF = 6 KAF = 6
class MessageNetworks(IntEnum):
SMSG = auto()
SIMPLEX = auto()
class MessageTypes(IntEnum): class MessageTypes(IntEnum):
OFFER = auto() OFFER = auto()
BID = auto() BID = auto()
@@ -53,6 +58,8 @@ class MessageTypes(IntEnum):
ADS_BID_LF = auto() ADS_BID_LF = auto()
ADS_BID_ACCEPT_FL = auto() ADS_BID_ACCEPT_FL = auto()
CONNECT_REQ = auto()
class AddressTypes(IntEnum): class AddressTypes(IntEnum):
OFFER = auto() OFFER = auto()
@@ -111,6 +118,7 @@ class BidStates(IntEnum):
BID_EXPIRED = 31 BID_EXPIRED = 31
BID_AACCEPT_DELAY = 32 BID_AACCEPT_DELAY = 32
BID_AACCEPT_FAIL = 33 BID_AACCEPT_FAIL = 33
CONNECT_REQ_SENT = 34
class TxStates(IntEnum): class TxStates(IntEnum):
@@ -228,6 +236,10 @@ class NotificationTypes(IntEnum):
BID_ACCEPTED = auto() BID_ACCEPTED = auto()
class ConnectionRequestTypes(IntEnum):
BID = 1
class AutomationOverrideOptions(IntEnum): class AutomationOverrideOptions(IntEnum):
DEFAULT = 0 DEFAULT = 0
ALWAYS_ACCEPT = 1 ALWAYS_ACCEPT = 1
@@ -339,6 +351,8 @@ def strBidState(state):
return "Auto accept delay" return "Auto accept delay"
if state == BidStates.BID_AACCEPT_FAIL: if state == BidStates.BID_AACCEPT_FAIL:
return "Auto accept failed" return "Auto accept failed"
if state == BidStates.CONNECT_REQ_SENT:
return "Connect request sent"
return "Unknown" + " " + str(state) return "Unknown" + " " + str(state)

View File

@@ -17,12 +17,13 @@ import traceback
import basicswap.config as cfg import basicswap.config as cfg
from basicswap import __version__ from basicswap import __version__
from basicswap.ui.util import getCoinName
from basicswap.basicswap import BasicSwap from basicswap.basicswap import BasicSwap
from basicswap.chainparams import chainparams, Coins, isKnownCoinName from basicswap.chainparams import chainparams, Coins, isKnownCoinName
from basicswap.http_server import HttpThread
from basicswap.contrib.websocket_server import WebsocketServer from basicswap.contrib.websocket_server import WebsocketServer
from basicswap.http_server import HttpThread
from basicswap.network.simplex_chat import startSimplexClient
from basicswap.ui.util import getCoinName
from basicswap.util.daemon import Daemon
initial_logger = logging.getLogger() initial_logger = logging.getLogger()
initial_logger.level = logging.DEBUG initial_logger.level = logging.DEBUG
@@ -31,16 +32,6 @@ if not len(initial_logger.handlers):
logger = initial_logger logger = initial_logger
swap_client = None swap_client = None
with_coins = set()
without_coins = set()
class Daemon:
__slots__ = ("handle", "files")
def __init__(self, handle, files):
self.handle = handle
self.files = files
def signal_handler(sig, frame): def signal_handler(sig, frame):
@@ -131,6 +122,7 @@ def startDaemon(node_dir, bin_dir, daemon_bin, opts=[], extra_config={}):
cwd=datadir_path, cwd=datadir_path,
), ),
opened_files, opened_files,
os.path.basename(daemon_bin),
) )
@@ -161,6 +153,7 @@ def startXmrDaemon(node_dir, bin_dir, daemon_bin, opts=[]):
cwd=datadir_path, cwd=datadir_path,
), ),
[file_stdout, file_stderr], [file_stdout, file_stderr],
os.path.basename(daemon_bin),
) )
@@ -224,6 +217,7 @@ def startXmrWalletDaemon(node_dir, bin_dir, wallet_bin, opts=[]):
cwd=data_dir, cwd=data_dir,
), ),
[wallet_stdout, wallet_stderr], [wallet_stdout, wallet_stderr],
os.path.basename(wallet_bin),
) )
@@ -284,8 +278,32 @@ def getCoreBinArgs(coin_id: int, coin_settings, prepare=False, use_tor_proxy=Fal
return extra_args return extra_args
def mainLoop(daemons, update: bool = True):
while not swap_client.delay_event.wait(0.5):
if update:
swap_client.update()
else:
pass
for daemon in daemons:
if daemon.running is False:
continue
poll = daemon.handle.poll()
if poll is None:
pass # Process is running
else:
daemon.running = False
swap_client.log.error(
f"Process {daemon.handle.pid} for {daemon.name} terminated unexpectedly returning {poll}."
)
def runClient( def runClient(
data_dir: str, chain: str, start_only_coins: bool, log_prefix: str = "BasicSwap" data_dir: str,
chain: str,
start_only_coins: bool,
log_prefix: str = "BasicSwap",
extra_opts=dict(),
) -> int: ) -> int:
global swap_client, logger global swap_client, logger
daemons = [] daemons = []
@@ -311,13 +329,6 @@ def runClient(
with open(settings_path) as fs: with open(settings_path) as fs:
settings = json.load(fs) settings = json.load(fs)
extra_opts = dict()
if len(with_coins) > 0:
with_coins.add("particl")
extra_opts["with_coins"] = with_coins
if len(without_coins) > 0:
extra_opts["without_coins"] = without_coins
swap_client = BasicSwap( swap_client = BasicSwap(
data_dir, settings, chain, log_name=log_prefix, extra_opts=extra_opts data_dir, settings, chain, log_name=log_prefix, extra_opts=extra_opts
) )
@@ -334,12 +345,46 @@ def runClient(
# Settings may have been modified # Settings may have been modified
settings = swap_client.settings settings = swap_client.settings
try: try:
# Try start daemons # Try start daemons
for network in settings.get("networks", []):
network_type = network.get("type", "unknown")
if network_type == "simplex":
simplex_dir = os.path.join(data_dir, "simplex")
log_level = "debug" if swap_client.debug else "info"
socks_proxy = None
if "socks_proxy_override" in network:
socks_proxy = network["socks_proxy_override"]
elif swap_client.use_tor_proxy:
socks_proxy = (
f"{swap_client.tor_proxy_host}:{swap_client.tor_proxy_port}"
)
daemons.append(
startSimplexClient(
network["client_path"],
simplex_dir,
network["server_address"],
network["ws_port"],
logger,
swap_client.delay_event,
socks_proxy=socks_proxy,
log_level=log_level,
)
)
pid = daemons[-1].handle.pid
swap_client.log.info(f"Started Simplex client {pid}")
for c, v in settings["chainclients"].items(): for c, v in settings["chainclients"].items():
if len(start_only_coins) > 0 and c not in start_only_coins: if len(start_only_coins) > 0 and c not in start_only_coins:
continue continue
if (len(with_coins) > 0 and c not in with_coins) or c in without_coins: if (
len(swap_client.with_coins_override) > 0
and c not in swap_client.with_coins_override
) or c in swap_client.without_coins_override:
if v.get("manage_daemon", False) or v.get( if v.get("manage_daemon", False) or v.get(
"manage_wallet_daemon", False "manage_wallet_daemon", False
): ):
@@ -497,8 +542,7 @@ def runClient(
logger.info( logger.info(
f"Only running {start_only_coins}. Manually exit with Ctrl + c when ready." f"Only running {start_only_coins}. Manually exit with Ctrl + c when ready."
) )
while not swap_client.delay_event.wait(0.5): mainLoop(daemons, update=False)
pass
else: else:
swap_client.start() swap_client.start()
if "htmlhost" in settings: if "htmlhost" in settings:
@@ -536,8 +580,7 @@ def runClient(
swap_client.ws_server.run_forever(threaded=True) swap_client.ws_server.run_forever(threaded=True)
logger.info("Exit with Ctrl + c.") logger.info("Exit with Ctrl + c.")
while not swap_client.delay_event.wait(0.5): mainLoop(daemons)
swap_client.update()
except Exception as e: # noqa: F841 except Exception as e: # noqa: F841
traceback.print_exc() traceback.print_exc()
@@ -560,13 +603,13 @@ def runClient(
closed_pids = [] closed_pids = []
for d in daemons: for d in daemons:
swap_client.log.info(f"Interrupting {d.handle.pid}") swap_client.log.info(f"Interrupting {d.name} {d.handle.pid}")
try: try:
d.handle.send_signal( d.handle.send_signal(
signal.CTRL_C_EVENT if os.name == "nt" else signal.SIGINT signal.CTRL_C_EVENT if os.name == "nt" else signal.SIGINT
) )
except Exception as e: except Exception as e:
swap_client.log.info(f"Interrupting {d.handle.pid}, error {e}") swap_client.log.info(f"Interrupting {d.name} {d.handle.pid}, error {e}")
for d in daemons: for d in daemons:
try: try:
d.handle.wait(timeout=120) d.handle.wait(timeout=120)
@@ -623,6 +666,9 @@ def printHelp():
"--startonlycoin Only start the provides coin daemon/s, use this if a chain requires extra processing." "--startonlycoin Only start the provides coin daemon/s, use this if a chain requires extra processing."
) )
print("--logprefix Specify log prefix.") print("--logprefix Specify log prefix.")
print(
"--forcedbupgrade Recheck database against schema regardless of version."
)
def main(): def main():
@@ -630,6 +676,9 @@ def main():
chain = "mainnet" chain = "mainnet"
start_only_coins = set() start_only_coins = set()
log_prefix: str = "BasicSwap" log_prefix: str = "BasicSwap"
options = dict()
with_coins = set()
without_coins = set()
for v in sys.argv[1:]: for v in sys.argv[1:]:
if len(v) < 2 or v[0] != "-": if len(v) < 2 or v[0] != "-":
@@ -665,6 +714,9 @@ def main():
ensure_coin_valid(coin) ensure_coin_valid(coin)
without_coins.add(coin) without_coins.add(coin)
continue continue
if name == "forcedbupgrade":
options["force_db_upgrade"] = True
continue
if len(s) == 2: if len(s) == 2:
if name == "datadir": if name == "datadir":
data_dir = os.path.expanduser(s[1]) data_dir = os.path.expanduser(s[1])
@@ -693,8 +745,14 @@ def main():
if not os.path.exists(data_dir): if not os.path.exists(data_dir):
os.makedirs(data_dir) os.makedirs(data_dir)
if len(with_coins) > 0:
with_coins.add("particl")
options["with_coins"] = with_coins
if len(without_coins) > 0:
options["without_coins"] = without_coins
logger.info(os.path.basename(sys.argv[0]) + ", version: " + __version__ + "\n\n") logger.info(os.path.basename(sys.argv[0]) + ", version: " + __version__ + "\n\n")
fail_code = runClient(data_dir, chain, start_only_coins, log_prefix) fail_code = runClient(data_dir, chain, start_only_coins, log_prefix, options)
print("Done.") print("Done.")
return fail_code return fail_code

View File

@@ -13,7 +13,7 @@ from enum import IntEnum, auto
from typing import Optional from typing import Optional
CURRENT_DB_VERSION = 28 CURRENT_DB_VERSION = 29
CURRENT_DB_DATA_VERSION = 6 CURRENT_DB_DATA_VERSION = 6
@@ -219,6 +219,7 @@ class Bid(Table):
bid_addr = Column("string") bid_addr = Column("string")
pk_bid_addr = Column("blob") pk_bid_addr = Column("blob")
proof_address = Column("string") proof_address = Column("string")
proof_signature = Column("blob")
proof_utxos = Column("blob") proof_utxos = Column("blob")
# Address to spend lock tx to - address from wallet if empty TODO # Address to spend lock tx to - address from wallet if empty TODO
withdraw_to_addr = Column("string") withdraw_to_addr = Column("string")
@@ -485,6 +486,14 @@ class XmrSwap(Table):
b_lock_tx_id = Column("blob") b_lock_tx_id = Column("blob")
msg_split_info = Column("string")
def getMsgSplitInfo(self):
if self.msg_split_info is None:
return 16000, 17000
msg_split_info = self.msg_split_info.split(":")
return int(msg_split_info[0]), int(msg_split_info[1])
class XmrSplitData(Table): class XmrSplitData(Table):
__tablename__ = "xmr_split_data" __tablename__ = "xmr_split_data"
@@ -658,10 +667,44 @@ class CoinRates(Table):
last_updated = Column("integer") last_updated = Column("integer")
def create_db_(con, log) -> None: class MessageNetworks(Table):
c = con.cursor() __tablename__ = "message_networks"
record_id = Column("integer", primary_key=True, autoincrement=True)
active_ind = Column("integer")
name = Column("string")
created_at = Column("integer")
class DirectMessageRoute(Table):
__tablename__ = "direct_message_routes"
record_id = Column("integer", primary_key=True, autoincrement=True)
active_ind = Column("integer")
network_id = Column("integer")
linked_type = Column("integer")
linked_id = Column("blob")
smsg_addr_local = Column("string")
smsg_addr_remote = Column("string")
# smsg_addr_id_local = Column("integer") # SmsgAddress
# smsg_addr_id_remote = Column("integer") # KnownIdentity
route_data = Column("blob")
created_at = Column("integer")
class DirectMessageRouteLink(Table):
__tablename__ = "direct_message_route_links"
record_id = Column("integer", primary_key=True, autoincrement=True)
active_ind = Column("integer")
direct_message_route_id = Column("integer")
linked_type = Column("integer")
linked_id = Column("blob")
created_at = Column("integer")
def extract_schema() -> dict:
g = globals().copy() g = globals().copy()
tables = {}
for name, obj in g.items(): for name, obj in g.items():
if not inspect.isclass(obj): if not inspect.isclass(obj):
continue continue
@@ -671,15 +714,13 @@ def create_db_(con, log) -> None:
continue continue
table_name: str = obj.__tablename__ table_name: str = obj.__tablename__
query: str = f"CREATE TABLE {table_name} (" table = {}
columns = {}
primary_key = None primary_key = None
constraints = [] constraints = []
indices = [] indices = []
num_columns: int = 0
for m in inspect.getmembers(obj): for m in inspect.getmembers(obj):
m_name, m_obj = m m_name, m_obj = m
if hasattr(m_obj, "__sqlite3_primary_key__"): if hasattr(m_obj, "__sqlite3_primary_key__"):
primary_key = m_obj primary_key = m_obj
continue continue
@@ -690,46 +731,103 @@ def create_db_(con, log) -> None:
indices.append(m_obj) indices.append(m_obj)
continue continue
if hasattr(m_obj, "__sqlite3_column__"): if hasattr(m_obj, "__sqlite3_column__"):
if num_columns > 0:
query += ","
col_type: str = m_obj.column_type.upper() col_type: str = m_obj.column_type.upper()
if col_type == "BOOL": if col_type == "BOOL":
col_type = "INTEGER" col_type = "INTEGER"
query += f" {m_name} {col_type} " columns[m_name] = {
"type": col_type,
if m_obj.primary_key: "primary_key": m_obj.primary_key,
query += "PRIMARY KEY ASC " "unique": m_obj.unique,
if m_obj.unique: }
query += "UNIQUE " table["columns"] = columns
num_columns += 1
if primary_key is not None: if primary_key is not None:
query += f", PRIMARY KEY ({primary_key.column_1}" table["primary_key"] = {"column_1": primary_key.column_1}
if primary_key.column_2: if primary_key.column_2:
query += f", {primary_key.column_2}" table["primary_key"]["column_2"] = primary_key.column_2
if primary_key.column_3: if primary_key.column_3:
query += f", {primary_key.column_3}" table["primary_key"]["column_3"] = primary_key.column_3
query += ") "
for constraint in constraints: for constraint in constraints:
query += f", UNIQUE ({constraint.column_1}" if "constraints" not in table:
table["constraints"] = []
table_constraint = {"column_1": constraint.column_1}
if constraint.column_2: if constraint.column_2:
query += f", {constraint.column_2}" table_constraint["column_2"] = constraint.column_2
if constraint.column_3: if constraint.column_3:
query += f", {constraint.column_3}" table_constraint["column_3"] = constraint.column_3
query += ") " table["constraints"].append(table_constraint)
for i in indices:
if "indices" not in table:
table["indices"] = []
table_index = {"index_name": i.name, "column_1": i.column_1}
if i.column_2 is not None:
table_index["column_2"] = i.column_2
if i.column_3 is not None:
table_index["column_3"] = i.column_3
table["indices"].append(table_index)
tables[table_name] = table
return tables
def create_table(c, table_name, table) -> None:
query: str = f"CREATE TABLE {table_name} ("
for i, (colname, column) in enumerate(table["columns"].items()):
col_type = column["type"]
query += ("," if i > 0 else "") + f" {colname} {col_type} "
if column["primary_key"]:
query += "PRIMARY KEY ASC "
if column["unique"]:
query += "UNIQUE "
if "primary_key" in table:
column_1 = table["primary_key"]["column_1"]
column_2 = table["primary_key"].get("column_2", None)
column_3 = table["primary_key"].get("column_3", None)
query += f", PRIMARY KEY ({column_1}"
if column_2:
query += f", {column_2}"
if column_3:
query += f", {column_3}"
query += ") "
constraints = table.get("constraints", [])
for constraint in constraints:
column_1 = constraint["column_1"]
column_2 = constraint.get("column_2", None)
column_3 = constraint.get("column_3", None)
query += f", UNIQUE ({column_1}"
if column_2:
query += f", {column_2}"
if column_3:
query += f", {column_3}"
query += ") "
query += ")"
c.execute(query)
indices = table.get("indices", [])
for index in indices:
index_name = index["index_name"]
column_1 = index["column_1"]
column_2 = index.get("column_2", None)
column_3 = index.get("column_3", None)
query: str = f"CREATE INDEX {index_name} ON {table_name} ({column_1}"
if column_2:
query += f", {column_2}"
if column_3:
query += f", {column_3}"
query += ")" query += ")"
c.execute(query) c.execute(query)
for i in indices:
query: str = f"CREATE INDEX {i.name} ON {table_name} ({i.column_1}"
if i.column_2 is not None: def create_db_(con, log) -> None:
query += f", {i.column_2}" db_schema = extract_schema()
if i.column_3 is not None: c = con.cursor()
query += f", {i.column_3}" for table_name, table in db_schema.items():
query += ")" create_table(c, table_name, table)
c.execute(query)
def create_db(db_path: str, log) -> None: def create_db(db_path: str, log) -> None:
@@ -915,6 +1013,7 @@ class DBMethods:
query += f"{key}=:{key}" query += f"{key}=:{key}"
cursor.execute(query, values) cursor.execute(query, values)
return cursor.lastrowid
def query( def query(
self, self,

View File

@@ -12,8 +12,10 @@ from .db import (
AutomationStrategy, AutomationStrategy,
BidState, BidState,
Concepts, Concepts,
create_table,
CURRENT_DB_DATA_VERSION, CURRENT_DB_DATA_VERSION,
CURRENT_DB_VERSION, CURRENT_DB_VERSION,
extract_schema,
) )
from .basicswap_util import ( from .basicswap_util import (
@@ -49,10 +51,9 @@ def upgradeDatabaseData(self, data_version):
return return
self.log.info( self.log.info(
"Upgrading database records from version %d to %d.", f"Upgrading database records from version {data_version} to {CURRENT_DB_DATA_VERSION}."
data_version,
CURRENT_DB_DATA_VERSION,
) )
cursor = self.openDB() cursor = self.openDB()
try: try:
now = int(time.time()) now = int(time.time())
@@ -138,313 +139,137 @@ def upgradeDatabaseData(self, data_version):
self.db_data_version = CURRENT_DB_DATA_VERSION self.db_data_version = CURRENT_DB_DATA_VERSION
self.setIntKV("db_data_version", self.db_data_version, cursor) self.setIntKV("db_data_version", self.db_data_version, cursor)
self.commitDB() self.commitDB()
self.log.info( self.log.info(f"Upgraded database records to version {self.db_data_version}")
"Upgraded database records to version {}".format(self.db_data_version)
)
finally: finally:
self.closeDB(cursor, commit=False) self.closeDB(cursor, commit=False)
def upgradeDatabase(self, db_version): def upgradeDatabase(self, db_version):
if db_version >= CURRENT_DB_VERSION: if self._force_db_upgrade is False and db_version >= CURRENT_DB_VERSION:
return return
self.log.info( self.log.info(
f"Upgrading database from version {db_version} to {CURRENT_DB_VERSION}." f"Upgrading database from version {db_version} to {CURRENT_DB_VERSION}."
) )
while True: # db_version, tablename, oldcolumnname, newcolumnname
try: rename_columns = [
cursor = self.openDB() (13, "actions", "event_id", "action_id"),
(13, "actions", "event_type", "action_type"),
(13, "actions", "event_data", "action_data"),
(
14,
"xmr_swaps",
"coin_a_lock_refund_spend_tx_msg_id",
"coin_a_lock_spend_tx_msg_id",
),
]
current_version = db_version expect_schema = extract_schema()
if current_version == 6: have_tables = {}
cursor.execute("ALTER TABLE bids ADD COLUMN security_token BLOB") try:
cursor.execute("ALTER TABLE offers ADD COLUMN security_token BLOB") cursor = self.openDB()
db_version += 1
elif current_version == 7: for rename_column in rename_columns:
cursor.execute("ALTER TABLE transactions ADD COLUMN block_hash BLOB") dbv, table_name, colname_from, colname_to = rename_column
if db_version < dbv:
cursor.execute( cursor.execute(
"ALTER TABLE transactions ADD COLUMN block_height INTEGER" f"ALTER TABLE {table_name} RENAME COLUMN {colname_from} TO {colname_to}"
)
cursor.execute("ALTER TABLE transactions ADD COLUMN block_time INTEGER")
db_version += 1
elif current_version == 8:
cursor.execute(
"""
CREATE TABLE wallets (
record_id INTEGER NOT NULL,
coin_id INTEGER,
wallet_name VARCHAR,
wallet_data VARCHAR,
balance_type INTEGER,
created_at BIGINT,
PRIMARY KEY (record_id))"""
)
db_version += 1
elif current_version == 9:
cursor.execute("ALTER TABLE wallets ADD COLUMN wallet_data VARCHAR")
db_version += 1
elif current_version == 10:
cursor.execute(
"ALTER TABLE smsgaddresses ADD COLUMN active_ind INTEGER"
)
cursor.execute(
"ALTER TABLE smsgaddresses ADD COLUMN created_at INTEGER"
)
cursor.execute("ALTER TABLE smsgaddresses ADD COLUMN note VARCHAR")
cursor.execute("ALTER TABLE smsgaddresses ADD COLUMN pubkey VARCHAR")
cursor.execute(
"UPDATE smsgaddresses SET active_ind = 1, created_at = 1"
) )
cursor.execute("ALTER TABLE offers ADD COLUMN addr_to VARCHAR") query = "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name;"
cursor.execute(f'UPDATE offers SET addr_to = "{self.network_addr}"') tables = cursor.execute(query).fetchall()
db_version += 1 for table in tables:
elif current_version == 11: table_name = table[0]
cursor.execute( if table_name in ("sqlite_sequence",):
"ALTER TABLE bids ADD COLUMN chain_a_height_start INTEGER"
)
cursor.execute(
"ALTER TABLE bids ADD COLUMN chain_b_height_start INTEGER"
)
cursor.execute("ALTER TABLE bids ADD COLUMN protocol_version INTEGER")
cursor.execute("ALTER TABLE offers ADD COLUMN protocol_version INTEGER")
cursor.execute("ALTER TABLE transactions ADD COLUMN tx_data BLOB")
db_version += 1
elif current_version == 12:
cursor.execute(
"""
CREATE TABLE knownidentities (
record_id INTEGER NOT NULL,
address VARCHAR,
label VARCHAR,
publickey BLOB,
num_sent_bids_successful INTEGER,
num_recv_bids_successful INTEGER,
num_sent_bids_rejected INTEGER,
num_recv_bids_rejected INTEGER,
num_sent_bids_failed INTEGER,
num_recv_bids_failed INTEGER,
note VARCHAR,
updated_at BIGINT,
created_at BIGINT,
PRIMARY KEY (record_id))"""
)
cursor.execute("ALTER TABLE bids ADD COLUMN reject_code INTEGER")
cursor.execute("ALTER TABLE bids ADD COLUMN rate INTEGER")
cursor.execute(
"ALTER TABLE offers ADD COLUMN amount_negotiable INTEGER"
)
cursor.execute("ALTER TABLE offers ADD COLUMN rate_negotiable INTEGER")
db_version += 1
elif current_version == 13:
db_version += 1
cursor.execute(
"""
CREATE TABLE automationstrategies (
record_id INTEGER NOT NULL,
active_ind INTEGER,
label VARCHAR,
type_ind INTEGER,
only_known_identities INTEGER,
num_concurrent INTEGER,
data BLOB,
note VARCHAR,
created_at BIGINT,
PRIMARY KEY (record_id))"""
)
cursor.execute(
"""
CREATE TABLE automationlinks (
record_id INTEGER NOT NULL,
active_ind INTEGER,
linked_type INTEGER,
linked_id BLOB,
strategy_id INTEGER,
data BLOB,
repeat_limit INTEGER,
repeat_count INTEGER,
note VARCHAR,
created_at BIGINT,
PRIMARY KEY (record_id))"""
)
cursor.execute(
"""
CREATE TABLE history (
record_id INTEGER NOT NULL,
concept_type INTEGER,
concept_id INTEGER,
changed_data BLOB,
note VARCHAR,
created_at BIGINT,
PRIMARY KEY (record_id))"""
)
cursor.execute(
"""
CREATE TABLE bidstates (
record_id INTEGER NOT NULL,
active_ind INTEGER,
state_id INTEGER,
label VARCHAR,
in_progress INTEGER,
note VARCHAR,
created_at BIGINT,
PRIMARY KEY (record_id))"""
)
cursor.execute("ALTER TABLE wallets ADD COLUMN active_ind INTEGER")
cursor.execute(
"ALTER TABLE knownidentities ADD COLUMN active_ind INTEGER"
)
cursor.execute("ALTER TABLE eventqueue RENAME TO actions")
cursor.execute(
"ALTER TABLE actions RENAME COLUMN event_id TO action_id"
)
cursor.execute(
"ALTER TABLE actions RENAME COLUMN event_type TO action_type"
)
cursor.execute(
"ALTER TABLE actions RENAME COLUMN event_data TO action_data"
)
elif current_version == 14:
db_version += 1
cursor.execute(
"ALTER TABLE xmr_swaps ADD COLUMN coin_a_lock_release_msg_id BLOB"
)
cursor.execute(
"ALTER TABLE xmr_swaps RENAME COLUMN coin_a_lock_refund_spend_tx_msg_id TO coin_a_lock_spend_tx_msg_id"
)
elif current_version == 15:
db_version += 1
cursor.execute(
"""
CREATE TABLE notifications (
record_id INTEGER NOT NULL,
active_ind INTEGER,
event_type INTEGER,
event_data BLOB,
created_at BIGINT,
PRIMARY KEY (record_id))"""
)
elif current_version == 16:
db_version += 1
cursor.execute(
"""
CREATE TABLE prefunded_transactions (
record_id INTEGER NOT NULL,
active_ind INTEGER,
created_at BIGINT,
linked_type INTEGER,
linked_id BLOB,
tx_type INTEGER,
tx_data BLOB,
used_by BLOB,
PRIMARY KEY (record_id))"""
)
elif current_version == 17:
db_version += 1
cursor.execute(
"ALTER TABLE knownidentities ADD COLUMN automation_override INTEGER"
)
cursor.execute(
"ALTER TABLE knownidentities ADD COLUMN visibility_override INTEGER"
)
cursor.execute("ALTER TABLE knownidentities ADD COLUMN data BLOB")
cursor.execute("UPDATE knownidentities SET active_ind = 1")
elif current_version == 18:
db_version += 1
cursor.execute("ALTER TABLE xmr_split_data ADD COLUMN addr_from STRING")
cursor.execute("ALTER TABLE xmr_split_data ADD COLUMN addr_to STRING")
elif current_version == 19:
db_version += 1
cursor.execute("ALTER TABLE bidstates ADD COLUMN in_error INTEGER")
cursor.execute("ALTER TABLE bidstates ADD COLUMN swap_failed INTEGER")
cursor.execute("ALTER TABLE bidstates ADD COLUMN swap_ended INTEGER")
elif current_version == 20:
db_version += 1
cursor.execute(
"""
CREATE TABLE message_links (
record_id INTEGER NOT NULL,
active_ind INTEGER,
created_at BIGINT,
linked_type INTEGER,
linked_id BLOB,
msg_type INTEGER,
msg_sequence INTEGER,
msg_id BLOB,
PRIMARY KEY (record_id))"""
)
cursor.execute("ALTER TABLE offers ADD COLUMN bid_reversed INTEGER")
elif current_version == 21:
db_version += 1
cursor.execute("ALTER TABLE offers ADD COLUMN proof_utxos BLOB")
cursor.execute("ALTER TABLE bids ADD COLUMN proof_utxos BLOB")
elif current_version == 22:
db_version += 1
cursor.execute("ALTER TABLE offers ADD COLUMN amount_to INTEGER")
elif current_version == 23:
db_version += 1
cursor.execute(
"""
CREATE TABLE checkedblocks (
record_id INTEGER NOT NULL,
created_at BIGINT,
coin_type INTEGER,
block_height INTEGER,
block_hash BLOB,
block_time INTEGER,
PRIMARY KEY (record_id))"""
)
cursor.execute("ALTER TABLE bids ADD COLUMN pkhash_buyer_to BLOB")
elif current_version == 24:
db_version += 1
cursor.execute("ALTER TABLE bidstates ADD COLUMN can_accept INTEGER")
elif current_version == 25:
db_version += 1
cursor.execute(
"""
CREATE TABLE coinrates (
record_id INTEGER NOT NULL,
currency_from INTEGER,
currency_to INTEGER,
rate VARCHAR,
source VARCHAR,
last_updated INTEGER,
PRIMARY KEY (record_id))"""
)
elif current_version == 26:
db_version += 1
cursor.execute("ALTER TABLE offers ADD COLUMN auto_accept_type INTEGER")
elif current_version == 27:
db_version += 1
cursor.execute("ALTER TABLE offers ADD COLUMN pk_from BLOB")
cursor.execute("ALTER TABLE bids ADD COLUMN pk_bid_addr BLOB")
if current_version != db_version:
self.db_version = db_version
self.setIntKV("db_version", db_version, cursor)
self.commitDB()
self.log.info("Upgraded database to version {}".format(self.db_version))
continue continue
except Exception as e:
self.log.error("Upgrade failed {}".format(e))
self.rollbackDB()
finally:
self.closeDB(cursor, commit=False)
break
if db_version != CURRENT_DB_VERSION: have_table = {}
raise ValueError("Unable to upgrade database.") have_columns = {}
query = "SELECT * FROM PRAGMA_TABLE_INFO(:table_name) ORDER BY cid DESC;"
columns = cursor.execute(query, {"table_name": table_name}).fetchall()
for column in columns:
cid, name, data_type, notnull, default_value, primary_key = column
have_columns[name] = {"type": data_type, "primary_key": primary_key}
have_table["columns"] = have_columns
cursor.execute(f"PRAGMA INDEX_LIST('{table_name}');")
indices = cursor.fetchall()
for index in indices:
seq, index_name, unique, origin, partial = index
if origin == "pk": # Created by a PRIMARY KEY constraint
continue
cursor.execute(f"PRAGMA INDEX_INFO('{index_name}');")
index_info = cursor.fetchall()
add_index = {"index_name": index_name}
for index_columns in index_info:
seqno, cid, name = index_columns
if origin == "u": # Created by a UNIQUE constraint
have_columns[name]["unique"] = 1
else:
if "column_1" not in add_index:
add_index["column_1"] = name
elif "column_2" not in add_index:
add_index["column_2"] = name
elif "column_3" not in add_index:
add_index["column_3"] = name
else:
raise RuntimeError("Add more index columns.")
if origin == "c":
if "indices" not in table:
have_table["indices"] = []
have_table["indices"].append(add_index)
have_tables[table_name] = have_table
for table_name, table in expect_schema.items():
if table_name not in have_tables:
self.log.info(f"Creating table {table_name}.")
create_table(cursor, table_name, table)
continue
have_table = have_tables[table_name]
have_columns = have_table["columns"]
for colname, column in table["columns"].items():
if colname not in have_columns:
col_type = column["type"]
self.log.info(f"Adding column {colname} to table {table_name}.")
cursor.execute(
f"ALTER TABLE {table_name} ADD COLUMN {colname} {col_type}"
)
indices = table.get("indices", [])
have_indices = have_table.get("indices", [])
for index in indices:
index_name = index["index_name"]
if not any(
have_idx.get("index_name") == index_name
for have_idx in have_indices
):
self.log.info(f"Adding index {index_name} to table {table_name}.")
column_1 = index["column_1"]
column_2 = index.get("column_2", None)
column_3 = index.get("column_3", None)
query: str = (
f"CREATE INDEX {index_name} ON {table_name} ({column_1}"
)
if column_2:
query += f", {column_2}"
if column_3:
query += f", {column_3}"
query += ")"
cursor.execute(query)
if CURRENT_DB_VERSION != db_version:
self.db_version = CURRENT_DB_VERSION
self.setIntKV("db_version", CURRENT_DB_VERSION, cursor)
self.log.info(f"Upgraded database to version {self.db_version}")
self.commitDB()
except Exception as e:
self.log.error(f"Upgrade failed {e}")
self.rollbackDB()
finally:
self.closeDB(cursor, commit=False)

View File

@@ -76,6 +76,10 @@ def remove_expired_data(self, time_offset: int = 0):
"DELETE FROM message_links WHERE linked_type = :type_ind AND linked_id = :linked_id", "DELETE FROM message_links WHERE linked_type = :type_ind AND linked_id = :linked_id",
{"type_ind": int(Concepts.BID), "linked_id": bid_row[0]}, {"type_ind": int(Concepts.BID), "linked_id": bid_row[0]},
) )
cursor.execute(
"DELETE FROM direct_message_route_links WHERE linked_type = :type_ind AND linked_id = :linked_id",
{"type_ind": int(Concepts.BID), "linked_id": bid_row[0]},
)
cursor.execute( cursor.execute(
"DELETE FROM eventlog WHERE eventlog.linked_type = :type_ind AND eventlog.linked_id = :offer_id", "DELETE FROM eventlog WHERE eventlog.linked_type = :type_ind AND eventlog.linked_id = :offer_id",

View File

@@ -858,7 +858,7 @@ def js_automationstrategies(self, url_split, post_string: str, is_json: bool) ->
"label": strat_data.label, "label": strat_data.label,
"type_ind": strat_data.type_ind, "type_ind": strat_data.type_ind,
"only_known_identities": strat_data.only_known_identities, "only_known_identities": strat_data.only_known_identities,
"data": json.loads(strat_data.data.decode("utf-8")), "data": json.loads(strat_data.data.decode("UTF-8")),
"note": "" if strat_data.note is None else strat_data.note, "note": "" if strat_data.note is None else strat_data.note,
} }
return bytes(json.dumps(rv), "UTF-8") return bytes(json.dumps(rv), "UTF-8")
@@ -992,7 +992,7 @@ def js_unlock(self, url_split, post_string, is_json) -> bytes:
swap_client = self.server.swap_client swap_client = self.server.swap_client
post_data = getFormData(post_string, is_json) post_data = getFormData(post_string, is_json)
password = get_data_entry(post_data, "password") password: str = get_data_entry(post_data, "password")
if have_data_entry(post_data, "coin"): if have_data_entry(post_data, "coin"):
coin = getCoinType(str(get_data_entry(post_data, "coin"))) coin = getCoinType(str(get_data_entry(post_data, "coin")))
@@ -1167,6 +1167,49 @@ def js_coinprices(self, url_split, post_string, is_json) -> bytes:
) )
def js_messageroutes(self, url_split, post_string, is_json) -> bytes:
swap_client = self.server.swap_client
post_data = {} if post_string == "" else getFormData(post_string, is_json)
filters = {
"page_no": 1,
"limit": PAGE_LIMIT,
"sort_by": "created_at",
"sort_dir": "desc",
}
if have_data_entry(post_data, "sort_by"):
sort_by = get_data_entry(post_data, "sort_by")
ensure(
sort_by
in [
"created_at",
],
"Invalid sort by",
)
filters["sort_by"] = sort_by
if have_data_entry(post_data, "sort_dir"):
sort_dir = get_data_entry(post_data, "sort_dir")
ensure(sort_dir in ["asc", "desc"], "Invalid sort dir")
filters["sort_dir"] = sort_dir
if have_data_entry(post_data, "offset"):
filters["offset"] = int(get_data_entry(post_data, "offset"))
if have_data_entry(post_data, "limit"):
filters["limit"] = int(get_data_entry(post_data, "limit"))
ensure(filters["limit"] > 0, "Invalid limit")
if have_data_entry(post_data, "address_from"):
filters["address_from"] = get_data_entry(post_data, "address_from")
if have_data_entry(post_data, "address_to"):
filters["address_to"] = get_data_entry(post_data, "address_to")
action = get_data_entry_or(post_data, "action", None)
message_routes = swap_client.listMessageRoutes(filters, action)
return bytes(json.dumps(message_routes), "UTF-8")
endpoints = { endpoints = {
"coins": js_coins, "coins": js_coins,
"wallets": js_wallets, "wallets": js_wallets,
@@ -1194,6 +1237,7 @@ endpoints = {
"readurl": js_readurl, "readurl": js_readurl,
"active": js_active, "active": js_active,
"coinprices": js_coinprices, "coinprices": js_coinprices,
"messageroutes": js_messageroutes,
} }

View File

@@ -2,6 +2,7 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# Copyright (c) 2024 tecnovert # Copyright (c) 2024 tecnovert
# Copyright (c) 2025 The Basicswap developers
# Distributed under the MIT software license, see the accompanying # Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php. # file LICENSE or http://www.opensource.org/licenses/mit-license.php.
@@ -23,6 +24,13 @@ protobuf ParseFromString would reset the whole object, from_bytes won't.
from basicswap.util.integer import encode_varint, decode_varint from basicswap.util.integer import encode_varint, decode_varint
NPBW_INT = 0
NPBW_BYTES = 2
NPBF_STR = 1
NPBF_BOOL = 2
class NonProtobufClass: class NonProtobufClass:
def __init__(self, init_all: bool = True, **kwargs): def __init__(self, init_all: bool = True, **kwargs):
for key, value in kwargs.items(): for key, value in kwargs.items():
@@ -34,7 +42,7 @@ class NonProtobufClass:
found_field = True found_field = True
break break
if found_field is False: if found_field is False:
raise ValueError(f"got an unexpected keyword argument '{key}'") raise ValueError(f"Got an unexpected keyword argument '{key}'")
if init_all: if init_all:
self.init_fields() self.init_fields()
@@ -117,151 +125,160 @@ class NonProtobufClass:
class OfferMessage(NonProtobufClass): class OfferMessage(NonProtobufClass):
_map = { _map = {
1: ("protocol_version", 0, 0), 1: ("protocol_version", NPBW_INT, 0),
2: ("coin_from", 0, 0), 2: ("coin_from", NPBW_INT, 0),
3: ("coin_to", 0, 0), 3: ("coin_to", NPBW_INT, 0),
4: ("amount_from", 0, 0), 4: ("amount_from", NPBW_INT, 0),
5: ("amount_to", 0, 0), 5: ("amount_to", NPBW_INT, 0),
6: ("min_bid_amount", 0, 0), 6: ("min_bid_amount", NPBW_INT, 0),
7: ("time_valid", 0, 0), 7: ("time_valid", NPBW_INT, 0),
8: ("lock_type", 0, 0), 8: ("lock_type", NPBW_INT, 0),
9: ("lock_value", 0, 0), 9: ("lock_value", NPBW_INT, 0),
10: ("swap_type", 0, 0), 10: ("swap_type", NPBW_INT, 0),
11: ("proof_address", 2, 1), 11: ("proof_address", NPBW_BYTES, NPBF_STR),
12: ("proof_signature", 2, 1), 12: ("proof_signature", NPBW_BYTES, NPBF_STR),
13: ("pkhash_seller", 2, 0), 13: ("pkhash_seller", NPBW_BYTES, 0),
14: ("secret_hash", 2, 0), 14: ("secret_hash", NPBW_BYTES, 0),
15: ("fee_rate_from", 0, 0), 15: ("fee_rate_from", NPBW_INT, 0),
16: ("fee_rate_to", 0, 0), 16: ("fee_rate_to", NPBW_INT, 0),
17: ("amount_negotiable", 0, 2), 17: ("amount_negotiable", NPBW_INT, NPBF_BOOL),
18: ("rate_negotiable", 0, 2), 18: ("rate_negotiable", NPBW_INT, NPBF_BOOL),
19: ("proof_utxos", 2, 0), 19: ("proof_utxos", NPBW_BYTES, 0),
20: ("auto_accept_type", 0, 0), 20: ("auto_accept_type", 0, 0),
} }
class BidMessage(NonProtobufClass): class BidMessage(NonProtobufClass):
_map = { _map = {
1: ("protocol_version", 0, 0), 1: ("protocol_version", NPBW_INT, 0),
2: ("offer_msg_id", 2, 0), 2: ("offer_msg_id", NPBW_BYTES, 0),
3: ("time_valid", 0, 0), 3: ("time_valid", NPBW_INT, 0),
4: ("amount", 0, 0), 4: ("amount", NPBW_INT, 0),
5: ("amount_to", 0, 0), 5: ("amount_to", NPBW_INT, 0),
6: ("pkhash_buyer", 2, 0), 6: ("pkhash_buyer", NPBW_BYTES, 0),
7: ("proof_address", 2, 1), 7: ("proof_address", NPBW_BYTES, NPBF_STR),
8: ("proof_signature", 2, 1), 8: ("proof_signature", NPBW_BYTES, NPBF_STR),
9: ("proof_utxos", 2, 0), 9: ("proof_utxos", NPBW_BYTES, 0),
10: ("pkhash_buyer_to", 2, 0), 10: ("pkhash_buyer_to", NPBW_BYTES, 0),
} }
class BidAcceptMessage(NonProtobufClass): class BidAcceptMessage(NonProtobufClass):
# Step 3, seller -> buyer # Step 3, seller -> buyer
_map = { _map = {
1: ("bid_msg_id", 2, 0), 1: ("bid_msg_id", NPBW_BYTES, 0),
2: ("initiate_txid", 2, 0), 2: ("initiate_txid", NPBW_BYTES, 0),
3: ("contract_script", 2, 0), 3: ("contract_script", NPBW_BYTES, 0),
4: ("pkhash_seller", 2, 0), 4: ("pkhash_seller", NPBW_BYTES, 0),
} }
class OfferRevokeMessage(NonProtobufClass): class OfferRevokeMessage(NonProtobufClass):
_map = { _map = {
1: ("offer_msg_id", 2, 0), 1: ("offer_msg_id", NPBW_BYTES, 0),
2: ("signature", 2, 0), 2: ("signature", NPBW_BYTES, 0),
} }
class BidRejectMessage(NonProtobufClass): class BidRejectMessage(NonProtobufClass):
_map = { _map = {
1: ("bid_msg_id", 2, 0), 1: ("bid_msg_id", NPBW_BYTES, 0),
2: ("reject_code", 0, 0), 2: ("reject_code", NPBW_INT, 0),
} }
class XmrBidMessage(NonProtobufClass): class XmrBidMessage(NonProtobufClass):
# MSG1L, F -> L # MSG1L, F -> L
_map = { _map = {
1: ("protocol_version", 0, 0), 1: ("protocol_version", NPBW_INT, 0),
2: ("offer_msg_id", 2, 0), 2: ("offer_msg_id", NPBW_BYTES, 0),
3: ("time_valid", 0, 0), 3: ("time_valid", NPBW_INT, 0),
4: ("amount", 0, 0), 4: ("amount", NPBW_INT, 0),
5: ("amount_to", 0, 0), 5: ("amount_to", NPBW_INT, 0),
6: ("pkaf", 2, 0), 6: ("pkaf", NPBW_BYTES, 0),
7: ("kbvf", 2, 0), 7: ("kbvf", NPBW_BYTES, 0),
8: ("kbsf_dleag", 2, 0), 8: ("kbsf_dleag", NPBW_BYTES, 0),
9: ("dest_af", 2, 0), 9: ("dest_af", NPBW_BYTES, 0),
} }
class XmrSplitMessage(NonProtobufClass): class XmrSplitMessage(NonProtobufClass):
_map = { _map = {
1: ("msg_id", 2, 0), 1: ("msg_id", NPBW_BYTES, 0),
2: ("msg_type", 0, 0), 2: ("msg_type", NPBW_INT, 0),
3: ("sequence", 0, 0), 3: ("sequence", NPBW_INT, 0),
4: ("dleag", 2, 0), 4: ("dleag", NPBW_BYTES, 0),
} }
class XmrBidAcceptMessage(NonProtobufClass): class XmrBidAcceptMessage(NonProtobufClass):
_map = { _map = {
1: ("bid_msg_id", 2, 0), 1: ("bid_msg_id", NPBW_BYTES, 0),
2: ("pkal", 2, 0), 2: ("pkal", NPBW_BYTES, 0),
3: ("kbvl", 2, 0), 3: ("kbvl", NPBW_BYTES, 0),
4: ("kbsl_dleag", 2, 0), 4: ("kbsl_dleag", NPBW_BYTES, 0),
# MSG2F # MSG2F
5: ("a_lock_tx", 2, 0), 5: ("a_lock_tx", NPBW_BYTES, 0),
6: ("a_lock_tx_script", 2, 0), 6: ("a_lock_tx_script", NPBW_BYTES, 0),
7: ("a_lock_refund_tx", 2, 0), 7: ("a_lock_refund_tx", NPBW_BYTES, 0),
8: ("a_lock_refund_tx_script", 2, 0), 8: ("a_lock_refund_tx_script", NPBW_BYTES, 0),
9: ("a_lock_refund_spend_tx", 2, 0), 9: ("a_lock_refund_spend_tx", NPBW_BYTES, 0),
10: ("al_lock_refund_tx_sig", 2, 0), 10: ("al_lock_refund_tx_sig", NPBW_BYTES, 0),
} }
class XmrBidLockTxSigsMessage(NonProtobufClass): class XmrBidLockTxSigsMessage(NonProtobufClass):
# MSG3L # MSG3L
_map = { _map = {
1: ("bid_msg_id", 2, 0), 1: ("bid_msg_id", NPBW_BYTES, 0),
2: ("af_lock_refund_spend_tx_esig", 2, 0), 2: ("af_lock_refund_spend_tx_esig", NPBW_BYTES, 0),
3: ("af_lock_refund_tx_sig", 2, 0), 3: ("af_lock_refund_tx_sig", NPBW_BYTES, 0),
} }
class XmrBidLockSpendTxMessage(NonProtobufClass): class XmrBidLockSpendTxMessage(NonProtobufClass):
# MSG4F # MSG4F
_map = { _map = {
1: ("bid_msg_id", 2, 0), 1: ("bid_msg_id", NPBW_BYTES, 0),
2: ("a_lock_spend_tx", 2, 0), 2: ("a_lock_spend_tx", NPBW_BYTES, 0),
3: ("kal_sig", 2, 0), 3: ("kal_sig", NPBW_BYTES, 0),
} }
class XmrBidLockReleaseMessage(NonProtobufClass): class XmrBidLockReleaseMessage(NonProtobufClass):
# MSG5F # MSG5F
_map = { _map = {
1: ("bid_msg_id", 2, 0), 1: ("bid_msg_id", NPBW_BYTES, 0),
2: ("al_lock_spend_tx_esig", 2, 0), 2: ("al_lock_spend_tx_esig", NPBW_BYTES, 0),
} }
class ADSBidIntentMessage(NonProtobufClass): class ADSBidIntentMessage(NonProtobufClass):
# L -> F Sent from bidder, construct a reverse bid # L -> F Sent from bidder, construct a reverse bid
_map = { _map = {
1: ("protocol_version", 0, 0), 1: ("protocol_version", NPBW_INT, 0),
2: ("offer_msg_id", 2, 0), 2: ("offer_msg_id", NPBW_BYTES, 0),
3: ("time_valid", 0, 0), 3: ("time_valid", NPBW_INT, 0),
4: ("amount_from", 0, 0), 4: ("amount_from", NPBW_INT, 0),
5: ("amount_to", 0, 0), 5: ("amount_to", NPBW_INT, 0),
} }
class ADSBidIntentAcceptMessage(NonProtobufClass): class ADSBidIntentAcceptMessage(NonProtobufClass):
# F -> L Sent from offerer, construct a reverse bid # F -> L Sent from offerer, construct a reverse bid
_map = { _map = {
1: ("bid_msg_id", 2, 0), 1: ("bid_msg_id", NPBW_BYTES, 0),
2: ("pkaf", 2, 0), 2: ("pkaf", NPBW_BYTES, 0),
3: ("kbvf", 2, 0), 3: ("kbvf", NPBW_BYTES, 0),
4: ("kbsf_dleag", 2, 0), 4: ("kbsf_dleag", NPBW_BYTES, 0),
5: ("dest_af", 2, 0), 5: ("dest_af", NPBW_BYTES, 0),
}
class ConnectReqMessage(NonProtobufClass):
_map = {
1: ("network_type", NPBW_INT, 0),
2: ("network_data", NPBW_BYTES, 0),
3: ("request_type", NPBW_INT, 0),
4: ("request_data", NPBW_BYTES, 0),
} }

View File

@@ -8,6 +8,7 @@
import base64 import base64
import json import json
import threading import threading
import traceback
import websocket import websocket
@@ -25,9 +26,6 @@ from basicswap.util.address import (
b58decode, b58decode,
decodeWif, decodeWif,
) )
from basicswap.basicswap_util import (
BidStates,
)
def encode_base64(data: bytes) -> str: def encode_base64(data: bytes) -> str:
@@ -52,6 +50,20 @@ class WebSocketThread(threading.Thread):
self.recv_queue = Queue() self.recv_queue = Queue()
self.cmd_recv_queue = Queue() self.cmd_recv_queue = Queue()
self.delayed_events_queue = Queue()
self.ignore_events: bool = False
self.num_messages_received: int = 0
def disable_debug_mode(self):
self.ignore_events = False
for i in range(100):
try:
message = self.delayed_events_queue.get(block=False)
except Empty:
break
self.recv_queue.put(message)
def on_message(self, ws, message): def on_message(self, ws, message):
if self.logger: if self.logger:
@@ -62,6 +74,7 @@ class WebSocketThread(threading.Thread):
if message.startswith('{"corrId"'): if message.startswith('{"corrId"'):
self.cmd_recv_queue.put(message) self.cmd_recv_queue.put(message)
else: else:
self.num_messages_received += 1
self.recv_queue.put(message) self.recv_queue.put(message)
def queue_get(self): def queue_get(self):
@@ -106,6 +119,20 @@ class WebSocketThread(threading.Thread):
self.ws.send(cmd) self.ws.send(cmd)
return self.corrId return self.corrId
def wait_for_command_response(self, cmd_id, num_tries: int = 200):
cmd_id = str(cmd_id)
for i in range(num_tries):
message = self.cmd_queue_get()
if message is not None:
data = json.loads(message)
if "corrId" in data:
if data["corrId"] == cmd_id:
return data
self.delay_event.wait(0.5)
raise ValueError(
f"wait_for_command_response timed-out waiting for ID: {cmd_id}"
)
def run(self): def run(self):
self.ws = websocket.WebSocketApp( self.ws = websocket.WebSocketApp(
self.url, self.url,
@@ -126,16 +153,15 @@ class WebSocketThread(threading.Thread):
def waitForResponse(ws_thread, sent_id, delay_event): def waitForResponse(ws_thread, sent_id, delay_event):
sent_id = str(sent_id) sent_id = str(sent_id)
for i in range(100): for i in range(200):
message = ws_thread.cmd_queue_get() message = ws_thread.cmd_queue_get()
if message is not None: if message is not None:
data = json.loads(message) data = json.loads(message)
# print(f"json: {json.dumps(data, indent=4)}")
if "corrId" in data: if "corrId" in data:
if data["corrId"] == sent_id: if data["corrId"] == sent_id:
return data return data
delay_event.wait(0.5) delay_event.wait(0.5)
raise ValueError(f"waitForResponse timed-out waiting for id: {sent_id}") raise ValueError(f"waitForResponse timed-out waiting for ID: {sent_id}")
def waitForConnected(ws_thread, delay_event): def waitForConnected(ws_thread, delay_event):
@@ -174,10 +200,17 @@ def getPrivkeyForAddress(self, addr) -> bytes:
raise ValueError("key not found") raise ValueError("key not found")
def sendSimplexMsg( def encryptMsg(
self, network, addr_from: str, addr_to: str, payload: bytes, msg_valid: int, cursor self,
addr_from: str,
addr_to: str,
payload: bytes,
msg_valid: int,
cursor,
timestamp=None,
deterministic=False,
) -> bytes: ) -> bytes:
self.log.debug("sendSimplexMsg") self.log.debug("encryptMsg")
try: try:
rv = self.callrpc( rv = self.callrpc(
@@ -210,14 +243,40 @@ def sendSimplexMsg(
privkey_from = getPrivkeyForAddress(self, addr_from) privkey_from = getPrivkeyForAddress(self, addr_from)
payload += bytes((0,)) # Include null byte to match smsg payload += bytes((0,)) # Include null byte to match smsg
smsg_msg: bytes = smsgEncrypt(privkey_from, pubkey_to, payload) smsg_msg: bytes = smsgEncrypt(
privkey_from, pubkey_to, payload, timestamp, deterministic
)
return smsg_msg
def sendSimplexMsg(
self,
network,
addr_from: str,
addr_to: str,
payload: bytes,
msg_valid: int,
cursor,
timestamp: int = None,
deterministic: bool = False,
to_user_name: str = None,
) -> bytes:
self.log.debug("sendSimplexMsg")
smsg_msg: bytes = encryptMsg(
self, addr_from, addr_to, payload, msg_valid, cursor, timestamp, deterministic
)
smsg_id = smsgGetID(smsg_msg) smsg_id = smsgGetID(smsg_msg)
ws_thread = network["ws_thread"] ws_thread = network["ws_thread"]
sent_id = ws_thread.send_command("#bsx " + encode_base64(smsg_msg)) if to_user_name is not None:
to = "@" + to_user_name + " "
else:
to = "#bsx "
sent_id = ws_thread.send_command(to + encode_base64(smsg_msg))
response = waitForResponse(ws_thread, sent_id, self.delay_event) response = waitForResponse(ws_thread, sent_id, self.delay_event)
if response["resp"]["type"] != "newChatItems": if getResponseData(response, "type") != "newChatItems":
json_str = json.dumps(response, indent=4) json_str = json.dumps(response, indent=4)
self.log.debug(f"Response {json_str}") self.log.debug(f"Response {json_str}")
raise ValueError("Send failed") raise ValueError("Send failed")
@@ -243,8 +302,10 @@ def decryptSimplexMsg(self, msg_data):
# Try with all active bid/offer addresses # Try with all active bid/offer addresses
query: str = """SELECT DISTINCT address FROM ( query: str = """SELECT DISTINCT address FROM (
SELECT bid_addr AS address FROM bids WHERE active_ind = 1 SELECT b.bid_addr AS address FROM bids b
AND (in_progress = 1 OR (state > :bid_received AND state < :bid_completed) OR (state IN (:bid_received, :bid_sent) AND expire_at > :now)) JOIN bidstates s ON b.state = s.state_id
WHERE b.active_ind = 1
AND (s.in_progress OR (s.swap_ended = 0 AND b.expire_at > :now))
UNION UNION
SELECT addr_from AS address FROM offers WHERE active_ind = 1 AND expire_at > :now SELECT addr_from AS address FROM offers WHERE active_ind = 1 AND expire_at > :now
)""" )"""
@@ -253,15 +314,7 @@ def decryptSimplexMsg(self, msg_data):
try: try:
cursor = self.openDB() cursor = self.openDB()
addr_rows = cursor.execute( addr_rows = cursor.execute(query, {"now": now}).fetchall()
query,
{
"bid_received": int(BidStates.BID_RECEIVED),
"bid_completed": int(BidStates.SWAP_COMPLETED),
"bid_sent": int(BidStates.BID_SENT),
"now": now,
},
).fetchall()
finally: finally:
self.closeDB(cursor, commit=False) self.closeDB(cursor, commit=False)
@@ -283,46 +336,125 @@ def decryptSimplexMsg(self, msg_data):
return decrypted return decrypted
def parseSimplexMsg(self, chat_item):
item_status = chat_item["chatItem"]["meta"]["itemStatus"]
dir_type = item_status["type"]
if dir_type not in ("sndRcvd", "rcvNew"):
return None
snd_progress = item_status.get("sndProgress", None)
if snd_progress and snd_progress != "complete":
item_id = chat_item["chatItem"]["meta"]["itemId"]
self.log.debug(f"simplex chat item {item_id} {snd_progress}")
return None
conn_id = None
msg_dir: str = "recv" if dir_type == "rcvNew" else "sent"
chat_type: str = chat_item["chatInfo"]["type"]
if chat_type == "group":
chat_name = chat_item["chatInfo"]["groupInfo"]["localDisplayName"]
conn_id = chat_item["chatInfo"]["groupInfo"]["groupId"]
self.num_group_simplex_messages_received += 1
elif chat_type == "direct":
chat_name = chat_item["chatInfo"]["contact"]["localDisplayName"]
conn_id = chat_item["chatInfo"]["contact"]["activeConn"]["connId"]
self.num_direct_simplex_messages_received += 1
else:
return None
msg_content = chat_item["chatItem"]["content"]["msgContent"]["text"]
try:
msg_data: bytes = decode_base64(msg_content)
decrypted_msg = decryptSimplexMsg(self, msg_data)
if decrypted_msg is None:
return None
decrypted_msg["chat_type"] = chat_type
decrypted_msg["chat_name"] = chat_name
decrypted_msg["conn_id"] = conn_id
decrypted_msg["msg_dir"] = msg_dir
return decrypted_msg
except Exception as e: # noqa: F841
# self.log.debug(f"decryptSimplexMsg error: {e}")
self.log.debug(f"decryptSimplexMsg error: {e}")
pass
return None
def processEvent(self, ws_thread, msg_type: str, data) -> bool:
if ws_thread.ignore_events:
if msg_type not in ("contactConnected", "contactDeletedByContact"):
return False
ws_thread.delayed_events_queue.put(json.dumps(data))
return True
if msg_type == "contactConnected":
self.processContactConnected(data)
elif msg_type == "contactDeletedByContact":
self.processContactDisconnected(data)
else:
return False
return True
def readSimplexMsgs(self, network): def readSimplexMsgs(self, network):
ws_thread = network["ws_thread"] ws_thread = network["ws_thread"]
for i in range(100): for i in range(100):
message = ws_thread.queue_get() message = ws_thread.queue_get()
if message is None: if message is None:
break break
if self.delay_event.is_set():
break
data = json.loads(message) data = json.loads(message)
# self.log.debug(f"message 1: {json.dumps(data, indent=4)}") # self.log.debug(f"Message: {json.dumps(data, indent=4)}")
try: try:
if data["resp"]["type"] in ("chatItemsStatusesUpdated", "newChatItems"): msg_type: str = getResponseData(data, "type")
for chat_item in data["resp"]["chatItems"]: if msg_type in ("chatItemsStatusesUpdated", "newChatItems"):
item_status = chat_item["chatItem"]["meta"]["itemStatus"] for chat_item in getResponseData(data, "chatItems"):
if item_status["type"] in ("sndRcvd", "rcvNew"): decrypted_msg = parseSimplexMsg(self, chat_item)
snd_progress = item_status.get("sndProgress", None) if decrypted_msg is None:
if snd_progress: continue
if snd_progress != "complete": self.processMsg(decrypted_msg)
item_id = chat_item["chatItem"]["meta"]["itemId"] elif msg_type == "chatError":
self.log.debug( # self.log.debug(f"chatError Message: {json.dumps(data, indent=4)}")
f"simplex chat item {item_id} {snd_progress}" pass
) elif processEvent(self, ws_thread, msg_type, data):
continue pass
try: else:
msg_data: bytes = decode_base64( self.log.debug(f"Unknown msg_type: {msg_type}")
chat_item["chatItem"]["content"]["msgContent"]["text"] # self.log.debug(f"Message: {json.dumps(data, indent=4)}")
)
decrypted_msg = decryptSimplexMsg(self, msg_data)
if decrypted_msg is None:
continue
self.processMsg(decrypted_msg)
except Exception as e: # noqa: F841
# self.log.debug(f"decryptSimplexMsg error: {e}")
pass
except Exception as e: except Exception as e:
self.log.debug(f"readSimplexMsgs error: {e}") self.log.debug(f"readSimplexMsgs error: {e}")
if self.debug:
self.log.error(traceback.format_exc())
self.delay_event.wait(0.05) self.delay_event.wait(0.05)
def getResponseData(data, tag=None):
if "Right" in data["resp"]:
if tag:
return data["resp"]["Right"][tag]
return data["resp"]["Right"]
if tag:
return data["resp"][tag]
return data["resp"]
def getNewSimplexLink(data):
response_data = getResponseData(data)
if "connLinkContact" in response_data:
return response_data["connLinkContact"]["connFullLink"]
return response_data["connReqContact"]
def getJoinedSimplexLink(data):
response_data = getResponseData(data)
if "connLinkInvitation" in response_data:
return response_data["connLinkInvitation"]["connFullLink"]
return response_data["connReqInvitation"]
def initialiseSimplexNetwork(self, network_config) -> None: def initialiseSimplexNetwork(self, network_config) -> None:
self.log.debug("initialiseSimplexNetwork") self.log.debug("initialiseSimplexNetwork")
@@ -337,10 +469,10 @@ def initialiseSimplexNetwork(self, network_config) -> None:
sent_id = ws_thread.send_command("/groups") sent_id = ws_thread.send_command("/groups")
response = waitForResponse(ws_thread, sent_id, self.delay_event) response = waitForResponse(ws_thread, sent_id, self.delay_event)
if len(response["resp"]["groups"]) < 1: if len(getResponseData(response, "groups")) < 1:
sent_id = ws_thread.send_command("/c " + network_config["group_link"]) sent_id = ws_thread.send_command("/c " + network_config["group_link"])
response = waitForResponse(ws_thread, sent_id, self.delay_event) response = waitForResponse(ws_thread, sent_id, self.delay_event)
assert "groupLinkId" in response["resp"]["connection"] assert "groupLinkId" in getResponseData(response, "connection")
network = { network = {
"type": "simplex", "type": "simplex",
@@ -348,3 +480,44 @@ def initialiseSimplexNetwork(self, network_config) -> None:
} }
self.active_networks.append(network) self.active_networks.append(network)
def closeSimplexChat(self, net_i, connId) -> bool:
try:
cmd_id = net_i.send_command("/chats")
response = net_i.wait_for_command_response(cmd_id, num_tries=500)
remote_name = None
for chat in getResponseData(response, "chats"):
if (
"chatInfo" not in chat
or "type" not in chat["chatInfo"]
or chat["chatInfo"]["type"] != "direct"
):
continue
try:
if chat["chatInfo"]["contact"]["activeConn"]["connId"] == connId:
remote_name = chat["chatInfo"]["contact"]["localDisplayName"]
break
except Exception as e:
self.log.debug(f"Error parsing chat: {e}")
if remote_name is None:
self.log.warning(
f"Unable to find remote name for simplex direct chat, ID: {connId}"
)
return False
self.log.debug(f"Deleting simplex chat @{remote_name}, connID {connId}")
cmd_id = net_i.send_command(f"/delete @{remote_name}")
cmd_response = net_i.wait_for_command_response(cmd_id)
if getResponseData(cmd_response, "type") != "contactDeleted":
self.log.warning(f"Failed to delete simplex chat, ID: {connId}")
self.log.debug(
"cmd_response: {}".format(json.dumps(cmd_response, indent=4))
)
return False
except Exception as e:
self.log.warning(f"Error deleting simplex chat, ID: {connId} - {e}")
return False
return True

View File

@@ -7,10 +7,11 @@
import os import os
import select import select
import sqlite3
import subprocess import subprocess
import time import time
from basicswap.bin.run import Daemon from basicswap.util.daemon import Daemon
def initSimplexClient(args, logger, delay_event): def initSimplexClient(args, logger, delay_event):
@@ -29,7 +30,7 @@ def initSimplexClient(args, logger, delay_event):
def readOutput(): def readOutput():
buf = os.read(pipe_r, 1024).decode("utf-8") buf = os.read(pipe_r, 1024).decode("utf-8")
response = None response = None
# logging.debug(f"simplex-chat output: {buf}") # logger.debug(f"simplex-chat output: {buf}")
if "display name:" in buf: if "display name:" in buf:
logger.debug("Setting display name") logger.debug("Setting display name")
response = b"user\n" response = b"user\n"
@@ -45,7 +46,7 @@ def initSimplexClient(args, logger, delay_event):
max_wait_seconds: int = 60 max_wait_seconds: int = 60
while p.poll() is None: while p.poll() is None:
if time.time() > start_time + max_wait_seconds: if time.time() > start_time + max_wait_seconds:
raise ValueError("Timed out") raise RuntimeError("Timed out")
if os.name == "nt": if os.name == "nt":
readOutput() readOutput()
delay_event.wait(0.1) delay_event.wait(0.1)
@@ -70,22 +71,45 @@ def startSimplexClient(
websocket_port: int, websocket_port: int,
logger, logger,
delay_event, delay_event,
socks_proxy=None,
log_level: str = "debug",
) -> Daemon: ) -> Daemon:
logger.info("Starting Simplex client") logger.info("Starting Simplex client")
if not os.path.exists(data_path): if not os.path.exists(data_path):
os.makedirs(data_path) os.makedirs(data_path)
db_path = os.path.join(data_path, "simplex_client_data") simplex_data_prefix = os.path.join(data_path, "simplex_client_data")
simplex_db_path = simplex_data_prefix + "_chat.db"
args = [bin_path, "-d", simplex_data_prefix, "-p", str(websocket_port)]
args = [bin_path, "-d", db_path, "-s", server_address, "-p", str(websocket_port)] if socks_proxy:
args += ["--socks-proxy", socks_proxy]
if not os.path.exists(db_path): if not os.path.exists(simplex_db_path):
# Need to set initial profile through CLI # Need to set initial profile through CLI
# TODO: Must be a better way? # TODO: Must be a better way?
init_args = args + ["-e", "/help"] # Run command ro exit client init_args = args + ["-e", "/help"] # Run command to exit client
init_args += ["-s", server_address]
initSimplexClient(init_args, logger, delay_event) initSimplexClient(init_args, logger, delay_event)
else:
# Workaround to avoid error:
# SQLite3 returned ErrorConstraint while attempting to perform step: UNIQUE constraint failed: protocol_servers.user_id, protocol_servers.host, protocol_servers.port
# TODO: Remove?
with sqlite3.connect(simplex_db_path) as con:
c = con.cursor()
if ":" in server_address:
host, port = server_address.split(":")
else:
host = server_address
port = ""
query: str = (
"SELECT COUNT(*) FROM protocol_servers WHERE host = :host and port = :port"
)
q = c.execute(query, {"host": host, "port": port}).fetchone()
if q[0] < 1:
args += ["-s", server_address]
args += ["-l", "debug"] args += ["-l", log_level]
opened_files = [] opened_files = []
stdout_dest = open( stdout_dest = open(
@@ -104,4 +128,5 @@ def startSimplexClient(
cwd=data_path, cwd=data_path,
), ),
opened_files, opened_files,
"simplex-chat",
) )

94
basicswap/ui/app.py Normal file
View File

@@ -0,0 +1,94 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2025 The Basicswap developers
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import json
from basicswap.db import getOrderByStr
class UIApp:
def listMessageRoutes(self, filters={}, action=None):
cursor = self.openDB()
try:
rv = []
query_data: dict = {}
filter_query_str: str = ""
address_from: str = filters.get("address_from", None)
if address_from is not None:
filter_query_str += " AND smsg_addr_local = :address_from "
query_data["address_from"] = address_from
address_to: str = filters.get("address_to", None)
if address_from is not None:
filter_query_str += " AND smsg_addr_remote = :address_to "
query_data["address_to"] = address_to
if action is None:
pass
elif action == "clear":
self.log.info("Clearing message routes")
query_str: str = (
"SELECT record_id, network_id, route_data"
+ " FROM direct_message_routes "
)
query_str += filter_query_str
rows = cursor.execute(query_str, query_data).fetchall()
for row in rows:
record_id, network_id, route_data = row
route_data = json.loads(route_data.decode("UTF-8"))
self.closeMessageRoute(record_id, network_id, route_data, cursor)
else:
raise ValueError("Unknown action")
query_str: str = (
"SELECT record_id, network_id, active_ind, linked_type, linked_id, "
+ " smsg_addr_local, smsg_addr_remote, route_data, created_at"
+ " FROM direct_message_routes "
+ " WHERE active_ind > 0 "
)
query_str += filter_query_str
query_str += getOrderByStr(filters)
limit = filters.get("limit", None)
if limit is not None:
query_str += " LIMIT :limit"
query_data["limit"] = limit
offset = filters.get("offset", None)
if offset is not None:
query_str += " OFFSET :offset"
query_data["offset"] = offset
q = cursor.execute(query_str, query_data)
rv = []
for row in q:
(
record_id,
network_id,
active_ind,
linked_type,
linked_id,
smsg_addr_local,
smsg_addr_remote,
route_data,
created_at,
) = row
rv.append(
{
"record_id": record_id,
"network_id": network_id,
"active_ind": active_ind,
"smsg_addr_local": smsg_addr_local,
"smsg_addr_remote": smsg_addr_remote,
"route_data": json.loads(route_data.decode("UTF-8")),
}
)
return rv
finally:
self.closeDB(cursor, commit=False)

15
basicswap/util/daemon.py Normal file
View File

@@ -0,0 +1,15 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2025 The Basicswap developers
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
class Daemon:
__slots__ = ("handle", "files", "name", "running")
def __init__(self, handle, files, name):
self.handle = handle
self.files = files
self.name = name
self.running = True

View File

@@ -83,19 +83,35 @@ def smsgGetID(smsg_message: bytes) -> bytes:
return smsg_timestamp.to_bytes(8, byteorder="big") + ripemd160(smsg_message[8:]) return smsg_timestamp.to_bytes(8, byteorder="big") + ripemd160(smsg_message[8:])
def smsgEncrypt(privkey_from: bytes, pubkey_to: bytes, payload: bytes) -> bytes: def smsgEncrypt(
privkey_from: bytes,
pubkey_to: bytes,
payload: bytes,
smsg_timestamp: int = None,
deterministic: bool = False,
) -> bytes:
# assert len(payload) < 128 # Requires lz4 if payload > 128 bytes # assert len(payload) < 128 # Requires lz4 if payload > 128 bytes
# TODO: Add lz4 to match core smsg # TODO: Add lz4 to match core smsg
smsg_timestamp = int(time.time()) if deterministic:
r = getSecretInt().to_bytes(32, byteorder="big") assert smsg_timestamp is not None
h = hashlib.sha256(b"smsg")
h.update(privkey_from)
h.update(pubkey_to)
h.update(payload)
h.update(smsg_timestamp.to_bytes(8, byteorder="big"))
r = h.digest()
smsg_iv: bytes = hashlib.sha256(b"smsg_iv" + r).digest()[:16]
else:
r = getSecretInt().to_bytes(32, byteorder="big")
smsg_iv: bytes = secrets.token_bytes(16)
if smsg_timestamp is None:
smsg_timestamp = int(time.time())
R = PublicKey.from_secret(r).format() R = PublicKey.from_secret(r).format()
p = PrivateKey(r).ecdh(pubkey_to) p = PrivateKey(r).ecdh(pubkey_to)
H = hashlib.sha512(p).digest() H = hashlib.sha512(p).digest()
key_e: bytes = H[:32] key_e: bytes = H[:32]
key_m: bytes = H[32:] key_m: bytes = H[32:]
smsg_iv: bytes = secrets.token_bytes(16)
payload_hash: bytes = sha256(sha256(payload)) payload_hash: bytes = sha256(sha256(payload))
signature: bytes = PrivateKey(privkey_from).sign_recoverable( signature: bytes = PrivateKey(privkey_from).sign_recoverable(
payload_hash, hasher=None payload_hash, hasher=None

View File

@@ -142,7 +142,7 @@ Observe progress with
## Start a subset of the configured coins using docker ## Start a subset of the configured coins using docker
docker compose run --service-ports swapclient basicswap-run -datadir=/coindata -withcoins=monero docker compose run --rm --service-ports swapclient basicswap-run -datadir=/coindata -withcoins=monero

View File

@@ -184,7 +184,7 @@ def wait_for_bid(
swap_client.log.debug( swap_client.log.debug(
f"TEST: wait_for_bid {bid_id.hex()}: Bid not found." f"TEST: wait_for_bid {bid_id.hex()}: Bid not found."
) )
raise ValueError("wait_for_bid timed out.") raise ValueError(f"wait_for_bid timed out {bid_id.hex()}.")
def wait_for_bid_tx_state( def wait_for_bid_tx_state(
@@ -331,7 +331,7 @@ def wait_for_balance(
delay_event.wait(delay_time) delay_event.wait(delay_time)
i += 1 i += 1
if i > iterations: if i > iterations:
raise ValueError("Expect {} {}".format(balance_key, expect_amount)) raise ValueError(f"Expect {balance_key} {expect_amount}")
def wait_for_unspent( def wait_for_unspent(
@@ -347,11 +347,11 @@ def wait_for_unspent(
delay_event.wait(delay_time) delay_event.wait(delay_time)
i += 1 i += 1
if i > iterations: if i > iterations:
raise ValueError("wait_for_unspent {}".format(expect_amount)) raise ValueError(f"wait_for_unspent {expect_amount}")
def delay_for(delay_event, delay_for=60): def delay_for(delay_event, delay_for=60):
logging.info("Delaying for {} seconds.".format(delay_for)) logging.info(f"Delaying for {delay_for} seconds.")
delay_event.wait(delay_for) delay_event.wait(delay_for)
@@ -375,9 +375,7 @@ def waitForRPC(rpc_func, delay_event, rpc_command="getwalletinfo", max_tries=7):
except Exception as ex: except Exception as ex:
if i < max_tries: if i < max_tries:
logging.warning( logging.warning(
"Can't connect to RPC: %s. Retrying in %d second/s.", f"Can't connect to RPC: {ex}. Retrying in {i + 1} second/s."
str(ex),
(i + 1),
) )
delay_event.wait(i + 1) delay_event.wait(i + 1)
raise ValueError("waitForRPC failed") raise ValueError("waitForRPC failed")

View File

@@ -89,15 +89,19 @@ DOGECOIN_RPC_PORT_BASE = int(os.getenv("DOGECOIN_RPC_PORT_BASE", DOGE_BASE_RPC_P
EXTRA_CONFIG_JSON = json.loads(os.getenv("EXTRA_CONFIG_JSON", "{}")) EXTRA_CONFIG_JSON = json.loads(os.getenv("EXTRA_CONFIG_JSON", "{}"))
def waitForBidState(delay_event, port, bid_id, state_str, wait_for=60): def waitForBidState(delay_event, port, bid_id, wait_for_state, wait_for=60):
for i in range(wait_for): for i in range(wait_for):
if delay_event.is_set(): if delay_event.is_set():
raise ValueError("Test stopped.") raise ValueError("Test stopped.")
bid = json.loads( bid = json.loads(
urlopen("http://127.0.0.1:12700/json/bids/{}".format(bid_id)).read() urlopen("http://127.0.0.1:12700/json/bids/{}".format(bid_id)).read()
) )
if bid["bid_state"] == state_str: if isinstance(wait_for_state, (list, tuple)):
return if bid["bid_state"] in wait_for_state:
return
else:
if bid["bid_state"] == wait_for_state:
return
delay_event.wait(1) delay_event.wait(1)
raise ValueError("waitForBidState failed") raise ValueError("waitForBidState failed")

View File

@@ -17,8 +17,7 @@ docker run \
Fingerprint: Q8SNxc2SRcKyXlhJM8KFUgPNW4KXPGRm4eSLtT_oh-I= Fingerprint: Q8SNxc2SRcKyXlhJM8KFUgPNW4KXPGRm4eSLtT_oh-I=
export SIMPLEX_SERVER_ADDRESS=smp://Q8SNxc2SRcKyXlhJM8KFUgPNW4KXPGRm4eSLtT_oh-I=:password@127.0.0.1:5223,443 export SIMPLEX_SERVER_ADDRESS=smp://Q8SNxc2SRcKyXlhJM8KFUgPNW4KXPGRm4eSLtT_oh-I=:password@127.0.0.1:5223
https://github.com/simplex-chat/simplex-chat/issues/4127 https://github.com/simplex-chat/simplex-chat/issues/4127
json: {"corrId":"3","cmd":"/_send #1 text test123"} json: {"corrId":"3","cmd":"/_send #1 text test123"}
@@ -43,9 +42,12 @@ from basicswap.basicswap import (
from basicswap.chainparams import Coins from basicswap.chainparams import Coins
from basicswap.network.simplex import ( from basicswap.network.simplex import (
WebSocketThread, getJoinedSimplexLink,
getNewSimplexLink,
getResponseData,
waitForConnected, waitForConnected,
waitForResponse, waitForResponse,
WebSocketThread,
) )
from basicswap.network.simplex_chat import startSimplexClient from basicswap.network.simplex_chat import startSimplexClient
from tests.basicswap.common import ( from tests.basicswap.common import (
@@ -53,10 +55,14 @@ from tests.basicswap.common import (
wait_for_bid, wait_for_bid,
wait_for_offer, wait_for_offer,
) )
from tests.basicswap.util import read_json_api
from tests.basicswap.test_xmr import BaseTest, test_delay_event, RESET_TEST from tests.basicswap.test_xmr import BaseTest, test_delay_event, RESET_TEST
SIMPLEX_SERVER_FINGERPRINT = os.getenv("SIMPLEX_SERVER_FINGERPRINT", "")
SIMPLEX_SERVER_ADDRESS = os.getenv("SIMPLEX_SERVER_ADDRESS") SIMPLEX_SERVER_ADDRESS = os.getenv(
"SIMPLEX_SERVER_ADDRESS",
f"smp://{SIMPLEX_SERVER_FINGERPRINT}:password@127.0.0.1:5223",
)
SIMPLEX_CLIENT_PATH = os.path.expanduser(os.getenv("SIMPLEX_CLIENT_PATH")) SIMPLEX_CLIENT_PATH = os.path.expanduser(os.getenv("SIMPLEX_CLIENT_PATH"))
TEST_DIR = cfg.TEST_DATADIRS TEST_DIR = cfg.TEST_DATADIRS
@@ -67,6 +73,35 @@ if not len(logger.handlers):
logger.addHandler(logging.StreamHandler(sys.stdout)) logger.addHandler(logging.StreamHandler(sys.stdout))
def parse_message(msg_data):
if getResponseData(msg_data, "type") not in (
"chatItemsStatusesUpdated",
"newChatItems",
):
return None
for chat_item in getResponseData(msg_data, "chatItems"):
chat_type: str = chat_item["chatInfo"]["type"]
if chat_type == "group":
chat_name = chat_item["chatInfo"]["groupInfo"]["localDisplayName"]
elif chat_type == "direct":
chat_name = chat_item["chatInfo"]["contact"]["localDisplayName"]
else:
return None
dir_type = chat_item["chatItem"]["meta"]["itemStatus"]["type"]
msg_dir = "recv" if dir_type == "rcvNew" else "sent"
if dir_type in ("sndRcvd", "rcvNew"):
msg_content = chat_item["chatItem"]["content"]["msgContent"]["text"]
return {
"text": msg_content,
"chat_type": chat_type,
"chat_name": chat_name,
"msg_dir": msg_dir,
}
return None
class TestSimplex(unittest.TestCase): class TestSimplex(unittest.TestCase):
daemons = [] daemons = []
remove_testdir: bool = False remove_testdir: bool = False
@@ -79,10 +114,10 @@ class TestSimplex(unittest.TestCase):
if os.path.isdir(TEST_DIR): if os.path.isdir(TEST_DIR):
if RESET_TEST: if RESET_TEST:
logging.info("Removing " + TEST_DIR) logger.info("Removing " + TEST_DIR)
shutil.rmtree(TEST_DIR) shutil.rmtree(TEST_DIR)
else: else:
logging.info("Restoring instance from " + TEST_DIR) logger.info("Restoring instance from " + TEST_DIR)
if not os.path.exists(TEST_DIR): if not os.path.exists(TEST_DIR):
os.makedirs(TEST_DIR) os.makedirs(TEST_DIR)
@@ -126,7 +161,7 @@ class TestSimplex(unittest.TestCase):
waitForConnected(ws_thread, test_delay_event) waitForConnected(ws_thread, test_delay_event)
sent_id = ws_thread.send_command("/group bsx") sent_id = ws_thread.send_command("/group bsx")
response = waitForResponse(ws_thread, sent_id, test_delay_event) response = waitForResponse(ws_thread, sent_id, test_delay_event)
assert response["resp"]["type"] == "groupCreated" assert getResponseData(response, "type") == "groupCreated"
ws_thread.send_command("/set voice #bsx off") ws_thread.send_command("/set voice #bsx off")
ws_thread.send_command("/set files #bsx off") ws_thread.send_command("/set files #bsx off")
@@ -134,82 +169,221 @@ class TestSimplex(unittest.TestCase):
ws_thread.send_command("/set reactions #bsx off") ws_thread.send_command("/set reactions #bsx off")
ws_thread.send_command("/set reports #bsx off") ws_thread.send_command("/set reports #bsx off")
ws_thread.send_command("/set disappear #bsx on week") ws_thread.send_command("/set disappear #bsx on week")
sent_id = ws_thread.send_command("/create link #bsx")
connReqContact = None sent_id = ws_thread.send_command("/create link #bsx")
connReqMsgData = waitForResponse(ws_thread, sent_id, test_delay_event) connReqMsgData = waitForResponse(ws_thread, sent_id, test_delay_event)
connReqContact = connReqMsgData["resp"]["connReqContact"] connReqContact = getNewSimplexLink(connReqMsgData)
group_link = "https://simplex.chat" + connReqContact[8:] group_link = "https://simplex.chat" + connReqContact[8:]
logger.info(f"group_link: {group_link}") logger.info(f"group_link: {group_link}")
sent_id = ws_thread2.send_command("/c " + group_link) sent_id = ws_thread2.send_command("/c " + group_link)
response = waitForResponse(ws_thread2, sent_id, test_delay_event) response = waitForResponse(ws_thread2, sent_id, test_delay_event)
assert "groupLinkId" in response["resp"]["connection"] assert "groupLinkId" in getResponseData(response, "connection")
sent_id = ws_thread2.send_command("/groups") sent_id = ws_thread2.send_command("/groups")
response = waitForResponse(ws_thread2, sent_id, test_delay_event) response = waitForResponse(ws_thread2, sent_id, test_delay_event)
assert len(response["resp"]["groups"]) == 1 assert len(getResponseData(response, "groups")) == 1
ws_thread.send_command("#bsx test msg 1") sent_id = ws_thread2.send_command("/connect")
response = waitForResponse(ws_thread2, sent_id, test_delay_event)
with open(os.path.join(client2_dir, "chat_inv.txt"), "w") as fp:
fp.write(json.dumps(response, indent=4))
connReqInvitation = getJoinedSimplexLink(response)
logger.info(f"direct_link: {connReqInvitation}")
pccConnId_2_sent = getResponseData(response, "connection")["pccConnId"]
print(f"pccConnId_2_sent: {pccConnId_2_sent}")
sent_id = ws_thread.send_command(f"/connect {connReqInvitation}")
response = waitForResponse(ws_thread, sent_id, test_delay_event)
with open(os.path.join(client1_dir, "chat_inv_accept.txt"), "w") as fp:
fp.write(json.dumps(response, indent=4))
pccConnId_1_accepted = getResponseData(response, "connection")["pccConnId"]
print(f"pccConnId_1_accepted: {pccConnId_1_accepted}")
sent_id = ws_thread.send_command("/chats")
response = waitForResponse(ws_thread, sent_id, test_delay_event)
with open(os.path.join(client1_dir, "chats.txt"), "w") as fp:
fp.write(json.dumps(response, indent=4))
direct_local_name_1 = None
for chat in getResponseData(response, "chats"):
print(f"chat: {chat}")
if (
chat["chatInfo"]["contact"]["activeConn"]["connId"]
== pccConnId_1_accepted
):
direct_local_name_1 = chat["chatInfo"]["contact"][
"localDisplayName"
]
break
print(f"direct_local_name_1: {direct_local_name_1}")
sent_id = ws_thread2.send_command("/chats")
response = waitForResponse(ws_thread2, sent_id, test_delay_event)
with open(os.path.join(client2_dir, "chats.txt"), "w") as fp:
fp.write(json.dumps(response, indent=4))
direct_local_name_2 = None
for chat in getResponseData(response, "chats"):
print(f"chat: {chat}")
if (
chat["chatInfo"]["contact"]["activeConn"]["connId"]
== pccConnId_2_sent
):
direct_local_name_2 = chat["chatInfo"]["contact"][
"localDisplayName"
]
break
print(f"direct_local_name_2: {direct_local_name_2}")
# localDisplayName in chats doesn't match the contactConnected message.
assert direct_local_name_1 == "user_1"
assert direct_local_name_2 == "user_1"
sent_id = ws_thread.send_command("#bsx test msg 1")
response = waitForResponse(ws_thread, sent_id, test_delay_event)
assert getResponseData(response, "type") == "newChatItems"
sent_id = ws_thread.send_command("@user_1 test msg 2")
response = waitForResponse(ws_thread, sent_id, test_delay_event)
assert getResponseData(response, "type") == "newChatItems"
msg_counter1: int = 0
msg_counter2: int = 0
found = [dict(), dict()]
found_connected = [dict(), dict()]
found_1 = False
found_2 = False
for i in range(100): for i in range(100):
message = ws_thread.queue_get() if test_delay_event.is_set():
if message is not None: break
for k in range(100):
message = ws_thread.queue_get()
if message is None or test_delay_event.is_set():
break
msg_counter1 += 1
data = json.loads(message) data = json.loads(message)
# print(f"message 1: {json.dumps(data, indent=4)}")
try: try:
if data["resp"]["type"] in ( msg_type = getResponseData(data, "type")
"chatItemsStatusesUpdated", except Exception as e:
"newChatItems", print(f"msg_type error: {e}")
): msg_type = "None"
for chat_item in data["resp"]["chatItems"]: with open(
# print(f"chat_item 1: {json.dumps(chat_item, indent=4)}") os.path.join(
if chat_item["chatItem"]["meta"]["itemStatus"][ client1_dir, f"recv_{msg_counter1}_{msg_type}.txt"
"type" ),
] in ("sndRcvd", "rcvNew"): "w",
if ( ) as fp:
chat_item["chatItem"]["content"]["msgContent"][ fp.write(json.dumps(data, indent=4))
"text" if msg_type == "contactConnected":
] found_connected[0][msg_counter1] = data
== "test msg 1" continue
): try:
found_1 = True simplex_msg = parse_message(data)
if simplex_msg:
simplex_msg["msg_id"] = msg_counter1
found[0][msg_counter1] = simplex_msg
except Exception as e: except Exception as e:
print(f"error 1: {e}") print(f"error 1: {e}")
message = ws_thread2.queue_get() for k in range(100):
if message is not None: message = ws_thread2.queue_get()
if message is None or test_delay_event.is_set():
break
msg_counter2 += 1
data = json.loads(message) data = json.loads(message)
# print(f"message 2: {json.dumps(data, indent=4)}")
try: try:
if data["resp"]["type"] in ( msg_type = getResponseData(data, "type")
"chatItemsStatusesUpdated", except Exception as e:
"newChatItems", print(f"msg_type error: {e}")
): msg_type = "None"
for chat_item in data["resp"]["chatItems"]: with open(
# print(f"chat_item 1: {json.dumps(chat_item, indent=4)}") os.path.join(
if chat_item["chatItem"]["meta"]["itemStatus"][ client2_dir, f"recv_{msg_counter2}_{msg_type}.txt"
"type" ),
] in ("sndRcvd", "rcvNew"): "w",
if ( ) as fp:
chat_item["chatItem"]["content"]["msgContent"][ fp.write(json.dumps(data, indent=4))
"text" if msg_type == "contactConnected":
] found_connected[1][msg_counter2] = data
== "test msg 1" continue
): try:
found_2 = True simplex_msg = parse_message(data)
if simplex_msg:
simplex_msg["msg_id"] = msg_counter2
found[1][msg_counter2] = simplex_msg
except Exception as e: except Exception as e:
print(f"error 2: {e}") print(f"error 2: {e}")
if (
if found_1 and found_2: len(found[0]) >= 2
and len(found[1]) >= 2
and len(found_connected[0]) >= 1
and len(found_connected[1]) >= 1
):
break break
test_delay_event.wait(0.5) test_delay_event.wait(0.5)
assert found_1 is True assert len(found_connected[0]) == 1
assert found_2 is True node1_connect = list(found_connected[0].values())[0]
assert (
getResponseData(node1_connect, "contact")["activeConn"]["connId"]
== pccConnId_1_accepted
)
assert (
getResponseData(node1_connect, "contact")["localDisplayName"]
== "user_2"
)
assert len(found_connected[1]) == 1
node2_connect = list(found_connected[1].values())[0]
assert (
getResponseData(node2_connect, "contact")["activeConn"]["connId"]
== pccConnId_2_sent
)
assert (
getResponseData(node2_connect, "contact")["localDisplayName"]
== "user_2"
)
node1_msg1 = [m for m in found[0].values() if m["text"] == "test msg 1"]
assert len(node1_msg1) == 1
node1_msg1 = node1_msg1[0]
assert node1_msg1["chat_type"] == "group"
assert node1_msg1["chat_name"] == "bsx"
assert node1_msg1["msg_dir"] == "sent"
node1_msg2 = [m for m in found[0].values() if m["text"] == "test msg 2"]
assert len(node1_msg2) == 1
node1_msg2 = node1_msg2[0]
assert node1_msg2["chat_type"] == "direct"
assert node1_msg2["chat_name"] == "user_1"
assert node1_msg2["msg_dir"] == "sent"
node2_msg1 = [m for m in found[1].values() if m["text"] == "test msg 1"]
assert len(node2_msg1) == 1
node2_msg1 = node2_msg1[0]
assert node2_msg1["chat_type"] == "group"
assert node2_msg1["chat_name"] == "bsx"
assert node2_msg1["msg_dir"] == "recv"
node2_msg2 = [m for m in found[1].values() if m["text"] == "test msg 2"]
assert len(node2_msg2) == 1
node2_msg2 = node2_msg2[0]
assert node2_msg2["chat_type"] == "direct"
assert node2_msg2["chat_name"] == "user_1"
assert node2_msg2["msg_dir"] == "recv"
sent_id = ws_thread.send_command("/delete @user_1")
response = waitForResponse(ws_thread, sent_id, test_delay_event)
assert getResponseData(response, "type") == "contactDeleted"
sent_id = ws_thread2.send_command("/delete @user_1")
response = waitForResponse(ws_thread2, sent_id, test_delay_event)
assert getResponseData(response, "type") == "contactDeleted"
sent_id = ws_thread2.send_command("/chats")
response = waitForResponse(ws_thread2, sent_id, test_delay_event)
with open(os.path.join(client2_dir, "chats_after_delete.txt"), "w") as fp:
fp.write(json.dumps(response, indent=4))
assert len(getResponseData(response, "chats")) == 4
finally: finally:
for t in threads: for t in threads:
@@ -254,7 +428,7 @@ class Test(BaseTest):
waitForConnected(ws_thread, test_delay_event) waitForConnected(ws_thread, test_delay_event)
sent_id = ws_thread.send_command("/group bsx") sent_id = ws_thread.send_command("/group bsx")
response = waitForResponse(ws_thread, sent_id, test_delay_event) response = waitForResponse(ws_thread, sent_id, test_delay_event)
assert response["resp"]["type"] == "groupCreated" assert getResponseData(response, "type") == "groupCreated"
ws_thread.send_command("/set voice #bsx off") ws_thread.send_command("/set voice #bsx off")
ws_thread.send_command("/set files #bsx off") ws_thread.send_command("/set files #bsx off")
@@ -266,7 +440,7 @@ class Test(BaseTest):
connReqContact = None connReqContact = None
connReqMsgData = waitForResponse(ws_thread, sent_id, test_delay_event) connReqMsgData = waitForResponse(ws_thread, sent_id, test_delay_event)
connReqContact = connReqMsgData["resp"]["connReqContact"] connReqContact = getNewSimplexLink(connReqMsgData)
cls.group_link = "https://simplex.chat" + connReqContact[8:] cls.group_link = "https://simplex.chat" + connReqContact[8:]
logger.info(f"BSX group_link: {cls.group_link}") logger.info(f"BSX group_link: {cls.group_link}")
@@ -277,13 +451,12 @@ class Test(BaseTest):
@classmethod @classmethod
def tearDownClass(cls): def tearDownClass(cls):
logging.info("Finalising Test") logger.info("Finalising Test")
super(Test, cls).tearDownClass() super(Test, cls).tearDownClass()
stopDaemons(cls.daemons) stopDaemons(cls.daemons)
@classmethod @classmethod
def addCoinSettings(cls, settings, datadir, node_id): def addCoinSettings(cls, settings, datadir, node_id):
settings["networks"] = [ settings["networks"] = [
{ {
"type": "simplex", "type": "simplex",
@@ -295,17 +468,22 @@ class Test(BaseTest):
] ]
def test_01_swap(self): def test_01_swap(self):
logging.info("---------- Test xmr swap") logger.info("---------- Test adaptor sig swap")
swap_clients = self.swap_clients swap_clients = self.swap_clients
for sc in swap_clients: for sc in swap_clients:
sc.dleag_split_size_init = 9000 sc._use_direct_message_routes = False
sc.dleag_split_size = 11000
assert len(swap_clients[0].active_networks) == 1 assert len(swap_clients[0].active_networks) == 1
assert swap_clients[0].active_networks[0]["type"] == "simplex" assert swap_clients[0].active_networks[0]["type"] == "simplex"
num_direct_messages_received_before = [0] * 3
for i in range(3):
num_direct_messages_received_before[i] = swap_clients[
i
].num_direct_simplex_messages_received
coin_from = Coins.BTC coin_from = Coins.BTC
coin_to = self.coin_to coin_to = self.coin_to
@@ -340,3 +518,506 @@ class Test(BaseTest):
sent=True, sent=True,
wait_for=320, wait_for=320,
) )
for i in range(3):
assert (
num_direct_messages_received_before[i]
== swap_clients[i].num_direct_simplex_messages_received
)
def test_01_swap_reverse(self):
logger.info("---------- Test adaptor sig swap reverse")
swap_clients = self.swap_clients
for sc in swap_clients:
sc._use_direct_message_routes = False
assert len(swap_clients[0].active_networks) == 1
assert swap_clients[0].active_networks[0]["type"] == "simplex"
num_direct_messages_received_before = [0] * 3
for i in range(3):
num_direct_messages_received_before[i] = swap_clients[
i
].num_direct_simplex_messages_received
coin_from = self.coin_to
coin_to = Coins.BTC
ci_from = swap_clients[1].ci(coin_from)
ci_to = swap_clients[0].ci(coin_to)
swap_value = ci_from.make_int(random.uniform(0.2, 20.0), r=1)
rate_swap = ci_to.make_int(random.uniform(0.2, 20.0), r=1)
offer_id = swap_clients[1].postOffer(
coin_from, coin_to, swap_value, rate_swap, swap_value, SwapTypes.XMR_SWAP
)
wait_for_offer(test_delay_event, swap_clients[0], offer_id)
offer = swap_clients[0].getOffer(offer_id)
bid_id = swap_clients[0].postBid(offer_id, offer.amount_from)
wait_for_bid(test_delay_event, swap_clients[1], bid_id, BidStates.BID_RECEIVED)
swap_clients[1].acceptBid(bid_id)
wait_for_bid(
test_delay_event,
swap_clients[1],
bid_id,
BidStates.SWAP_COMPLETED,
wait_for=320,
)
wait_for_bid(
test_delay_event,
swap_clients[0],
bid_id,
BidStates.SWAP_COMPLETED,
sent=True,
wait_for=320,
)
for i in range(3):
assert (
num_direct_messages_received_before[i]
== swap_clients[i].num_direct_simplex_messages_received
)
def test_02_direct(self):
logger.info("---------- Test adaptor sig swap with direct messages")
swap_clients = self.swap_clients
for sc in swap_clients:
sc._use_direct_message_routes = True
assert len(swap_clients[0].active_networks) == 1
assert swap_clients[0].active_networks[0]["type"] == "simplex"
num_direct_messages_received_before = [0] * 3
num_group_messages_received_before = [0] * 3
for i in range(3):
num_direct_messages_received_before[i] = swap_clients[
i
].num_direct_simplex_messages_received
num_group_messages_received_before[i] = swap_clients[
i
].num_group_simplex_messages_received
coin_from = Coins.BTC
coin_to = self.coin_to
ci_from = swap_clients[0].ci(coin_from)
ci_to = swap_clients[1].ci(coin_to)
swap_value = ci_from.make_int(random.uniform(0.2, 20.0), r=1)
rate_swap = ci_to.make_int(random.uniform(0.2, 20.0), r=1)
offer_id = swap_clients[0].postOffer(
coin_from, coin_to, swap_value, rate_swap, swap_value, SwapTypes.XMR_SWAP
)
wait_for_offer(test_delay_event, swap_clients[1], offer_id)
offer = swap_clients[1].getOffer(offer_id)
bid_id = swap_clients[1].postBid(offer_id, offer.amount_from)
wait_for_bid(
test_delay_event,
swap_clients[0],
bid_id,
BidStates.BID_RECEIVED,
wait_for=60,
)
swap_clients[0].acceptBid(bid_id)
wait_for_bid(
test_delay_event,
swap_clients[0],
bid_id,
BidStates.SWAP_COMPLETED,
wait_for=320,
)
wait_for_bid(
test_delay_event,
swap_clients[1],
bid_id,
BidStates.SWAP_COMPLETED,
sent=True,
wait_for=320,
)
for i in range(3):
swap_clients[
i
].num_group_simplex_messages_received == num_group_messages_received_before[
i
] + 2
swap_clients[
2
].num_direct_simplex_messages_received == num_direct_messages_received_before[2]
def test_02_direct_reverse(self):
logger.info(
"---------- Test test_02_direct_reverse adaptor sig swap with direct messages"
)
swap_clients = self.swap_clients
for sc in swap_clients:
sc._use_direct_message_routes = True
assert len(swap_clients[0].active_networks) == 1
assert swap_clients[0].active_networks[0]["type"] == "simplex"
num_direct_messages_received_before = [0] * 3
num_group_messages_received_before = [0] * 3
for i in range(3):
num_direct_messages_received_before[i] = swap_clients[
i
].num_direct_simplex_messages_received
num_group_messages_received_before[i] = swap_clients[
i
].num_group_simplex_messages_received
coin_from = self.coin_to
coin_to = Coins.BTC
ci_from = swap_clients[1].ci(coin_from)
ci_to = swap_clients[0].ci(coin_to)
swap_value = ci_from.make_int(random.uniform(0.2, 20.0), r=1)
rate_swap = ci_to.make_int(random.uniform(0.2, 20.0), r=1)
offer_id = swap_clients[1].postOffer(
coin_from, coin_to, swap_value, rate_swap, swap_value, SwapTypes.XMR_SWAP
)
wait_for_offer(test_delay_event, swap_clients[0], offer_id)
offer = swap_clients[0].getOffer(offer_id)
bid_id = swap_clients[0].postBid(offer_id, offer.amount_from)
wait_for_bid(
test_delay_event,
swap_clients[1],
bid_id,
BidStates.BID_RECEIVED,
wait_for=60,
)
swap_clients[1].acceptBid(bid_id)
wait_for_bid(
test_delay_event,
swap_clients[1],
bid_id,
BidStates.SWAP_COMPLETED,
wait_for=320,
)
wait_for_bid(
test_delay_event,
swap_clients[0],
bid_id,
BidStates.SWAP_COMPLETED,
sent=True,
wait_for=320,
)
for i in range(3):
swap_clients[
i
].num_group_simplex_messages_received == num_group_messages_received_before[
i
] + 2
swap_clients[
2
].num_direct_simplex_messages_received == num_direct_messages_received_before[2]
def test_03_hltc(self):
logger.info("---------- Test secret hash swap")
swap_clients = self.swap_clients
for sc in swap_clients:
sc._use_direct_message_routes = False
assert len(swap_clients[0].active_networks) == 1
assert swap_clients[0].active_networks[0]["type"] == "simplex"
num_direct_messages_received_before = [0] * 3
num_group_messages_received_before = [0] * 3
for i in range(3):
num_direct_messages_received_before[i] = swap_clients[
i
].num_direct_simplex_messages_received
num_group_messages_received_before[i] = swap_clients[
i
].num_group_simplex_messages_received
coin_from = Coins.PART
coin_to = Coins.BTC
self.prepare_balance(coin_to, 200.0, 1801, 1800)
ci_from = swap_clients[0].ci(coin_from)
ci_to = swap_clients[1].ci(coin_to)
swap_value = ci_from.make_int(random.uniform(0.2, 20.0), r=1)
rate_swap = ci_to.make_int(random.uniform(0.2, 20.0), r=1)
offer_id = swap_clients[0].postOffer(
coin_from,
coin_to,
swap_value,
rate_swap,
swap_value,
SwapTypes.SELLER_FIRST,
)
wait_for_offer(test_delay_event, swap_clients[1], offer_id)
offer = swap_clients[1].getOffer(offer_id)
bid_id = swap_clients[1].postBid(offer_id, offer.amount_from)
wait_for_bid(
test_delay_event,
swap_clients[0],
bid_id,
BidStates.BID_RECEIVED,
wait_for=90,
)
swap_clients[0].acceptBid(bid_id)
wait_for_bid(
test_delay_event,
swap_clients[0],
bid_id,
BidStates.SWAP_COMPLETED,
wait_for=320,
)
wait_for_bid(
test_delay_event,
swap_clients[1],
bid_id,
BidStates.SWAP_COMPLETED,
sent=True,
wait_for=320,
)
for i in range(3):
assert (
num_direct_messages_received_before[i]
== swap_clients[i].num_direct_simplex_messages_received
)
def test_03_direct_hltc(self):
logger.info("---------- Test secret hash swap with direct messages")
for i in range(3):
message_routes = read_json_api(
1800 + i, "messageroutes", {"action": "clear"}
)
assert len(message_routes) == 0
swap_clients = self.swap_clients
for sc in swap_clients:
sc._use_direct_message_routes = True
assert len(swap_clients[0].active_networks) == 1
assert swap_clients[0].active_networks[0]["type"] == "simplex"
num_direct_messages_received_before = [0] * 3
num_group_messages_received_before = [0] * 3
for i in range(3):
num_direct_messages_received_before[i] = swap_clients[
i
].num_direct_simplex_messages_received
num_group_messages_received_before[i] = swap_clients[
i
].num_group_simplex_messages_received
coin_from = Coins.PART
coin_to = Coins.BTC
self.prepare_balance(coin_to, 200.0, 1801, 1800)
ci_from = swap_clients[0].ci(coin_from)
ci_to = swap_clients[1].ci(coin_to)
swap_value = ci_from.make_int(random.uniform(0.2, 20.0), r=1)
rate_swap = ci_to.make_int(random.uniform(0.2, 20.0), r=1)
offer_id = swap_clients[0].postOffer(
coin_from,
coin_to,
swap_value,
rate_swap,
swap_value,
SwapTypes.SELLER_FIRST,
)
wait_for_offer(test_delay_event, swap_clients[1], offer_id)
offer = swap_clients[1].getOffer(offer_id)
bid_id = swap_clients[1].postBid(offer_id, offer.amount_from)
wait_for_bid(
test_delay_event,
swap_clients[0],
bid_id,
BidStates.BID_RECEIVED,
wait_for=90,
)
swap_clients[0].acceptBid(bid_id)
wait_for_bid(
test_delay_event,
swap_clients[0],
bid_id,
BidStates.SWAP_COMPLETED,
wait_for=320,
)
wait_for_bid(
test_delay_event,
swap_clients[1],
bid_id,
BidStates.SWAP_COMPLETED,
sent=True,
wait_for=320,
)
message_routes = read_json_api(1800, "messageroutes")
assert len(message_routes) == 1
for i in range(3):
swap_clients[
i
].num_group_simplex_messages_received == num_group_messages_received_before[
i
] + 2
swap_clients[
2
].num_direct_simplex_messages_received == num_direct_messages_received_before[2]
def test_04_multiple(self):
logger.info("---------- Test multiple swaps with direct messages")
for i in range(3):
message_routes = read_json_api(
1800 + i, "messageroutes", {"action": "clear"}
)
assert len(message_routes) == 0
swap_clients = self.swap_clients
for sc in swap_clients:
sc._use_direct_message_routes = True
assert len(swap_clients[0].active_networks) == 1
assert swap_clients[0].active_networks[0]["type"] == "simplex"
num_direct_messages_received_before = [0] * 3
num_group_messages_received_before = [0] * 3
for i in range(3):
num_direct_messages_received_before[i] = swap_clients[
i
].num_direct_simplex_messages_received
num_group_messages_received_before[i] = swap_clients[
i
].num_group_simplex_messages_received
coin_from = Coins.BTC
coin_to = self.coin_to
ci_from = swap_clients[0].ci(coin_from)
ci_to = swap_clients[1].ci(coin_to)
swap_value = ci_from.make_int(random.uniform(0.2, 20.0), r=1)
rate_swap = ci_to.make_int(random.uniform(0.2, 20.0), r=1)
offer_id = swap_clients[0].postOffer(
coin_from, coin_to, swap_value, rate_swap, swap_value, SwapTypes.XMR_SWAP
)
swap_clients[1].active_networks[0]["ws_thread"].ignore_events = True
wait_for_offer(test_delay_event, swap_clients[1], offer_id)
offer = swap_clients[1].getOffer(offer_id)
addr1_bids = swap_clients[1].getReceiveAddressForCoin(Coins.PART)
bid_ids = []
for i in range(2):
bid_ids.append(
swap_clients[1].postBid(
offer_id, offer.amount_from, addr_send_from=addr1_bids
)
)
swap_clients[1].active_networks[0]["ws_thread"].disable_debug_mode()
bid_ids.append(swap_clients[1].postBid(offer_id, offer.amount_from))
for i in range(len(bid_ids)):
wait_for_bid(
test_delay_event,
swap_clients[0],
bid_ids[i],
BidStates.BID_RECEIVED,
wait_for=60,
)
swap_clients[0].acceptBid(bid_ids[i])
logger.info("Message routes with active bids shouldn't expire")
swap_clients[0].mock_time_offset = (
swap_clients[0]._expire_message_routes_after + 1
)
swap_clients[0].expireMessageRoutes()
swap_clients[0].mock_time_offset = 0
message_routes_0 = read_json_api(1800, "messageroutes")
assert len(message_routes_0) == 2
for i in range(len(bid_ids)):
wait_for_bid(
test_delay_event,
swap_clients[0],
bid_ids[i],
BidStates.SWAP_COMPLETED,
wait_for=320,
)
wait_for_bid(
test_delay_event,
swap_clients[1],
bid_ids[i],
BidStates.SWAP_COMPLETED,
sent=True,
wait_for=320,
)
for i in range(3):
swap_clients[
i
].num_group_simplex_messages_received == num_group_messages_received_before[
i
] + 2
swap_clients[
2
].num_direct_simplex_messages_received == num_direct_messages_received_before[2]
message_routes_0 = read_json_api(1800, "messageroutes")
assert len(message_routes_0) == 2
message_routes_1 = read_json_api(1801, "messageroutes")
assert len(message_routes_1) == 2
logger.info("Test closing routes")
read_json_api(1800, "messageroutes", {"action": "clear"})
def waitForNumMessageRoutes(
port: int = 1800, num_routes: int = 0, num_tries: int = 40
):
logger.info(
f"Waiting for {num_routes} message route{'s' if num_routes != 1 else ''}, port: {port}."
)
for i in range(num_tries):
test_delay_event.wait(1)
if test_delay_event.is_set():
raise ValueError("Test stopped.")
message_routes = read_json_api(port, "messageroutes")
if len(message_routes) == num_routes:
return True
raise ValueError("waitForNumMessageRoutes timed out.")
waitForNumMessageRoutes(1800, 0)
waitForNumMessageRoutes(1801, 0)

View File

@@ -35,6 +35,9 @@ import sys
import threading import threading
import time import time
import unittest import unittest
import basicswap.config as cfg
from unittest.mock import patch from unittest.mock import patch
from basicswap.rpc_xmr import ( from basicswap.rpc_xmr import (
@@ -87,6 +90,19 @@ TEST_COINS_LIST = os.getenv("TEST_COINS_LIST", "bitcoin,monero")
NUM_NODES = int(os.getenv("NUM_NODES", 3)) NUM_NODES = int(os.getenv("NUM_NODES", 3))
EXTRA_CONFIG_JSON = json.loads(os.getenv("EXTRA_CONFIG_JSON", "{}")) EXTRA_CONFIG_JSON = json.loads(os.getenv("EXTRA_CONFIG_JSON", "{}"))
SIMPLEX_SERVER_FINGERPRINT = os.getenv("SIMPLEX_SERVER_FINGERPRINT", "")
SIMPLEX_SERVER_PASSWORD = os.getenv("SIMPLEX_SERVER_PASSWORD", "password")
SIMPLEX_SERVER_HOST = os.getenv("SIMPLEX_SERVER_HOST", "127.0.0.1")
SIMPLEX_SERVER_PORT = os.getenv("SIMPLEX_SERVER_PORT", "5223")
SIMPLEX_SERVER_ADDRESS = os.getenv(
"SIMPLEX_SERVER_ADDRESS",
f"smp://{SIMPLEX_SERVER_FINGERPRINT}:{SIMPLEX_SERVER_PASSWORD}@{SIMPLEX_SERVER_HOST}:{SIMPLEX_SERVER_PORT}",
)
SIMPLEX_WS_PORT = int(os.getenv("SIMPLEX_WS_PORT", "5225"))
SIMPLEX_GROUP_LINK = os.getenv("SIMPLEX_GROUP_LINK", "")
SIMPLEX_CLIENT_PATH = os.path.expanduser(os.getenv("SIMPLEX_CLIENT_PATH", ""))
SIMPLEX_SERVER_SOCKS_PROXY = os.getenv("SIMPLEX_SERVER_SOCKS_PROXY", "")
logger = logging.getLogger() logger = logging.getLogger()
logger.level = logging.DEBUG logger.level = logging.DEBUG
if not len(logger.handlers): if not len(logger.handlers):
@@ -318,7 +334,7 @@ def start_processes(self):
self.btc_addr = callbtcrpc(0, "getnewaddress", ["mining_addr", "bech32"]) self.btc_addr = callbtcrpc(0, "getnewaddress", ["mining_addr", "bech32"])
num_blocks: int = 500 # Mine enough to activate segwit num_blocks: int = 500 # Mine enough to activate segwit
if callbtcrpc(0, "getblockcount") < num_blocks: if callbtcrpc(0, "getblockcount") < num_blocks:
logging.info("Mining %d Bitcoin blocks to %s", num_blocks, self.btc_addr) logging.info(f"Mining {num_blocks} Bitcoin blocks to {self.btc_addr}")
callbtcrpc(0, "generatetoaddress", [num_blocks, self.btc_addr]) callbtcrpc(0, "generatetoaddress", [num_blocks, self.btc_addr])
logging.info("BTC blocks: %d", callbtcrpc(0, "getblockcount")) logging.info("BTC blocks: %d", callbtcrpc(0, "getblockcount"))
@@ -476,6 +492,28 @@ def start_processes(self):
assert particl_blocks >= num_blocks assert particl_blocks >= num_blocks
def modifyConfig(test_path, i):
config_path = os.path.join(test_path, f"client{i}", cfg.CONFIG_FILENAME)
with open(config_path) as fp:
settings = json.load(fp)
if SIMPLEX_CLIENT_PATH != "":
simplex_options = {
"type": "simplex",
"server_address": SIMPLEX_SERVER_ADDRESS,
"client_path": SIMPLEX_CLIENT_PATH,
"ws_port": SIMPLEX_WS_PORT + i,
"group_link": SIMPLEX_GROUP_LINK,
}
if SIMPLEX_SERVER_SOCKS_PROXY != "":
simplex_options["socks_proxy_override"] = SIMPLEX_SERVER_SOCKS_PROXY
settings["networks"] = [simplex_options]
with open(config_path, "w") as fp:
json.dump(settings, fp, indent=4)
class BaseTestWithPrepare(unittest.TestCase): class BaseTestWithPrepare(unittest.TestCase):
__test__ = False __test__ = False
@@ -522,6 +560,9 @@ class BaseTestWithPrepare(unittest.TestCase):
PORT_OFS, PORT_OFS,
) )
for i in range(NUM_NODES):
modifyConfig(test_path, i)
signal.signal( signal.signal(
signal.SIGINT, lambda signal, frame: signal_handler(cls, signal, frame) signal.SIGINT, lambda signal, frame: signal_handler(cls, signal, frame)
) )

View File

@@ -2,7 +2,7 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# Copyright (c) 2021-2022 tecnovert # Copyright (c) 2021-2022 tecnovert
# Copyright (c) 2024 The Basicswap developers # Copyright (c) 2024-2025 The Basicswap developers
# Distributed under the MIT software license, see the accompanying # Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php. # file LICENSE or http://www.opensource.org/licenses/mit-license.php.
@@ -17,12 +17,9 @@ python tests/basicswap/test_xmr_bids_offline.py
""" """
import sys import sys
import json
import logging import logging
import unittest import unittest
import multiprocessing import multiprocessing
from urllib import parse
from urllib.request import urlopen
from tests.basicswap.util import ( from tests.basicswap.util import (
read_json_api, read_json_api,
@@ -64,21 +61,11 @@ class Test(XmrTestBase):
"lockhrs": 24, "lockhrs": 24,
"automation_strat_id": 1, "automation_strat_id": 1,
} }
rv = json.loads( rv = read_json_api(12700, "offers/new", offer_data)
urlopen(
"http://127.0.0.1:12700/json/offers/new",
data=parse.urlencode(offer_data).encode(),
).read()
)
offer0_id = rv["offer_id"] offer0_id = rv["offer_id"]
offer_data["amt_from"] = "2" offer_data["amt_from"] = "2"
rv = json.loads( rv = read_json_api(12700, "offers/new", offer_data)
urlopen(
"http://127.0.0.1:12700/json/offers/new",
data=parse.urlencode(offer_data).encode(),
).read()
)
offer1_id = rv["offer_id"] offer1_id = rv["offer_id"]
summary = read_json_api(12700) summary = read_json_api(12700)
@@ -92,52 +79,26 @@ class Test(XmrTestBase):
c0.terminate() c0.terminate()
c0.join() c0.join()
offers = json.loads( offers = read_json_api(12701, f"offers/{offer0_id}")
urlopen("http://127.0.0.1:12701/json/offers/{}".format(offer0_id)).read()
)
assert len(offers) == 1 assert len(offers) == 1
offer0 = offers[0] offer0 = offers[0]
post_data = {"coin_from": "PART"} post_data = {"coin_from": "PART"}
test_post_offers = json.loads( test_post_offers = read_json_api(12701, "offers", post_data)
urlopen(
"http://127.0.0.1:12701/json/offers",
data=parse.urlencode(post_data).encode(),
).read()
)
assert len(test_post_offers) == 2 assert len(test_post_offers) == 2
post_data["coin_from"] = "2" post_data["coin_from"] = "2"
test_post_offers = json.loads( test_post_offers = read_json_api(12701, "offers", post_data)
urlopen(
"http://127.0.0.1:12701/json/offers",
data=parse.urlencode(post_data).encode(),
).read()
)
assert len(test_post_offers) == 0 assert len(test_post_offers) == 0
bid_data = {"offer_id": offer0_id, "amount_from": offer0["amount_from"]} bid_data = {"offer_id": offer0_id, "amount_from": offer0["amount_from"]}
bid0_id = read_json_api(12701, "bids/new", bid_data)["bid_id"]
bid0_id = json.loads( offers = read_json_api(12701, f"offers/{offer1_id}")
urlopen(
"http://127.0.0.1:12701/json/bids/new",
data=parse.urlencode(bid_data).encode(),
).read()
)["bid_id"]
offers = json.loads(
urlopen("http://127.0.0.1:12701/json/offers/{}".format(offer1_id)).read()
)
assert len(offers) == 1 assert len(offers) == 1
offer1 = offers[0] offer1 = offers[0]
bid_data = {"offer_id": offer1_id, "amount_from": offer1["amount_from"]} bid_data = {"offer_id": offer1_id, "amount_from": offer1["amount_from"]}
bid1_id = read_json_api(12701, "bids/new", bid_data)["bid_id"]
bid1_id = json.loads(
urlopen(
"http://127.0.0.1:12701/json/bids/new",
data=parse.urlencode(bid_data).encode(),
).read()
)["bid_id"]
logger.info("Delaying for 5 seconds.") logger.info("Delaying for 5 seconds.")
self.delay_event.wait(5) self.delay_event.wait(5)
@@ -149,26 +110,17 @@ class Test(XmrTestBase):
waitForServer(self.delay_event, 12700) waitForServer(self.delay_event, 12700)
waitForNumBids(self.delay_event, 12700, 2) waitForNumBids(self.delay_event, 12700, 2)
waitForBidState(self.delay_event, 12700, bid0_id, "Received") waitForBidState(self.delay_event, 12700, bid0_id, ("Received", "Delaying"))
waitForBidState(self.delay_event, 12700, bid1_id, "Received") waitForBidState(self.delay_event, 12700, bid1_id, ("Received", "Delaying"))
# Manually accept on top of auto-accept for extra chaos # Manually accept on top of auto-accept for extra chaos
data = parse.urlencode({"accept": True}).encode()
try: try:
rv = json.loads( rv = read_json_api(12700, f"bids/{bid0_id}", {"accept": True})
urlopen(
"http://127.0.0.1:12700/json/bids/{}".format(bid0_id), data=data
).read()
)
assert rv["bid_state"] == "Accepted" assert rv["bid_state"] == "Accepted"
except Exception as e: except Exception as e:
print("Accept bid failed", str(e), rv) print("Accept bid failed", str(e), rv)
try: try:
rv = json.loads( rv = read_json_api(12700, f"bids/{bid1_id}", {"accept": True})
urlopen(
"http://127.0.0.1:12700/json/bids/{}".format(bid1_id), data=data
).read()
)
assert rv["bid_state"] == "Accepted" assert rv["bid_state"] == "Accepted"
except Exception as e: except Exception as e:
print("Accept bid failed", str(e), rv) print("Accept bid failed", str(e), rv)
@@ -179,8 +131,8 @@ class Test(XmrTestBase):
raise ValueError("Test stopped.") raise ValueError("Test stopped.")
self.delay_event.wait(4) self.delay_event.wait(4)
rv0 = read_json_api(12700, "bids/{}".format(bid0_id)) rv0 = read_json_api(12700, f"bids/{bid0_id}")
rv1 = read_json_api(12700, "bids/{}".format(bid1_id)) rv1 = read_json_api(12700, f"bids/{bid1_id}")
if rv0["bid_state"] == "Completed" and rv1["bid_state"] == "Completed": if rv0["bid_state"] == "Completed" and rv1["bid_state"] == "Completed":
break break
assert rv0["bid_state"] == "Completed" assert rv0["bid_state"] == "Completed"