mirror of
https://github.com/basicswap/basicswap.git
synced 2025-11-05 10:28:10 +01:00
Add check for minimum sqlite version.
This commit is contained in:
@@ -1023,6 +1023,12 @@ class BasicSwap(BaseApp):
|
|||||||
self.log.info(f"SQLite version: {sqlite3.sqlite_version}")
|
self.log.info(f"SQLite version: {sqlite3.sqlite_version}")
|
||||||
self.log.debug(f"Timezone offset: {time.timezone} ({time.tzname[0]})")
|
self.log.debug(f"Timezone offset: {time.timezone} ({time.tzname[0]})")
|
||||||
|
|
||||||
|
MIN_SQLITE_VERSION = (3, 35, 0) # Upsert
|
||||||
|
if sqlite3.sqlite_version_info < MIN_SQLITE_VERSION:
|
||||||
|
raise RuntimeError(
|
||||||
|
"SQLite {} or higher required.".format(".".join(MIN_SQLITE_VERSION))
|
||||||
|
)
|
||||||
|
|
||||||
upgradeDatabase(self, self.db_version)
|
upgradeDatabase(self, self.db_version)
|
||||||
upgradeDatabaseData(self, self.db_data_version)
|
upgradeDatabaseData(self, self.db_data_version)
|
||||||
|
|
||||||
|
|||||||
147
basicswap/db.py
147
basicswap/db.py
@@ -658,82 +658,85 @@ class CoinRates(Table):
|
|||||||
last_updated = Column("integer")
|
last_updated = Column("integer")
|
||||||
|
|
||||||
|
|
||||||
|
def create_db_(con, log) -> None:
|
||||||
|
c = con.cursor()
|
||||||
|
|
||||||
|
g = globals().copy()
|
||||||
|
for name, obj in g.items():
|
||||||
|
if not inspect.isclass(obj):
|
||||||
|
continue
|
||||||
|
if not hasattr(obj, "__sqlite3_table__"):
|
||||||
|
continue
|
||||||
|
if not hasattr(obj, "__tablename__"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
table_name: str = obj.__tablename__
|
||||||
|
query: str = f"CREATE TABLE {table_name} ("
|
||||||
|
|
||||||
|
primary_key = None
|
||||||
|
constraints = []
|
||||||
|
indices = []
|
||||||
|
num_columns: int = 0
|
||||||
|
for m in inspect.getmembers(obj):
|
||||||
|
m_name, m_obj = m
|
||||||
|
|
||||||
|
if hasattr(m_obj, "__sqlite3_primary_key__"):
|
||||||
|
primary_key = m_obj
|
||||||
|
continue
|
||||||
|
if hasattr(m_obj, "__sqlite3_unique__"):
|
||||||
|
constraints.append(m_obj)
|
||||||
|
continue
|
||||||
|
if hasattr(m_obj, "__sqlite3_index__"):
|
||||||
|
indices.append(m_obj)
|
||||||
|
continue
|
||||||
|
if hasattr(m_obj, "__sqlite3_column__"):
|
||||||
|
if num_columns > 0:
|
||||||
|
query += ","
|
||||||
|
|
||||||
|
col_type: str = m_obj.column_type.upper()
|
||||||
|
if col_type == "BOOL":
|
||||||
|
col_type = "INTEGER"
|
||||||
|
query += f" {m_name} {col_type} "
|
||||||
|
|
||||||
|
if m_obj.primary_key:
|
||||||
|
query += "PRIMARY KEY ASC "
|
||||||
|
if m_obj.unique:
|
||||||
|
query += "UNIQUE "
|
||||||
|
num_columns += 1
|
||||||
|
|
||||||
|
if primary_key is not None:
|
||||||
|
query += f", PRIMARY KEY ({primary_key.column_1}"
|
||||||
|
if primary_key.column_2:
|
||||||
|
query += f", {primary_key.column_2}"
|
||||||
|
if primary_key.column_3:
|
||||||
|
query += f", {primary_key.column_3}"
|
||||||
|
query += ") "
|
||||||
|
|
||||||
|
for constraint in constraints:
|
||||||
|
query += f", UNIQUE ({constraint.column_1}"
|
||||||
|
if constraint.column_2:
|
||||||
|
query += f", {constraint.column_2}"
|
||||||
|
if constraint.column_3:
|
||||||
|
query += f", {constraint.column_3}"
|
||||||
|
query += ") "
|
||||||
|
|
||||||
|
query += ")"
|
||||||
|
c.execute(query)
|
||||||
|
for i in indices:
|
||||||
|
query: str = f"CREATE INDEX {i.name} ON {table_name} ({i.column_1}"
|
||||||
|
if i.column_2 is not None:
|
||||||
|
query += f", {i.column_2}"
|
||||||
|
if i.column_3 is not None:
|
||||||
|
query += f", {i.column_3}"
|
||||||
|
query += ")"
|
||||||
|
c.execute(query)
|
||||||
|
|
||||||
|
|
||||||
def create_db(db_path: str, log) -> None:
|
def create_db(db_path: str, log) -> None:
|
||||||
con = None
|
con = None
|
||||||
try:
|
try:
|
||||||
con = sqlite3.connect(db_path)
|
con = sqlite3.connect(db_path)
|
||||||
c = con.cursor()
|
create_db_(con, log)
|
||||||
|
|
||||||
g = globals().copy()
|
|
||||||
for name, obj in g.items():
|
|
||||||
if not inspect.isclass(obj):
|
|
||||||
continue
|
|
||||||
if not hasattr(obj, "__sqlite3_table__"):
|
|
||||||
continue
|
|
||||||
if not hasattr(obj, "__tablename__"):
|
|
||||||
continue
|
|
||||||
|
|
||||||
table_name: str = obj.__tablename__
|
|
||||||
query: str = f"CREATE TABLE {table_name} ("
|
|
||||||
|
|
||||||
primary_key = None
|
|
||||||
constraints = []
|
|
||||||
indices = []
|
|
||||||
num_columns: int = 0
|
|
||||||
for m in inspect.getmembers(obj):
|
|
||||||
m_name, m_obj = m
|
|
||||||
|
|
||||||
if hasattr(m_obj, "__sqlite3_primary_key__"):
|
|
||||||
primary_key = m_obj
|
|
||||||
continue
|
|
||||||
if hasattr(m_obj, "__sqlite3_unique__"):
|
|
||||||
constraints.append(m_obj)
|
|
||||||
continue
|
|
||||||
if hasattr(m_obj, "__sqlite3_index__"):
|
|
||||||
indices.append(m_obj)
|
|
||||||
continue
|
|
||||||
if hasattr(m_obj, "__sqlite3_column__"):
|
|
||||||
if num_columns > 0:
|
|
||||||
query += ","
|
|
||||||
|
|
||||||
col_type: str = m_obj.column_type.upper()
|
|
||||||
if col_type == "BOOL":
|
|
||||||
col_type = "INTEGER"
|
|
||||||
query += f" {m_name} {col_type} "
|
|
||||||
|
|
||||||
if m_obj.primary_key:
|
|
||||||
query += "PRIMARY KEY ASC "
|
|
||||||
if m_obj.unique:
|
|
||||||
query += "UNIQUE "
|
|
||||||
num_columns += 1
|
|
||||||
|
|
||||||
if primary_key is not None:
|
|
||||||
query += f", PRIMARY KEY ({primary_key.column_1}"
|
|
||||||
if primary_key.column_2:
|
|
||||||
query += f", {primary_key.column_2}"
|
|
||||||
if primary_key.column_3:
|
|
||||||
query += f", {primary_key.column_3}"
|
|
||||||
query += ") "
|
|
||||||
|
|
||||||
for constraint in constraints:
|
|
||||||
query += f", UNIQUE ({constraint.column_1}"
|
|
||||||
if constraint.column_2:
|
|
||||||
query += f", {constraint.column_2}"
|
|
||||||
if constraint.column_3:
|
|
||||||
query += f", {constraint.column_3}"
|
|
||||||
query += ") "
|
|
||||||
|
|
||||||
query += ")"
|
|
||||||
c.execute(query)
|
|
||||||
for i in indices:
|
|
||||||
query: str = f"CREATE INDEX {i.name} ON {table_name} ({i.column_1}"
|
|
||||||
if i.column_2 is not None:
|
|
||||||
query += f", {i.column_2}"
|
|
||||||
if i.column_3 is not None:
|
|
||||||
query += f", {i.column_3}"
|
|
||||||
query += ")"
|
|
||||||
c.execute(query)
|
|
||||||
|
|
||||||
con.commit()
|
con.commit()
|
||||||
finally:
|
finally:
|
||||||
if con:
|
if con:
|
||||||
|
|||||||
@@ -175,10 +175,12 @@ Close the terminal and open a new one to update the python symlinks.
|
|||||||
|
|
||||||
### Basicswap
|
### Basicswap
|
||||||
|
|
||||||
|
If installing on an older distro such as Debian 11 you may need to use a newer Python/SQLite version, see ["Use a Different Python Version" in notes.md](notes.md#Use-a-Different-Python-Version)
|
||||||
|
|
||||||
export SWAP_DATADIR=$HOME/coinswaps
|
export SWAP_DATADIR=$HOME/coinswaps
|
||||||
python3 -m venv "$SWAP_DATADIR/venv"
|
python3 -m venv "$SWAP_DATADIR/venv"
|
||||||
. $SWAP_DATADIR/venv/bin/activate && python -V
|
|
||||||
|
|
||||||
|
. $SWAP_DATADIR/venv/bin/activate && python -V
|
||||||
cd $SWAP_DATADIR
|
cd $SWAP_DATADIR
|
||||||
git clone https://github.com/basicswap/basicswap.git
|
git clone https://github.com/basicswap/basicswap.git
|
||||||
cd $SWAP_DATADIR/basicswap
|
cd $SWAP_DATADIR/basicswap
|
||||||
|
|||||||
15
doc/notes.md
15
doc/notes.md
@@ -103,6 +103,21 @@ Test:
|
|||||||
basicswap-prepare.exe --help
|
basicswap-prepare.exe --help
|
||||||
|
|
||||||
|
|
||||||
|
## Use a Different Python Version
|
||||||
|
|
||||||
|
[uv](https://github.com/astral-sh/uv) can create a virtual environment for a different version of Python (and consequently SQLite).
|
||||||
|
|
||||||
|
Run:
|
||||||
|
|
||||||
|
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||||
|
source $HOME/.local/bin/env
|
||||||
|
uv venv -p 3.10 "$SWAP_DATADIR/venv" --seed
|
||||||
|
|
||||||
|
Instead of:
|
||||||
|
|
||||||
|
python3 -m venv "$SWAP_DATADIR/venv"
|
||||||
|
|
||||||
|
|
||||||
## Private Offers
|
## Private Offers
|
||||||
|
|
||||||
To send a private offer:
|
To send a private offer:
|
||||||
|
|||||||
@@ -2,13 +2,15 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
# Copyright (c) 2019-2024 tecnovert
|
# Copyright (c) 2019-2024 tecnovert
|
||||||
# Copyright (c) 2024 The Basicswap developers
|
# Copyright (c) 2024-2025 The Basicswap developers
|
||||||
# Distributed under the MIT software license, see the accompanying
|
# Distributed under the MIT software license, see the accompanying
|
||||||
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
|
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
|
||||||
|
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import logging
|
||||||
import random
|
import random
|
||||||
import secrets
|
import secrets
|
||||||
|
import threading
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import basicswap.contrib.ed25519_fast as edf
|
import basicswap.contrib.ed25519_fast as edf
|
||||||
@@ -24,6 +26,7 @@ from coincurve.ecdsaotves import (
|
|||||||
from coincurve.keys import PrivateKey
|
from coincurve.keys import PrivateKey
|
||||||
|
|
||||||
from basicswap.contrib.mnemonic import Mnemonic
|
from basicswap.contrib.mnemonic import Mnemonic
|
||||||
|
from basicswap.db import create_db_, DBMethods, KnownIdentity
|
||||||
from basicswap.util import i2b, h2b
|
from basicswap.util import i2b, h2b
|
||||||
from basicswap.util.address import decodeAddress
|
from basicswap.util.address import decodeAddress
|
||||||
from basicswap.util.crypto import ripemd160, hash160, blake256
|
from basicswap.util.crypto import ripemd160, hash160, blake256
|
||||||
@@ -51,6 +54,9 @@ from basicswap.messages_npb import (
|
|||||||
from basicswap.contrib.test_framework.script import hash160 as hash160_btc
|
from basicswap.contrib.test_framework.script import hash160 as hash160_btc
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
class Test(unittest.TestCase):
|
class Test(unittest.TestCase):
|
||||||
|
|
||||||
def test_serialise_num(self):
|
def test_serialise_num(self):
|
||||||
@@ -556,6 +562,31 @@ class Test(unittest.TestCase):
|
|||||||
mnemonic_recovered: str = Mnemonic("english").to_mnemonic(entropy0)
|
mnemonic_recovered: str = Mnemonic("english").to_mnemonic(entropy0)
|
||||||
assert mnemonic_recovered == mnemonics[0]
|
assert mnemonic_recovered == mnemonics[0]
|
||||||
|
|
||||||
|
def test_db(self):
|
||||||
|
db_test = DBMethods()
|
||||||
|
db_test.sqlite_file = ":memory:"
|
||||||
|
db_test.mxDB = threading.Lock()
|
||||||
|
cursor = db_test.openDB()
|
||||||
|
try:
|
||||||
|
create_db_(db_test._db_con, logger)
|
||||||
|
# Test upsert
|
||||||
|
ki = KnownIdentity()
|
||||||
|
ki.address = "test"
|
||||||
|
ki.label = "test"
|
||||||
|
db_test.add(ki, cursor)
|
||||||
|
ki.record_id = 1
|
||||||
|
ki.address = "test1"
|
||||||
|
ki.label = "test1"
|
||||||
|
try:
|
||||||
|
db_test.add(ki, cursor, upsert=False)
|
||||||
|
except Exception as e:
|
||||||
|
assert "UNIQUE constraint failed" in str(e)
|
||||||
|
else:
|
||||||
|
raise ValueError("Should have errored.")
|
||||||
|
db_test.add(ki, cursor, upsert=True)
|
||||||
|
finally:
|
||||||
|
db_test.closeDB(cursor)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
Reference in New Issue
Block a user