Add CLN regtest to integration tests

This commit is contained in:
Reckless_Satoshi 2023-11-13 14:40:47 +00:00 committed by Reckless_Satoshi
parent ebd0a287c3
commit 605a37bb87
16 changed files with 272 additions and 167 deletions

View File

@ -15,68 +15,64 @@ concurrency:
cancel-in-progress: true cancel-in-progress: true
jobs: jobs:
build: test:
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
max-parallel: 4 max-parallel: 2
matrix: matrix:
python-version: ["3.11.6", "3.12"] python-tag: ['3.11.6-slim-bookworm', '3.12-slim-bookworm']
lnd-version: ["v0.17.0-beta","v0.17.1-beta.rc1"] lnd-version: ["v0.17.0-beta"] # , "v0.17.0-beta.rc1"]
cln-version: ["v23.08.1"]
ln-vendor: ["LND", "CLN"]
steps: steps:
- name: 'Checkout' - name: 'Checkout'
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: 'Compose Eegtest Orchestration' - name: Update Python version in Dockerfile
run: |
sed -i "1s/FROM python:.*/FROM python:${{ matrix.python-tag }}/" Dockerfile
sed -i '/RUN pip install --no-cache-dir -r requirements.txt/a COPY requirements_dev.txt .\nRUN pip install --no-cache-dir -r requirements_dev.txt' Dockerfile
- uses: satackey/action-docker-layer-caching@v0.0.11
continue-on-error: true
with:
key: coordinator-docker-cache-${{ hashFiles('./Dockerfile') }}
restore-keys: |
coordinator-docker-cache-
- name: 'Compose Regtest Orchestration'
uses: isbang/compose-action@v1.5.1 uses: isbang/compose-action@v1.5.1
with: with:
compose-file: "docker-test.yml" compose-file: "./docker-tests.yml"
env: "tests/compose.env" down-flags: "--volumes"
# Ideally we run only coordinator-${{ matrix.ln-vendor }} , at the moment some tests fail if LND is not around.
services: |
bitcoind
postgres
redis
coordinator-CLN
coordinator-LND
robot-LND
coordinator
env:
LND_VERSION: ${{ matrix.lnd-version }}
CLN_VERSION: ${{ matrix.cln-version }}
BITCOIND_VERSION: ${{ matrix.bitcoind-version }}
ROBOSATS_ENVS_FILE: ".env-sample"
# - name: 'Set up Python ${{ matrix.python-version }}' - name: Wait for coordinator (django server)
# uses: actions/setup-python@v4 run: |
# with: while [ "$(docker inspect --format "{{.State.Health.Status}}" coordinator)" != "healthy" ]; do
# python-version: ${{ matrix.python-version }} echo "Waiting for coordinator to be healthy..."
sleep 5
# - name: 'Cache pip dependencies' done
# uses: actions/cache@v3
# with:
# path: ~/.cache/pip
# key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
# restore-keys: |
# ${{ runner.os }}-pip-
# - name: 'Install Python Dependencies'
# run: |
# python -m pip install --upgrade pip
# pip install -r requirements.txt
# pip install -r requirements_dev.txt
# - name: 'Install LND/CLN gRPC Dependencies'
# run: bash ./scripts/generate_grpc.sh
# - name: 'Create .env File'
# run: |
# mv .env-sample .env
# sed -i "s/USE_TOR=True/USE_TOR=False/" .env
# - name: 'Wait for PostgreSQL to become ready'
# run: |
# sudo apt-get install -y postgresql-client
# until pg_isready -h localhost -p 5432 -U postgres; do sleep 2; done
- name: 'Run tests with coverage' - name: 'Run tests with coverage'
run: | run: |
docker exec coordinator coverage run manage.py test docker exec coordinator coverage run manage.py test
docker exec coordinator coverage report docker exec coordinator coverage report
env:
LNVENDOR: ${{ matrix.ln-vendor }}
# jobs: DEVELOPMENT: True
# test: USE_TOR: False
# runs-on: ubuntu-latest
# steps:
# - uses: actions/checkout@v2
# - name: Run Docker Compose
# run: |
# docker-compose up -d
# docker-compose run web python manage.py test

View File

@ -26,7 +26,7 @@ jobs:
with: with:
python-version: '3.11.6' python-version: '3.11.6'
cache: pip cache: pip
- run: pip install black==22.8.0 flake8==5.0.4 isort==5.10.1 - run: pip install requirements_dev.txt
- name: Run linters - name: Run linters
uses: wearerequired/lint-action@v2 uses: wearerequired/lint-action@v2
with: with:

View File

@ -38,8 +38,8 @@ jobs:
fi fi
django-test: integration-tests:
uses: RoboSats/robosats/.github/workflows/django-test.yml@main uses: RoboSats/robosats/.github/workflows/integration-tests.yml@main
needs: check-versions needs: check-versions
frontend-build: frontend-build:

View File

@ -69,6 +69,16 @@ class CLNNode:
print(f"Cannot get CLN version: {e}") print(f"Cannot get CLN version: {e}")
return None return None
@classmethod
def get_info(cls):
try:
nodestub = node_pb2_grpc.NodeStub(cls.node_channel)
request = node_pb2.GetinfoRequest()
response = nodestub.Getinfo(request)
return response
except Exception as e:
print(f"Cannot get CLN node id: {e}")
@classmethod @classmethod
def decode_payreq(cls, invoice): def decode_payreq(cls, invoice):
"""Decodes a lightning payment request (invoice)""" """Decodes a lightning payment request (invoice)"""

View File

@ -28,8 +28,8 @@ class InfoSerializer(serializers.Serializer):
lifetime_volume = serializers.FloatField( lifetime_volume = serializers.FloatField(
help_text="Total volume in BTC since exchange's inception" help_text="Total volume in BTC since exchange's inception"
) )
lnd_version = serializers.CharField(required=False) lnd_version = serializers.CharField()
cln_version = serializers.CharField(required=False) cln_version = serializers.CharField()
robosats_running_commit_hash = serializers.CharField() robosats_running_commit_hash = serializers.CharField()
alternative_site = serializers.CharField() alternative_site = serializers.CharField()
alternative_name = serializers.CharField() alternative_name = serializers.CharField()

View File

@ -21,8 +21,6 @@ from api.utils import (
verify_signed_message, verify_signed_message,
weighted_median, weighted_median,
) )
from tests.mocks.cln import MockNodeStub
from tests.mocks.lnd import MockVersionerStub
class TestUtils(TestCase): class TestUtils(TestCase):
@ -96,15 +94,13 @@ class TestUtils(TestCase):
mock_response_blockchain.json.assert_called_once() mock_response_blockchain.json.assert_called_once()
mock_response_yadio.json.assert_called_once() mock_response_yadio.json.assert_called_once()
@patch("api.lightning.lnd.verrpc_pb2_grpc.VersionerStub", MockVersionerStub)
def test_get_lnd_version(self): def test_get_lnd_version(self):
version = get_lnd_version() version = get_lnd_version()
self.assertEqual(version, "v0.17.0-beta") self.assertTrue(isinstance(version, str))
@patch("api.lightning.cln.node_pb2_grpc.NodeStub", MockNodeStub)
def test_get_cln_version(self): def test_get_cln_version(self):
version = get_cln_version() version = get_cln_version()
self.assertEqual(version, "v23.08") self.assertTrue(isinstance(version, str))
@patch( @patch(
"builtins.open", new_callable=mock_open, read_data="00000000000000000000 dev" "builtins.open", new_callable=mock_open, read_data="00000000000000000000 dev"

View File

@ -181,7 +181,7 @@ def get_lnd_version():
return LNDNode.get_version() return LNDNode.get_version()
except Exception: except Exception:
return None return "No LND"
cln_version_cache = {} cln_version_cache = {}
@ -194,7 +194,7 @@ def get_cln_version():
return CLNNode.get_version() return CLNNode.get_version()
except Exception: except Exception:
return None return "No CLN"
robosats_commit_cache = {} robosats_commit_cache = {}

View File

@ -2,20 +2,29 @@
# docker-compose -f docker-tests.yml --env-file tests/compose.env up -d # docker-compose -f docker-tests.yml --env-file tests/compose.env up -d
# Some useful handy commands that hopefully are never needed # Some useful handy commands that hopefully are never needed
# docker exec -it btc bitcoin-cli -chain=regtest -rpcpassword=test -rpcuser=test createwallet default # docker exec -it btc bitcoin-cli -chain=regtest -rpcpassword=test -rpcuser=test createwallet default
# docker exec -it btc bitcoin-cli -chain=regtest -rpcpassword=test -rpcuser=test -generate 101 # docker exec -it btc bitcoin-cli -chain=regtest -rpcpassword=test -rpcuser=test -generate 101
# docker exec -it coordinator-lnd lncli --network=regtest getinfo # docker exec -it coordinator-LND lncli --network=regtest getinfo
# docker exec -it robot-lnd lncli --network=regtest --rpcserver localhost:10010 getinfo # docker exec -it robot-LND lncli --network=regtest --rpcserver localhost:10010 getinfo
version: '3.9' version: '3.9'
services: services:
bitcoind: bitcoind:
image: ruimarinho/bitcoin-core:${BITCOIND_TAG} image: ruimarinho/bitcoin-core:${BITCOIND_VERSION:-24.0.1}-alpine
container_name: btc container_name: btc
restart: always restart: always
ports: ports:
- "8000:8000" - "8000:8000"
- "8080:8080"
- "8081:8081"
- "10009:10009"
- "10010:10010"
- "9999:9999"
- "9998:9998"
- "5432:5432"
- "6379:6379"
volumes: volumes:
- bitcoin:/bitcoin/.bitcoin/ - bitcoin:/bitcoin/.bitcoin/
command: command:
@ -35,9 +44,9 @@ services:
--zmqpubrawtx=tcp://0.0.0.0:28333 --zmqpubrawtx=tcp://0.0.0.0:28333
--listenonion=0 --listenonion=0
coordinator-lnd: coordinator-LND:
image: lightninglabs/lnd:${LND_TAG} image: lightninglabs/lnd:${LND_VERSION:-v0.17.0-beta}
container_name: coordinator-lnd container_name: coordinator-LND
restart: always restart: always
volumes: volumes:
- bitcoin:/root/.bitcoin/ - bitcoin:/root/.bitcoin/
@ -47,11 +56,11 @@ services:
--noseedbackup --noseedbackup
--nobootstrap --nobootstrap
--restlisten=localhost:8081 --restlisten=localhost:8081
--no-rest-tls
--debuglevel=debug --debuglevel=debug
--maxpendingchannels=10 --maxpendingchannels=10
--rpclisten=0.0.0.0:10009 --rpclisten=0.0.0.0:10009
--listen=0.0.0.0:9735 --listen=0.0.0.0:9735
--no-rest-tls
--color=#4126a7 --color=#4126a7
--alias=RoboSats --alias=RoboSats
--bitcoin.active --bitcoin.active
@ -67,9 +76,24 @@ services:
- bitcoind - bitcoind
network_mode: service:bitcoind network_mode: service:bitcoind
robot-lnd: coordinator-CLN:
image: lightninglabs/lnd:${LND_TAG} image: elementsproject/lightningd:${CLN_VERSION:-v23.08.1}
container_name: robot-lnd restart: always
container_name: coordinator-CLN
environment:
LIGHTNINGD_NETWORK: 'regtest'
volumes:
- cln:/root/.lightning
- ./docker/cln/plugins/cln-grpc-hold:/root/.lightning/plugins/cln-grpc-hold
- bitcoin:/root/.bitcoin
command: --regtest --wumbo --bitcoin-rpcuser=test --bitcoin-rpcpassword=test --rest-host=0.0.0.0 --bind-addr=127.0.0.1:9737 --grpc-port=9999 --grpc-hold-port=9998 --important-plugin=/root/.lightning/plugins/cln-grpc-hold --database-upgrade=true
depends_on:
- bitcoind
network_mode: service:bitcoind
robot-LND:
image: lightninglabs/lnd:${LND_VERSION:-v0.17.0-beta}
container_name: robot-LND
restart: always restart: always
volumes: volumes:
- bitcoin:/root/.bitcoin/ - bitcoin:/root/.bitcoin/
@ -78,6 +102,7 @@ services:
command: command:
--noseedbackup --noseedbackup
--nobootstrap --nobootstrap
--restlisten=localhost:8080
--no-rest-tls --no-rest-tls
--debuglevel=debug --debuglevel=debug
--maxpendingchannels=10 --maxpendingchannels=10
@ -99,7 +124,7 @@ services:
network_mode: service:bitcoind network_mode: service:bitcoind
redis: redis:
image: redis:${REDIS_TAG} image: redis:${REDIS_VERSION:-7.2.1}-alpine
container_name: redis container_name: redis
restart: always restart: always
volumes: volumes:
@ -116,11 +141,11 @@ services:
TESTING: True TESTING: True
USE_TOR: False USE_TOR: False
MACAROON_PATH: 'data/chain/bitcoin/regtest/admin.macaroon' MACAROON_PATH: 'data/chain/bitcoin/regtest/admin.macaroon'
CLN_DIR: '/cln/regtest/'
env_file: env_file:
- ${ROBOSATS_ENVS_FILE} - ${ROBOSATS_ENVS_FILE}
depends_on: depends_on:
- redis - redis
- coordinator-lnd
- postgres - postgres
network_mode: service:bitcoind network_mode: service:bitcoind
volumes: volumes:
@ -128,76 +153,22 @@ services:
- lnd:/lnd - lnd:/lnd
- lndrobot:/lndrobot - lndrobot:/lndrobot
- cln:/cln - cln:/cln
healthcheck:
test: ["CMD", "curl", "localhost:8000"]
interval: 5s
timeout: 5s
retries: 3
postgres: postgres:
image: postgres:${POSTGRES_TAG:-14.2-alpine} image: postgres:${POSTGRES_VERSION:-14.2}-alpine
container_name: sql container_name: sql
restart: always restart: always
environment: environment:
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} POSTGRES_PASSWORD: 'example'
POSTGRES_USER: ${POSTGRES_USER} POSTGRES_USER: 'postgres'
POSTGRES_DB: ${POSTGRES_DB} POSTGRES_DB: 'postgres'
network_mode: service:bitcoind network_mode: service:bitcoind
# clean-orders:
# image: robosats-image
# restart: always
# container_name: clord
# command: python3 manage.py clean_orders
# environment:
# SKIP_COLLECT_STATIC: "true"
# POSTGRES_HOST: 'postgres'
# env_file:
# - ${ROBOSATS_ENVS_FILE}
# follow-invoices:
# image: robosats-image
# container_name: invo
# restart: always
# env_file:
# - ${ROBOSATS_ENVS_FILE}
# environment:
# SKIP_COLLECT_STATIC: "true"
# POSTGRES_HOST: 'postgres'
# command: python3 manage.py follow_invoices
# telegram-watcher:
# image: robosats-image
# container_name: tg
# restart: always
# environment:
# SKIP_COLLECT_STATIC: "true"
# POSTGRES_HOST: 'postgres'
# env_file:
# - ${ROBOSATS_ENVS_FILE}
# command: python3 manage.py telegram_watcher
# celery:
# image: robosats-image
# container_name: cele
# restart: always
# env_file:
# - ${ROBOSATS_ENVS_FILE}
# environment:
# SKIP_COLLECT_STATIC: "true"
# POSTGRES_HOST: 'postgres'
# command: celery -A robosats worker --loglevel=WARNING
# depends_on:
# - redis
# celery-beat:
# image: robosats-image
# container_name: beat
# restart: always
# env_file:
# - ${ROBOSATS_ENVS_FILE}
# environment:
# SKIP_COLLECT_STATIC: "true"
# POSTGRES_HOST: 'postgres'
# command: celery -A robosats beat -l info --scheduler django_celery_beat.schedulers:DatabaseScheduler
# depends_on:
# - redis
volumes: volumes:
redisdata: redisdata:
bitcoin: bitcoin:

BIN
docker/cln/plugins/cln-grpc-hold Executable file

Binary file not shown.

View File

@ -8,4 +8,5 @@ omit = [
# omit test and mocks from coverage reports # omit test and mocks from coverage reports
"tests/*", "tests/*",
"*mocks*", "*mocks*",
"manage.py",
] ]

View File

@ -1,7 +1,7 @@
coverage==7.3.2
black==23.3.0 black==23.3.0
isort==5.12.0 isort==5.12.0
flake8==6.1.0 flake8==6.1.0
pyflakes==3.1.0 pyflakes==3.1.0
coverage==7.3.2
drf-openapi-tester==2.3.3 drf-openapi-tester==2.3.3
pre-commit==3.5.0 pre-commit==3.5.0

View File

@ -1161,10 +1161,12 @@ components:
- alternative_site - alternative_site
- bond_size - bond_size
- book_liquidity - book_liquidity
- cln_version
- current_swap_fee_rate - current_swap_fee_rate
- last_day_nonkyc_btc_premium - last_day_nonkyc_btc_premium
- last_day_volume - last_day_volume
- lifetime_volume - lifetime_volume
- lnd_version
- maker_fee - maker_fee
- network - network
- node_alias - node_alias

View File

@ -1,10 +1,7 @@
ROBOSATS_ENVS_FILE=".env-sample" ROBOSATS_ENVS_FILE=".env-sample"
BITCOIND_TAG='24.0.1-alpine' BITCOIND_VERSION='24.0.1'
LND_TAG='v0.17.0-beta' LND_VERSION='v0.17.0-beta'
REDIS_TAG='7.2.1-alpine@sha256:7f5a0dfbf379db69dc78434091dce3220e251022e71dcdf36207928cbf9010de' CLN_VERSION='v23.08.1'
POSTGRES_TAG='14.2-alpine' REDIS_VERSION='7.2.1'
POSTGRES_VERSION='14.2'
POSTGRES_DB='postgres'
POSTGRES_USER='postgres'
POSTGRES_PASSWORD='example'

View File

@ -1,24 +1,35 @@
import codecs
import sys
import time
import requests import requests
from requests.auth import HTTPBasicAuth from requests.auth import HTTPBasicAuth
from requests.exceptions import ReadTimeout from requests.exceptions import ReadTimeout
wait_step = 0.2
def get_node(name="robot"): def get_node(name="robot"):
""" """
We have two regtest LND nodes: "coordinator" (the robosats backend) and "robot" (the robosats user) We have two regtest LND nodes: "coordinator" (the robosats backend) and "robot" (the robosats user)
""" """
if name == "robot": if name == "robot":
with open("/lndrobot/data/chain/bitcoin/regtest/admin.macaroon", "rb") as f: macaroon = codecs.encode(
macaroon = f.read() open("/lndrobot/data/chain/bitcoin/regtest/admin.macaroon", "rb").read(),
return {"port": 8080, "headers": {"Grpc-Metadata-macaroon": macaroon.hex()}} "hex",
)
port = 8080
elif name == "coordinator": elif name == "coordinator":
with open("/lnd/data/chain/bitcoin/regtest/admin.macaroon", "rb") as f: macaroon = codecs.encode(
macaroon = f.read() open("/lnd/data/chain/bitcoin/regtest/admin.macaroon", "rb").read(), "hex"
return {"port": 8081, "headers": {"Grpc-Metadata-macaroon": macaroon.hex()}} )
port = 8081
return {"port": port, "headers": {"Grpc-Metadata-macaroon": macaroon}}
def get_node_id(node_name): def get_lnd_node_id(node_name):
node = get_node(node_name) node = get_node(node_name)
response = requests.get( response = requests.get(
f'http://localhost:{node["port"]}/v1/getinfo', headers=node["headers"] f'http://localhost:{node["port"]}/v1/getinfo', headers=node["headers"]
@ -27,13 +38,99 @@ def get_node_id(node_name):
return data["identity_pubkey"] return data["identity_pubkey"]
def get_cln_node_id():
from api.lightning.cln import CLNNode
response = CLNNode.get_info()
return response.id.hex()
def wait_for_lnd_node_sync(node_name):
node = get_node(node_name)
waited = 0
while True:
response = requests.get(
f'http://localhost:{node["port"]}/v1/getinfo', headers=node["headers"]
)
if response.json()["synced_to_chain"]:
return
else:
sys.stdout.write(
f"\rWaiting for {node_name} node chain sync {round(waited,1)}s"
)
sys.stdout.flush()
waited += wait_step
time.sleep(wait_step)
def wait_for_lnd_active_channels(node_name):
node = get_node(node_name)
waited = 0
while True:
response = requests.get(
f'http://localhost:{node["port"]}/v1/getinfo', headers=node["headers"]
)
if response.json()["num_active_channels"] > 0:
return
else:
sys.stdout.write(
f"\rWaiting for {node_name} node channels to be active {round(waited,1)}s"
)
sys.stdout.flush()
waited += wait_step
time.sleep(wait_step)
def wait_for_cln_node_sync():
from api.lightning.cln import CLNNode
waited = 0
while True:
response = CLNNode.get_info()
if response.warning_bitcoind_sync or response.warning_lightningd_sync:
sys.stdout.write(
f"\rWaiting for coordinator CLN node sync {round(waited,1)}s"
)
sys.stdout.flush()
waited += wait_step
time.sleep(wait_step)
else:
return
def wait_for_cln_active_channels():
from api.lightning.cln import CLNNode
waited = 0
while True:
response = CLNNode.get_info()
if response.num_active_channels > 0:
return
else:
sys.stdout.write(
f"\rWaiting for coordinator CLN node channels to be active {round(waited,1)}s"
)
sys.stdout.flush()
waited += wait_step
time.sleep(wait_step)
def connect_to_node(node_name, node_id, ip_port): def connect_to_node(node_name, node_id, ip_port):
node = get_node(node_name) node = get_node(node_name)
data = {"addr": {"pubkey": node_id, "host": ip_port}} data = {"addr": {"pubkey": node_id, "host": ip_port}}
response = requests.post( while True:
f'http://localhost:{node["port"]}/v1/peers', json=data, headers=node["headers"] response = requests.post(
) f'http://localhost:{node["port"]}/v1/peers',
return response.json() json=data,
headers=node["headers"],
)
if response.json() == {}:
return response.json()
else:
if "already connected to peer" in response.json()["message"]:
return response.json()
print(f"Could not connect to coordinator node: {response.json()}")
time.sleep(wait_step)
def open_channel(node_name, node_id, local_funding_amount, push_sat): def open_channel(node_name, node_id, local_funding_amount, push_sat):
@ -60,6 +157,7 @@ def create_address(node_name):
def generate_blocks(address, num_blocks): def generate_blocks(address, num_blocks):
print(f"Mining {num_blocks} blocks")
data = { data = {
"jsonrpc": "1.0", "jsonrpc": "1.0",
"id": "curltest", "id": "curltest",

View File

@ -44,8 +44,8 @@ class CoordinatorInfoTest(BaseAPITestCase):
self.assertEqual(data["last_day_nonkyc_btc_premium"], 0) self.assertEqual(data["last_day_nonkyc_btc_premium"], 0)
self.assertEqual(data["last_day_volume"], 0) self.assertEqual(data["last_day_volume"], 0)
self.assertEqual(data["lifetime_volume"], 0) self.assertEqual(data["lifetime_volume"], 0)
self.assertEqual(data["lnd_version"], "v0.17.0-beta") self.assertTrue(isinstance(data["lnd_version"], str))
self.assertEqual(data["cln_version"], "v23.08") self.assertTrue(isinstance(data["cln_version"], str))
self.assertEqual( self.assertEqual(
data["robosats_running_commit_hash"], "00000000000000000000 dev" data["robosats_running_commit_hash"], "00000000000000000000 dev"
) )

View File

@ -14,12 +14,19 @@ from tests.node_utils import (
connect_to_node, connect_to_node,
create_address, create_address,
generate_blocks, generate_blocks,
get_node_id, get_cln_node_id,
get_lnd_node_id,
open_channel, open_channel,
pay_invoice, pay_invoice,
wait_for_cln_active_channels,
wait_for_cln_node_sync,
wait_for_lnd_active_channels,
wait_for_lnd_node_sync,
) )
from tests.test_api import BaseAPITestCase from tests.test_api import BaseAPITestCase
LNVENDOR = config("LNVENDOR", cast=str, default="LND")
def read_file(file_path): def read_file(file_path):
""" """
@ -49,6 +56,20 @@ class TradeTest(BaseAPITestCase):
"longitude": 135.503, "longitude": 135.503,
} }
def wait_nodes_sync():
wait_for_lnd_node_sync("robot")
if LNVENDOR == "LND":
wait_for_lnd_node_sync("coordinator")
elif LNVENDOR == "CLN":
wait_for_cln_node_sync()
def wait_active_channels():
wait_for_lnd_active_channels("robot")
if LNVENDOR == "LND":
wait_for_lnd_active_channels("coordinator")
elif LNVENDOR == "CLN":
wait_for_cln_active_channels()
@classmethod @classmethod
def setUpTestData(cls): def setUpTestData(cls):
""" """
@ -61,19 +82,32 @@ class TradeTest(BaseAPITestCase):
cache_market() cache_market()
# Fund two LN nodes in regtest and open channels # Fund two LN nodes in regtest and open channels
coordinator_node_id = get_node_id("coordinator") # Coordinator is either LND or CLN. Robot user is always LND.
connect_to_node("robot", coordinator_node_id, "localhost:9735") if LNVENDOR == "LND":
coordinator_node_id = get_lnd_node_id("coordinator")
coordinator_port = 9735
elif LNVENDOR == "CLN":
coordinator_node_id = get_cln_node_id()
coordinator_port = 9737
print("Coordinator Node ID: ", coordinator_node_id)
funding_address = create_address("robot") funding_address = create_address("robot")
generate_blocks(funding_address, 101) generate_blocks(funding_address, 101)
cls.wait_nodes_sync()
time.sleep( # Open channel between Robot user and coordinator
2 print(f"\nOpening channel from Robot user node to coordinator {LNVENDOR} node")
) # channels cannot be created until the node is fully sync. We just created 101 blocks. connect_to_node("robot", coordinator_node_id, f"localhost:{coordinator_port}")
open_channel("robot", coordinator_node_id, 100_000_000, 50_000_000) open_channel("robot", coordinator_node_id, 100_000_000, 50_000_000)
# Generate 6 blocks so the channel becomes active # Generate 10 blocks so the channel becomes active and wait for sync
generate_blocks(funding_address, 6) generate_blocks(funding_address, 10)
# Wait a tiny bit so payments can be done in the new channel
cls.wait_nodes_sync()
cls.wait_active_channels()
time.sleep(1)
def test_login_superuser(self): def test_login_superuser(self):
""" """