2022-07-31 14:35:12 +00:00
|
|
|
import json
|
2022-10-20 20:53:51 +00:00
|
|
|
import logging
|
2022-07-31 14:35:12 +00:00
|
|
|
import os
|
|
|
|
|
2022-01-16 15:18:23 +00:00
|
|
|
import numpy as np
|
2022-10-20 20:53:51 +00:00
|
|
|
import requests
|
|
|
|
import ring
|
2022-07-31 14:35:12 +00:00
|
|
|
from decouple import config
|
|
|
|
|
2022-01-18 18:24:45 +00:00
|
|
|
from api.models import Order
|
2022-01-10 01:12:58 +00:00
|
|
|
|
2022-10-20 09:56:10 +00:00
|
|
|
logger = logging.getLogger("api.utils")
|
|
|
|
|
|
|
|
TOR_PROXY = config("TOR_PROXY", default="127.0.0.1:9050")
|
|
|
|
USE_TOR = config("USE_TOR", cast=bool, default=True)
|
2022-07-31 14:35:12 +00:00
|
|
|
|
2022-08-03 21:21:02 +00:00
|
|
|
|
2022-08-08 15:58:06 +00:00
|
|
|
def get_session():
|
2022-02-22 00:50:25 +00:00
|
|
|
session = requests.session()
|
|
|
|
# Tor uses the 9050 port as the default socks port
|
2022-08-08 15:58:06 +00:00
|
|
|
if USE_TOR:
|
2022-10-20 09:56:10 +00:00
|
|
|
session.proxies = {
|
|
|
|
"http": "socks5://" + TOR_PROXY,
|
|
|
|
"https": "socks5://" + TOR_PROXY,
|
|
|
|
}
|
2022-02-22 00:50:25 +00:00
|
|
|
return session
|
2022-02-17 19:50:10 +00:00
|
|
|
|
2022-07-31 14:35:12 +00:00
|
|
|
|
|
|
|
def bitcoind_rpc(method, params=None):
|
|
|
|
"""
|
|
|
|
Makes a RPC call to bitcoin core daemon
|
|
|
|
:param method: RPC method to call
|
|
|
|
:param params: list of params required by the calling RPC method
|
|
|
|
:return:
|
|
|
|
"""
|
|
|
|
|
2022-10-20 09:56:10 +00:00
|
|
|
BITCOIND_RPCURL = config("BITCOIND_RPCURL")
|
|
|
|
BITCOIND_RPCUSER = config("BITCOIND_RPCUSER")
|
|
|
|
BITCOIND_RPCPASSWORD = config("BITCOIND_RPCPASSWORD")
|
2022-07-31 14:35:12 +00:00
|
|
|
|
|
|
|
if params is None:
|
|
|
|
params = []
|
|
|
|
|
|
|
|
payload = json.dumps(
|
2022-10-20 09:56:10 +00:00
|
|
|
{"jsonrpc": "2.0", "id": "robosats", "method": method, "params": params}
|
2022-07-31 14:35:12 +00:00
|
|
|
)
|
2022-10-20 09:56:10 +00:00
|
|
|
return requests.post(
|
|
|
|
BITCOIND_RPCURL, auth=(BITCOIND_RPCUSER, BITCOIND_RPCPASSWORD), data=payload
|
|
|
|
).json()["result"]
|
2022-07-31 14:35:12 +00:00
|
|
|
|
|
|
|
|
2022-06-06 17:57:04 +00:00
|
|
|
def validate_onchain_address(address):
|
2022-07-31 14:35:12 +00:00
|
|
|
"""
|
2022-06-06 17:57:04 +00:00
|
|
|
Validates an onchain address
|
2022-07-31 14:35:12 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
try:
|
2022-10-20 09:56:10 +00:00
|
|
|
validation = bitcoind_rpc("validateaddress", [address])
|
|
|
|
if not validation["isvalid"]:
|
2022-08-01 01:03:33 +00:00
|
|
|
return False, {"bad_address": "Invalid address"}
|
2022-08-01 12:55:43 +00:00
|
|
|
except Exception as e:
|
|
|
|
logger.error(e)
|
2022-10-20 09:56:10 +00:00
|
|
|
return False, {
|
|
|
|
"bad_address": "Unable to validate address, check bitcoind backend"
|
|
|
|
}
|
2022-07-31 14:35:12 +00:00
|
|
|
|
|
|
|
return True, None
|
|
|
|
|
2022-06-06 17:57:04 +00:00
|
|
|
|
2022-02-22 00:50:25 +00:00
|
|
|
market_cache = {}
|
2022-10-20 09:56:10 +00:00
|
|
|
|
|
|
|
|
2022-02-17 19:50:10 +00:00
|
|
|
@ring.dict(market_cache, expire=3) # keeps in cache for 3 seconds
|
2022-01-16 15:18:23 +00:00
|
|
|
def get_exchange_rates(currencies):
|
2022-02-17 19:50:10 +00:00
|
|
|
"""
|
2022-01-16 15:18:23 +00:00
|
|
|
Params: list of currency codes.
|
2022-01-14 14:57:56 +00:00
|
|
|
Checks for exchange rates in several public APIs.
|
2022-01-16 15:18:23 +00:00
|
|
|
Returns the median price list.
|
2022-02-17 19:50:10 +00:00
|
|
|
"""
|
2022-01-15 00:28:19 +00:00
|
|
|
|
2022-08-08 15:58:06 +00:00
|
|
|
session = get_session()
|
2022-02-22 00:50:25 +00:00
|
|
|
|
2022-10-20 09:56:10 +00:00
|
|
|
APIS = config("MARKET_PRICE_APIS", cast=lambda v: [s.strip() for s in v.split(",")])
|
2022-01-14 14:57:56 +00:00
|
|
|
|
2022-01-16 15:18:23 +00:00
|
|
|
api_rates = []
|
2022-01-14 14:57:56 +00:00
|
|
|
for api_url in APIS:
|
2022-02-17 19:50:10 +00:00
|
|
|
try: # If one API is unavailable pass
|
|
|
|
if "blockchain.info" in api_url:
|
2022-02-22 00:50:25 +00:00
|
|
|
blockchain_prices = session.get(api_url).json()
|
2022-01-16 15:18:23 +00:00
|
|
|
blockchain_rates = []
|
|
|
|
for currency in currencies:
|
2022-02-17 19:50:10 +00:00
|
|
|
try: # If a currency is missing place a None
|
|
|
|
blockchain_rates.append(
|
2022-10-20 09:56:10 +00:00
|
|
|
float(blockchain_prices[currency]["last"])
|
|
|
|
)
|
2022-10-20 20:53:51 +00:00
|
|
|
except Exception:
|
2022-01-16 15:18:23 +00:00
|
|
|
blockchain_rates.append(np.nan)
|
|
|
|
api_rates.append(blockchain_rates)
|
|
|
|
|
2022-02-17 19:50:10 +00:00
|
|
|
elif "yadio.io" in api_url:
|
2022-02-22 00:50:25 +00:00
|
|
|
yadio_prices = session.get(api_url).json()
|
2022-01-16 15:18:23 +00:00
|
|
|
yadio_rates = []
|
|
|
|
for currency in currencies:
|
|
|
|
try:
|
2022-10-20 09:56:10 +00:00
|
|
|
yadio_rates.append(float(yadio_prices["BTC"][currency]))
|
2022-10-20 20:53:51 +00:00
|
|
|
except Exception:
|
2022-01-16 15:18:23 +00:00
|
|
|
yadio_rates.append(np.nan)
|
|
|
|
api_rates.append(yadio_rates)
|
2022-10-20 20:53:51 +00:00
|
|
|
except Exception:
|
2022-01-14 14:57:56 +00:00
|
|
|
pass
|
|
|
|
|
2022-01-16 15:18:23 +00:00
|
|
|
if len(api_rates) == 0:
|
2022-02-17 19:50:10 +00:00
|
|
|
return None # Wops there is not API available!
|
2022-01-16 15:18:23 +00:00
|
|
|
|
|
|
|
exchange_rates = np.array(api_rates)
|
|
|
|
median_rates = np.nanmedian(exchange_rates, axis=0)
|
|
|
|
|
|
|
|
return median_rates.tolist()
|
2022-01-11 14:36:43 +00:00
|
|
|
|
2022-02-17 19:50:10 +00:00
|
|
|
|
2023-04-22 18:54:03 +00:00
|
|
|
lnd_version_cache = {}
|
|
|
|
|
|
|
|
|
|
|
|
@ring.dict(lnd_version_cache, expire=3600)
|
2022-01-11 14:36:43 +00:00
|
|
|
def get_lnd_version():
|
|
|
|
|
2023-04-22 18:54:03 +00:00
|
|
|
from api.lightning.node import LNNode
|
2022-02-12 18:22:05 +00:00
|
|
|
|
2023-04-22 18:54:03 +00:00
|
|
|
print(LNNode.get_version())
|
|
|
|
return LNNode.get_version()
|
2022-02-17 19:50:10 +00:00
|
|
|
|
2022-01-11 14:36:43 +00:00
|
|
|
|
|
|
|
robosats_commit_cache = {}
|
2022-10-20 09:56:10 +00:00
|
|
|
|
|
|
|
|
2022-01-11 14:36:43 +00:00
|
|
|
@ring.dict(robosats_commit_cache, expire=3600)
|
2022-09-20 17:39:49 +00:00
|
|
|
def get_robosats_commit():
|
2022-01-11 14:36:43 +00:00
|
|
|
|
2022-02-08 16:20:41 +00:00
|
|
|
commit = os.popen('git log -n 1 --pretty=format:"%H"')
|
|
|
|
commit_hash = commit.read()
|
2022-01-11 14:36:43 +00:00
|
|
|
|
2022-07-16 14:01:45 +00:00
|
|
|
# .git folder is included in .dockerignore. But automatic build will drop in a commit_sha.txt file on root
|
2022-10-20 20:53:51 +00:00
|
|
|
if commit_hash is None or commit_hash == "":
|
2022-07-16 14:01:45 +00:00
|
|
|
with open("commit_sha.txt") as f:
|
|
|
|
commit_hash = f.read()
|
|
|
|
|
2022-02-08 16:20:41 +00:00
|
|
|
return commit_hash
|
2022-01-18 18:24:45 +00:00
|
|
|
|
2022-10-20 09:56:10 +00:00
|
|
|
|
2022-09-20 17:39:49 +00:00
|
|
|
robosats_version_cache = {}
|
2022-10-20 09:56:10 +00:00
|
|
|
|
|
|
|
|
2022-09-20 17:39:49 +00:00
|
|
|
@ring.dict(robosats_commit_cache, expire=99999)
|
|
|
|
def get_robosats_version():
|
|
|
|
|
|
|
|
with open("version.json") as f:
|
|
|
|
version_dict = json.load(f)
|
|
|
|
|
|
|
|
return version_dict
|
|
|
|
|
2022-10-20 09:56:10 +00:00
|
|
|
|
2022-01-18 18:24:45 +00:00
|
|
|
premium_percentile = {}
|
2022-10-20 09:56:10 +00:00
|
|
|
|
|
|
|
|
2022-01-18 18:24:45 +00:00
|
|
|
@ring.dict(premium_percentile, expire=300)
|
|
|
|
def compute_premium_percentile(order):
|
|
|
|
|
2022-02-17 19:50:10 +00:00
|
|
|
queryset = Order.objects.filter(
|
2022-10-20 09:56:10 +00:00
|
|
|
currency=order.currency, status=Order.Status.PUB, type=order.type
|
|
|
|
).exclude(id=order.id)
|
2022-01-18 18:24:45 +00:00
|
|
|
|
|
|
|
if len(queryset) <= 1:
|
|
|
|
return 0.5
|
|
|
|
|
2022-03-25 00:09:55 +00:00
|
|
|
amount = order.amount if not order.has_range else order.max_amount
|
|
|
|
order_rate = float(order.last_satoshis) / float(amount)
|
2022-01-18 18:24:45 +00:00
|
|
|
rates = []
|
|
|
|
for similar_order in queryset:
|
2022-10-20 09:56:10 +00:00
|
|
|
similar_order_amount = (
|
|
|
|
similar_order.amount
|
|
|
|
if not similar_order.has_range
|
|
|
|
else similar_order.max_amount
|
|
|
|
)
|
|
|
|
rates.append(float(similar_order.last_satoshis) / float(similar_order_amount))
|
2022-02-17 19:50:10 +00:00
|
|
|
|
2022-01-18 18:24:45 +00:00
|
|
|
rates = np.array(rates)
|
2022-03-12 11:24:11 +00:00
|
|
|
return round(np.sum(rates < order_rate) / len(rates), 2)
|
|
|
|
|
|
|
|
|
2022-10-20 09:56:10 +00:00
|
|
|
def weighted_median(values, sample_weight=None, quantiles=0.5, values_sorted=False):
|
2022-06-25 10:44:32 +00:00
|
|
|
"""Very close to numpy.percentile, but it supports weights.
|
|
|
|
NOTE: quantiles should be in [0, 1]!
|
|
|
|
:param values: numpy.array with data
|
|
|
|
:param quantiles: array-like with many quantiles needed. For weighted median 0.5
|
|
|
|
:param sample_weight: array-like of the same length as `array`
|
|
|
|
:param values_sorted: bool, if True, then will avoid sorting of
|
|
|
|
initial array assuming array is already sorted
|
|
|
|
:return: numpy.array with computed quantiles.
|
|
|
|
"""
|
|
|
|
values = np.array(values)
|
|
|
|
quantiles = np.array(quantiles)
|
|
|
|
if sample_weight is None:
|
|
|
|
sample_weight = np.ones(len(values))
|
|
|
|
sample_weight = np.array(sample_weight)
|
2022-10-20 09:56:10 +00:00
|
|
|
assert np.all(quantiles >= 0) and np.all(
|
|
|
|
quantiles <= 1
|
|
|
|
), "quantiles should be in [0, 1]"
|
2022-06-25 10:44:32 +00:00
|
|
|
|
|
|
|
if not values_sorted:
|
|
|
|
sorter = np.argsort(values)
|
|
|
|
values = values[sorter]
|
|
|
|
sample_weight = sample_weight[sorter]
|
|
|
|
|
|
|
|
weighted_quantiles = np.cumsum(sample_weight) - 0.5 * sample_weight
|
|
|
|
weighted_quantiles -= weighted_quantiles[0]
|
|
|
|
weighted_quantiles /= weighted_quantiles[-1]
|
|
|
|
|
|
|
|
return np.interp(quantiles, weighted_quantiles, values)
|
|
|
|
|
2022-10-20 09:56:10 +00:00
|
|
|
|
2022-03-12 11:24:11 +00:00
|
|
|
def compute_avg_premium(queryset):
|
2022-06-25 10:44:32 +00:00
|
|
|
premiums = []
|
2022-03-12 11:24:11 +00:00
|
|
|
volumes = []
|
2022-06-07 21:05:34 +00:00
|
|
|
|
|
|
|
# We exclude BTC, as LN <-> BTC swap premiums should not be mixed with FIAT.
|
2022-06-25 10:44:32 +00:00
|
|
|
|
2022-06-07 21:05:34 +00:00
|
|
|
for tick in queryset.exclude(currency=1000):
|
2022-06-29 09:41:59 +00:00
|
|
|
premiums.append(float(tick.premium))
|
|
|
|
volumes.append(float(tick.volume))
|
2022-03-12 11:24:11 +00:00
|
|
|
|
2022-06-25 10:44:32 +00:00
|
|
|
total_volume = sum(volumes)
|
|
|
|
|
|
|
|
# weighted_median_premium is the weighted median of the premiums by volume
|
2022-10-20 09:56:10 +00:00
|
|
|
if len(premiums) > 0 and len(volumes) > 0:
|
|
|
|
weighted_median_premium = weighted_median(
|
|
|
|
values=premiums, sample_weight=volumes, quantiles=0.5, values_sorted=False
|
|
|
|
)
|
2022-07-01 14:48:17 +00:00
|
|
|
else:
|
|
|
|
weighted_median_premium = 0.0
|
2022-06-25 10:44:32 +00:00
|
|
|
return weighted_median_premium, total_volume
|