mirror of
https://github.com/RoboSats/robosats.git
synced 2025-02-21 12:49:02 +00:00
Merge branch 'main' into add-new-translation-portuguese
This commit is contained in:
commit
b034ab1b19
13
.env-sample
13
.env-sample
@ -45,7 +45,7 @@ REDIS_URL='redis://localhost:6379/1'
|
||||
# List of market price public APIs. If the currency is available in more than 1 API, will use median price.
|
||||
MARKET_PRICE_APIS = https://blockchain.info/ticker, https://api.yadio.io/exrates/BTC, https://bitpay.com/rates/BTC, https://criptoya.com/api/btc
|
||||
|
||||
# Host e.g. robosats.com
|
||||
# Host e.g. robosats.org
|
||||
HOST_NAME = ''
|
||||
HOST_NAME2 = ''
|
||||
I2P_ALIAS = ''
|
||||
@ -58,8 +58,14 @@ SECRET_KEY = 'django-insecure-6^&6uw$b5^en%(cu2kc7_o)(mgpazx#j_znwlym0vxfamn2uo-
|
||||
# e.g. robotestagw3dcxmd66r4rgksb4nmmr43fh77bzn2ia2eucduyeafnyd.onion
|
||||
ONION_LOCATION = ''
|
||||
|
||||
# Geoblocked countries (will reject F2F trades).
|
||||
# List of A3 country codes (see fhttps://en.wikipedia.org/wiki/ISO_3166-1_alpha-3)
|
||||
# Leave empty '' to allow all countries.
|
||||
# Example 'NOR,USA,CZE'.
|
||||
GEOBLOCKED_COUNTRIES = 'ABW,AFG,AGO'
|
||||
|
||||
# Link to robosats alternative site (shown in frontend in statsfornerds so users can switch mainnet/testnet)
|
||||
ALTERNATIVE_SITE = 'RoboSats6tkf3eva7x2voqso3a5wcorsnw34jveyxfqi2fu7oyheasid.onion'
|
||||
ALTERNATIVE_SITE = 'RoboSatsy56bwqn56qyadmcxkx767hnabg4mihxlmgyt6if5gnuxvzad.onion/offers/'
|
||||
ALTERNATIVE_NAME = 'RoboSats Mainnet'
|
||||
|
||||
# Telegram bot token
|
||||
@ -169,3 +175,6 @@ SLASHED_BOND_REWARD_SPLIT = 0.5
|
||||
|
||||
# Username for HTLCs escrows
|
||||
ESCROW_USERNAME = 'admin'
|
||||
|
||||
#Social
|
||||
NOSTR_NSEC = 'nsec1vxhs2zc4kqe0dhz4z2gfrdyjsrwf8pg3neeqx6w4nl8djfzdp0dqwd6rxh'
|
||||
|
@ -40,7 +40,7 @@ body:
|
||||
"Rule #2": "You DO NOT talk about RoboSats Club",
|
||||
},
|
||||
"mainnet": {
|
||||
"onion": "http://robosats6tkf3eva7x2voqso3a5wcorsnw34jveyxfqi2fu7oyheasid.onion",
|
||||
"onion": "http://robosatsy56bwqn56qyadmcxkx767hnabg4mihxlmgyt6if5gnuxvzad.onion",
|
||||
"clearnet": "https://coordinator-address.com"
|
||||
"i2p": "http:///.........b32.i2p"
|
||||
},
|
||||
@ -104,8 +104,8 @@ body:
|
||||
id: devfund
|
||||
attributes:
|
||||
label: DevFund Donations
|
||||
description: |
|
||||
What percentage of trade revenue will you donate to the RoboSats Development Fund?
|
||||
description: |
|
||||
What percentage of trade revenue will you donate to the RoboSats Development Fund?
|
||||
placeholder: e.g. 20%
|
||||
validations:
|
||||
required: true
|
||||
@ -113,9 +113,9 @@ body:
|
||||
id: pgp
|
||||
attributes:
|
||||
label: "PGP Public Key and Fingerprint"
|
||||
description: |
|
||||
description: |
|
||||
Your permanent identity. If needed, you may have to verify your identity to robots or other coordinators by signing a message.
|
||||
placeholder: |
|
||||
placeholder: |
|
||||
e.g. Pubkey repo: https://keys.openpgp.org//vks/v1/by-fingerprint/...
|
||||
Fingerprint: 1234 5678 90AB CDEF 1234 5678 90AB CDEF 1234
|
||||
validations:
|
||||
@ -170,7 +170,7 @@ body:
|
||||
attributes:
|
||||
label: Onion Mainnet Coordinator Service Address
|
||||
description: The Onion hidden service where your MAINNET RoboSats coordinator API can be found.
|
||||
placeholder: e.g. http://robosats6tkf3eva7x2voqso3a5wcorsnw34jveyxfqi2fu7oyheasid.onion
|
||||
placeholder: e.g. http://robosatsy56bwqn56qyadmcxkx767hnabg4mihxlmgyt6if5gnuxvzad.onion
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
@ -242,7 +242,7 @@ body:
|
||||
attributes:
|
||||
value: |
|
||||
## Contact Methods
|
||||
Only one contact method is required, but multiple are preferred. Find a balance between accessibility and operational security (too many contact methods increase the surface area and might harm OpSec).
|
||||
Only one contact method is required, but multiple are preferred. Find a balance between accessibility and operational security (too many contact methods increase the surface area and might harm OpSec).
|
||||
Contact methods below are sorted by importance.
|
||||
- type: input
|
||||
id: email
|
||||
|
59
.github/workflows/android-build.yml
vendored
59
.github/workflows/android-build.yml
vendored
@ -6,6 +6,15 @@ on:
|
||||
semver:
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
KEYSTORE:
|
||||
required: true
|
||||
KEY_ALIAS:
|
||||
required: true
|
||||
KEY_PASS:
|
||||
required: true
|
||||
KEY_STORE_PASS:
|
||||
required: true
|
||||
push:
|
||||
branches: [ "main" ]
|
||||
paths: [ "mobile", "frontend" ]
|
||||
@ -23,7 +32,7 @@ jobs:
|
||||
|
||||
- name: 'Download Android Web.bundle Artifact (built frontend)'
|
||||
if: inputs.semver == '' # Only if workflow fired from frontend-build.yml
|
||||
uses: dawidd6/action-download-artifact@v3
|
||||
uses: dawidd6/action-download-artifact@v7
|
||||
with:
|
||||
workflow: frontend-build.yml
|
||||
workflow_conclusion: success
|
||||
@ -54,10 +63,37 @@ jobs:
|
||||
- name: Setup Gradle
|
||||
uses: gradle/gradle-build-action@v3
|
||||
|
||||
- name: Decode Keystore
|
||||
id: decode_keystore
|
||||
uses: timheuer/base64-to-file@v1.2
|
||||
with:
|
||||
fileName: 'keystore.jks'
|
||||
fileDir: './'
|
||||
encodedString: ${{ secrets.KEYSTORE }}
|
||||
|
||||
- name: 'Build Android Release'
|
||||
run: |
|
||||
cd mobile/android
|
||||
./gradlew assembleRelease
|
||||
env:
|
||||
KEY_ALIAS: ${{ secrets.KEY_ALIAS }}
|
||||
KEY_PASS: ${{ secrets.KEY_PASS }}
|
||||
KEY_STORE_PASS: ${{ secrets.KEY_STORE_PASS }}
|
||||
|
||||
|
||||
- name: 'Check for non-FOSS libraries'
|
||||
run: |
|
||||
wget https://github.com/iBotPeaches/Apktool/releases/download/v2.7.0/apktool_2.7.0.jar
|
||||
wget https://github.com/iBotPeaches/Apktool/raw/master/scripts/linux/apktool
|
||||
# clone the repo
|
||||
git clone https://gitlab.com/IzzyOnDroid/repo.git
|
||||
# create a directory for Apktool and move the apktool* files there
|
||||
mkdir -p repo/lib/radar/tool
|
||||
mv apktool* repo/lib/radar/tool
|
||||
# create an alias for ease of use
|
||||
chmod u+x repo/lib/radar/tool/apktool
|
||||
mv repo/lib/radar/tool/apktool_2.7.0.jar repo/lib/radar/tool/apktool.jar
|
||||
repo/bin/scanapk.php mobile/android/app/build/outputs/apk/release/app-universal-release.apk
|
||||
|
||||
- name: 'Get Commit Hash'
|
||||
id: commit
|
||||
@ -96,14 +132,6 @@ jobs:
|
||||
name: robosats-${{ inputs.semver }}-x86_64.apk
|
||||
path: mobile/android/app/build/outputs/apk/release/app-x86_64-release.apk
|
||||
|
||||
# Create app-x86-release APK artifact asset for Release
|
||||
- name: 'Upload x86 .apk Release Artifact (for Release)'
|
||||
uses: actions/upload-artifact@v4
|
||||
if: inputs.semver != '' # If this workflow is called from release.yml
|
||||
with:
|
||||
name: robosats-${{ inputs.semver }}-x86.apk
|
||||
path: mobile/android/app/build/outputs/apk/release/app-x86-release.apk
|
||||
|
||||
- name: 'Create Pre-release'
|
||||
id: create_release
|
||||
if: inputs.semver == '' # only if this workflow is not called from a push to tag (a Release)
|
||||
@ -164,16 +192,3 @@ jobs:
|
||||
asset_path: ./mobile/android/app/build/outputs/apk/release/app-x86_64-release.apk
|
||||
asset_name: robosats-${{ steps.commit.outputs.short }}-x86_64.apk
|
||||
asset_content_type: application/apk
|
||||
|
||||
# Upload x86 APK to pre-release
|
||||
- name: 'Upload x86 Pre-release APK Asset'
|
||||
id: upload-release-x86-apk-asset
|
||||
if: inputs.semver == '' # only if this workflow is not called from a push to tag (a Release)
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||
asset_path: ./mobile/android/app/build/outputs/apk/release/app-x86-release.apk
|
||||
asset_name: robosats-${{ steps.commit.outputs.short }}-x86.apk
|
||||
asset_content_type: application/apk
|
28
.github/workflows/coordinator-image.yml
vendored
28
.github/workflows/coordinator-image.yml
vendored
@ -17,35 +17,19 @@ jobs:
|
||||
|
||||
- name: 'Download Basic main.js Artifact'
|
||||
if: inputs.semver == '' # Only if workflow fired from frontend-build.yml
|
||||
uses: dawidd6/action-download-artifact@v3
|
||||
uses: dawidd6/action-download-artifact@v7
|
||||
with:
|
||||
workflow: frontend-build.yml
|
||||
workflow_conclusion: success
|
||||
name: web-main-js
|
||||
path: frontend/static/frontend/
|
||||
name: django-main-static
|
||||
path: frontend
|
||||
|
||||
- name: 'Download Basic main.js Artifact for a release'
|
||||
if: inputs.semver != '' # Only if fired as job in release.yml
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: web-main-js
|
||||
path: frontend/static/frontend/
|
||||
|
||||
- name: 'Download pro.js Artifact'
|
||||
if: inputs.semver == '' # Only if workflow fired from frontend-build.yml
|
||||
uses: dawidd6/action-download-artifact@v3
|
||||
with:
|
||||
workflow: frontend-build.yml
|
||||
workflow_conclusion: success
|
||||
name: web-pro-js
|
||||
path: frontend/static/frontend/
|
||||
|
||||
- name: 'Download pro.js Artifact for a release'
|
||||
if: inputs.semver != '' # Only if fired as job in release.yml
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: web-pro-js
|
||||
path: frontend/static/frontend/
|
||||
name: django-main-static
|
||||
path: frontend
|
||||
|
||||
- name: 'Log in to Docker Hub'
|
||||
uses: docker/login-action@v3
|
||||
@ -75,7 +59,7 @@ jobs:
|
||||
echo ${{ steps.commit.outputs.long }}>"commit_sha"
|
||||
|
||||
- name: 'Build and push Docker image'
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
|
128
.github/workflows/desktop-build.yml
vendored
Normal file
128
.github/workflows/desktop-build.yml
vendored
Normal file
@ -0,0 +1,128 @@
|
||||
name: "Build: Desktop"
|
||||
on:
|
||||
workflow_dispatch:
|
||||
workflow_call:
|
||||
inputs:
|
||||
semver:
|
||||
required: true
|
||||
type: string
|
||||
push:
|
||||
branches: [ "main" ]
|
||||
paths: [ "desktopApp", "frontend" ]
|
||||
pull_request:
|
||||
branches: [ "main" ]
|
||||
paths: [ "desktopApp", "frontend" ]
|
||||
|
||||
jobs:
|
||||
build-desktop:
|
||||
permissions: write-all
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '16'
|
||||
|
||||
- name: 'Download Basic main.js Artifact'
|
||||
if: inputs.semver == '' # Only if workflow fired from frontend-build.yml
|
||||
uses: dawidd6/action-download-artifact@v7
|
||||
with:
|
||||
workflow: frontend-build.yml
|
||||
workflow_conclusion: success
|
||||
name: desktop-main-static
|
||||
path: desktopApp
|
||||
|
||||
- name: 'Download Basic main.js Artifact for a release'
|
||||
if: inputs.semver != '' # Only if fired as job in release.yml
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: desktop-main-static
|
||||
path: desktopApp
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd desktopApp
|
||||
npm install
|
||||
|
||||
- name: Build for macOS
|
||||
run: |
|
||||
cd desktopApp
|
||||
npm run package-mac
|
||||
|
||||
- name: Build for Windows
|
||||
run: |
|
||||
cd desktopApp
|
||||
npm run package-win
|
||||
|
||||
- name: Build for Linux
|
||||
run: |
|
||||
cd desktopApp
|
||||
npm run package-linux
|
||||
|
||||
- name: 'Get Commit Hash'
|
||||
id: commit
|
||||
uses: pr-mpt/actions-commit-hash@v3
|
||||
|
||||
- name: Print semver
|
||||
run: echo The semver is ${{ github.event.inputs.semver }}
|
||||
|
||||
- name: Upload macOS Build Artifact
|
||||
if: inputs.semver != ''
|
||||
uses: actions/upload-artifact@v4
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
path: desktopApp/release-builds/Robosats-darwin-x64
|
||||
name: robosats-desktop-${{ inputs.semver }}-mac-darwin-x64.zip
|
||||
|
||||
- name: Upload Windows Build Artifact
|
||||
if: inputs.semver != ''
|
||||
uses: actions/upload-artifact@v4
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
path: desktopApp/release-builds/Robosats-win32-ia32
|
||||
name: robosats-desktop-${{ inputs.semver }}-win32-ia32.zip
|
||||
|
||||
- name: Upload Linux Build Artifact
|
||||
if: inputs.semver != ''
|
||||
uses: actions/upload-artifact@v4
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
path: desktopApp/release-builds/Robosats-linux-x64
|
||||
name: robosats-desktop-${{ inputs.semver }}-linux-x64.zip
|
||||
|
||||
- name: Upload macOS Build Artifact
|
||||
id: upload-release-mac-zip-asset
|
||||
if: inputs.semver == ''
|
||||
uses: actions/upload-artifact@v4
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
path: desktopApp/release-builds/Robosats-darwin-x64
|
||||
name: robosats-desktop-${{ steps.commit.outputs.short }}-mac-darwin-x64.zip
|
||||
|
||||
- name: Upload Windows Build Artifact
|
||||
id: upload-release-win-zip-asset
|
||||
if: inputs.semver == ''
|
||||
uses: actions/upload-artifact@v4
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
path: desktopApp/release-builds/Robosats-win32-ia32
|
||||
name: robosats-desktop-${{ steps.commit.outputs.short }}-win32-ia32.zip
|
||||
|
||||
- name: Upload Linux Build Artifact
|
||||
id: upload-release-linux-zip-asset
|
||||
if: inputs.semver == ''
|
||||
uses: actions/upload-artifact@v4
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
path: desktopApp/release-builds/Robosats-linux-x64
|
||||
name: robosats-desktop-${{ steps.commit.outputs.short }}-linux-x64.zip
|
32
.github/workflows/frontend-build.yml
vendored
32
.github/workflows/frontend-build.yml
vendored
@ -50,34 +50,34 @@ jobs:
|
||||
export NODE_OPTIONS="--max-old-space-size=4096"
|
||||
cd frontend
|
||||
npm run build
|
||||
- name: 'Archive Web Basic Build Results'
|
||||
- name: 'Archive Django Static Build Results'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: web-main-js
|
||||
name: django-main-static
|
||||
path: |
|
||||
frontend/static/frontend/*main.js
|
||||
frontend/static/frontend/*.wasm
|
||||
- name: 'Archive Web Basic Selfhosted Build Results'
|
||||
frontend/static
|
||||
frontend/templates/frontend/*.html
|
||||
- name: 'Archive Node App Static Build Results'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: web-basic-selfhosted-js
|
||||
name: nodeapp-main-static
|
||||
path: |
|
||||
frontend/static/frontend/*basic.selfhosted.js
|
||||
frontend/static/frontend/*.wasm
|
||||
- name: 'Archive Web PRO Build Results'
|
||||
nodeapp/static
|
||||
nodeapp/*.html
|
||||
- name: 'Archive Desktop App Static Build Results'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: web-pro-js
|
||||
name: desktop-main-static
|
||||
path: |
|
||||
frontend/static/frontend/*pro.js
|
||||
frontend/static/frontend/*.wasm
|
||||
- name: 'Archive Web PRO SelhostedBuild Results'
|
||||
desktopApp/static
|
||||
desktopApp/*.html
|
||||
- name: 'Archive Django Static Build Results'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: web-pro-selfhosted-js
|
||||
name: web-main-static
|
||||
path: |
|
||||
frontend/static/frontend/*pro.selfhosted.js
|
||||
frontend/static/frontend/*.wasm
|
||||
web/static
|
||||
web/*.html
|
||||
- name: 'Archive Mobile Build Results'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
|
26
.github/workflows/integration-tests.yml
vendored
26
.github/workflows/integration-tests.yml
vendored
@ -17,18 +17,27 @@ concurrency:
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
max-parallel: 2
|
||||
matrix:
|
||||
python-tag: ['3.11.6-slim-bookworm', '3.12.1-slim-bookworm']
|
||||
lnd-version: ['v0.17.3-beta']
|
||||
cln-version: ['v23.11.2']
|
||||
ln-vendor: ['LND'] #, 'CLN']
|
||||
python-tag: ['3.12.3-slim-bookworm']
|
||||
lnd-version: ['v0.18.2-beta']
|
||||
cln-version: ['v24.08']
|
||||
ln-vendor: ['LND', 'CLN']
|
||||
|
||||
steps:
|
||||
- name: 'Checkout'
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 'Download static files Artifact'
|
||||
uses: dawidd6/action-download-artifact@v7
|
||||
with:
|
||||
workflow: frontend-build.yml
|
||||
workflow_conclusion: success
|
||||
name: django-main-static
|
||||
path: frontend
|
||||
|
||||
- name: Patch Dockerfile and .env-sample
|
||||
run: |
|
||||
sed -i "1s/FROM python:.*/FROM python:${{ matrix.python-tag }}/" Dockerfile
|
||||
@ -56,21 +65,20 @@ jobs:
|
||||
env:
|
||||
LND_VERSION: ${{ matrix.lnd-version }}
|
||||
CLN_VERSION: ${{ matrix.cln-version }}
|
||||
BITCOIND_VERSION: ${{ matrix.bitcoind-version }}
|
||||
ROBOSATS_ENVS_FILE: ".env-sample"
|
||||
|
||||
- name: Wait for coordinator (django server)
|
||||
run: |
|
||||
while [ "$(docker inspect --format "{{.State.Health.Status}}" coordinator)" != "healthy" ]; do
|
||||
while [ "$(docker inspect --format "{{.State.Health.Status}}" test-coordinator)" != "healthy" ]; do
|
||||
echo "Waiting for coordinator to be healthy..."
|
||||
sleep 5
|
||||
done
|
||||
|
||||
- name: 'Run tests with coverage'
|
||||
run: |
|
||||
docker exec coordinator coverage run manage.py test
|
||||
docker exec coordinator coverage report
|
||||
docker exec coordinator coverage html
|
||||
docker exec test-coordinator coverage run manage.py test
|
||||
docker exec test-coordinator coverage report
|
||||
docker exec test-coordinator coverage html
|
||||
env:
|
||||
LNVENDOR: ${{ matrix.ln-vendor }}
|
||||
DEVELOPMENT: True
|
||||
|
60
.github/workflows/release.yml
vendored
60
.github/workflows/release.yml
vendored
@ -71,11 +71,19 @@ jobs:
|
||||
android-build:
|
||||
uses: RoboSats/robosats/.github/workflows/android-build.yml@main
|
||||
needs: [frontend-build, check-versions]
|
||||
secrets: inherit
|
||||
with:
|
||||
semver: ${{ needs.check-versions.outputs.semver }}
|
||||
|
||||
desktop-build:
|
||||
uses: RoboSats/robosats/.github/workflows/desktop-build.yml@main
|
||||
needs: [frontend-build, check-versions]
|
||||
secrets: inherit
|
||||
with:
|
||||
semver: ${{ needs.check-versions.outputs.semver }}
|
||||
|
||||
release:
|
||||
needs: [check-versions, integration-tests, coordinator-image, selfhosted-client-image, web-client-image, android-build]
|
||||
needs: [check-versions, integration-tests, coordinator-image, selfhosted-client-image, web-client-image, android-build, desktop-build]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
@ -155,19 +163,53 @@ jobs:
|
||||
asset_name: robosats-${{ needs.check-versions.outputs.semver }}-x86_64.apk
|
||||
asset_content_type: application/apk
|
||||
|
||||
# Upload app-x86-release APK artifact asset
|
||||
- name: 'Download x86 APK Artifact'
|
||||
- name: 'Download macOS Build Artifact'
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: robosats-${{ needs.check-versions.outputs.semver }}-x86.apk
|
||||
name: robosats-desktop-${{ needs.check-versions.outputs.semver }}-mac-darwin-x64.zip
|
||||
path: .
|
||||
- name: 'Upload x86 APK Asset'
|
||||
id: upload-x86-release-asset
|
||||
|
||||
- name: 'Upload macOS Build Artifact'
|
||||
id: upload-release-mac-zip-asset
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create-release.outputs.upload_url }}
|
||||
asset_path: app-x86-release.apk
|
||||
asset_name: robosats-${{ needs.check-versions.outputs.semver }}-x86.apk
|
||||
asset_content_type: application/apk
|
||||
asset_path: robosats-desktop-${{ needs.check-versions.outputs.semver }}-mac-darwin-x64.zip
|
||||
asset_name: robosats-desktop-${{ needs.check-versions.outputs.semver }}-mac-darwin-x64.zip
|
||||
asset_content_type: application/zip
|
||||
|
||||
- name: 'Download linux Build Artifact'
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: robosats-desktop-${{ needs.check-versions.outputs.semver }}-linux-x64.zip
|
||||
path: .
|
||||
|
||||
- name: 'Upload linux Build Artifact'
|
||||
id: upload-release-linux-zip-asset
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create-release.outputs.upload_url }}
|
||||
asset_path: robosats-desktop-${{ needs.check-versions.outputs.semver }}-linux-x64.zip
|
||||
asset_name: robosats-desktop-${{ needs.check-versions.outputs.semver }}-linux-x64.zip
|
||||
asset_content_type: application/zip
|
||||
|
||||
- name: 'Download Windows Build Artifact'
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: robosats-desktop-${{ needs.check-versions.outputs.semver }}-win32-ia32.zip
|
||||
path: .
|
||||
|
||||
- name: 'Upload Windows Build Artifact'
|
||||
id: upload-release-win-zip-asset
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.create-release.outputs.upload_url }}
|
||||
asset_path: robosats-desktop-${{ needs.check-versions.outputs.semver }}-win32-ia32.zip
|
||||
asset_name: robosats-desktop-${{ needs.check-versions.outputs.semver }}-win32-ia32.zip
|
||||
asset_content_type: application/zip
|
35
.github/workflows/selfhosted-client-image.yml
vendored
35
.github/workflows/selfhosted-client-image.yml
vendored
@ -21,40 +21,21 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: 'Copy Static' # Needed since Github actions does not support symlinks
|
||||
run: cp -r frontend/static nodeapp/static
|
||||
|
||||
- name: 'Download basic.selfhosted.js Artifact'
|
||||
- name: 'Download Basic main.js Artifact'
|
||||
if: inputs.semver == '' # Only if workflow fired from frontend-build.yml
|
||||
uses: dawidd6/action-download-artifact@v3
|
||||
uses: dawidd6/action-download-artifact@v7
|
||||
with:
|
||||
workflow: frontend-build.yml
|
||||
workflow_conclusion: success
|
||||
name: web-basic-selfhosted-js
|
||||
path: nodeapp/static/frontend/
|
||||
name: nodeapp-main-static
|
||||
path: nodeapp
|
||||
|
||||
- name: 'Download main.js Artifact for a release'
|
||||
- name: 'Download Basic main.js Artifact for a release'
|
||||
if: inputs.semver != '' # Only if fired as job in release.yml
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: web-basic-selfhosted-js
|
||||
path: nodeapp/static/frontend/
|
||||
|
||||
- name: 'Download pro.selfhosted.js Artifact'
|
||||
if: inputs.semver == '' # Only if workflow fired from frontend-build.yml
|
||||
uses: dawidd6/action-download-artifact@v3
|
||||
with:
|
||||
workflow: frontend-build.yml
|
||||
workflow_conclusion: success
|
||||
name: web-pro-selfhosted-js
|
||||
path: nodeapp/static/frontend/
|
||||
|
||||
- name: 'Download pro.js Artifact for a release'
|
||||
if: inputs.semver != '' # Only if fired as job in release.yml
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: web-pro-selfhosted-js
|
||||
path: nodeapp/static/frontend/
|
||||
name: nodeapp-main-static
|
||||
path: nodeapp
|
||||
|
||||
- name: 'Log in to Docker Hub'
|
||||
uses: docker/login-action@v3
|
||||
@ -85,7 +66,7 @@ jobs:
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: 'Build and push Docker image'
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ./nodeapp
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
35
.github/workflows/web-client-image.yml
vendored
35
.github/workflows/web-client-image.yml
vendored
@ -21,40 +21,21 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: 'Copy Static' # Needed since Github actions does not support symlinks
|
||||
run: cp -r frontend/static web/static
|
||||
|
||||
- name: 'Download main.js Artifact'
|
||||
- name: 'Download Basic main.js Artifact'
|
||||
if: inputs.semver == '' # Only if workflow fired from frontend-build.yml
|
||||
uses: dawidd6/action-download-artifact@v3
|
||||
uses: dawidd6/action-download-artifact@v7
|
||||
with:
|
||||
workflow: frontend-build.yml
|
||||
workflow_conclusion: success
|
||||
name: web-main-js
|
||||
path: web/static/frontend/
|
||||
name: web-main-static
|
||||
path: web
|
||||
|
||||
- name: 'Download main.js Artifact for a release'
|
||||
- name: 'Download Basic main.js Artifact for a release'
|
||||
if: inputs.semver != '' # Only if fired as job in release.yml
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: web-main-js
|
||||
path: web/static/frontend/
|
||||
|
||||
- name: 'Download pro.js Artifact'
|
||||
if: inputs.semver == '' # Only if workflow fired from frontend-build.yml
|
||||
uses: dawidd6/action-download-artifact@v3
|
||||
with:
|
||||
workflow: frontend-build.yml
|
||||
workflow_conclusion: success
|
||||
name: web-pro-js
|
||||
path: web/static/frontend/
|
||||
|
||||
- name: 'Download pro.js Artifact for a release'
|
||||
if: inputs.semver != '' # Only if fired as job in release.yml
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: web-pro-js
|
||||
path: web/static/frontend/
|
||||
name: web-main-static
|
||||
path: web
|
||||
|
||||
- name: 'Log in to Docker Hub'
|
||||
uses: docker/login-action@v3
|
||||
@ -85,7 +66,7 @@ jobs:
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: 'Build and push Docker image'
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ./web
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
23
.gitignore
vendored
23
.gitignore
vendored
@ -1,9 +1,6 @@
|
||||
*.py[cod]
|
||||
__pycache__
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Packages
|
||||
*.egg
|
||||
*.egg-info
|
||||
@ -637,6 +634,7 @@ frontend/static/assets/avatars*
|
||||
api/lightning/*_grpc.py
|
||||
api/lightning/*_pb2.py
|
||||
api/lightning/pymp*
|
||||
api/lightning/pip*
|
||||
frontend/static/locales/collected_phrases.json
|
||||
frontend/static/admin*
|
||||
frontend/static/rest_framework*
|
||||
@ -644,12 +642,21 @@ frontend/static/import_export*
|
||||
frontend/static/drf_spectacular_sidecar/
|
||||
frontend/src/components/PaymentMethods/Icons/code*
|
||||
frontend/src/components/PaymentMethods/Icons/webp*
|
||||
frontend/static/frontend/**
|
||||
frontend/static/frontend
|
||||
docs/.jekyll-cache*
|
||||
docs/_site*
|
||||
node
|
||||
desktopApp/release-builds
|
||||
|
||||
# mobile frontend statics
|
||||
mobile/html/Web.bundle/js*
|
||||
mobile/html/Web.bundle/css*
|
||||
mobile/html/Web.bundle/assets*
|
||||
# frontend statics
|
||||
frontend/templates/frontend/*.html
|
||||
mobile/html/Web.bundle
|
||||
desktopApp/static
|
||||
desktopApp/*.html
|
||||
web/static
|
||||
web/*.html
|
||||
nodeapp/static
|
||||
nodeapp/*.html
|
||||
|
||||
# Protocol Buffers
|
||||
api/lightning/*.proto
|
||||
|
@ -11,7 +11,7 @@ repos:
|
||||
- '--fix=lf'
|
||||
- id: trailing-whitespace
|
||||
- id: pretty-format-json
|
||||
exclude: ^frontend/
|
||||
exclude: ^frontend/|^mobile/
|
||||
args:
|
||||
- '--autofix'
|
||||
- '--no-sort-keys'
|
||||
@ -38,6 +38,15 @@ repos:
|
||||
files: ^frontend/
|
||||
types_or: [javascript, jsx, ts, tsx, css, markdown, json] # uses https://github.com/pre-commit/identify
|
||||
entry: bash -c 'cd frontend && npm run format'
|
||||
- id: lintern-frontend
|
||||
name: lintern-frontend
|
||||
stages:
|
||||
- commit
|
||||
- merge-commit
|
||||
language: system
|
||||
files: ^frontend/
|
||||
types_or: [javascript, jsx, ts, tsx, css, markdown, json] # uses https://github.com/pre-commit/identify
|
||||
entry: bash -c 'cd frontend && npm run lint'
|
||||
- id: prettier-mobile
|
||||
name: prettier-mobile
|
||||
stages:
|
||||
@ -47,6 +56,15 @@ repos:
|
||||
files: ^mobile/
|
||||
types_or: [javascript, jsx, ts, tsx, css, markdown, json] # uses https://github.com/pre-commit/identify
|
||||
entry: bash -c 'cd mobile && npm run format'
|
||||
- id: lintern-mobile
|
||||
name: lintern-mobile
|
||||
stages:
|
||||
- commit
|
||||
- merge-commit
|
||||
language: system
|
||||
files: ^mobile/
|
||||
types_or: [javascript, jsx, ts, tsx, css, markdown, json] # uses https://github.com/pre-commit/identify
|
||||
entry: bash -c 'cd mobile && npm run lint'
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.1.13
|
||||
hooks:
|
||||
|
@ -7,7 +7,7 @@ This contributing guide is based on the [Bisq contributing guide](https://github
|
||||
|
||||
## Communication Channels
|
||||
|
||||
Most communication about development takes place on our [Matrix Development group](https://matrix.to/#/#robosats:matrix.org).
|
||||
Most communication about development takes place on our [SimpleX Development Group](https://simplex.chat/contact#/?v=2-7&smp=smp%3A%2F%2F6iIcWT_dF2zN_w5xzZEY7HI2Prbh3ldP07YTyDexPjE%3D%40smp10.simplex.im%2FKEkNLMlgM8vrrU3xjBt5emS7EsP0c4s1%23%2F%3Fv%3D1-3%26dh%3DMCowBQYDK2VuAyEABehx7Tgefl_vvOGOe2SThJCGACKRgSU2wiUdIJ5bQHw%253D%26srv%3Drb2pbttocvnbrngnwziclp2f4ckjq65kebafws6g4hy22cdaiv5dwjqd.onion&data=%7B%22type%22%3A%22group%22%2C%22groupLinkId%22%3A%22gFi-9hvL3XgXXTgnlZPyJw%3D%3D%22%7D).
|
||||
|
||||
Discussion about code changes happens in GitHub issues and pull requests.
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
FROM python:3.11.8-slim-bookworm
|
||||
FROM python:3.12.3-slim-bookworm
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG DEVELOPMENT=False
|
||||
|
||||
|
33
README.md
33
README.md
@ -13,8 +13,8 @@ RoboSats is a simple and private way to exchange bitcoin for national currencies
|
||||
</div>
|
||||
|
||||
## Try it out!
|
||||
- **TOR URL:** [**RoboDex**arjwtfryec556cjdz3dfa7u47saek6lkftnkgshvgg2kcumqd.onion](http://robodexarjwtfryec556cjdz3dfa7u47saek6lkftnkgshvgg2kcumqd.onion) ( Open with [Tor Browser](https://www.torproject.org/download/))
|
||||
- Clearnet URL: [dex.robosats.com](https://unsafe.robosats.com) (not recommended!)
|
||||
- **TOR URL:** [**RoboSats**y56bwqn56qyadmcxkx767hnabg4mihxlmgyt6if5gnuxvzad.onion](http://RoboSatsy56bwqn56qyadmcxkx767hnabg4mihxlmgyt6if5gnuxvzad.onion) ( Open with [Tor Browser](https://www.torproject.org/download/))
|
||||
- Clearnet URL: [unsafe.robosats.org](https://unsafe.robosats.org) (not recommended!)
|
||||
|
||||
*Always use [Tor Browser](https://www.torproject.org/download/) and .onion for best privacy. The Clearnet URL redirects to a third party Tor2web service. Your privacy cannot be guaranteed to be respected. Use clearnet only to check around the app, never use for trading!*
|
||||
|
||||
@ -24,16 +24,16 @@ RoboSats is a simple and private way to exchange bitcoin for national currencies
|
||||
https://user-images.githubusercontent.com/90936742/167310017-dc211a05-dd5e-4ef4-b93f-250f80bc5bca.mp4
|
||||
|
||||
### Written guides
|
||||
- **[English](https://learn.robosats.com/read/en)**
|
||||
- **[Español](https://learn.robosats.com/read/es)**
|
||||
- **[Deutsch](https://learn.robosats.com/read/de)**
|
||||
- **[English](https://learn.robosats.org/read/en)**
|
||||
- **[Español](https://learn.robosats.org/read/es)**
|
||||
- **[Deutsch](https://learn.robosats.org/read/de)**
|
||||
|
||||
### Video guides
|
||||
- **[English](https://learn.robosats.com/watch/en/)**
|
||||
- **[Español](https://learn.robosats.com/watch/es/)**
|
||||
- **[Deutsch](https://learn.robosats.com/watch/de)**
|
||||
- **[Português](https://learn.robosats.com/watch/pt)**
|
||||
- **[Polski](https://learn.robosats.com/watch/pl)**
|
||||
- **[English](https://learn.robosats.org/watch/en/)**
|
||||
- **[Español](https://learn.robosats.org/watch/es/)**
|
||||
- **[Deutsch](https://learn.robosats.org/watch/de)**
|
||||
- **[Português](https://learn.robosats.org/watch/pt)**
|
||||
- **[Polski](https://learn.robosats.org/watch/pl)**
|
||||
|
||||
## How it works
|
||||
|
||||
@ -51,8 +51,19 @@ Alice wants to buy satoshis privately:
|
||||
11. The bonds would be charged (lost) in case of unilateral cancellation or cheating (lost dispute).
|
||||
|
||||
## Contribute to the Robotic Satoshis Open Source Project
|
||||
Check out our [Contribution Guide](https://learn.robosats.com/contribute/) to find how you can make RoboSats great.
|
||||
Check out our [Contribution Guide](https://learn.robosats.org/contribute/) to find how you can make RoboSats great.
|
||||
|
||||
RoboSats is a monorepo, arguably a messy one at the moment.
|
||||
- The top level is a Django application (the coordinator backend) with apps `/api`, `/control`, and `/chat`. Django settings are in `/robosats` and `/tests` has integration tests for the RoboSats backend.
|
||||
- The `/frontend` directory contains the ReactJS client.
|
||||
- The `/nodeapp` directory contains the docker orchestration and utilities for the self-hosted application (Umbrel, StartOS, etc)
|
||||
- The `/mobile` directory contains our React Native app (a wrapper around our ReactJS app in `/frontend`)
|
||||
- The `/docs` directory has the learn.robosats.org static Jekyll site markdown docs.
|
||||
- The `/web` directory is a light wrapper around our client app `/frontend` intended to host a RoboSats dex client to be used for the public. We use this one in unsafe.robosats.org
|
||||
|
||||
You can run the whole stack for local development following the instructions in [setup.md](/setup.md)
|
||||
|
||||
Officially mantained docker orchestration for coordinators can be found in the repo [robosats-deploy](https://github.com/RoboSats/robosats-deploy)
|
||||
### ⚡Developer Rewards ⚡
|
||||
Check out the [Developer Rewards Panel](https://github.com/users/Reckless-Satoshi/projects/2/views/5) for tasks paid in Sats.
|
||||
|
||||
|
81
api/admin.py
81
api/admin.py
@ -21,17 +21,13 @@ admin.site.unregister(TokenProxy)
|
||||
class RobotInline(admin.StackedInline):
|
||||
model = Robot
|
||||
can_delete = False
|
||||
fields = ("avatar_tag",)
|
||||
readonly_fields = ["avatar_tag"]
|
||||
show_change_link = True
|
||||
|
||||
|
||||
# extended users with avatars
|
||||
@admin.register(User)
|
||||
class EUserAdmin(AdminChangeLinksMixin, UserAdmin):
|
||||
inlines = [RobotInline]
|
||||
list_display = (
|
||||
"avatar_tag",
|
||||
"id",
|
||||
"robot_link",
|
||||
"username",
|
||||
@ -43,28 +39,32 @@ class EUserAdmin(AdminChangeLinksMixin, UserAdmin):
|
||||
change_links = ("robot",)
|
||||
ordering = ("-id",)
|
||||
|
||||
def avatar_tag(self, obj):
|
||||
return obj.robot.avatar_tag()
|
||||
|
||||
|
||||
# extended tokens with raw id fields and avatars
|
||||
# extended tokens with raw id fields
|
||||
@admin.register(TokenProxy)
|
||||
class ETokenAdmin(AdminChangeLinksMixin, TokenAdmin):
|
||||
raw_id_fields = ["user"]
|
||||
list_display = (
|
||||
"avatar_tag",
|
||||
"key",
|
||||
"user_link",
|
||||
)
|
||||
list_display_links = ("key",)
|
||||
change_links = ("user",)
|
||||
|
||||
def avatar_tag(self, obj):
|
||||
return obj.user.robot.avatar_tag()
|
||||
|
||||
class LNPaymentInline(admin.StackedInline):
|
||||
model = LNPayment
|
||||
can_delete = True
|
||||
fields = ("num_satoshis", "status", "routing_budget_sats", "description")
|
||||
readonly_fields = ("num_satoshis", "status", "routing_budget_sats", "description")
|
||||
show_change_link = True
|
||||
show_full_result_count = True
|
||||
extra = 0
|
||||
|
||||
|
||||
@admin.register(Order)
|
||||
class OrderAdmin(AdminChangeLinksMixin, admin.ModelAdmin):
|
||||
inlines = [LNPaymentInline]
|
||||
list_display = (
|
||||
"id",
|
||||
"type",
|
||||
@ -210,6 +210,7 @@ class OrderAdmin(AdminChangeLinksMixin, admin.ModelAdmin):
|
||||
f"Dispute of order {order.id} solved successfully on favor of the maker",
|
||||
messages.SUCCESS,
|
||||
)
|
||||
send_notification.delay(order_id=order.id, message="dispute_closed")
|
||||
|
||||
else:
|
||||
self.message_user(
|
||||
@ -248,6 +249,7 @@ class OrderAdmin(AdminChangeLinksMixin, admin.ModelAdmin):
|
||||
f"Dispute of order {order.id} solved successfully on favor of the taker",
|
||||
messages.SUCCESS,
|
||||
)
|
||||
send_notification.delay(order_id=order.id, message="dispute_closed")
|
||||
|
||||
else:
|
||||
self.message_user(
|
||||
@ -369,8 +371,61 @@ class OrderAdmin(AdminChangeLinksMixin, admin.ModelAdmin):
|
||||
return float(obj.amount)
|
||||
|
||||
|
||||
class OrderInline(admin.StackedInline):
|
||||
model = Order
|
||||
can_delete = False
|
||||
show_change_link = True
|
||||
extra = 0
|
||||
fields = (
|
||||
"id",
|
||||
"type",
|
||||
"maker",
|
||||
"taker",
|
||||
"status",
|
||||
"amount",
|
||||
"currency",
|
||||
"last_satoshis",
|
||||
"is_disputed",
|
||||
"is_fiat_sent",
|
||||
"created_at",
|
||||
"expires_at",
|
||||
"payout_tx",
|
||||
"payout",
|
||||
"maker_bond",
|
||||
"taker_bond",
|
||||
"trade_escrow",
|
||||
)
|
||||
readonly_fields = fields
|
||||
|
||||
|
||||
class PayoutOrderInline(OrderInline):
|
||||
verbose_name = "Order Paid"
|
||||
fk_name = "payout"
|
||||
|
||||
|
||||
class MakerBondOrderInline(OrderInline):
|
||||
verbose_name = "Order Made"
|
||||
fk_name = "maker_bond"
|
||||
|
||||
|
||||
class TakerBondOrderInline(OrderInline):
|
||||
verbose_name = "Order Taken"
|
||||
fk_name = "taker_bond"
|
||||
|
||||
|
||||
class EscrowOrderInline(OrderInline):
|
||||
verbose_name = "Order Escrow"
|
||||
fk_name = "trade_escrow"
|
||||
|
||||
|
||||
@admin.register(LNPayment)
|
||||
class LNPaymentAdmin(AdminChangeLinksMixin, admin.ModelAdmin):
|
||||
inlines = [
|
||||
PayoutOrderInline,
|
||||
MakerBondOrderInline,
|
||||
TakerBondOrderInline,
|
||||
EscrowOrderInline,
|
||||
]
|
||||
list_display = (
|
||||
"hash",
|
||||
"concept",
|
||||
@ -446,7 +501,6 @@ class OnchainPaymentAdmin(AdminChangeLinksMixin, admin.ModelAdmin):
|
||||
@admin.register(Robot)
|
||||
class UserRobotAdmin(AdminChangeLinksMixin, admin.ModelAdmin):
|
||||
list_display = (
|
||||
"avatar_tag",
|
||||
"id",
|
||||
"user_link",
|
||||
"telegram_enabled",
|
||||
@ -459,9 +513,8 @@ class UserRobotAdmin(AdminChangeLinksMixin, admin.ModelAdmin):
|
||||
)
|
||||
raw_id_fields = ("user",)
|
||||
list_editable = ["earned_rewards"]
|
||||
list_display_links = ("avatar_tag", "id")
|
||||
list_display_links = ["id"]
|
||||
change_links = ["user"]
|
||||
readonly_fields = ["avatar_tag"]
|
||||
search_fields = ["user__username", "id"]
|
||||
readonly_fields = ("hash_id", "public_key", "encrypted_private_key")
|
||||
|
||||
|
@ -90,9 +90,9 @@ class CLNNode:
|
||||
@classmethod
|
||||
def decode_payreq(cls, invoice):
|
||||
"""Decodes a lightning payment request (invoice)"""
|
||||
request = hold_pb2.DecodeBolt11Request(bolt11=invoice)
|
||||
holdstub = hold_pb2_grpc.HoldStub(cls.hold_channel)
|
||||
response = holdstub.DecodeBolt11(request)
|
||||
nodestub = node_pb2_grpc.NodeStub(cls.node_channel)
|
||||
request = node_pb2.DecodeRequest(string=invoice)
|
||||
response = nodestub.Decode(request)
|
||||
return response
|
||||
|
||||
@classmethod
|
||||
@ -236,7 +236,7 @@ class CLNNode:
|
||||
holdstub = hold_pb2_grpc.HoldStub(cls.hold_channel)
|
||||
response = holdstub.HoldInvoiceCancel(request)
|
||||
|
||||
return response.state == hold_pb2.HoldInvoiceCancelResponse.Holdstate.CANCELED
|
||||
return response.state == hold_pb2.Holdstate.CANCELED
|
||||
|
||||
@classmethod
|
||||
def settle_hold_invoice(cls, preimage):
|
||||
@ -247,7 +247,7 @@ class CLNNode:
|
||||
holdstub = hold_pb2_grpc.HoldStub(cls.hold_channel)
|
||||
response = holdstub.HoldInvoiceSettle(request)
|
||||
|
||||
return response.state == hold_pb2.HoldInvoiceSettleResponse.Holdstate.SETTLED
|
||||
return response.state == hold_pb2.Holdstate.SETTLED
|
||||
|
||||
@classmethod
|
||||
def gen_hold_invoice(
|
||||
@ -272,7 +272,7 @@ class CLNNode:
|
||||
|
||||
request = hold_pb2.HoldInvoiceRequest(
|
||||
description=description,
|
||||
amount_msat=primitives__pb2.Amount(msat=num_satoshis * 1_000),
|
||||
amount_msat=hold_pb2.Amount(msat=num_satoshis * 1_000),
|
||||
label=f"Order:{order_id}-{lnpayment_concept}-{time}",
|
||||
expiry=invoice_expiry,
|
||||
cltv=cltv_expiry_blocks,
|
||||
@ -286,7 +286,7 @@ class CLNNode:
|
||||
hold_payment["preimage"] = preimage.hex()
|
||||
hold_payment["payment_hash"] = response.payment_hash.hex()
|
||||
hold_payment["created_at"] = timezone.make_aware(
|
||||
datetime.fromtimestamp(payreq_decoded.timestamp)
|
||||
datetime.fromtimestamp(payreq_decoded.created_at)
|
||||
)
|
||||
hold_payment["expires_at"] = timezone.make_aware(
|
||||
datetime.fromtimestamp(response.expires_at)
|
||||
@ -309,13 +309,13 @@ class CLNNode:
|
||||
# Will fail if 'unable to locate invoice'. Happens if invoice expiry
|
||||
# time has passed (but these are 15% padded at the moment). Should catch it
|
||||
# and report back that the invoice has expired (better robustness)
|
||||
if response.state == hold_pb2.HoldInvoiceLookupResponse.Holdstate.OPEN:
|
||||
if response.state == hold_pb2.Holdstate.OPEN:
|
||||
pass
|
||||
if response.state == hold_pb2.HoldInvoiceLookupResponse.Holdstate.SETTLED:
|
||||
if response.state == hold_pb2.Holdstate.SETTLED:
|
||||
pass
|
||||
if response.state == hold_pb2.HoldInvoiceLookupResponse.Holdstate.CANCELED:
|
||||
if response.state == hold_pb2.Holdstate.CANCELED:
|
||||
pass
|
||||
if response.state == hold_pb2.HoldInvoiceLookupResponse.Holdstate.ACCEPTED:
|
||||
if response.state == hold_pb2.Holdstate.ACCEPTED:
|
||||
lnpayment.expiry_height = response.htlc_expiry
|
||||
lnpayment.status = LNPayment.Status.LOCKED
|
||||
lnpayment.save(update_fields=["expiry_height", "status"])
|
||||
@ -359,7 +359,7 @@ class CLNNode:
|
||||
except Exception as e:
|
||||
# If it fails at finding the invoice: it has been expired for more than an hour (and could be paid or just expired).
|
||||
# In RoboSats DB we make a distinction between cancelled and returned
|
||||
# (cln-grpc-hodl has separate state for hodl-invoices, which it forgets after an invoice expired more than an hour ago)
|
||||
# (holdinvoice plugin has separate state for hodl-invoices, which it forgets after an invoice expired more than an hour ago)
|
||||
if "empty result for listdatastore_state" in str(e):
|
||||
print(str(e))
|
||||
request2 = node_pb2.ListinvoicesRequest(
|
||||
@ -418,7 +418,7 @@ class CLNNode:
|
||||
|
||||
# Some wallet providers (e.g. Muun) force routing through a private channel with high fees >1500ppm
|
||||
# These payments will fail. So it is best to let the user know in advance this invoice is not valid.
|
||||
route_hints = payreq_decoded.route_hints.hints
|
||||
route_hints = payreq_decoded.routes.hints
|
||||
|
||||
# Max amount RoboSats will pay for routing
|
||||
if routing_budget_ppm == 0:
|
||||
@ -438,8 +438,10 @@ class CLNNode:
|
||||
route_cost = 0
|
||||
# ...add up the cost of every hinted hop...
|
||||
for hop_hint in hinted_route.hops:
|
||||
route_cost += hop_hint.feebase.msat / 1_000
|
||||
route_cost += hop_hint.feeprop * num_satoshis / 1_000_000
|
||||
route_cost += hop_hint.fee_base_msat.msat / 1_000
|
||||
route_cost += (
|
||||
hop_hint.fee_proportional_millionths * num_satoshis / 1_000_000
|
||||
)
|
||||
|
||||
# ...and store the cost of the route to the array
|
||||
routes_cost.append(route_cost)
|
||||
@ -447,7 +449,7 @@ class CLNNode:
|
||||
# If the cheapest possible private route is more expensive than what RoboSats is willing to pay
|
||||
if min(routes_cost) >= max_routing_fee_sats:
|
||||
payout["context"] = {
|
||||
"bad_invoice": "The invoice hinted private routes are not payable within the submitted routing budget."
|
||||
"bad_invoice": "The invoice hinted private routes are not payable within the submitted routing budget. This can be adjusted with Advanced Options enabled."
|
||||
}
|
||||
return payout
|
||||
|
||||
@ -466,7 +468,7 @@ class CLNNode:
|
||||
return payout
|
||||
|
||||
payout["created_at"] = timezone.make_aware(
|
||||
datetime.fromtimestamp(payreq_decoded.timestamp)
|
||||
datetime.fromtimestamp(payreq_decoded.created_at)
|
||||
)
|
||||
payout["expires_at"] = payout["created_at"] + timedelta(
|
||||
seconds=payreq_decoded.expiry
|
||||
@ -869,4 +871,4 @@ class CLNNode:
|
||||
else:
|
||||
raise e
|
||||
|
||||
return response.state == hold_pb2.HoldInvoiceLookupResponse.Holdstate.SETTLED
|
||||
return response.state == hold_pb2.Holdstate.SETTLED
|
||||
|
@ -424,7 +424,7 @@ class LNDNode:
|
||||
# If the cheapest possible private route is more expensive than what RoboSats is willing to pay
|
||||
if min(routes_cost) >= max_routing_fee_sats:
|
||||
payout["context"] = {
|
||||
"bad_invoice": "The invoice hinted private routes are not payable within the submitted routing budget."
|
||||
"bad_invoice": "The invoice hinted private routes are not payable within the submitted routing budget. This can be adjusted with Advanced Options enabled."
|
||||
}
|
||||
return payout
|
||||
|
||||
@ -478,6 +478,7 @@ class LNDNode:
|
||||
payment_request=lnpayment.invoice,
|
||||
fee_limit_sat=fee_limit_sat,
|
||||
timeout_seconds=timeout_seconds,
|
||||
amp=True,
|
||||
)
|
||||
|
||||
routerstub = router_pb2_grpc.RouterStub(cls.channel)
|
||||
@ -536,6 +537,7 @@ class LNDNode:
|
||||
fee_limit_sat=fee_limit_sat,
|
||||
timeout_seconds=timeout_seconds,
|
||||
allow_self_payment=True,
|
||||
amp=True,
|
||||
)
|
||||
|
||||
order = lnpayment.order_paid_LN
|
||||
|
112
api/logics.py
112
api/logics.py
@ -1,15 +1,15 @@
|
||||
import math
|
||||
from datetime import timedelta
|
||||
|
||||
from decouple import config
|
||||
from decouple import config, Csv
|
||||
from django.contrib.auth.models import User
|
||||
from django.db.models import Q, Sum
|
||||
from django.utils import timezone
|
||||
|
||||
from api.lightning.node import LNNode
|
||||
from api.models import Currency, LNPayment, MarketTick, OnchainPayment, Order
|
||||
from api.tasks import send_devfund_donation, send_notification
|
||||
from api.utils import get_minning_fee, validate_onchain_address
|
||||
from api.tasks import send_devfund_donation, send_notification, nostr_send_order_event
|
||||
from api.utils import get_minning_fee, validate_onchain_address, location_country
|
||||
from chat.models import Message
|
||||
|
||||
FEE = float(config("FEE"))
|
||||
@ -29,6 +29,8 @@ MAX_MINING_NETWORK_SPEEDUP_EXPECTED = float(
|
||||
config("MAX_MINING_NETWORK_SPEEDUP_EXPECTED")
|
||||
)
|
||||
|
||||
GEOBLOCKED_COUNTRIES = config("GEOBLOCKED_COUNTRIES", cast=Csv(), default="")
|
||||
|
||||
|
||||
class Logics:
|
||||
@classmethod
|
||||
@ -137,6 +139,19 @@ class Logics:
|
||||
|
||||
return True, None
|
||||
|
||||
@classmethod
|
||||
def validate_location(cls, order) -> bool:
|
||||
if not (order.latitude or order.longitude):
|
||||
return True, None
|
||||
|
||||
country = location_country(order.longitude, order.latitude)
|
||||
if country in GEOBLOCKED_COUNTRIES:
|
||||
return False, {
|
||||
"bad_request": f"The coordinator does not support orders in {country}"
|
||||
}
|
||||
else:
|
||||
return True, None
|
||||
|
||||
def validate_amount_within_range(order, amount):
|
||||
if amount > float(order.max_amount) or amount < float(order.min_amount):
|
||||
return False, {
|
||||
@ -170,6 +185,9 @@ class Logics:
|
||||
seconds=order.t_to_expire(Order.Status.TAK)
|
||||
)
|
||||
order.save(update_fields=["amount", "taker", "expires_at"])
|
||||
|
||||
nostr_send_order_event.delay(order_id=order.id)
|
||||
|
||||
order.log(
|
||||
f"Taken by Robot({user.robot.id},{user.username}) for {order.amount} fiat units"
|
||||
)
|
||||
@ -278,6 +296,8 @@ class Logics:
|
||||
cls.cancel_bond(order.taker_bond)
|
||||
cls.kick_taker(order)
|
||||
|
||||
nostr_send_order_event.delay(order_id=order.id)
|
||||
|
||||
order.log("Order expired while waiting for taker bond")
|
||||
order.log("Taker bond was cancelled")
|
||||
|
||||
@ -689,9 +709,9 @@ class Logics:
|
||||
|
||||
if context["invoice_amount"] < MIN_SWAP_AMOUNT:
|
||||
context["swap_allowed"] = False
|
||||
context[
|
||||
"swap_failure_reason"
|
||||
] = f"Order amount is smaller than the minimum swap available of {MIN_SWAP_AMOUNT} Sats"
|
||||
context["swap_failure_reason"] = (
|
||||
f"Order amount is smaller than the minimum swap available of {MIN_SWAP_AMOUNT} Sats"
|
||||
)
|
||||
order.log(
|
||||
f"Onchain payment option was not offered: amount is smaller than the minimum swap available of {MIN_SWAP_AMOUNT} Sats",
|
||||
level="WARN",
|
||||
@ -699,9 +719,9 @@ class Logics:
|
||||
return True, context
|
||||
elif context["invoice_amount"] > MAX_SWAP_AMOUNT:
|
||||
context["swap_allowed"] = False
|
||||
context[
|
||||
"swap_failure_reason"
|
||||
] = f"Order amount is bigger than the maximum swap available of {MAX_SWAP_AMOUNT} Sats"
|
||||
context["swap_failure_reason"] = (
|
||||
f"Order amount is bigger than the maximum swap available of {MAX_SWAP_AMOUNT} Sats"
|
||||
)
|
||||
order.log(
|
||||
f"Onchain payment option was not offered: amount is bigger than the maximum swap available of {MAX_SWAP_AMOUNT} Sats",
|
||||
level="WARN",
|
||||
@ -726,9 +746,9 @@ class Logics:
|
||||
)
|
||||
if not valid:
|
||||
context["swap_allowed"] = False
|
||||
context[
|
||||
"swap_failure_reason"
|
||||
] = "Not enough onchain liquidity available to offer a swap"
|
||||
context["swap_failure_reason"] = (
|
||||
"Not enough onchain liquidity available to offer a swap"
|
||||
)
|
||||
order.log(
|
||||
"Onchain payment option was not offered: onchain liquidity available to offer a swap",
|
||||
level="WARN",
|
||||
@ -878,7 +898,7 @@ class Logics:
|
||||
if order.status == Order.Status.FAI:
|
||||
if order.payout.status != LNPayment.Status.EXPIRE:
|
||||
return False, {
|
||||
"bad_request": "You can only submit an invoice after expiration or 3 failed attempts"
|
||||
"bad_invoice": "You can only submit an invoice after expiration or 3 failed attempts"
|
||||
}
|
||||
|
||||
# cancel onchain_payout if existing
|
||||
@ -894,25 +914,24 @@ class Logics:
|
||||
if not payout["valid"]:
|
||||
return False, payout["context"]
|
||||
|
||||
order.payout, _ = LNPayment.objects.update_or_create(
|
||||
if order.payout:
|
||||
if order.payout.payment_hash == payout["payment_hash"]:
|
||||
return False, {"bad_invoice": "You must submit a NEW invoice"}
|
||||
|
||||
order.payout = LNPayment.objects.create(
|
||||
concept=LNPayment.Concepts.PAYBUYER,
|
||||
type=LNPayment.Types.NORM,
|
||||
sender=User.objects.get(username=ESCROW_USERNAME),
|
||||
# In case this user has other payouts, update the one related to this order.
|
||||
order_paid_LN=order,
|
||||
receiver=user,
|
||||
routing_budget_ppm=routing_budget_ppm,
|
||||
routing_budget_sats=routing_budget_sats,
|
||||
# if there is a LNPayment matching these above, it updates that one with defaults below.
|
||||
defaults={
|
||||
"invoice": invoice,
|
||||
"status": LNPayment.Status.VALIDI,
|
||||
"num_satoshis": num_satoshis,
|
||||
"description": payout["description"],
|
||||
"payment_hash": payout["payment_hash"],
|
||||
"created_at": payout["created_at"],
|
||||
"expires_at": payout["expires_at"],
|
||||
},
|
||||
invoice=invoice,
|
||||
status=LNPayment.Status.VALIDI,
|
||||
num_satoshis=num_satoshis,
|
||||
description=payout["description"],
|
||||
payment_hash=payout["payment_hash"],
|
||||
created_at=payout["created_at"],
|
||||
expires_at=payout["expires_at"],
|
||||
)
|
||||
|
||||
order.is_swap = False
|
||||
@ -1005,6 +1024,8 @@ class Logics:
|
||||
order.log("Order expired while waiting for maker bond")
|
||||
order.log("Maker bond was cancelled")
|
||||
|
||||
nostr_send_order_event.delay(order_id=order.id)
|
||||
|
||||
return True, None
|
||||
|
||||
# 2.a) When maker cancels after bond
|
||||
@ -1025,6 +1046,8 @@ class Logics:
|
||||
order.log("Order cancelled by maker while public or paused")
|
||||
order.log("Maker bond was <b>unlocked</b>")
|
||||
|
||||
nostr_send_order_event.delay(order_id=order.id)
|
||||
|
||||
return True, None
|
||||
|
||||
# 2.b) When maker cancels after bond and before taker bond is locked
|
||||
@ -1044,6 +1067,8 @@ class Logics:
|
||||
order.log("Maker bond was <b>unlocked</b>")
|
||||
order.log("Taker bond was <b>cancelled</b>")
|
||||
|
||||
nostr_send_order_event.delay(order_id=order.id)
|
||||
|
||||
return True, None
|
||||
|
||||
# 3) When taker cancels before bond
|
||||
@ -1056,6 +1081,8 @@ class Logics:
|
||||
|
||||
order.log("Taker cancelled before locking the bond")
|
||||
|
||||
nostr_send_order_event.delay(order_id=order.id)
|
||||
|
||||
return True, None
|
||||
|
||||
# 4) When taker or maker cancel after bond (before escrow)
|
||||
@ -1085,6 +1112,8 @@ class Logics:
|
||||
order.log("Maker bond was <b>settled</b>")
|
||||
order.log("Taker bond was <b>unlocked</b>")
|
||||
|
||||
nostr_send_order_event.delay(order_id=order.id)
|
||||
|
||||
return True, None
|
||||
|
||||
# 4.b) When taker cancel after bond (before escrow)
|
||||
@ -1107,6 +1136,8 @@ class Logics:
|
||||
order.log("Taker bond was <b>settled</b>")
|
||||
order.log("Maker bond was <b>unlocked</b>")
|
||||
|
||||
nostr_send_order_event.delay(order_id=order.id)
|
||||
|
||||
return True, None
|
||||
|
||||
# 5) When trade collateral has been posted (after escrow)
|
||||
@ -1122,6 +1153,9 @@ class Logics:
|
||||
order.log(
|
||||
f"Taker Robot({user.robot.id},{user.username}) accepted the collaborative cancellation"
|
||||
)
|
||||
|
||||
nostr_send_order_event.delay(order_id=order.id)
|
||||
|
||||
return True, None
|
||||
|
||||
# if the taker had asked, and now the maker does: cancel order, return everything
|
||||
@ -1130,6 +1164,9 @@ class Logics:
|
||||
order.log(
|
||||
f"Maker Robot({user.robot.id},{user.username}) accepted the collaborative cancellation"
|
||||
)
|
||||
|
||||
nostr_send_order_event.delay(order_id=order.id)
|
||||
|
||||
return True, None
|
||||
|
||||
# Otherwise just make true the asked for cancel flags
|
||||
@ -1167,6 +1204,8 @@ class Logics:
|
||||
order.update_status(Order.Status.CCA)
|
||||
send_notification.delay(order_id=order.id, message="collaborative_cancelled")
|
||||
|
||||
nostr_send_order_event.delay(order_id=order.id)
|
||||
|
||||
order.log("Order was collaboratively cancelled")
|
||||
order.log("Maker bond was <b>unlocked</b>")
|
||||
order.log("Taker bond was <b>unlocked</b>")
|
||||
@ -1194,6 +1233,8 @@ class Logics:
|
||||
|
||||
order.save() # update all fields
|
||||
|
||||
nostr_send_order_event.delay(order_id=order.id)
|
||||
|
||||
order.log(f"Order({order.id},{str(order)}) is public in the order book")
|
||||
return
|
||||
|
||||
@ -1241,9 +1282,9 @@ class Logics:
|
||||
bond_satoshis = int(order.last_satoshis * order.bond_size / 100)
|
||||
|
||||
if user.robot.wants_stealth:
|
||||
description = f"Payment reference: {order.reference}. This payment WILL FREEZE IN YOUR WALLET, check on RoboSats if the lock was successful. It will be unlocked (fail) unless you cheat or cancel unilaterally."
|
||||
description = f"{config("NODE_ALIAS")} - Payment reference: {order.reference}. This payment WILL FREEZE IN YOUR WALLET, check on RoboSats if the lock was successful. It will be unlocked (fail) unless you cheat or cancel unilaterally."
|
||||
else:
|
||||
description = f"RoboSats - Publishing '{str(order)}' - Maker bond - This payment WILL FREEZE IN YOUR WALLET, check on RoboSats if the lock was successful. It will be unlocked (fail) unless you cheat or cancel unilaterally."
|
||||
description = f"{config("NODE_ALIAS")} - Publishing '{str(order)}' - Maker bond - This payment WILL FREEZE IN YOUR WALLET, check on RoboSats if the lock was successful. It will be unlocked (fail) unless you cheat or cancel unilaterally."
|
||||
|
||||
# Gen hold Invoice
|
||||
try:
|
||||
@ -1336,6 +1377,9 @@ class Logics:
|
||||
except Exception:
|
||||
pass
|
||||
send_notification.delay(order_id=order.id, message="order_taken_confirmed")
|
||||
|
||||
nostr_send_order_event.delay(order_id=order.id)
|
||||
|
||||
order.log(
|
||||
f"<b>Contract formalized.</b> Maker: Robot({order.maker.robot.id},{order.maker}). Taker: Robot({order.taker.robot.id},{order.taker}). API median price {order.currency.exchange_rate} {dict(Currency.currency_choices)[order.currency.currency]}/BTC. Premium is {order.premium}%. Contract size {order.last_satoshis} Sats"
|
||||
)
|
||||
@ -1363,10 +1407,10 @@ class Logics:
|
||||
bond_satoshis = int(order.last_satoshis * order.bond_size / 100)
|
||||
pos_text = "Buying" if cls.is_buyer(order, user) else "Selling"
|
||||
if user.robot.wants_stealth:
|
||||
description = f"Payment reference: {order.reference}. This payment WILL FREEZE IN YOUR WALLET, check on RoboSats if the lock was successful. It will be unlocked (fail) unless you cheat or cancel unilaterally."
|
||||
description = f"{config("NODE_ALIAS")} - Payment reference: {order.reference}. This payment WILL FREEZE IN YOUR WALLET, check on RoboSats if the lock was successful. It will be unlocked (fail) unless you cheat or cancel unilaterally."
|
||||
else:
|
||||
description = (
|
||||
f"RoboSats - Taking 'Order {order.id}' {pos_text} BTC for {str(float(order.amount)) + Currency.currency_dict[str(order.currency.currency)]}"
|
||||
f"{config("NODE_ALIAS")} - Taking 'Order {order.id}' {pos_text} BTC for {str(float(order.amount)) + Currency.currency_dict[str(order.currency.currency)]}"
|
||||
+ " - Taker bond - This payment WILL FREEZE IN YOUR WALLET, check on RoboSats if the lock was successful. It will be unlocked (fail) unless you cheat or cancel unilaterally."
|
||||
)
|
||||
|
||||
@ -1462,9 +1506,9 @@ class Logics:
|
||||
order.log(f"Escrow invoice amount is calculated as {escrow_satoshis} Sats")
|
||||
|
||||
if user.robot.wants_stealth:
|
||||
description = f"Payment reference: {order.reference}. This payment WILL FREEZE IN YOUR WALLET, check on RoboSats if the lock was successful. It will be unlocked (fail) unless you cheat or cancel unilaterally."
|
||||
description = f"{config("NODE_ALIAS")} - Payment reference: {order.reference}. This payment WILL FREEZE IN YOUR WALLET, check on RoboSats if the lock was successful. It will be unlocked (fail) unless you cheat or cancel unilaterally."
|
||||
else:
|
||||
description = f"RoboSats - Escrow amount for '{str(order)}' - It WILL FREEZE IN YOUR WALLET. It will be released to the buyer once you confirm you received the fiat. It will automatically return if buyer does not confirm the payment."
|
||||
description = f"{config("NODE_ALIAS")} - Escrow amount for '{str(order)}' - It WILL FREEZE IN YOUR WALLET. It will be released to the buyer once you confirm you received the fiat. It will automatically return if buyer does not confirm the payment."
|
||||
|
||||
# Gen hold Invoice
|
||||
try:
|
||||
@ -1727,11 +1771,15 @@ class Logics:
|
||||
order.log(
|
||||
f"Robot({user.robot.id},{user.username}) paused the public order"
|
||||
)
|
||||
|
||||
nostr_send_order_event.delay(order_id=order.id)
|
||||
elif order.status == Order.Status.PAU:
|
||||
order.update_status(Order.Status.PUB)
|
||||
order.log(
|
||||
f"Robot({user.robot.id},{user.username}) made public the paused order"
|
||||
)
|
||||
|
||||
nostr_send_order_event.delay(order_id=order.id)
|
||||
else:
|
||||
order.log(
|
||||
f"Robot({user.robot.id},{user.username}) tried to pause/unpause an order that was not public or paused",
|
||||
|
@ -6,7 +6,7 @@ from django.core.management.base import BaseCommand
|
||||
from django.db import transaction
|
||||
|
||||
from api.models import Robot
|
||||
from api.notifications import Telegram
|
||||
from api.notifications import Notifications
|
||||
from api.utils import get_session
|
||||
|
||||
|
||||
@ -17,7 +17,7 @@ class Command(BaseCommand):
|
||||
bot_token = config("TELEGRAM_TOKEN")
|
||||
updates_url = f"https://api.telegram.org/bot{bot_token}/getUpdates"
|
||||
session = get_session()
|
||||
telegram = Telegram()
|
||||
notifications = Notifications()
|
||||
|
||||
def handle(self, *args, **options):
|
||||
offset = 0
|
||||
@ -49,17 +49,17 @@ class Command(BaseCommand):
|
||||
continue
|
||||
parts = message.split(" ")
|
||||
if len(parts) < 2:
|
||||
self.telegram.send_message(
|
||||
chat_id=result["message"]["from"]["id"],
|
||||
text='You must enable the notifications bot using the RoboSats client. Click on your "Robot robot" -> "Enable Telegram" and follow the link or scan the QR code.',
|
||||
self.notifications.send_telegram_message(
|
||||
result["message"]["from"]["id"],
|
||||
'You must enable the notifications bot using the RoboSats client. Click on your "Robot robot" -> "Enable Telegram" and follow the link or scan the QR code.',
|
||||
)
|
||||
continue
|
||||
token = parts[-1]
|
||||
robot = Robot.objects.filter(telegram_token=token).first()
|
||||
if not robot:
|
||||
self.telegram.send_message(
|
||||
chat_id=result["message"]["from"]["id"],
|
||||
text=f'Wops, invalid token! There is no Robot with telegram chat token "{token}"',
|
||||
self.notifications.send_telegram_message(
|
||||
result["message"]["from"]["id"],
|
||||
f'Wops, invalid token! There is no Robot with telegram chat token "{token}"',
|
||||
)
|
||||
continue
|
||||
|
||||
@ -71,7 +71,7 @@ class Command(BaseCommand):
|
||||
robot.telegram_lang_code = result["message"]["from"][
|
||||
"language_code"
|
||||
]
|
||||
self.telegram.welcome(robot.user)
|
||||
self.notifications.welcome(robot.user)
|
||||
robot.telegram_enabled = True
|
||||
robot.save(
|
||||
update_fields=[
|
||||
|
26
api/migrations/0047_notification.py
Normal file
26
api/migrations/0047_notification.py
Normal file
@ -0,0 +1,26 @@
|
||||
# Generated by Django 5.0.6 on 2024-06-14 18:31
|
||||
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api', '0046_alter_currency_currency'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Notification',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('created_at', models.DateTimeField(default=django.utils.timezone.now)),
|
||||
('title', models.CharField(default=None, max_length=240)),
|
||||
('description', models.CharField(blank=True, default=None, max_length=240)),
|
||||
('order', models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, to='api.order')),
|
||||
('robot', models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, to='api.robot')),
|
||||
],
|
||||
),
|
||||
]
|
19
api/migrations/0048_alter_order_reference.py
Normal file
19
api/migrations/0048_alter_order_reference.py
Normal file
@ -0,0 +1,19 @@
|
||||
# Generated by Django 5.0.6 on 2024-06-29 14:07
|
||||
|
||||
import api.models.order
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api', '0047_notification'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='order',
|
||||
name='reference',
|
||||
field=models.UUIDField(default=api.models.order.custom_uuid, editable=False),
|
||||
),
|
||||
]
|
18
api/migrations/0049_alter_currency_currency.py
Normal file
18
api/migrations/0049_alter_currency_currency.py
Normal file
@ -0,0 +1,18 @@
|
||||
# Generated by Django 5.0.8 on 2024-08-15 18:06
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api', '0048_alter_order_reference'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='currency',
|
||||
name='currency',
|
||||
field=models.PositiveSmallIntegerField(choices=[(1, 'USD'), (2, 'EUR'), (3, 'JPY'), (4, 'GBP'), (5, 'AUD'), (6, 'CAD'), (7, 'CHF'), (8, 'CNY'), (9, 'HKD'), (10, 'NZD'), (11, 'SEK'), (12, 'KRW'), (13, 'SGD'), (14, 'NOK'), (15, 'MXN'), (16, 'BYN'), (17, 'RUB'), (18, 'ZAR'), (19, 'TRY'), (20, 'BRL'), (21, 'CLP'), (22, 'CZK'), (23, 'DKK'), (24, 'HRK'), (25, 'HUF'), (26, 'INR'), (27, 'ISK'), (28, 'PLN'), (29, 'RON'), (30, 'ARS'), (31, 'VES'), (32, 'COP'), (33, 'PEN'), (34, 'UYU'), (35, 'PYG'), (36, 'BOB'), (37, 'IDR'), (38, 'ANG'), (39, 'CRC'), (40, 'CUP'), (41, 'DOP'), (42, 'GHS'), (43, 'GTQ'), (44, 'ILS'), (45, 'JMD'), (46, 'KES'), (47, 'KZT'), (48, 'MYR'), (49, 'NAD'), (50, 'NGN'), (51, 'AZN'), (52, 'PAB'), (53, 'PHP'), (54, 'PKR'), (55, 'QAR'), (56, 'SAR'), (57, 'THB'), (58, 'TTD'), (59, 'VND'), (60, 'XOF'), (61, 'TWD'), (62, 'TZS'), (63, 'XAF'), (64, 'UAH'), (65, 'EGP'), (66, 'LKR'), (67, 'MAD'), (68, 'AED'), (69, 'TND'), (70, 'ETB'), (71, 'GEL'), (72, 'UGX'), (73, 'RSD'), (74, 'IRT'), (75, 'BDT'), (76, 'ALL'), (77, 'DZD'), (300, 'XAU'), (1000, 'BTC')], unique=True),
|
||||
),
|
||||
]
|
18
api/migrations/0050_alter_order_status.py
Normal file
18
api/migrations/0050_alter_order_status.py
Normal file
@ -0,0 +1,18 @@
|
||||
# Generated by Django 5.0.8 on 2024-08-22 08:30
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('api', '0049_alter_currency_currency'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='order',
|
||||
name='status',
|
||||
field=models.PositiveSmallIntegerField(choices=[(0, 'Waiting for maker bond'), (1, 'Public'), (2, 'Paused'), (3, 'Waiting for taker bond'), (4, 'Cancelled'), (5, 'Expired'), (6, 'Waiting for trade collateral and buyer invoice'), (7, 'Waiting only for seller trade collateral'), (8, 'Waiting only for buyer invoice'), (9, 'Sending fiat - In chatroom'), (10, 'Fiat sent - In chatroom'), (11, 'In dispute'), (12, 'Collaboratively cancelled'), (13, 'Sending satoshis to buyer'), (14, 'Successful trade'), (15, 'Failed lightning network routing'), (16, 'Wait for dispute resolution'), (17, 'Maker lost dispute'), (18, 'Taker lost dispute')], default=0),
|
||||
),
|
||||
]
|
@ -4,5 +4,14 @@ from .market_tick import MarketTick
|
||||
from .onchain_payment import OnchainPayment
|
||||
from .order import Order
|
||||
from .robot import Robot
|
||||
from .notification import Notification
|
||||
|
||||
__all__ = ["Currency", "LNPayment", "MarketTick", "OnchainPayment", "Order", "Robot"]
|
||||
__all__ = [
|
||||
"Currency",
|
||||
"LNPayment",
|
||||
"MarketTick",
|
||||
"OnchainPayment",
|
||||
"Order",
|
||||
"Robot",
|
||||
"Notification",
|
||||
]
|
||||
|
@ -1,5 +1,6 @@
|
||||
import json
|
||||
|
||||
from decimal import Decimal
|
||||
from django.core.validators import MinValueValidator
|
||||
from django.db import models
|
||||
from django.utils import timezone
|
||||
@ -18,7 +19,7 @@ class Currency(models.Model):
|
||||
decimal_places=4,
|
||||
default=None,
|
||||
null=True,
|
||||
validators=[MinValueValidator(0)],
|
||||
validators=[MinValueValidator(Decimal(0))],
|
||||
)
|
||||
timestamp = models.DateTimeField(default=timezone.now)
|
||||
|
||||
|
@ -1,5 +1,6 @@
|
||||
import uuid
|
||||
|
||||
from decimal import Decimal
|
||||
from decouple import config
|
||||
from django.core.validators import MaxValueValidator, MinValueValidator
|
||||
from django.db import models
|
||||
@ -27,21 +28,24 @@ class MarketTick(models.Model):
|
||||
decimal_places=2,
|
||||
default=None,
|
||||
null=True,
|
||||
validators=[MinValueValidator(0)],
|
||||
validators=[MinValueValidator(Decimal(0))],
|
||||
)
|
||||
volume = models.DecimalField(
|
||||
max_digits=8,
|
||||
decimal_places=8,
|
||||
default=None,
|
||||
null=True,
|
||||
validators=[MinValueValidator(0)],
|
||||
validators=[MinValueValidator(Decimal(0))],
|
||||
)
|
||||
premium = models.DecimalField(
|
||||
max_digits=5,
|
||||
decimal_places=2,
|
||||
default=None,
|
||||
null=True,
|
||||
validators=[MinValueValidator(-100), MaxValueValidator(999)],
|
||||
validators=[
|
||||
MinValueValidator(Decimal(-100)),
|
||||
MaxValueValidator(Decimal(999))
|
||||
],
|
||||
blank=True,
|
||||
)
|
||||
currency = models.ForeignKey("api.Currency", null=True, on_delete=models.SET_NULL)
|
||||
@ -52,7 +56,10 @@ class MarketTick(models.Model):
|
||||
max_digits=4,
|
||||
decimal_places=4,
|
||||
default=0,
|
||||
validators=[MinValueValidator(0), MaxValueValidator(1)],
|
||||
validators=[
|
||||
MinValueValidator(Decimal(0)),
|
||||
MaxValueValidator(Decimal(1))
|
||||
],
|
||||
)
|
||||
|
||||
def log_a_tick(order):
|
||||
|
35
api/models/notification.py
Normal file
35
api/models/notification.py
Normal file
@ -0,0 +1,35 @@
|
||||
# We use custom seeded UUID generation during testing
|
||||
import uuid
|
||||
|
||||
from decouple import config
|
||||
from api.models import Order, Robot
|
||||
from django.db import models
|
||||
from django.utils import timezone
|
||||
|
||||
if config("TESTING", cast=bool, default=False):
|
||||
import random
|
||||
import string
|
||||
|
||||
random.seed(1)
|
||||
chars = string.ascii_lowercase + string.digits
|
||||
|
||||
def custom_uuid():
|
||||
return uuid.uuid5(uuid.NAMESPACE_DNS, "".join(random.choices(chars, k=20)))
|
||||
|
||||
else:
|
||||
custom_uuid = uuid.uuid4
|
||||
|
||||
|
||||
class Notification(models.Model):
|
||||
# notification info
|
||||
created_at = models.DateTimeField(default=timezone.now)
|
||||
|
||||
robot = models.ForeignKey(Robot, on_delete=models.CASCADE, default=None)
|
||||
order = models.ForeignKey(Order, on_delete=models.CASCADE, default=None)
|
||||
|
||||
# notification details
|
||||
title = models.CharField(max_length=240, null=False, default=None)
|
||||
description = models.CharField(max_length=240, default=None, blank=True)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.title} {self.description}"
|
@ -1,3 +1,4 @@
|
||||
from decimal import Decimal
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.validators import MaxValueValidator, MinValueValidator
|
||||
@ -58,7 +59,10 @@ class OnchainPayment(models.Model):
|
||||
default=2.05,
|
||||
null=False,
|
||||
blank=False,
|
||||
validators=[MinValueValidator(1), MaxValueValidator(999)],
|
||||
validators=[
|
||||
MinValueValidator(Decimal(1)),
|
||||
MaxValueValidator(Decimal(999))
|
||||
],
|
||||
)
|
||||
mining_fee_rate = models.DecimalField(
|
||||
max_digits=6,
|
||||
@ -66,7 +70,10 @@ class OnchainPayment(models.Model):
|
||||
default=2.05,
|
||||
null=False,
|
||||
blank=False,
|
||||
validators=[MinValueValidator(1), MaxValueValidator(999)],
|
||||
validators=[
|
||||
MinValueValidator(Decimal(1)),
|
||||
MaxValueValidator(Decimal(999))
|
||||
],
|
||||
)
|
||||
mining_fee_sats = models.PositiveBigIntegerField(default=0, null=False, blank=False)
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
# We use custom seeded UUID generation during testing
|
||||
import uuid
|
||||
|
||||
from decimal import Decimal
|
||||
from decouple import config
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
@ -9,6 +10,7 @@ from django.db import models
|
||||
from django.db.models.signals import pre_delete
|
||||
from django.dispatch import receiver
|
||||
from django.utils import timezone
|
||||
from api.tasks import send_notification
|
||||
|
||||
if config("TESTING", cast=bool, default=False):
|
||||
import random
|
||||
@ -44,7 +46,7 @@ class Order(models.Model):
|
||||
DIS = 11, "In dispute"
|
||||
CCA = 12, "Collaboratively cancelled"
|
||||
PAY = 13, "Sending satoshis to buyer"
|
||||
SUC = 14, "Sucessful trade"
|
||||
SUC = 14, "Successful trade"
|
||||
FAI = 15, "Failed lightning network routing"
|
||||
WFR = 16, "Wait for dispute resolution"
|
||||
MLD = 17, "Maker lost dispute"
|
||||
@ -90,7 +92,7 @@ class Order(models.Model):
|
||||
decimal_places=2,
|
||||
default=0,
|
||||
null=True,
|
||||
validators=[MinValueValidator(-100), MaxValueValidator(999)],
|
||||
validators=[MinValueValidator(Decimal(-100)), MaxValueValidator(Decimal(999))],
|
||||
blank=True,
|
||||
)
|
||||
# explicit
|
||||
@ -135,8 +137,8 @@ class Order(models.Model):
|
||||
default=settings.DEFAULT_BOND_SIZE,
|
||||
null=False,
|
||||
validators=[
|
||||
MinValueValidator(settings.MIN_BOND_SIZE), # 2 %
|
||||
MaxValueValidator(settings.MAX_BOND_SIZE), # 15 %
|
||||
MinValueValidator(Decimal(settings.MIN_BOND_SIZE)), # 2 %
|
||||
MaxValueValidator(Decimal(settings.MAX_BOND_SIZE)), # 15 %
|
||||
],
|
||||
blank=False,
|
||||
)
|
||||
@ -147,8 +149,8 @@ class Order(models.Model):
|
||||
decimal_places=6,
|
||||
null=True,
|
||||
validators=[
|
||||
MinValueValidator(-90),
|
||||
MaxValueValidator(90),
|
||||
MinValueValidator(Decimal(-90)),
|
||||
MaxValueValidator(Decimal(90)),
|
||||
],
|
||||
blank=True,
|
||||
)
|
||||
@ -157,8 +159,8 @@ class Order(models.Model):
|
||||
decimal_places=6,
|
||||
null=True,
|
||||
validators=[
|
||||
MinValueValidator(-180),
|
||||
MaxValueValidator(180),
|
||||
MinValueValidator(Decimal(-180)),
|
||||
MaxValueValidator(Decimal(180)),
|
||||
],
|
||||
blank=True,
|
||||
)
|
||||
@ -348,6 +350,8 @@ class Order(models.Model):
|
||||
self.log(
|
||||
f"Order state went from {old_status}: <i>{Order.Status(old_status).label}</i> to {new_status}: <i>{Order.Status(new_status).label}</i>"
|
||||
)
|
||||
if new_status == Order.Status.FAI:
|
||||
send_notification.delay(order_id=self.id, message="lightning_failed")
|
||||
|
||||
|
||||
@receiver(pre_delete, sender=Order)
|
||||
|
@ -1,12 +1,8 @@
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.validators import validate_comma_separated_integer_list
|
||||
from django.db import models
|
||||
from django.db.models.signals import post_save, pre_delete
|
||||
from django.db.models.signals import post_save
|
||||
from django.dispatch import receiver
|
||||
from django.utils.html import mark_safe
|
||||
|
||||
|
||||
class Robot(models.Model):
|
||||
@ -88,25 +84,5 @@ class Robot(models.Model):
|
||||
def save_user_robot(sender, instance, **kwargs):
|
||||
instance.robot.save()
|
||||
|
||||
@receiver(pre_delete, sender=User)
|
||||
def del_avatar_from_disk(sender, instance, **kwargs):
|
||||
try:
|
||||
avatar_file = Path(
|
||||
settings.AVATAR_ROOT + instance.robot.avatar.url.split("/")[-1]
|
||||
)
|
||||
avatar_file.unlink()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def __str__(self):
|
||||
return self.user.username
|
||||
|
||||
# to display avatars in admin panel
|
||||
def get_avatar(self):
|
||||
if not self.avatar:
|
||||
return settings.STATIC_ROOT + "unknown_avatar.png"
|
||||
return self.avatar.url
|
||||
|
||||
# method to create a fake table field in read only mode
|
||||
def avatar_tag(self):
|
||||
return mark_safe('<img src="%s" width="50" height="50" />' % self.get_avatar())
|
||||
|
@ -1,7 +1,10 @@
|
||||
import hashlib
|
||||
import time
|
||||
|
||||
from .utils import human_format
|
||||
# UNUSED
|
||||
# import time
|
||||
|
||||
# UNUSED
|
||||
# from .utils import human_format
|
||||
|
||||
|
||||
class NickGenerator:
|
||||
@ -94,7 +97,7 @@ class NickGenerator:
|
||||
# if self.verbose:
|
||||
# print(f"Adverb: {adv}, id {adv_id}.")
|
||||
else:
|
||||
adv_id, adv, remainder = 0, "", nick_id
|
||||
adv, remainder = "", nick_id
|
||||
|
||||
# Compute adjective id
|
||||
if self.use_adj:
|
||||
|
114
api/nostr.py
Normal file
114
api/nostr.py
Normal file
@ -0,0 +1,114 @@
|
||||
import pygeohash
|
||||
import hashlib
|
||||
import uuid
|
||||
|
||||
from asgiref.sync import sync_to_async
|
||||
from nostr_sdk import Keys, Client, EventBuilder, NostrSigner, Kind, Tag
|
||||
from api.models import Order
|
||||
from decouple import config
|
||||
|
||||
|
||||
class Nostr:
|
||||
"""Simple nostr events manager to be used as a cache system for clients"""
|
||||
|
||||
async def send_order_event(self, order):
|
||||
"""Creates the event and sends it to the coordinator relay"""
|
||||
|
||||
if config("NOSTR_NSEC", cast=str, default="") == "":
|
||||
return
|
||||
|
||||
print("Sending nostr event")
|
||||
|
||||
# Initialize with coordinator Keys
|
||||
keys = Keys.parse(config("NOSTR_NSEC", cast=str))
|
||||
signer = NostrSigner.keys(keys)
|
||||
client = Client(signer)
|
||||
|
||||
# Add relays and connect
|
||||
await client.add_relays(["ws://localhost:7777"])
|
||||
await client.connect()
|
||||
|
||||
robot_name = await self.get_robot_name(order)
|
||||
robot_hash_id = await self.get_robot_hash_id(order)
|
||||
currency = await self.get_robot_currency(order)
|
||||
|
||||
event = EventBuilder(
|
||||
Kind(38383),
|
||||
"",
|
||||
self.generate_tags(order, robot_name, robot_hash_id, currency),
|
||||
).to_event(keys)
|
||||
await client.send_event(event)
|
||||
print(f"Nostr event sent: {event.as_json()}")
|
||||
|
||||
@sync_to_async
|
||||
def get_robot_name(self, order):
|
||||
return order.maker.username
|
||||
|
||||
@sync_to_async
|
||||
def get_robot_hash_id(self, order):
|
||||
return order.maker.robot.hash_id
|
||||
|
||||
@sync_to_async
|
||||
def get_robot_currency(self, order):
|
||||
return str(order.currency)
|
||||
|
||||
def generate_tags(self, order, robot_name, robot_hash_id, currency):
|
||||
hashed_id = hashlib.md5(
|
||||
f"{config("COORDINATOR_ALIAS", cast=str)}{order.id}".encode("utf-8")
|
||||
).hexdigest()
|
||||
|
||||
tags = [
|
||||
Tag.parse(["d", str(uuid.UUID(hashed_id))]),
|
||||
Tag.parse(["name", robot_name, robot_hash_id]),
|
||||
Tag.parse(["k", "sell" if order.type == Order.Types.SELL else "buy"]),
|
||||
Tag.parse(["f", currency]),
|
||||
Tag.parse(["s", self.get_status_tag(order)]),
|
||||
Tag.parse(["amt", "0"]),
|
||||
Tag.parse(
|
||||
["fa"]
|
||||
+ (
|
||||
[str(order.amount)]
|
||||
if not order.has_range
|
||||
else [str(order.min_amount), str(order.max_amount)]
|
||||
)
|
||||
),
|
||||
Tag.parse(["pm"] + order.payment_method.split(" ")),
|
||||
Tag.parse(["premium", str(order.premium)]),
|
||||
Tag.parse(
|
||||
[
|
||||
"source",
|
||||
f"http://{config("HOST_NAME")}/order/{config("COORDINATOR_ALIAS", cast=str).lower()}/{order.id}",
|
||||
]
|
||||
),
|
||||
Tag.parse(
|
||||
[
|
||||
"expiration",
|
||||
str(int(order.expires_at.timestamp())),
|
||||
str(order.escrow_duration),
|
||||
]
|
||||
),
|
||||
Tag.parse(["y", "robosats", config("COORDINATOR_ALIAS", cast=str).lower()]),
|
||||
Tag.parse(["network", str(config("NETWORK"))]),
|
||||
Tag.parse(["layer"] + self.get_layer_tag(order)),
|
||||
Tag.parse(["bond", str(order.bond_size)]),
|
||||
Tag.parse(["z", "order"]),
|
||||
]
|
||||
|
||||
if order.latitude and order.longitude:
|
||||
tags.extend(
|
||||
[Tag.parse(["g", pygeohash.encode(order.latitude, order.longitude)])]
|
||||
)
|
||||
|
||||
return tags
|
||||
|
||||
def get_status_tag(self, order):
|
||||
if order.status == Order.Status.PUB:
|
||||
return "pending"
|
||||
else:
|
||||
return "success"
|
||||
|
||||
def get_layer_tag(self, order):
|
||||
if order.type == Order.Types.SELL:
|
||||
return ["onchain", "lightning"]
|
||||
else:
|
||||
return ["lightning"]
|
@ -1,12 +1,14 @@
|
||||
from secrets import token_urlsafe
|
||||
|
||||
from decouple import config
|
||||
|
||||
from api.models import Order
|
||||
from api.models import (
|
||||
Order,
|
||||
Notification,
|
||||
)
|
||||
from api.utils import get_session
|
||||
|
||||
|
||||
class Telegram:
|
||||
class Notifications:
|
||||
"""Simple telegram messages using TG's API"""
|
||||
|
||||
session = get_session()
|
||||
@ -29,13 +31,24 @@ class Telegram:
|
||||
|
||||
return context
|
||||
|
||||
def send_message(self, chat_id, text):
|
||||
def send_message(self, order, robot, title, description=""):
|
||||
"""Save a message for a user and sends it to Telegram"""
|
||||
self.save_message(order, robot, title, description)
|
||||
if robot.telegram_enabled:
|
||||
self.send_telegram_message(robot.telegram_chat_id, title, description)
|
||||
|
||||
def save_message(self, order, robot, title, description=""):
|
||||
"""Save a message for a user"""
|
||||
Notification.objects.create(
|
||||
title=title, description=description, robot=robot, order=order
|
||||
)
|
||||
|
||||
def send_telegram_message(self, chat_id, title, description=""):
|
||||
"""sends a message to a user with telegram notifications enabled"""
|
||||
|
||||
bot_token = config("TELEGRAM_TOKEN")
|
||||
|
||||
text = f"{title} {description}"
|
||||
message_url = f"https://api.telegram.org/bot{bot_token}/sendMessage?chat_id={chat_id}&text={text}"
|
||||
|
||||
# if it fails, it should keep trying
|
||||
while True:
|
||||
try:
|
||||
@ -49,119 +62,127 @@ class Telegram:
|
||||
lang = user.robot.telegram_lang_code
|
||||
|
||||
if lang == "es":
|
||||
text = f"🔔 Hola {user.username}, te enviaré notificaciones sobre tus órdenes en RoboSats."
|
||||
title = f"🔔 Hola {user.username}, te enviaré notificaciones sobre tus órdenes en RoboSats."
|
||||
else:
|
||||
text = f"🔔 Hey {user.username}, I will send you notifications about your RoboSats orders."
|
||||
self.send_message(user.robot.telegram_chat_id, text)
|
||||
title = f"🔔 Hey {user.username}, I will send you notifications about your RoboSats orders."
|
||||
self.send_telegram_message(user.robot.telegram_chat_id, title)
|
||||
user.robot.telegram_welcomed = True
|
||||
user.robot.save(update_fields=["telegram_welcomed"])
|
||||
return
|
||||
|
||||
def order_taken_confirmed(self, order):
|
||||
if order.maker.robot.telegram_enabled:
|
||||
lang = order.maker.robot.telegram_lang_code
|
||||
if lang == "es":
|
||||
text = f"✅ Hey {order.maker.username} ¡Tu orden con ID {order.id} ha sido tomada por {order.taker.username}!🥳 Visita http://{self.site}/order/{order.id} para continuar."
|
||||
else:
|
||||
text = f"✅ Hey {order.maker.username}, your order was taken by {order.taker.username}!🥳 Visit http://{self.site}/order/{order.id} to proceed with the trade."
|
||||
self.send_message(order.maker.robot.telegram_chat_id, text)
|
||||
lang = order.maker.robot.telegram_lang_code
|
||||
if lang == "es":
|
||||
title = f"✅ Hey {order.maker.username} ¡Tu orden con ID {order.id} ha sido tomada por {order.taker.username}!🥳"
|
||||
description = f"Visita http://{self.site}/order/{order.id} para continuar."
|
||||
else:
|
||||
title = f"✅ Hey {order.maker.username}, your order was taken by {order.taker.username}!🥳"
|
||||
description = (
|
||||
f"Visit http://{self.site}/order/{order.id} to proceed with the trade."
|
||||
)
|
||||
self.send_message(order, order.maker.robot, title, description)
|
||||
|
||||
if order.taker.robot.telegram_enabled:
|
||||
lang = order.taker.robot.telegram_lang_code
|
||||
if lang == "es":
|
||||
text = f"✅ Hey {order.taker.username}, acabas de tomar la orden con ID {order.id}."
|
||||
else:
|
||||
text = f"✅ Hey {order.taker.username}, you just took the order with ID {order.id}."
|
||||
self.send_message(order.taker.robot.telegram_chat_id, text)
|
||||
lang = order.taker.robot.telegram_lang_code
|
||||
if lang == "es":
|
||||
title = f"✅ Hey {order.taker.username}, acabas de tomar la orden con ID {order.id}."
|
||||
else:
|
||||
title = f"✅ Hey {order.taker.username}, you just took the order with ID {order.id}."
|
||||
self.send_message(order, order.taker.robot, title)
|
||||
|
||||
return
|
||||
|
||||
def fiat_exchange_starts(self, order):
|
||||
for user in [order.maker, order.taker]:
|
||||
if user.robot.telegram_enabled:
|
||||
lang = user.robot.telegram_lang_code
|
||||
if lang == "es":
|
||||
text = f"✅ Hey {user.username}, el depósito de garantía y el recibo del comprador han sido recibidos. Es hora de enviar el dinero fiat. Visita http://{self.site}/order/{order.id} para hablar con tu contraparte."
|
||||
else:
|
||||
text = f"✅ Hey {user.username}, the escrow and invoice have been submitted. The fiat exchange starts now via the platform chat. Visit http://{self.site}/order/{order.id} to talk with your counterpart."
|
||||
self.send_message(user.robot.telegram_chat_id, text)
|
||||
lang = user.robot.telegram_lang_code
|
||||
if lang == "es":
|
||||
title = f"✅ Hey {user.username}, el depósito de garantía y el recibo del comprador han sido recibidos. Es hora de enviar el dinero fiat."
|
||||
description = f"Visita http://{self.site}/order/{order.id} para hablar con tu contraparte."
|
||||
else:
|
||||
title = f"✅ Hey {user.username}, the escrow and invoice have been submitted. The fiat exchange starts now via the platform chat."
|
||||
description = f"Visit http://{self.site}/order/{order.id} to talk with your counterpart."
|
||||
self.send_message(order, user.robot, title, description)
|
||||
return
|
||||
|
||||
def order_expired_untaken(self, order):
|
||||
if order.maker.robot.telegram_enabled:
|
||||
lang = order.maker.robot.telegram_lang_code
|
||||
if lang == "es":
|
||||
text = f"😪 Hey {order.maker.username}, tu orden con ID {order.id} ha expirado sin ser tomada por ningún robot. Visita http://{self.site}/order/{order.id} para renovarla."
|
||||
else:
|
||||
text = f"😪 Hey {order.maker.username}, your order with ID {order.id} has expired without a taker. Visit http://{self.site}/order/{order.id} to renew it."
|
||||
self.send_message(order.maker.robot.telegram_chat_id, text)
|
||||
lang = order.maker.robot.telegram_lang_code
|
||||
if lang == "es":
|
||||
title = f"😪 Hey {order.maker.username}, tu orden con ID {order.id} ha expirado sin ser tomada por ningún robot."
|
||||
description = f"Visita http://{self.site}/order/{order.id} para renovarla."
|
||||
else:
|
||||
title = f"😪 Hey {order.maker.username}, your order with ID {order.id} has expired without a taker."
|
||||
description = f"Visit http://{self.site}/order/{order.id} to renew it."
|
||||
self.send_message(order, order.maker.robot, title, description)
|
||||
return
|
||||
|
||||
def trade_successful(self, order):
|
||||
for user in [order.maker, order.taker]:
|
||||
if user.robot.telegram_enabled:
|
||||
lang = user.robot.telegram_lang_code
|
||||
if lang == "es":
|
||||
text = f"🥳 ¡Tu orden con ID {order.id} ha finalizado exitosamente!⚡ Únete a nosotros en @robosats_es y ayúdanos a mejorar."
|
||||
else:
|
||||
text = f"🥳 Your order with ID {order.id} has finished successfully!⚡ Join us @robosats and help us improve."
|
||||
self.send_message(user.robot.telegram_chat_id, text)
|
||||
lang = user.robot.telegram_lang_code
|
||||
if lang == "es":
|
||||
title = f"🥳 ¡Tu orden con ID {order.id} ha finalizado exitosamente!"
|
||||
description = (
|
||||
"⚡ Únete a nosotros en @robosats_es y ayúdanos a mejorar."
|
||||
)
|
||||
else:
|
||||
title = f"🥳 Your order with ID {order.id} has finished successfully!"
|
||||
description = "⚡ Join us @robosats and help us improve."
|
||||
self.send_message(order, user.robot, title, description)
|
||||
return
|
||||
|
||||
def public_order_cancelled(self, order):
|
||||
if order.maker.robot.telegram_enabled:
|
||||
lang = order.maker.robot.telegram_lang_code
|
||||
if lang == "es":
|
||||
text = f"❌ Hey {order.maker.username}, has cancelado tu orden pública con ID {order.id}."
|
||||
else:
|
||||
text = f"❌ Hey {order.maker.username}, you have cancelled your public order with ID {order.id}."
|
||||
self.send_message(order.maker.robot.telegram_chat_id, text)
|
||||
lang = order.maker.robot.telegram_lang_code
|
||||
if lang == "es":
|
||||
title = f"❌ Hey {order.maker.username}, has cancelado tu orden pública con ID {order.id}."
|
||||
else:
|
||||
title = f"❌ Hey {order.maker.username}, you have cancelled your public order with ID {order.id}."
|
||||
self.send_message(order, order.maker.robot, title)
|
||||
return
|
||||
|
||||
def collaborative_cancelled(self, order):
|
||||
for user in [order.maker, order.taker]:
|
||||
if user.robot.telegram_enabled:
|
||||
lang = user.robot.telegram_lang_code
|
||||
if lang == "es":
|
||||
text = f"❌ Hey {user.username}, tu orden con ID {str(order.id)} fue cancelada colaborativamente."
|
||||
else:
|
||||
text = f"❌ Hey {user.username}, your order with ID {str(order.id)} has been collaboratively cancelled."
|
||||
self.send_message(user.robot.telegram_chat_id, text)
|
||||
lang = user.robot.telegram_lang_code
|
||||
if lang == "es":
|
||||
title = f"❌ Hey {user.username}, tu orden con ID {str(order.id)} fue cancelada colaborativamente."
|
||||
else:
|
||||
title = f"❌ Hey {user.username}, your order with ID {str(order.id)} has been collaboratively cancelled."
|
||||
self.send_message(order, user.robot, title)
|
||||
return
|
||||
|
||||
def dispute_opened(self, order):
|
||||
for user in [order.maker, order.taker]:
|
||||
if user.robot.telegram_enabled:
|
||||
lang = user.robot.telegram_lang_code
|
||||
if lang == "es":
|
||||
text = f"⚖️ Hey {user.username}, la orden con ID {str(order.id)} ha entrado en disputa."
|
||||
else:
|
||||
text = f"⚖️ Hey {user.username}, a dispute has been opened on your order with ID {str(order.id)}."
|
||||
self.send_message(user.robot.telegram_chat_id, text)
|
||||
lang = user.robot.telegram_lang_code
|
||||
if lang == "es":
|
||||
title = f"⚖️ Hey {user.username}, la orden con ID {str(order.id)} ha entrado en disputa."
|
||||
else:
|
||||
title = f"⚖️ Hey {user.username}, a dispute has been opened on your order with ID {str(order.id)}."
|
||||
self.send_message(order, user.robot, title)
|
||||
|
||||
admin_chat_id = config("TELEGRAM_ADMIN_CHAT_ID")
|
||||
admin_chat_id = config("TELEGRAM_COORDINATOR_CHAT_ID")
|
||||
|
||||
if len(admin_chat_id) == 0:
|
||||
return
|
||||
|
||||
coordinator_text = f"There is a new dispute opened for the order with ID {str(order.id)}. Visit http://{self.site}/coordinator/api/order/{str(order.id)}/change to proceed."
|
||||
self.send_message(admin_chat_id, coordinator_text)
|
||||
coordinator_text = (
|
||||
f"There is a new dispute opened for the order with ID {str(order.id)}."
|
||||
)
|
||||
coordinator_description = f"Visit http://{self.site}/coordinator/api/order/{str(order.id)}/change to proceed."
|
||||
self.send_telegram_message(
|
||||
admin_chat_id, coordinator_text, coordinator_description
|
||||
)
|
||||
|
||||
return
|
||||
|
||||
def order_published(self, order):
|
||||
if order.maker.robot.telegram_enabled:
|
||||
lang = order.maker.robot.telegram_lang_code
|
||||
# In weird cases the order cannot be found (e.g. it is cancelled)
|
||||
queryset = Order.objects.filter(maker=order.maker)
|
||||
if len(queryset) == 0:
|
||||
return
|
||||
order = queryset.last()
|
||||
if lang == "es":
|
||||
text = f"✅ Hey {order.maker.username}, tu orden con ID {str(order.id)} es pública en el libro de ordenes."
|
||||
else:
|
||||
text = f"✅ Hey {order.maker.username}, your order with ID {str(order.id)} is public in the order book."
|
||||
self.send_message(order.maker.robot.telegram_chat_id, text)
|
||||
lang = order.maker.robot.telegram_lang_code
|
||||
# In weird cases the order cannot be found (e.g. it is cancelled)
|
||||
queryset = Order.objects.filter(maker=order.maker)
|
||||
if len(queryset) == 0:
|
||||
return
|
||||
order = queryset.last()
|
||||
if lang == "es":
|
||||
title = f"✅ Hey {order.maker.username}, tu orden con ID {str(order.id)} es pública en el libro de ordenes."
|
||||
else:
|
||||
title = f"✅ Hey {order.maker.username}, your order with ID {str(order.id)} is public in the order book."
|
||||
self.send_message(order, order.maker.robot, title)
|
||||
return
|
||||
|
||||
def new_chat_message(self, order, chat_message):
|
||||
@ -189,14 +210,56 @@ class Telegram:
|
||||
notification_reason = f"(You receive this notification because this was the first in-chat message. You will only be notified again if there is a gap bigger than {TIMEGAP} minutes between messages)"
|
||||
|
||||
user = chat_message.receiver
|
||||
if user.robot.telegram_enabled:
|
||||
text = f"💬 Hey {user.username}, a new chat message in-app was sent to you by {chat_message.sender.username} for order ID {str(order.id)}. {notification_reason}"
|
||||
self.send_message(user.robot.telegram_chat_id, text)
|
||||
title = f"💬 Hey {user.username}, a new chat message in-app was sent to you by {chat_message.sender.username} for order ID {str(order.id)}."
|
||||
self.send_message(order, user.robot, title, notification_reason)
|
||||
|
||||
return
|
||||
|
||||
def coordinator_cancelled(self, order):
|
||||
if order.maker.robot.telegram_enabled:
|
||||
text = f"🛠️ Your order with ID {order.id} has been cancelled by the coordinator {config('COORDINATOR_ALIAS', cast=str, default='NoAlias')} for the upcoming maintenance stop."
|
||||
self.send_message(order.maker.robot.telegram_chat_id, text)
|
||||
title = f"🛠️ Your order with ID {order.id} has been cancelled by the coordinator {config('COORDINATOR_ALIAS', cast=str, default='NoAlias')} for the upcoming maintenance stop."
|
||||
self.send_message(order, order.maker.robot, title)
|
||||
return
|
||||
|
||||
def dispute_closed(self, order):
|
||||
lang = order.maker.robot.telegram_lang_code
|
||||
if order.status == Order.Status.MLD:
|
||||
# Maker lost dispute
|
||||
looser = order.maker
|
||||
winner = order.taker
|
||||
elif order.status == Order.Status.TLD:
|
||||
# Taker lost dispute
|
||||
looser = order.taker
|
||||
winner = order.maker
|
||||
|
||||
lang = looser.robot.telegram_lang_code
|
||||
if lang == "es":
|
||||
title = f"⚖️ Hey {looser.username}, has perdido la disputa en la orden con ID {str(order.id)}."
|
||||
else:
|
||||
title = f"⚖️ Hey {looser.username}, you lost the dispute on your order with ID {str(order.id)}."
|
||||
self.send_message(order, looser.robot, title)
|
||||
|
||||
lang = winner.robot.telegram_lang_code
|
||||
if lang == "es":
|
||||
title = f"⚖️ Hey {winner.username}, has ganado la disputa en la orden con ID {str(order.id)}."
|
||||
else:
|
||||
title = f"⚖️ Hey {winner.username}, you won the dispute on your order with ID {str(order.id)}."
|
||||
self.send_message(order, winner.robot, title)
|
||||
|
||||
return
|
||||
|
||||
def lightning_failed(self, order):
|
||||
lang = order.maker.robot.telegram_lang_code
|
||||
if order.type == Order.Types.BUY:
|
||||
buyer = order.maker
|
||||
else:
|
||||
buyer = order.taker
|
||||
|
||||
if lang == "es":
|
||||
title = f"⚡❌ Hey {buyer.username}, el pago lightning en la order con ID {str(order.id)} ha fallado."
|
||||
description = "Intentalo de nuevo con una nueva factura o con otra wallet."
|
||||
else:
|
||||
title = f"⚡❌ Hey {buyer.username}, the lightning payment on your order with ID {str(order.id)} failed."
|
||||
description = "Try again with a new invoice or from another wallet."
|
||||
|
||||
self.send_message(order, buyer.robot, title, description)
|
||||
return
|
||||
|
@ -112,7 +112,7 @@ class OrderViewSchema:
|
||||
- `11` "In dispute"
|
||||
- `12` "Collaboratively cancelled"
|
||||
- `13` "Sending satoshis to buyer"
|
||||
- `14` "Sucessful trade"
|
||||
- `14` "Successful trade"
|
||||
- `15` "Failed lightning network routing"
|
||||
- `16` "Wait for dispute resolution"
|
||||
- `17` "Maker lost dispute"
|
||||
@ -219,14 +219,17 @@ class OrderViewSchema:
|
||||
- `update_invoice`
|
||||
- This action only is valid if you are the buyer. The `invoice`
|
||||
field needs to be present in the body and the value must be a
|
||||
valid LN invoice as cleartext PGP message signed with the robot key. Make sure to perform this action only when
|
||||
valid LN invoice as cleartext PGP message signed (SHA512) with the robot key.
|
||||
The amount of the invoice should be `invoice_amount` minus the routing
|
||||
budget whose parts per million should be specified by `routing_budget_ppm`.
|
||||
Make sure to perform this action only when
|
||||
both the bonds are locked. i.e The status of your order is
|
||||
at least `6` (Waiting for trade collateral and buyer invoice)
|
||||
- `update_address`
|
||||
- This action is only valid if you are the buyer. This action is
|
||||
used to set an on-chain payout address if you wish to have your
|
||||
payout be received on-chain. Only valid if there is an address in the body as
|
||||
cleartext PGP message signed with the robot key. This enables on-chain swap for the
|
||||
cleartext PGP message signed (SHA512) with the robot key. This enables on-chain swap for the
|
||||
order, so even if you earlier had submitted a LN invoice, it
|
||||
will be ignored. You get to choose the `mining_fee_rate` as
|
||||
well. Mining fee rate is specified in sats/vbyte.
|
||||
@ -246,9 +249,7 @@ class OrderViewSchema:
|
||||
mid-trade so use this action carefully:
|
||||
|
||||
- As a maker if you cancel an order after you have locked your
|
||||
maker bond, you are returned your bond. This may change in
|
||||
the future to prevent DDoSing the LN node and you won't be
|
||||
returned the maker bond.
|
||||
maker bond, you are returned your bond.
|
||||
- As a taker there is a time penalty involved if you `take` an
|
||||
order and cancel it without locking the taker bond.
|
||||
- For both taker or maker, if you cancel the order when both
|
||||
@ -377,6 +378,21 @@ class BookViewSchema:
|
||||
}
|
||||
|
||||
|
||||
class NotificationSchema:
|
||||
get = {
|
||||
"summary": "Get robot notifications",
|
||||
"description": "Get a list of notifications sent to the robot.",
|
||||
"parameters": [
|
||||
OpenApiParameter(
|
||||
name="created_at",
|
||||
location=OpenApiParameter.QUERY,
|
||||
description=("Shows notifications created AFTER this date."),
|
||||
type=str,
|
||||
),
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
class RobotViewSchema:
|
||||
get = {
|
||||
"summary": "Get robot info",
|
||||
@ -387,12 +403,13 @@ class RobotViewSchema:
|
||||
An authenticated request (has the token's sha256 hash encoded as base 91 in the Authorization header) will be
|
||||
returned the information about the state of a robot.
|
||||
|
||||
Make sure you generate your token using cryptographically secure methods. [Here's]() the function the Javascript
|
||||
client uses to generate the tokens. Since the server only receives the hash of the
|
||||
Make sure you generate your token using cryptographically secure methods.
|
||||
Since the server only receives the hash of the
|
||||
token, it is responsibility of the client to create a strong token. Check
|
||||
[here](https://github.com/RoboSats/robosats/blob/main/frontend/src/utils/token.js)
|
||||
[here](https://github.com/RoboSats/robosats/blob/main/frontend/src/utils/token.ts)
|
||||
to see how the Javascript client creates a random strong token and how it validates entropy is optimal for tokens
|
||||
created by the user at will.
|
||||
The PGP key should be an EdDSA ed25519/cert,sign+cv25519/encr key.
|
||||
|
||||
`public_key` - PGP key associated with the user (Armored ASCII format)
|
||||
`encrypted_private_key` - Private PGP key. This is only stored on the backend for later fetching by
|
||||
@ -403,7 +420,7 @@ class RobotViewSchema:
|
||||
A gpg key can be created by:
|
||||
|
||||
```shell
|
||||
gpg --full-gen-key
|
||||
gpg --default-new-key-algo "ed25519/cert,sign+cv25519/encr" --full-gen-key
|
||||
```
|
||||
|
||||
it's public key can be exported in ascii armored format with:
|
||||
@ -531,7 +548,7 @@ class InfoViewSchema:
|
||||
class RewardViewSchema:
|
||||
post = {
|
||||
"summary": "Withdraw reward",
|
||||
"description": "Withdraw user reward by submitting an invoice. The invoice must be send as cleartext PGP message signed with the robot key",
|
||||
"description": "Withdraw user reward by submitting an invoice. The invoice must be send as cleartext PGP message signed (SHA512) with the robot key",
|
||||
"responses": {
|
||||
200: {
|
||||
"type": "object",
|
||||
|
@ -1,7 +1,8 @@
|
||||
from decouple import config
|
||||
from decimal import Decimal
|
||||
from rest_framework import serializers
|
||||
|
||||
from .models import MarketTick, Order
|
||||
from .models import MarketTick, Order, Notification
|
||||
|
||||
RETRY_TIME = int(config("RETRY_TIME"))
|
||||
|
||||
@ -489,11 +490,25 @@ class OrderDetailSerializer(serializers.ModelSerializer):
|
||||
)
|
||||
|
||||
|
||||
class ListNotificationSerializer(serializers.ModelSerializer):
|
||||
status = serializers.SerializerMethodField(
|
||||
help_text="The `status` of the order when the notification was trigered",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Notification
|
||||
fields = ("title", "description", "order_id", "status", "created_at")
|
||||
|
||||
def get_status(self, notification) -> int:
|
||||
return notification.order.status
|
||||
|
||||
|
||||
class OrderPublicSerializer(serializers.ModelSerializer):
|
||||
maker_nick = serializers.CharField(required=False)
|
||||
maker_hash_id = serializers.CharField(required=False)
|
||||
maker_status = serializers.CharField(
|
||||
help_text='Status of the nick - "Active" or "Inactive"', required=False
|
||||
help_text='Status of the nick - "Active", "Seen Recently" or "Inactive"',
|
||||
required=False,
|
||||
)
|
||||
price = serializers.FloatField(
|
||||
help_text="Price in order's fiat currency", required=False
|
||||
@ -583,7 +598,7 @@ class UpdateOrderSerializer(serializers.Serializer):
|
||||
)
|
||||
routing_budget_ppm = serializers.IntegerField(
|
||||
default=0,
|
||||
min_value=0,
|
||||
min_value=Decimal(0),
|
||||
max_value=100_001,
|
||||
allow_null=True,
|
||||
required=False,
|
||||
|
53
api/tasks.py
53
api/tasks.py
@ -1,3 +1,4 @@
|
||||
from asgiref.sync import async_to_sync
|
||||
from celery import shared_task
|
||||
from celery.exceptions import SoftTimeLimitExceeded
|
||||
|
||||
@ -251,6 +252,20 @@ def cache_market():
|
||||
return
|
||||
|
||||
|
||||
@shared_task(name="", ignore_result=True, time_limit=120)
|
||||
def nostr_send_order_event(order_id=None):
|
||||
if order_id:
|
||||
from api.models import Order
|
||||
from api.nostr import Nostr
|
||||
|
||||
order = Order.objects.get(id=order_id)
|
||||
|
||||
nostr = Nostr()
|
||||
async_to_sync(nostr.send_order_event)(order)
|
||||
|
||||
return
|
||||
|
||||
|
||||
@shared_task(name="send_notification", ignore_result=True, time_limit=120)
|
||||
def send_notification(order_id=None, chat_message_id=None, message=None):
|
||||
if order_id:
|
||||
@ -263,48 +278,50 @@ def send_notification(order_id=None, chat_message_id=None, message=None):
|
||||
chat_message = Message.objects.get(id=chat_message_id)
|
||||
order = chat_message.order
|
||||
|
||||
taker_enabled = False if order.taker is None else order.taker.robot.telegram_enabled
|
||||
if not (order.maker.robot.telegram_enabled or taker_enabled):
|
||||
return
|
||||
from api.notifications import Notifications
|
||||
|
||||
from api.notifications import Telegram
|
||||
|
||||
telegram = Telegram()
|
||||
notifications = Notifications()
|
||||
|
||||
if message == "welcome":
|
||||
telegram.welcome(order)
|
||||
notifications.welcome(order)
|
||||
|
||||
elif message == "order_expired_untaken":
|
||||
telegram.order_expired_untaken(order)
|
||||
notifications.order_expired_untaken(order)
|
||||
|
||||
elif message == "trade_successful":
|
||||
telegram.trade_successful(order)
|
||||
notifications.trade_successful(order)
|
||||
|
||||
elif message == "public_order_cancelled":
|
||||
telegram.public_order_cancelled(order)
|
||||
notifications.public_order_cancelled(order)
|
||||
|
||||
elif message == "taker_expired_b4bond":
|
||||
telegram.taker_expired_b4bond(order)
|
||||
notifications.taker_expired_b4bond(order)
|
||||
|
||||
elif message == "order_published":
|
||||
telegram.order_published(order)
|
||||
notifications.order_published(order)
|
||||
|
||||
elif message == "order_taken_confirmed":
|
||||
telegram.order_taken_confirmed(order)
|
||||
notifications.order_taken_confirmed(order)
|
||||
|
||||
elif message == "fiat_exchange_starts":
|
||||
telegram.fiat_exchange_starts(order)
|
||||
notifications.fiat_exchange_starts(order)
|
||||
|
||||
elif message == "dispute_opened":
|
||||
telegram.dispute_opened(order)
|
||||
notifications.dispute_opened(order)
|
||||
|
||||
elif message == "collaborative_cancelled":
|
||||
telegram.collaborative_cancelled(order)
|
||||
notifications.collaborative_cancelled(order)
|
||||
|
||||
elif message == "new_chat_message":
|
||||
telegram.new_chat_message(order, chat_message)
|
||||
notifications.new_chat_message(order, chat_message)
|
||||
|
||||
elif message == "coordinator_cancelled":
|
||||
telegram.coordinator_cancelled(order)
|
||||
notifications.coordinator_cancelled(order)
|
||||
|
||||
elif message == "dispute_closed":
|
||||
notifications.dispute_closed(order)
|
||||
|
||||
elif message == "lightning_failed":
|
||||
notifications.lightning_failed(order)
|
||||
|
||||
return
|
||||
|
@ -15,6 +15,7 @@ from .views import (
|
||||
RobotView,
|
||||
StealthView,
|
||||
TickView,
|
||||
NotificationsView,
|
||||
)
|
||||
|
||||
urlpatterns = [
|
||||
@ -36,4 +37,5 @@ urlpatterns = [
|
||||
path("ticks/", TickView.as_view(), name="ticks"),
|
||||
path("stealth/", StealthView.as_view(), name="stealth"),
|
||||
path("chat/", ChatView.as_view({"get": "get", "post": "post"}), name="chat"),
|
||||
path("notifications/", NotificationsView.as_view(), name="notifications"),
|
||||
]
|
||||
|
32
api/utils.py
32
api/utils.py
@ -141,7 +141,7 @@ def get_devfund_pubkey(network: str) -> str:
|
||||
"""
|
||||
|
||||
session = get_session()
|
||||
url = "https://raw.githubusercontent.com/RoboSats/robosats/main/devfund_pubey.json"
|
||||
url = "https://raw.githubusercontent.com/RoboSats/robosats/main/devfund_pubkey.json"
|
||||
|
||||
try:
|
||||
response = session.get(url)
|
||||
@ -188,8 +188,7 @@ def get_exchange_rates(currencies):
|
||||
blockchain_rates.append(
|
||||
float(blockchain_prices[currency]["last"])
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
except Exception:
|
||||
blockchain_rates.append(np.nan)
|
||||
api_rates.append(blockchain_rates)
|
||||
|
||||
@ -479,6 +478,33 @@ def is_valid_token(token: str) -> bool:
|
||||
return all(c in charset for c in token)
|
||||
|
||||
|
||||
def location_country(lon: float, lat: float) -> str:
|
||||
"""
|
||||
Returns the country code of a lon/lat location
|
||||
"""
|
||||
|
||||
from shapely.geometry import shape, Point
|
||||
from shapely.prepared import prep
|
||||
|
||||
# Load the GeoJSON data from a local file
|
||||
with open("frontend/static/assets/geo/countries-coastline-10km.geo.json") as f:
|
||||
countries_geojeson = json.load(f)
|
||||
|
||||
# Prepare the countries for reverse geocoding
|
||||
countries = {}
|
||||
for feature in countries_geojeson["features"]:
|
||||
geom = feature["geometry"]
|
||||
country_code = feature["properties"]["A3"]
|
||||
countries[country_code] = prep(shape(geom))
|
||||
|
||||
point = Point(lon, lat)
|
||||
for country_code, geom in countries.items():
|
||||
if geom.contains(point):
|
||||
return country_code
|
||||
|
||||
return "unknown"
|
||||
|
||||
|
||||
def objects_to_hyperlinks(logs: str) -> str:
|
||||
"""
|
||||
Parses strings that have Object(ID,NAME) that match API models.
|
||||
|
51
api/views.py
51
api/views.py
@ -1,11 +1,12 @@
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
|
||||
from decouple import config
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.db.models import Q, Sum
|
||||
from django.utils import timezone
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.http import HttpResponseBadRequest
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework import status, viewsets
|
||||
from rest_framework.authentication import TokenAuthentication
|
||||
@ -15,8 +16,15 @@ from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from api.logics import Logics
|
||||
from api.models import Currency, LNPayment, MarketTick, OnchainPayment, Order
|
||||
from api.notifications import Telegram
|
||||
from api.models import (
|
||||
Currency,
|
||||
LNPayment,
|
||||
MarketTick,
|
||||
OnchainPayment,
|
||||
Order,
|
||||
Notification,
|
||||
)
|
||||
from api.notifications import Notifications
|
||||
from api.oas_schemas import (
|
||||
BookViewSchema,
|
||||
HistoricalViewSchema,
|
||||
@ -29,6 +37,7 @@ from api.oas_schemas import (
|
||||
RobotViewSchema,
|
||||
StealthViewSchema,
|
||||
TickViewSchema,
|
||||
NotificationSchema,
|
||||
)
|
||||
from api.serializers import (
|
||||
ClaimRewardSerializer,
|
||||
@ -40,6 +49,7 @@ from api.serializers import (
|
||||
StealthSerializer,
|
||||
TickSerializer,
|
||||
UpdateOrderSerializer,
|
||||
ListNotificationSerializer,
|
||||
)
|
||||
from api.utils import (
|
||||
compute_avg_premium,
|
||||
@ -55,9 +65,6 @@ from control.models import AccountingDay, BalanceLog
|
||||
EXP_MAKER_BOND_INVOICE = int(config("EXP_MAKER_BOND_INVOICE"))
|
||||
RETRY_TIME = int(config("RETRY_TIME"))
|
||||
|
||||
avatar_path = Path(settings.AVATAR_ROOT)
|
||||
avatar_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
class MakerView(CreateAPIView):
|
||||
serializer_class = MakeOrderSerializer
|
||||
@ -166,6 +173,10 @@ class MakerView(CreateAPIView):
|
||||
if not valid:
|
||||
return Response(context, status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
valid, context = Logics.validate_location(order)
|
||||
if not valid:
|
||||
return Response(context, status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
order.save()
|
||||
order.log(
|
||||
f"Order({order.id},{order}) created by Robot({request.user.robot.id},{request.user})"
|
||||
@ -659,7 +670,7 @@ class RobotView(APIView):
|
||||
context["last_login"] = user.last_login
|
||||
|
||||
# Adds/generate telegram token and whether it is enabled
|
||||
context = {**context, **Telegram.get_context(user)}
|
||||
context = {**context, **Notifications.get_context(user)}
|
||||
|
||||
# return active order or last made order if any
|
||||
has_no_active_order, _, order = Logics.validate_already_maker_or_taker(
|
||||
@ -730,6 +741,32 @@ class BookView(ListAPIView):
|
||||
return Response(book_data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class NotificationsView(ListAPIView):
|
||||
authentication_classes = [TokenAuthentication]
|
||||
permission_classes = [IsAuthenticated]
|
||||
serializer_class = ListNotificationSerializer
|
||||
|
||||
@extend_schema(**NotificationSchema.get)
|
||||
def get(self, request, format=None):
|
||||
robot = request.user.robot
|
||||
queryset = Notification.objects.filter(robot=robot).order_by("-created_at")
|
||||
created_at = request.GET.get("created_at")
|
||||
|
||||
if created_at:
|
||||
created_at = parse_datetime(created_at)
|
||||
if not created_at:
|
||||
return HttpResponseBadRequest("Invalid date format")
|
||||
queryset = queryset.filter(created_at__gte=created_at)
|
||||
|
||||
notification_data = []
|
||||
for notification in queryset:
|
||||
data = self.serializer_class(notification).data
|
||||
data["order_id"] = notification.order.id
|
||||
notification_data.append(data)
|
||||
|
||||
return Response(notification_data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class InfoView(viewsets.ViewSet):
|
||||
serializer_class = InfoSerializer
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
from rest_framework import serializers
|
||||
|
||||
from decimal import Decimal
|
||||
from chat.models import Message
|
||||
|
||||
|
||||
@ -36,7 +36,7 @@ class ChatSerializer(serializers.ModelSerializer):
|
||||
allow_null=True,
|
||||
default=None,
|
||||
required=False,
|
||||
min_value=0,
|
||||
min_value=Decimal(0),
|
||||
help_text="Offset for message index to get as response",
|
||||
)
|
||||
|
||||
@ -66,7 +66,7 @@ class PostMessageSerializer(serializers.ModelSerializer):
|
||||
|
||||
order_id = serializers.IntegerField(
|
||||
required=True,
|
||||
min_value=0,
|
||||
min_value=Decimal(0),
|
||||
help_text="Your peer's public key",
|
||||
)
|
||||
|
||||
@ -74,7 +74,7 @@ class PostMessageSerializer(serializers.ModelSerializer):
|
||||
allow_null=True,
|
||||
default=None,
|
||||
required=False,
|
||||
min_value=0,
|
||||
min_value=Decimal(0),
|
||||
help_text="Offset for message index to get as response",
|
||||
)
|
||||
|
||||
|
41
desktopApp/Readme.md
Normal file
41
desktopApp/Readme.md
Normal file
@ -0,0 +1,41 @@
|
||||
# RoboSats Desktop App
|
||||
|
||||
RoboSats desktop app serves the RoboSats frontend app directly and redirects all API requests to RoboSats P2P market coordinator through your TOR proxy.
|
||||
|
||||
## How to Use
|
||||
|
||||
### Step 1: Clone the Repository
|
||||
|
||||
First, clone the repository to your local machine:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/RoboSats/robosats.git
|
||||
cd robosats
|
||||
```
|
||||
|
||||
|
||||
### Step 2: Install Dependencies
|
||||
```bash
|
||||
cd desktopApp
|
||||
npm install
|
||||
```
|
||||
|
||||
|
||||
### Step 3: Run the App Locally
|
||||
```bash
|
||||
npm run start
|
||||
```
|
||||
|
||||
### Step 4: Package the App
|
||||
|
||||
To package the app for different platforms (Linux, Windows, macOS), use the corresponding npm commands:
|
||||
|
||||
```bash
|
||||
npm run package-linux
|
||||
npm run package-win
|
||||
npm run package-mac
|
||||
```
|
||||
|
||||
### Additional Information
|
||||
This desktop app ensures all API requests are redirected through a TOR proxy to maintain privacy and anonymity while accessing the RoboSats P2P market coordinator.
|
||||
|
BIN
desktopApp/assets/icon/Robosats.icns
Normal file
BIN
desktopApp/assets/icon/Robosats.icns
Normal file
Binary file not shown.
BIN
desktopApp/assets/icon/Robosats.ico
Normal file
BIN
desktopApp/assets/icon/Robosats.ico
Normal file
Binary file not shown.
After Width: | Height: | Size: 4.2 KiB |
133
desktopApp/assets/icon/Robosats.svg
Normal file
133
desktopApp/assets/icon/Robosats.svg
Normal file
@ -0,0 +1,133 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 16.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
width="256px" height="256px" viewBox="0 0 256 256" enable-background="new 0 0 256 256" xml:space="preserve">
|
||||
<linearGradient id="SVGID_1_" gradientUnits="userSpaceOnUse" x1="128" y1="256" x2="128" y2="0">
|
||||
<stop offset="0.1269" style="stop-color:#CCCCCC"/>
|
||||
<stop offset="0.2947" style="stop-color:#E1E1E1"/>
|
||||
<stop offset="0.4889" style="stop-color:#FFFFFF"/>
|
||||
</linearGradient>
|
||||
<rect fill="url(#SVGID_1_)" width="256" height="256"/>
|
||||
<g>
|
||||
<g>
|
||||
<g>
|
||||
<g>
|
||||
<g>
|
||||
|
||||
<linearGradient id="SVGID_2_" gradientUnits="userSpaceOnUse" x1="-2.2432" y1="60.3643" x2="167.041" y2="264.634" gradientTransform="matrix(0.9999 -0.0101 0.0101 0.9999 1.9865 13.6532)">
|
||||
<stop offset="0.3295" style="stop-color:#1976D2"/>
|
||||
<stop offset="0.4214" style="stop-color:#2E69CC"/>
|
||||
<stop offset="0.6106" style="stop-color:#6548BE"/>
|
||||
<stop offset="0.7834" style="stop-color:#9C27B0"/>
|
||||
</linearGradient>
|
||||
<path fill="url(#SVGID_2_)" d="M65.814,152.139c0.91,0.784,1.91,1.396,2.96,1.85c-1.711-1.462-3.403-2.891-5.073-4.277
|
||||
C64.274,150.59,64.974,151.414,65.814,152.139z"/>
|
||||
|
||||
<linearGradient id="SVGID_3_" gradientUnits="userSpaceOnUse" x1="-2.2031" y1="60.4131" x2="167.0789" y2="264.6803" gradientTransform="matrix(0.9999 -0.0101 0.0101 0.9999 1.9865 13.6532)">
|
||||
<stop offset="0.3295" style="stop-color:#1976D2"/>
|
||||
<stop offset="0.4214" style="stop-color:#2E69CC"/>
|
||||
<stop offset="0.6106" style="stop-color:#6548BE"/>
|
||||
<stop offset="0.7834" style="stop-color:#9C27B0"/>
|
||||
</linearGradient>
|
||||
<path fill="url(#SVGID_3_)" d="M65.814,152.139c0.91,0.784,1.91,1.396,2.96,1.85c-1.711-1.462-3.403-2.891-5.073-4.277
|
||||
C64.274,150.59,64.974,151.414,65.814,152.139z"/>
|
||||
|
||||
<linearGradient id="SVGID_4_" gradientUnits="userSpaceOnUse" x1="-1.457" y1="44.2812" x2="188.127" y2="273.0462" gradientTransform="matrix(0.9999 -0.0101 0.0101 0.9999 1.9865 13.6532)">
|
||||
<stop offset="0.3295" style="stop-color:#1976D2"/>
|
||||
<stop offset="0.4214" style="stop-color:#2E69CC"/>
|
||||
<stop offset="0.6106" style="stop-color:#6548BE"/>
|
||||
<stop offset="0.7834" style="stop-color:#9C27B0"/>
|
||||
</linearGradient>
|
||||
<path fill="url(#SVGID_4_)" d="M82.001,151.025c4.159-4.727,3.658-11.9-1.122-16.016c-4.78-4.117-12.026-3.62-16.188,1.109
|
||||
c-3.428,3.899-3.682,9.45-0.99,13.592c1.67,1.387,3.362,2.815,5.073,4.277C73.242,155.909,78.634,154.855,82.001,151.025z"/>
|
||||
|
||||
<linearGradient id="SVGID_5_" gradientUnits="userSpaceOnUse" x1="6.127" y1="53.4326" x2="175.4021" y2="257.6916" gradientTransform="matrix(0.9999 -0.0101 0.0101 0.9999 1.9865 13.6532)">
|
||||
<stop offset="0.3295" style="stop-color:#1976D2"/>
|
||||
<stop offset="0.4214" style="stop-color:#2E69CC"/>
|
||||
<stop offset="0.6106" style="stop-color:#6548BE"/>
|
||||
<stop offset="0.7834" style="stop-color:#9C27B0"/>
|
||||
</linearGradient>
|
||||
<path fill="url(#SVGID_5_)" d="M82.001,151.025c4.159-4.727,3.658-11.9-1.122-16.016c-4.78-4.117-12.026-3.62-16.188,1.109
|
||||
c-3.428,3.899-3.682,9.45-0.99,13.592c1.67,1.387,3.362,2.815,5.073,4.277C73.242,155.909,78.634,154.855,82.001,151.025z"/>
|
||||
|
||||
<linearGradient id="SVGID_6_" gradientUnits="userSpaceOnUse" x1="6.166" y1="53.4805" x2="175.4397" y2="257.7377" gradientTransform="matrix(0.9999 -0.0101 0.0101 0.9999 1.9865 13.6532)">
|
||||
<stop offset="0.3295" style="stop-color:#1976D2"/>
|
||||
<stop offset="0.4214" style="stop-color:#2E69CC"/>
|
||||
<stop offset="0.6106" style="stop-color:#6548BE"/>
|
||||
<stop offset="0.7834" style="stop-color:#9C27B0"/>
|
||||
</linearGradient>
|
||||
<path fill="url(#SVGID_6_)" d="M82.001,151.025c4.159-4.727,3.658-11.9-1.122-16.016c-4.78-4.117-12.026-3.62-16.188,1.109
|
||||
c-3.428,3.899-3.682,9.45-0.99,13.592c1.67,1.387,3.362,2.815,5.073,4.277C73.242,155.909,78.634,154.855,82.001,151.025z"/>
|
||||
|
||||
<linearGradient id="SVGID_7_" gradientUnits="userSpaceOnUse" x1="6.5864" y1="37.6182" x2="196.1729" y2="266.3862" gradientTransform="matrix(0.9999 -0.0101 0.0101 0.9999 1.9865 13.6532)">
|
||||
<stop offset="0.3295" style="stop-color:#1976D2"/>
|
||||
<stop offset="0.4214" style="stop-color:#2E69CC"/>
|
||||
<stop offset="0.6106" style="stop-color:#6548BE"/>
|
||||
<stop offset="0.7834" style="stop-color:#9C27B0"/>
|
||||
</linearGradient>
|
||||
<path fill="url(#SVGID_7_)" d="M131.52,178.628c-4.778-4.118-12.026-3.625-16.188,1.107
|
||||
c-4.159,4.729-3.654,11.902,1.124,16.021c4.779,4.114,12.028,3.62,16.185-1.113
|
||||
C136.801,189.911,136.299,182.742,131.52,178.628z"/>
|
||||
|
||||
<linearGradient id="SVGID_8_" gradientUnits="userSpaceOnUse" x1="14.1919" y1="46.7949" x2="183.4888" y2="251.0801" gradientTransform="matrix(0.9999 -0.0101 0.0101 0.9999 1.9865 13.6532)">
|
||||
<stop offset="0.3295" style="stop-color:#1976D2"/>
|
||||
<stop offset="0.4214" style="stop-color:#2E69CC"/>
|
||||
<stop offset="0.6106" style="stop-color:#6548BE"/>
|
||||
<stop offset="0.7834" style="stop-color:#9C27B0"/>
|
||||
</linearGradient>
|
||||
<path fill="url(#SVGID_8_)" d="M131.52,178.628c-4.778-4.118-12.026-3.625-16.188,1.107
|
||||
c-4.159,4.729-3.654,11.902,1.124,16.021c4.779,4.114,12.028,3.62,16.185-1.113
|
||||
C136.801,189.911,136.299,182.742,131.52,178.628z"/>
|
||||
|
||||
<linearGradient id="SVGID_9_" gradientUnits="userSpaceOnUse" x1="14.2378" y1="46.8506" x2="183.5231" y2="251.1217" gradientTransform="matrix(0.9999 -0.0101 0.0101 0.9999 1.9865 13.6532)">
|
||||
<stop offset="0.3295" style="stop-color:#1976D2"/>
|
||||
<stop offset="0.4214" style="stop-color:#2E69CC"/>
|
||||
<stop offset="0.6106" style="stop-color:#6548BE"/>
|
||||
<stop offset="0.7834" style="stop-color:#9C27B0"/>
|
||||
</linearGradient>
|
||||
<path fill="url(#SVGID_9_)" d="M131.52,178.628c-4.778-4.118-12.026-3.625-16.188,1.107
|
||||
c-4.159,4.729-3.654,11.902,1.124,16.021c4.779,4.114,12.028,3.62,16.185-1.113
|
||||
C136.801,189.911,136.299,182.742,131.52,178.628z"/>
|
||||
</g>
|
||||
|
||||
<linearGradient id="SVGID_10_" gradientUnits="userSpaceOnUse" x1="-56.7104" y1="46.0752" x2="181.1495" y2="333.0932" gradientTransform="matrix(0.9999 -0.0101 0.0101 0.9999 1.9865 13.6532)">
|
||||
<stop offset="0.3295" style="stop-color:#1976D2"/>
|
||||
<stop offset="0.4214" style="stop-color:#2E69CC"/>
|
||||
<stop offset="0.6106" style="stop-color:#6548BE"/>
|
||||
<stop offset="0.7834" style="stop-color:#9C27B0"/>
|
||||
</linearGradient>
|
||||
<path fill="url(#SVGID_10_)" d="M47.215,171.653c-0.005,7.697-0.009,15.186-0.014,22.144c2.244-2.067,4.739-4.303,7.153-6.622
|
||||
c0.744-0.719,1.28-0.813,2.038-0.109c1.297,1.197,2.635,2.35,4.001,3.56c1.243-1.401,2.409-2.72,3.618-4.084
|
||||
C58.394,181.564,52.874,176.668,47.215,171.653z"/>
|
||||
|
||||
<linearGradient id="SVGID_11_" gradientUnits="userSpaceOnUse" x1="-71.7656" y1="17.4062" x2="178.9234" y2="319.9048" gradientTransform="matrix(0.9999 -0.0101 0.0101 0.9999 1.9865 13.6532)">
|
||||
<stop offset="0.3295" style="stop-color:#1976D2"/>
|
||||
<stop offset="0.4214" style="stop-color:#2E69CC"/>
|
||||
<stop offset="0.6106" style="stop-color:#6548BE"/>
|
||||
<stop offset="0.7834" style="stop-color:#9C27B0"/>
|
||||
</linearGradient>
|
||||
<path fill="url(#SVGID_11_)" d="M96.606,215.496c-5.554-4.934-10.928-9.709-16.356-14.536c-1.225,1.39-2.362,2.679-3.551,4.024
|
||||
c1.533,1.378,2.997,2.693,4.539,4.078c-2.06,2.116-4.067,4.186-6.239,6.42C82.293,215.486,89.327,215.491,96.606,215.496z"/>
|
||||
|
||||
<linearGradient id="SVGID_12_" gradientUnits="userSpaceOnUse" x1="48.644" y1="2.749" x2="238.2449" y2="231.5344" gradientTransform="matrix(0.9999 -0.0101 0.0101 0.9999 1.9865 13.6532)">
|
||||
<stop offset="0.3295" style="stop-color:#1976D2"/>
|
||||
<stop offset="0.4214" style="stop-color:#2E69CC"/>
|
||||
<stop offset="0.6106" style="stop-color:#6548BE"/>
|
||||
<stop offset="0.7834" style="stop-color:#9C27B0"/>
|
||||
</linearGradient>
|
||||
<path fill="url(#SVGID_12_)" d="M151.123,140.642c4.551-2.31,8.836-5.034,12.748-8.313c7.891-6.625,13.992-14.499,16.895-24.56
|
||||
c2.469-8.565,3.027-17.312,2.158-26.142c-0.596-6.125-2.252-11.961-5.594-17.219c-7.176-11.29-17.574-18.036-30.375-21.218
|
||||
c-8.9-2.214-18.036-2.651-27.165-2.687c-23.748-0.088-47.498-0.055-71.249-0.067c-0.423-0.001-0.847,0.036-1.287,0.056
|
||||
c-0.015,24.535-0.031,48.95-0.046,73.32c15.731-11.838,31.863-14.195,48.42-2.57c2.176-2.02,4.324-4.011,6.562-6.088
|
||||
c-2.269-1.653-4.427-3.226-6.688-4.872c5.694-4.126,11.212-8.121,16.712-12.105c-1.47-3.392-0.892-6.063,1.599-7.667
|
||||
c2.145-1.383,5.17-0.997,6.868,0.874c1.745,1.923,1.889,4.86,0.337,6.912c-1.768,2.34-4.548,2.716-7.774,0.995
|
||||
c-2.781,3.42-5.572,6.854-8.424,10.36c2.357,1.672,4.611,3.269,6.938,4.919c-4.579,3.08-9.056,6.089-13.548,9.107
|
||||
c0.167,0.201,0.234,0.306,0.324,0.386c16.396,14.547,32.791,29.093,49.197,43.631c3.506,3.105,7.074,6.147,9.555,10.212
|
||||
c6.645,10.863,7.08,22.205,2.514,33.884c-2.002,5.131-5.035,9.634-9.098,13.737c19.465,0.012,38.66,0.024,58.096,0.036
|
||||
c-19.633-24.874-39.131-49.577-58.684-74.348C150.508,140.993,150.805,140.802,151.123,140.642z"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 9.1 KiB |
94
desktopApp/index.js
Normal file
94
desktopApp/index.js
Normal file
@ -0,0 +1,94 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
// Modules to control application life and create native browser window
|
||||
var electron_1 = require("electron");
|
||||
var child_process_1 = require("child_process");
|
||||
var path = require("path");
|
||||
var os = require("os");
|
||||
var tor = null;
|
||||
// Function to determine the current OS and find the appropriate Tor binary
|
||||
function checkPlatformAndRunTor() {
|
||||
var platform = os.platform();
|
||||
switch (platform) {
|
||||
case 'win32':
|
||||
tor = (0, child_process_1.spawn)(path.join(__dirname, '/tor/tor-win/tor/tor.exe'));
|
||||
break;
|
||||
case 'darwin':
|
||||
tor = (0, child_process_1.spawn)(path.join(__dirname, '/tor/tor-mac/tor/tor'));
|
||||
break;
|
||||
case 'linux':
|
||||
tor = (0, child_process_1.spawn)(path.join(__dirname, '/tor/tor-linux/tor/tor'));
|
||||
break;
|
||||
default:
|
||||
throw new Error("Unsupported platform: ".concat(platform));
|
||||
}
|
||||
}
|
||||
// Function to start Tor process
|
||||
checkPlatformAndRunTor();
|
||||
// Listen for Tor process stdout data
|
||||
tor.stdout.on("data", function (data) {
|
||||
var message = data.toString();
|
||||
console.log("Data received: ".concat(message));
|
||||
});
|
||||
// Listen for Tor process stderr data
|
||||
tor.stderr.on("data", function (data) {
|
||||
console.error("Error received: ".concat(data.toString()));
|
||||
electron_1.app.exit(1); // Exit the app if there's an error in the Tor process
|
||||
});
|
||||
// Function to create the main application window
|
||||
function createWindow() {
|
||||
// Create the browser window with specific dimensions
|
||||
var mainWindow = new electron_1.BrowserWindow({
|
||||
width: 1200,
|
||||
height: 800,
|
||||
icon: path.join(__dirname, '/static/assets/images/favicon-32x32.png'),
|
||||
webPreferences: {
|
||||
nodeIntegration: false, // Disable Node.js integration in the renderer
|
||||
contextIsolation: true, // Enable context isolation for security
|
||||
},
|
||||
});
|
||||
// Load the index.html file from the app directory
|
||||
mainWindow.loadURL("file://".concat(path.resolve(__dirname, 'index.html#/garage')), {
|
||||
extraHeaders: "pragma: no-cache\n" // Prevent caching of the loaded file
|
||||
});
|
||||
// Handle failed load attempts by reloading the file
|
||||
mainWindow.webContents.on("did-fail-load", function () {
|
||||
console.log("Failed to load the page, retrying...");
|
||||
mainWindow.loadURL("file://".concat(__dirname, "/index.html#/garage"));
|
||||
});
|
||||
// Uncomment the following line to open the DevTools
|
||||
// mainWindow.webContents.openDevTools();
|
||||
}
|
||||
// This method is called when Electron has finished initialization
|
||||
electron_1.app.whenReady().then(function () {
|
||||
// Create the window after the app is ready
|
||||
createWindow();
|
||||
// Re-create a window if the app is activated and there are no other windows open (MacOS specific behavior)
|
||||
electron_1.app.on("activate", function () {
|
||||
if (electron_1.BrowserWindow.getAllWindows().length === 0)
|
||||
createWindow();
|
||||
});
|
||||
});
|
||||
// Setup the app session when Electron is ready
|
||||
electron_1.app.on("ready", function () {
|
||||
// Redirect requests to static files
|
||||
electron_1.session.defaultSession.webRequest.onBeforeRequest({ urls: ['file:///static/*'] }, function (details, callback) {
|
||||
var url = details.url;
|
||||
var modifiedUrl = url.slice(7);
|
||||
var staticFilePath = path.join(__dirname, modifiedUrl);
|
||||
callback({ redirectURL: "file://".concat(staticFilePath) });
|
||||
});
|
||||
// Set the proxy for the session to route through Tor
|
||||
electron_1.session.defaultSession.setProxy({
|
||||
proxyRules: "socks://localhost:9050",
|
||||
proxyBypassRules: "<local>",
|
||||
});
|
||||
});
|
||||
// Handle all windows closed event except on macOS
|
||||
electron_1.app.on("window-all-closed", function () {
|
||||
// Terminate the Tor process if it exists
|
||||
tor === null || tor === void 0 ? void 0 : tor.kill();
|
||||
if (process.platform !== "darwin")
|
||||
electron_1.app.quit();
|
||||
});
|
||||
//# sourceMappingURL=index.js.map
|
1
desktopApp/index.js.map
Normal file
1
desktopApp/index.js.map
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["index.ts"],"names":[],"mappings":";;AAAA,uEAAuE;AACvE,qCAAsE;AACtE,+CAAsE;AACtE,2BAA6B;AAC7B,uBAAyB;AAEzB,IAAI,GAAG,GAA0C,IAAI,CAAC;AAEtD,2EAA2E;AAE3E,SAAS,sBAAsB;IAC7B,IAAM,QAAQ,GAAG,EAAE,CAAC,QAAQ,EAAE,CAAC;IAE/B,QAAQ,QAAQ,EAAE,CAAC;QACjB,KAAK,OAAO;YACV,GAAG,GAAG,IAAA,qBAAK,EAAC,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,0BAA0B,CAAC,CAAC,CAAC;YAC9D,MAAM;QACR,KAAK,QAAQ;YACX,GAAG,GAAG,IAAA,qBAAK,EAAC,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,sBAAsB,CAAC,CAAC,CAAC;YAC1D,MAAM;QACR,KAAK,OAAO;YACV,GAAG,GAAG,IAAA,qBAAK,EAAC,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,wBAAwB,CAAC,CAAC,CAAC;YAC5D,MAAM;QACR;YACE,MAAM,IAAI,KAAK,CAAC,gCAAyB,QAAQ,CAAE,CAAC,CAAC;IACzD,CAAC;AACH,CAAC;AAED,gCAAgC;AAChC,sBAAsB,EAAE,CAAA;AAGxB,qCAAqC;AACrC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,UAAC,IAAY;IACjC,IAAM,OAAO,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC;IAChC,OAAO,CAAC,GAAG,CAAC,yBAAkB,OAAO,CAAE,CAAC,CAAC;AAC3C,CAAC,CAAC,CAAC;AAEH,qCAAqC;AACrC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,UAAC,IAAY;IACjC,OAAO,CAAC,KAAK,CAAC,0BAAmB,IAAI,CAAC,QAAQ,EAAE,CAAE,CAAC,CAAC;IACpD,cAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,sDAAsD;AACrE,CAAC,CAAC,CAAC;AAEH,iDAAiD;AACjD,SAAS,YAAY;IACnB,qDAAqD;IACrD,IAAM,UAAU,GAAG,IAAI,wBAAa,CAAC;QACnC,KAAK,EAAE,IAAI;QACX,MAAM,EAAE,GAAG;QACX,IAAI,EAAC,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,yCAAyC,CAAC;QACpE,cAAc,EAAE;YACd,eAAe,EAAE,KAAK,EAAG,8CAA8C;YACvE,gBAAgB,EAAE,IAAI,EAAG,wCAAwC;SAClE;KACF,CAAC,CAAC;IAEH,kDAAkD;IAClD,UAAU,CAAC,OAAO,CAAC,iBAAU,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,mBAAmB,CAAC,CAAE,EAAE;QAC3E,YAAY,EAAE,oBAAoB,CAAE,qCAAqC;KAC1E,CAAC,CAAC;IAEH,oDAAoD;IACpD,UAAU,CAAC,WAAW,CAAC,EAAE,CAAC,eAAe,EAAE;QACzC,OAAO,CAAC,GAAG,CAAC,sCAAsC,CAAC,CAAC;QACpD,UAAU,CAAC,OAAO,CAAC,iBAAU,SAAS,uBAAoB,CAAC,CAAC;IAC9D,CAAC,CAAC,CAAC;IAEH,oDAAoD;IACpD,yCAAyC;AAC3C,CAAC;AAED,kEAAkE;AAClE,cAAG,CAAC,SAAS,EAAE,CAAC,IAAI,CAAC;IACnB,2CAA2C;IAC3C,YAAY,EAAE,CAAC;IAEf,2GAA2G;IAC3G,cAAG,CAAC,EAAE,CAAC,UAAU,EAAE;QACjB,IAAI,wBAAa,CAAC,aAAa,EAAE,CAAC,MAAM,KAAK,CAAC;YAAE,YAAY,EAAE,CAAC;IACjE,CAAC,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,+CAA+C;AAC/C,cAAG,CAAC,EAAE,CAAC,OAAO,EAAE;IACd,oCAAoC;IACpC,kBAAO,CAAC,cAAc,CAAC,UAAU,CAAC,eAAe,CAAC,EAAE,IAAI,EAAE,CAAC,kBAAkB,CAAC,EAAE,EAAE,UAAC,OAAO,EAAE,QAAQ;QAClG,IAAM,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC;QACxB,IAAM,WAAW,GAAG,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QACjC,IAAM,cAAc,GAAG,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,WAAW,CAAC,CAAC;QACzD,QAAQ,CAAC,EAAE,WAAW,EAAE,iBAAU,cAAc,CAAE,EAAE,CAAC,CAAC;IACxD,CAAC,CAAC,CAAC;IAEH,qDAAqD;IACrD,kBAAO,CAAC,cAAc,CAAC,QAAQ,CAAC;QAC9B,UAAU,EAAE,wBAAwB;QACpC,gBAAgB,EAAE,SAAS;KAC5B,CAAC,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,kDAAkD;AAClD,cAAG,CAAC,EAAE,CAAC,mBAAmB,EAAE;IAC1B,yCAAyC;IACzC,GAAG,aAAH,GAAG,uBAAH,GAAG,CAAE,IAAI,EAAE,CAAC;IACZ,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ;QAAE,cAAG,CAAC,IAAI,EAAE,CAAC;AAChD,CAAC,CAAC,CAAC"}
|
106
desktopApp/index.ts
Normal file
106
desktopApp/index.ts
Normal file
@ -0,0 +1,106 @@
|
||||
// Modules to control application life and create native browser window
|
||||
import { app, BrowserWindow, session, protocol, net } from 'electron';
|
||||
import { spawn, ChildProcessWithoutNullStreams } from 'child_process';
|
||||
import * as path from 'path';
|
||||
import * as os from "os";
|
||||
|
||||
let tor: ChildProcessWithoutNullStreams | null = null;
|
||||
|
||||
// Function to determine the current OS and find the appropriate Tor binary
|
||||
|
||||
function checkPlatformAndRunTor(): void {
|
||||
const platform = os.platform();
|
||||
|
||||
switch (platform) {
|
||||
case 'win32':
|
||||
tor = spawn(path.join(__dirname, '/tor/tor-win/tor/tor.exe'));
|
||||
break;
|
||||
case 'darwin':
|
||||
tor = spawn(path.join(__dirname, '/tor/tor-mac/tor/tor'));
|
||||
break;
|
||||
case 'linux':
|
||||
tor = spawn(path.join(__dirname, '/tor/tor-linux/tor/tor'));
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unsupported platform: ${platform}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Function to start Tor process
|
||||
checkPlatformAndRunTor()
|
||||
|
||||
|
||||
// Listen for Tor process stdout data
|
||||
tor.stdout.on("data", (data: Buffer) => {
|
||||
const message = data.toString();
|
||||
console.log(`Data received: ${message}`);
|
||||
});
|
||||
|
||||
// Listen for Tor process stderr data
|
||||
tor.stderr.on("data", (data: Buffer) => {
|
||||
console.error(`Error received: ${data.toString()}`);
|
||||
app.exit(1); // Exit the app if there's an error in the Tor process
|
||||
});
|
||||
|
||||
// Function to create the main application window
|
||||
function createWindow(): void {
|
||||
// Create the browser window with specific dimensions
|
||||
const mainWindow = new BrowserWindow({
|
||||
width: 1200,
|
||||
height: 800,
|
||||
icon:path.join(__dirname, '/static/assets/images/favicon-32x32.png'),
|
||||
webPreferences: {
|
||||
nodeIntegration: false, // Disable Node.js integration in the renderer
|
||||
contextIsolation: true, // Enable context isolation for security
|
||||
},
|
||||
});
|
||||
|
||||
// Load the index.html file from the app directory
|
||||
mainWindow.loadURL(`file://${path.resolve(__dirname, 'index.html#/garage')}`, {
|
||||
extraHeaders: "pragma: no-cache\n" // Prevent caching of the loaded file
|
||||
});
|
||||
|
||||
// Handle failed load attempts by reloading the file
|
||||
mainWindow.webContents.on("did-fail-load", () => {
|
||||
console.log("Failed to load the page, retrying...");
|
||||
mainWindow.loadURL(`file://${__dirname}/index.html#/garage`);
|
||||
});
|
||||
|
||||
// Uncomment the following line to open the DevTools
|
||||
// mainWindow.webContents.openDevTools();
|
||||
}
|
||||
|
||||
// This method is called when Electron has finished initialization
|
||||
app.whenReady().then(() => {
|
||||
// Create the window after the app is ready
|
||||
createWindow();
|
||||
|
||||
// Re-create a window if the app is activated and there are no other windows open (MacOS specific behavior)
|
||||
app.on("activate", () => {
|
||||
if (BrowserWindow.getAllWindows().length === 0) createWindow();
|
||||
});
|
||||
});
|
||||
|
||||
// Setup the app session when Electron is ready
|
||||
app.on("ready", () => {
|
||||
// Redirect requests to static files
|
||||
session.defaultSession.webRequest.onBeforeRequest({ urls: ['file:///static/*'] }, (details, callback) => {
|
||||
const url = details.url;
|
||||
const modifiedUrl = url.slice(7);
|
||||
const staticFilePath = path.join(__dirname, modifiedUrl);
|
||||
callback({ redirectURL: `file://${staticFilePath}` });
|
||||
});
|
||||
|
||||
// Set the proxy for the session to route through Tor
|
||||
session.defaultSession.setProxy({
|
||||
proxyRules: "socks://localhost:9050",
|
||||
proxyBypassRules: "<local>",
|
||||
});
|
||||
});
|
||||
|
||||
// Handle all windows closed event except on macOS
|
||||
app.on("window-all-closed", () => {
|
||||
// Terminate the Tor process if it exists
|
||||
tor?.kill();
|
||||
if (process.platform !== "darwin") app.quit();
|
||||
});
|
2834
desktopApp/package-lock.json
generated
Normal file
2834
desktopApp/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
48
desktopApp/package.json
Normal file
48
desktopApp/package.json
Normal file
@ -0,0 +1,48 @@
|
||||
{
|
||||
"name": "desktop-app",
|
||||
"version": "0.7.3",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"start": "electron .",
|
||||
"compile": "./node_modules/.bin/tsc",
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"package-linux": "npx @electron/packager . Robosats --platform=linux --arch=x64 --icon=./assets/icon/Robosats.svg --overwrite --out=release-builds",
|
||||
"package-win": "npx @electron/packager . Robosats --platform=win32 --arch=ia32 --icon=./assets/icon/Robosats.ico --overwrite --out=release-builds",
|
||||
"package-mac": "npx @electron/packager . Robosats --platform=darwin --arch=x64 --icon=./assets/icon/Robosats.icns --overwrite --out=release-builds"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@electron/packager": "^18.3.2",
|
||||
"electron": "^30.0.3",
|
||||
"typescript": "^5.4.5"
|
||||
},
|
||||
"dependencies": {
|
||||
"cors": "^2.8.5",
|
||||
"express": "^4.21.1"
|
||||
},
|
||||
"build": {
|
||||
"appId": "com.electron.robosats",
|
||||
"productName": "RobosatsApp",
|
||||
"directories": {
|
||||
"output": "dist"
|
||||
},
|
||||
"win": {
|
||||
"target": [
|
||||
"NSIS"
|
||||
]
|
||||
},
|
||||
"mac": {
|
||||
"target": [
|
||||
"dmg"
|
||||
]
|
||||
},
|
||||
"linux": {
|
||||
"target": [
|
||||
"AppImage",
|
||||
"deb"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
28
desktopApp/tor/README.CONJURE.md
Normal file
28
desktopApp/tor/README.CONJURE.md
Normal file
@ -0,0 +1,28 @@
|
||||
# Conjure
|
||||
|
||||
[Conjure](https://jhalderm.com/pub/papers/conjure-ccs19.pdf) is an anti-censorship tool in the refraction networking (a.k.a. decoy routing) lineage of circumvention systems. The key innovation of Conjure is to turn the unused IP address space of deploying ISPs into a large pool of **phantom** proxies that users can connect to. Due to the size of unused IPv6 address space and the potential for collateral damage against real websites hosted by the deploying ISPs, Conjure provides an effective solution to the problem of censors enumerating deployed bridges or proxies.
|
||||
|
||||
Conjure is currenty deployed on the University of Colorado network and a small to mid size ISP in Michigan.
|
||||
|
||||
# Conjure Pluggable Transport for Tor
|
||||
|
||||
This repository is an implementation of both the client and bridge side of a Tor pluggable transport that uses the deployed Conjure network to allow users to connect to the Tor network. The client side calls the [`gotapdance` library](https://github.com/refraction-networking/gotapdance) to communicate with deployed Conjure stations and route client traffic through the phantom proxies assigned by the station. The bridge side receives [haproxy](https://www.haproxy.org/download/1.8/doc/proxy-protocol.txt) connections from the Conjure station that wrap the proxied client traffic.
|
||||
|
||||
# Deployment details
|
||||
|
||||
We currently have deployed a low capacity Conjure bridge named [Haunt](https://metrics.torproject.org/rs.html#details/A84C946BF4E14E63A3C92E140532A4594F2C24CD). To connect through this bridge, use the `torrc` file in the `client/` directory as follows:
|
||||
|
||||
```
|
||||
cd client/
|
||||
tor -f torrc
|
||||
```
|
||||
|
||||
# Warnings
|
||||
|
||||
This tool and the deployment is still under active development. We are still working on securing the connection between the deployed Conjure stations and the Conjure bridge. We are also working on improving the censorship resistance of the registration connection between the client and the station. Do not expect this to work out of the box in all areas.
|
||||
|
||||
The Conjure station sometimes suffers from a heavy load of users. When this happens, connections will fail. If you are testing this out, try waiting awhile and trying again later.
|
||||
|
||||
# Conjure development
|
||||
|
||||
Due to the complex nature of the Conjure deployment, it can be difficult to set up a local development environment. Check out [phantombox](https://gitlab.torproject.org/cohosh/phantombox) for an automated libvirt-based setup that works on Linux.
|
109
desktopApp/tor/README.SNOWFLAKE.md
Normal file
109
desktopApp/tor/README.SNOWFLAKE.md
Normal file
@ -0,0 +1,109 @@
|
||||
# Snowflake
|
||||
|
||||
[](https://travis-ci.org/keroserene/snowflake)
|
||||
|
||||
Pluggable Transport using WebRTC, inspired by Flashproxy.
|
||||
|
||||
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
|
||||
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
|
||||
**Table of Contents**
|
||||
|
||||
- [Structure of this Repository](#structure-of-this-repository)
|
||||
- [Usage](#usage)
|
||||
- [Using Snowflake with Tor](#using-snowflake-with-tor)
|
||||
- [Running a Snowflake Proxy](#running-a-snowflake-proxy)
|
||||
- [Using the Snowflake Library with Other Applications](#using-the-snowflake-library-with-other-applications)
|
||||
- [Test Environment](#test-environment)
|
||||
- [FAQ](#faq)
|
||||
- [More info and links](#more-info-and-links)
|
||||
|
||||
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||
|
||||
### Structure of this Repository
|
||||
|
||||
- `broker/` contains code for the Snowflake broker
|
||||
- `doc/` contains Snowflake documentation and manpages
|
||||
- `client/` contains the Tor pluggable transport client and client library code
|
||||
- `common/` contains generic libraries used by multiple pieces of Snowflake
|
||||
- `proxy/` contains code for the Go standalone Snowflake proxy
|
||||
- `probetest/` contains code for a NAT probetesting service
|
||||
- `server/` contains the Tor pluggable transport server and server library code
|
||||
|
||||
### Usage
|
||||
|
||||
Snowflake is currently deployed as a pluggable transport for Tor.
|
||||
|
||||
#### Using Snowflake with Tor
|
||||
|
||||
To use the Snowflake client with Tor, you will need to add the appropriate `Bridge` and `ClientTransportPlugin` lines to your [torrc](https://2019.www.torproject.org/docs/tor-manual.html.en) file. See the [client README](client) for more information on building and running the Snowflake client.
|
||||
|
||||
#### Running a Snowflake Proxy
|
||||
|
||||
You can contribute to Snowflake by running a Snowflake proxy. We have the option to run a proxy in your browser or as a standalone Go program. See our [community documentation](https://community.torproject.org/relay/setup/snowflake/) for more details.
|
||||
|
||||
#### Using the Snowflake Library with Other Applications
|
||||
|
||||
Snowflake can be used as a Go API, and adheres to the [v2.1 pluggable transports specification](). For more information on using the Snowflake Go library, see the [Snowflake library documentation](doc/using-the-snowflake-library.md).
|
||||
|
||||
### Test Environment
|
||||
|
||||
There is a Docker-based test environment at https://github.com/cohosh/snowbox.
|
||||
|
||||
### FAQ
|
||||
|
||||
**Q: How does it work?**
|
||||
|
||||
In the Tor use-case:
|
||||
|
||||
1. Volunteers visit websites which host the "snowflake" proxy. (just
|
||||
like flashproxy)
|
||||
2. Tor clients automatically find available browser proxies via the Broker
|
||||
(the domain fronted signaling channel).
|
||||
3. Tor client and browser proxy establish a WebRTC peer connection.
|
||||
4. Proxy connects to some relay.
|
||||
5. Tor occurs.
|
||||
|
||||
More detailed information about how clients, snowflake proxies, and the Broker
|
||||
fit together on the way...
|
||||
|
||||
**Q: What are the benefits of this PT compared with other PTs?**
|
||||
|
||||
Snowflake combines the advantages of flashproxy and meek. Primarily:
|
||||
|
||||
- It has the convenience of Meek, but can support magnitudes more
|
||||
users with negligible CDN costs. (Domain fronting is only used for brief
|
||||
signalling / NAT-piercing to setup the P2P WebRTC DataChannels which handle
|
||||
the actual traffic.)
|
||||
|
||||
- Arbitrarily high numbers of volunteer proxies are possible like in
|
||||
flashproxy, but NATs are no longer a usability barrier - no need for
|
||||
manual port forwarding!
|
||||
|
||||
**Q: Why is this called Snowflake?**
|
||||
|
||||
It utilizes the "ICE" negotiation via WebRTC, and also involves a great
|
||||
abundance of ephemeral and short-lived (and special!) volunteer proxies...
|
||||
|
||||
### More info and links
|
||||
|
||||
We have more documentation in the [Snowflake wiki](https://gitlab.torproject.org/tpo/anti-censorship/pluggable-transports/snowflake/-/wikis/home) and at https://snowflake.torproject.org/.
|
||||
|
||||
|
||||
##### -- Android AAR Reproducible Build Setup --
|
||||
|
||||
Using `gomobile` it is possible to build snowflake as shared libraries for all
|
||||
the architectures supported by Android. This is in the _.gitlab-ci.yml_, which
|
||||
runs in GitLab CI. It is also possible to run this setup in a Virtual Machine
|
||||
using [vagrant](https://www.vagrantup.com/). Just run `vagrant up` and it will
|
||||
create and provision the VM. `vagrant ssh` to get into the VM to use it as a
|
||||
development environment.
|
||||
|
||||
##### uTLS Settings
|
||||
|
||||
Snowflake communicate with broker that serves as signaling server with TLS based domain fronting connection, which may be identified by its usage of Go language TLS stack.
|
||||
|
||||
uTLS is a software library designed to initiate the TLS Client Hello fingerprint of browsers or other popular software's TLS stack to evade censorship based on TLS client hello fingerprint with `-utls-imitate` . You can use `-version` to see a list of supported values.
|
||||
|
||||
Depending on client and server configuration, it may not always work as expected as not all extensions are correctly implemented.
|
||||
|
||||
You can also remove SNI (Server Name Indication) from client hello to evade censorship with `-utls-nosni`, not all servers supports this.
|
263
desktopApp/tor/README.WEBTUNNEL.md
Normal file
263
desktopApp/tor/README.WEBTUNNEL.md
Normal file
@ -0,0 +1,263 @@
|
||||
# WebTunnel
|
||||
|
||||
Pluggable Transport based on HTTP Upgrade(HTTPT)
|
||||
|
||||
WebTunnel is pluggable transport that attempt to imitate web browsing activities based on [HTTPT](https://censorbib.nymity.ch/#Frolov2020b).
|
||||
|
||||
## Client Usage
|
||||
Connect to a WebTunnel server with a Tor configuration file like:
|
||||
```
|
||||
UseBridges 1
|
||||
DataDirectory datadir
|
||||
|
||||
ClientTransportPlugin webtunnel exec ./client
|
||||
|
||||
Bridge webtunnel 192.0.2.3:1 url=https://akbwadp9lc5fyyz0cj4d76z643pxgbfh6oyc-167-71-71-157.sslip.io/5m9yq0j4ghkz0fz7qmuw58cvbjon0ebnrsp0
|
||||
|
||||
SocksPort auto
|
||||
|
||||
Log info
|
||||
```
|
||||
## Server Setup
|
||||
|
||||
#### Install Tor
|
||||
On a Debian system, first install tor normally with
|
||||
```
|
||||
apt install apt-transport-https
|
||||
lsb_release -c
|
||||
nano /etc/apt/sources.list.d/tor.list
|
||||
wget -qO- https://deb.torproject.org/torproject.org/A3C4F0F979CAA22CDBA8F512EE8CBC9E886DDD89.asc | gpg --dearmor | tee /usr/share/keyrings/tor-archive-keyring.gpg >/dev/null
|
||||
apt update
|
||||
apt install tor deb.torproject.org-keyring
|
||||
```
|
||||
|
||||
### Disable default instance
|
||||
The default Tor configuration is not useful for this setup, so the next step will be disabling them.
|
||||
```
|
||||
systemctl stop tor@default.service
|
||||
systemctl mask tor@default.service
|
||||
```
|
||||
|
||||
### Get Environment Ready
|
||||
```
|
||||
#copy server file to server
|
||||
scp server root@$SERVER_ADDRESS:/var/lib/torwebtunnel/webtunnel
|
||||
```
|
||||
|
||||
then create server torrc at `/var/lib/torwebtunnel/torrc`
|
||||
```
|
||||
BridgeRelay 1
|
||||
|
||||
ORPort 10000
|
||||
|
||||
ServerTransportPlugin webtunnel exec /var/lib/torwebtunnel/webtunnel
|
||||
|
||||
ServerTransportListenAddr webtunnel 127.0.0.1:11000
|
||||
|
||||
ExtORPort auto
|
||||
|
||||
ContactInfo WebTunnel email: tor.relay.email@torproject.net ciissversion:2
|
||||
|
||||
Nickname WebTunnelTest
|
||||
|
||||
PublishServerDescriptor 1
|
||||
BridgeDistribution none
|
||||
|
||||
DataDirectory /var/lib/torwebtunnel/tor-data
|
||||
CacheDirectory /tmp/tor-tmp-torwebtunnel
|
||||
|
||||
SocksPort 0
|
||||
```
|
||||
|
||||
#### Configure service unit file
|
||||
Create a service unit file as follow
|
||||
```
|
||||
[Unit]
|
||||
Description=Tor Web Tunnel
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
DynamicUser=yes
|
||||
PrivateUsers=true
|
||||
PrivateMounts=true
|
||||
ProtectSystem=strict
|
||||
PrivateTmp=true
|
||||
PrivateDevices=true
|
||||
ProtectClock=true
|
||||
NoNewPrivileges=true
|
||||
ProtectHome=tmpfs
|
||||
ProtectKernelModules=true
|
||||
ProtectKernelLogs=true
|
||||
|
||||
StateDirectory=torwebtunnel
|
||||
|
||||
ExecStart=/usr/bin/tor -f /var/lib/torwebtunnel/torrc --RunAsDaemon 0
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
||||
```
|
||||
|
||||
#### Obtain Certificate
|
||||
WebTunnel Requires a valid TLS certificate, to obtain that
|
||||
```
|
||||
curl https://get.acme.sh | sh -s email=my@example.com
|
||||
~/.acme.sh/acme.sh --issue --standalone --domain $SERVER_ADDRESS
|
||||
```
|
||||
|
||||
#### Install & Configure Nginx
|
||||
To coexist with other content at a single port, it is necessary to install a reverse proxy like nginx:
|
||||
```
|
||||
apt install nginx
|
||||
```
|
||||
|
||||
And then configure HTTP Upgrade forwarding at /etc/nginx/nginx.conf.
|
||||
```
|
||||
--- a/before.conf
|
||||
+++ b/after.conf
|
||||
@@ -60,6 +60,13 @@ http {
|
||||
|
||||
include /etc/nginx/conf.d/*.conf;
|
||||
include /etc/nginx/sites-enabled/*;
|
||||
+
|
||||
+ #WebSocket Support
|
||||
+ map $http_upgrade $connection_upgrade {
|
||||
+ default upgrade;
|
||||
+ '' close;
|
||||
+ }
|
||||
+
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
Finally, add http forwarding setting to a new file at /etc/nginx/site-enabled .
|
||||
```
|
||||
server {
|
||||
listen [::]:443 ssl http2;
|
||||
listen 443 ssl http2;
|
||||
server_name $SERVER_ADDRESS;
|
||||
#ssl on;
|
||||
|
||||
# certs sent to the client in SERVER HELLO are concatenated in ssl_certificate
|
||||
ssl_certificate /etc/nginx/ssl/fullchain.cer;
|
||||
ssl_certificate_key /etc/nginx/ssl/key.key;
|
||||
|
||||
|
||||
ssl_session_timeout 15m;
|
||||
|
||||
ssl_protocols TLSv1.2 TLSv1.3;
|
||||
|
||||
ssl_ciphers ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384;
|
||||
|
||||
ssl_prefer_server_ciphers off;
|
||||
|
||||
ssl_session_cache shared:MozSSL:50m;
|
||||
#ssl_ecdh_curve secp521r1,prime256v1,secp384r1;
|
||||
ssl_session_tickets off;
|
||||
|
||||
add_header Strict-Transport-Security "max-age=63072000" always;
|
||||
|
||||
location /$PATH {
|
||||
proxy_pass http://127.0.0.1:11000;
|
||||
proxy_http_version 1.1;
|
||||
|
||||
###Set WebSocket headers ####
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
|
||||
### Set Proxy headers ####
|
||||
proxy_set_header Accept-Encoding "";
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
add_header Front-End-Https on;
|
||||
|
||||
proxy_redirect off;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
## Docker Setup
|
||||
|
||||
Webtunnel is a new pluggable transport available for bridge operators.
|
||||
|
||||
### Prerequisites
|
||||
An existing website using nginx balancer to handle traffic. (other load banlancer is currently untested)
|
||||
|
||||
Handle traffic directly, without CDN. (CDN passthrough is currently untested)
|
||||
|
||||
A container runtime like Docker.
|
||||
|
||||
### Configure nginx Forwarding
|
||||
If you haven't already, configure websocket forwarding support in nginx by configure HTTP Upgrade forwarding at /etc/nginx/nginx.conf:
|
||||
```
|
||||
--- a/before.conf
|
||||
+++ b/after.conf
|
||||
@@ -60,6 +60,13 @@ http {
|
||||
|
||||
include /etc/nginx/conf.d/*.conf;
|
||||
include /etc/nginx/sites-enabled/*;
|
||||
+
|
||||
+ #WebSocket Support
|
||||
+ map $http_upgrade $connection_upgrade {
|
||||
+ default upgrade;
|
||||
+ '' close;
|
||||
+ }
|
||||
+
|
||||
}
|
||||
```
|
||||
And add a forwarded path under one the served domain, typically defined in files within `/etc/nginx/sites-enabled/`, replace $PATH with a random string(which you could generate with `echo $(cat /dev/urandom | tr -cd "qwertyuiopasdfghjklzxcvbnmMNBVCXZLKJHGFDSAQWERTUIOP0987654321"|head -c 24)`):
|
||||
```
|
||||
location /$PATH {
|
||||
proxy_pass http://127.0.0.1:11000;
|
||||
proxy_http_version 1.1;
|
||||
|
||||
###Set WebSocket headers ####
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
|
||||
### Set Proxy headers ####
|
||||
proxy_set_header Accept-Encoding "";
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
add_header Front-End-Https on;
|
||||
|
||||
proxy_redirect off;
|
||||
}
|
||||
```
|
||||
|
||||
### Install Docker Runtime(if necessary)
|
||||
```
|
||||
apt install curl sudo
|
||||
curl -fsSL https://get.docker.com -o get-docker.sh
|
||||
sudo sh ./get-docker.sh
|
||||
```
|
||||
|
||||
### Run Dockerlized Webtunnel Server
|
||||
Replace `URL` with your domain and path, and `OPERATOR_EMAIL` with your email address, then run:
|
||||
```
|
||||
truncate --size 0 .env
|
||||
echo "URL=https://yourdomain/and/path" >> .env
|
||||
echo "OPERATOR_EMAIL=your@email.org" >> .env
|
||||
echo "BRIDGE_NICKNAME=WTBr$(cat /dev/urandom | tr -cd 'qwertyuiopasdfghjklzxcvbnmMNBVCXZLKJHGFDSAQWERTUIOP0987654321'|head -c 10)" >> .env
|
||||
echo "GENEDORPORT=4$(cat /dev/urandom | tr -cd '0987654321'|head -c 4)" >> .env
|
||||
```
|
||||
This will create an environment file for the configuration of webtunnel bridge.
|
||||
|
||||
After creating the configure file, download the webtunnel docker compose file, and instancize it.
|
||||
````shell
|
||||
curl https://gitlab.torproject.org/tpo/anti-censorship/pluggable-transports/webtunnel/-/raw/main/release/container/docker-compose.yml?inline=false > docker-compose.yml
|
||||
docker compose up -d
|
||||
````
|
||||
It includes auto update by default, and will update webtunnel bridge server without any further action. Remove `watchtower` to disable this behavior.
|
||||
|
||||
### Get Bridgeline and Check it is Running
|
||||
You can obtain bridgeline and verify if it is working by running
|
||||
```shell
|
||||
docker compose exec webtunnel-bridge get-bridge-line.sh
|
||||
```
|
359517
desktopApp/tor/geoip
Normal file
359517
desktopApp/tor/geoip
Normal file
File diff suppressed because it is too large
Load Diff
155241
desktopApp/tor/geoip6
Normal file
155241
desktopApp/tor/geoip6
Normal file
File diff suppressed because it is too large
Load Diff
1
desktopApp/tor/tor-linux/data/geoip
Symbolic link
1
desktopApp/tor/tor-linux/data/geoip
Symbolic link
@ -0,0 +1 @@
|
||||
../../geoip
|
1
desktopApp/tor/tor-linux/data/geoip6
Symbolic link
1
desktopApp/tor/tor-linux/data/geoip6
Symbolic link
@ -0,0 +1 @@
|
||||
../../geoip6
|
BIN
desktopApp/tor/tor-linux/tor/libcrypto.so.3
Executable file
BIN
desktopApp/tor/tor-linux/tor/libcrypto.so.3
Executable file
Binary file not shown.
BIN
desktopApp/tor/tor-linux/tor/libevent-2.1.so.7
Executable file
BIN
desktopApp/tor/tor-linux/tor/libevent-2.1.so.7
Executable file
Binary file not shown.
BIN
desktopApp/tor/tor-linux/tor/libssl.so.3
Executable file
BIN
desktopApp/tor/tor-linux/tor/libssl.so.3
Executable file
Binary file not shown.
BIN
desktopApp/tor/tor-linux/tor/libstdc++.so.6
Executable file
BIN
desktopApp/tor/tor-linux/tor/libstdc++.so.6
Executable file
Binary file not shown.
@ -0,0 +1 @@
|
||||
../../../README.CONJURE.md
|
@ -0,0 +1 @@
|
||||
../../../README.SNOWFLAKE.md
|
@ -0,0 +1 @@
|
||||
../../../README.WEBTUNNEL.md
|
BIN
desktopApp/tor/tor-linux/tor/pluggable_transports/conjure-client
Executable file
BIN
desktopApp/tor/tor-linux/tor/pluggable_transports/conjure-client
Executable file
Binary file not shown.
BIN
desktopApp/tor/tor-linux/tor/pluggable_transports/lyrebird
Executable file
BIN
desktopApp/tor/tor-linux/tor/pluggable_transports/lyrebird
Executable file
Binary file not shown.
@ -0,0 +1,32 @@
|
||||
{
|
||||
"recommendedDefault" : "obfs4",
|
||||
"pluggableTransports" : {
|
||||
"lyrebird" : "ClientTransportPlugin meek_lite,obfs2,obfs3,obfs4,scramblesuit exec ${pt_path}lyrebird",
|
||||
"snowflake" : "ClientTransportPlugin snowflake exec ${pt_path}snowflake-client",
|
||||
"webtunnel" : "ClientTransportPlugin webtunnel exec ${pt_path}webtunnel-client",
|
||||
"conjure" : "ClientTransportPlugin conjure exec ${pt_path}conjure-client -registerURL https://registration.refraction.network/api"
|
||||
},
|
||||
"bridges" : {
|
||||
"meek-azure" : [
|
||||
"meek_lite 192.0.2.18:80 BE776A53492E1E044A26F17306E1BC46A55A1625 url=https://meek.azureedge.net/ front=ajax.aspnetcdn.com"
|
||||
],
|
||||
"obfs4" : [
|
||||
"obfs4 192.95.36.142:443 CDF2E852BF539B82BD10E27E9115A31734E378C2 cert=qUVQ0srL1JI/vO6V6m/24anYXiJD3QP2HgzUKQtQ7GRqqUvs7P+tG43RtAqdhLOALP7DJQ iat-mode=1",
|
||||
"obfs4 37.218.245.14:38224 D9A82D2F9C2F65A18407B1D2B764F130847F8B5D cert=bjRaMrr1BRiAW8IE9U5z27fQaYgOhX1UCmOpg2pFpoMvo6ZgQMzLsaTzzQNTlm7hNcb+Sg iat-mode=0",
|
||||
"obfs4 85.31.186.98:443 011F2599C0E9B27EE74B353155E244813763C3E5 cert=ayq0XzCwhpdysn5o0EyDUbmSOx3X/oTEbzDMvczHOdBJKlvIdHHLJGkZARtT4dcBFArPPg iat-mode=0",
|
||||
"obfs4 85.31.186.26:443 91A6354697E6B02A386312F68D82CF86824D3606 cert=PBwr+S8JTVZo6MPdHnkTwXJPILWADLqfMGoVvhZClMq/Urndyd42BwX9YFJHZnBB3H0XCw iat-mode=0",
|
||||
"obfs4 193.11.166.194:27015 2D82C2E354D531A68469ADF7F878FA6060C6BACA cert=4TLQPJrTSaDffMK7Nbao6LC7G9OW/NHkUwIdjLSS3KYf0Nv4/nQiiI8dY2TcsQx01NniOg iat-mode=0",
|
||||
"obfs4 193.11.166.194:27020 86AC7B8D430DAC4117E9F42C9EAED18133863AAF cert=0LDeJH4JzMDtkJJrFphJCiPqKx7loozKN7VNfuukMGfHO0Z8OGdzHVkhVAOfo1mUdv9cMg iat-mode=0",
|
||||
"obfs4 193.11.166.194:27025 1AE2C08904527FEA90C4C4F8C1083EA59FBC6FAF cert=ItvYZzW5tn6v3G4UnQa6Qz04Npro6e81AP70YujmK/KXwDFPTs3aHXcHp4n8Vt6w/bv8cA iat-mode=0",
|
||||
"obfs4 209.148.46.65:443 74FAD13168806246602538555B5521A0383A1875 cert=ssH+9rP8dG2NLDN2XuFw63hIO/9MNNinLmxQDpVa+7kTOa9/m+tGWT1SmSYpQ9uTBGa6Hw iat-mode=0",
|
||||
"obfs4 146.57.248.225:22 10A6CD36A537FCE513A322361547444B393989F0 cert=K1gDtDAIcUfeLqbstggjIw2rtgIKqdIhUlHp82XRqNSq/mtAjp1BIC9vHKJ2FAEpGssTPw iat-mode=0",
|
||||
"obfs4 45.145.95.6:27015 C5B7CD6946FF10C5B3E89691A7D3F2C122D2117C cert=TD7PbUO0/0k6xYHMPW3vJxICfkMZNdkRrb63Zhl5j9dW3iRGiCx0A7mPhe5T2EDzQ35+Zw iat-mode=0",
|
||||
"obfs4 51.222.13.177:80 5EDAC3B810E12B01F6FD8050D2FD3E277B289A08 cert=2uplIpLQ0q9+0qMFrK5pkaYRDOe460LL9WHBvatgkuRr/SL31wBOEupaMMJ6koRE6Ld0ew iat-mode=0"
|
||||
],
|
||||
"snowflake" : [
|
||||
"snowflake 192.0.2.3:80 2B280B23E1107BB62ABFC40DDCC8824814F80A72 fingerprint=2B280B23E1107BB62ABFC40DDCC8824814F80A72 url=https://1098762253.rsc.cdn77.org/ fronts=www.cdn77.com,www.phpmyadmin.net ice=stun:stun.l.google.com:19302,stun:stun.antisip.com:3478,stun:stun.bluesip.net:3478,stun:stun.dus.net:3478,stun:stun.epygi.com:3478,stun:stun.sonetel.com:3478,stun:stun.uls.co.za:3478,stun:stun.voipgate.com:3478,stun:stun.voys.nl:3478 utls-imitate=hellorandomizedalpn",
|
||||
"snowflake 192.0.2.4:80 8838024498816A039FCBBAB14E6F40A0843051FA fingerprint=8838024498816A039FCBBAB14E6F40A0843051FA url=https://1098762253.rsc.cdn77.org/ fronts=www.cdn77.com,www.phpmyadmin.net ice=stun:stun.l.google.com:19302,stun:stun.antisip.com:3478,stun:stun.bluesip.net:3478,stun:stun.dus.net:3478,stun:stun.epygi.com:3478,stun:stun.sonetel.net:3478,stun:stun.uls.co.za:3478,stun:stun.voipgate.com:3478,stun:stun.voys.nl:3478 utls-imitate=hellorandomizedalpn"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
Binary file not shown.
BIN
desktopApp/tor/tor-linux/tor/pluggable_transports/webtunnel-client
Executable file
BIN
desktopApp/tor/tor-linux/tor/pluggable_transports/webtunnel-client
Executable file
Binary file not shown.
BIN
desktopApp/tor/tor-linux/tor/tor
Executable file
BIN
desktopApp/tor/tor-linux/tor/tor
Executable file
Binary file not shown.
1
desktopApp/tor/tor-mac/data/geoip
Symbolic link
1
desktopApp/tor/tor-mac/data/geoip
Symbolic link
@ -0,0 +1 @@
|
||||
../../geoip
|
1
desktopApp/tor/tor-mac/data/geoip6
Symbolic link
1
desktopApp/tor/tor-mac/data/geoip6
Symbolic link
@ -0,0 +1 @@
|
||||
../../geoip6
|
BIN
desktopApp/tor/tor-mac/tor/libevent-2.1.7.dylib
Executable file
BIN
desktopApp/tor/tor-mac/tor/libevent-2.1.7.dylib
Executable file
Binary file not shown.
@ -0,0 +1 @@
|
||||
../../../README.CONJURE.md
|
@ -0,0 +1 @@
|
||||
../../../README.SNOWFLAKE.md
|
@ -0,0 +1 @@
|
||||
../../../README.WEBTUNNEL.md
|
BIN
desktopApp/tor/tor-mac/tor/pluggable_transports/conjure-client
Executable file
BIN
desktopApp/tor/tor-mac/tor/pluggable_transports/conjure-client
Executable file
Binary file not shown.
BIN
desktopApp/tor/tor-mac/tor/pluggable_transports/lyrebird
Executable file
BIN
desktopApp/tor/tor-mac/tor/pluggable_transports/lyrebird
Executable file
Binary file not shown.
@ -0,0 +1,32 @@
|
||||
{
|
||||
"recommendedDefault" : "obfs4",
|
||||
"pluggableTransports" : {
|
||||
"lyrebird" : "ClientTransportPlugin meek_lite,obfs2,obfs3,obfs4,scramblesuit exec ${pt_path}lyrebird",
|
||||
"snowflake" : "ClientTransportPlugin snowflake exec ${pt_path}snowflake-client",
|
||||
"webtunnel" : "ClientTransportPlugin webtunnel exec ${pt_path}webtunnel-client",
|
||||
"conjure" : "ClientTransportPlugin conjure exec ${pt_path}conjure-client -registerURL https://registration.refraction.network/api"
|
||||
},
|
||||
"bridges" : {
|
||||
"meek-azure" : [
|
||||
"meek_lite 192.0.2.18:80 BE776A53492E1E044A26F17306E1BC46A55A1625 url=https://meek.azureedge.net/ front=ajax.aspnetcdn.com"
|
||||
],
|
||||
"obfs4" : [
|
||||
"obfs4 192.95.36.142:443 CDF2E852BF539B82BD10E27E9115A31734E378C2 cert=qUVQ0srL1JI/vO6V6m/24anYXiJD3QP2HgzUKQtQ7GRqqUvs7P+tG43RtAqdhLOALP7DJQ iat-mode=1",
|
||||
"obfs4 37.218.245.14:38224 D9A82D2F9C2F65A18407B1D2B764F130847F8B5D cert=bjRaMrr1BRiAW8IE9U5z27fQaYgOhX1UCmOpg2pFpoMvo6ZgQMzLsaTzzQNTlm7hNcb+Sg iat-mode=0",
|
||||
"obfs4 85.31.186.98:443 011F2599C0E9B27EE74B353155E244813763C3E5 cert=ayq0XzCwhpdysn5o0EyDUbmSOx3X/oTEbzDMvczHOdBJKlvIdHHLJGkZARtT4dcBFArPPg iat-mode=0",
|
||||
"obfs4 85.31.186.26:443 91A6354697E6B02A386312F68D82CF86824D3606 cert=PBwr+S8JTVZo6MPdHnkTwXJPILWADLqfMGoVvhZClMq/Urndyd42BwX9YFJHZnBB3H0XCw iat-mode=0",
|
||||
"obfs4 193.11.166.194:27015 2D82C2E354D531A68469ADF7F878FA6060C6BACA cert=4TLQPJrTSaDffMK7Nbao6LC7G9OW/NHkUwIdjLSS3KYf0Nv4/nQiiI8dY2TcsQx01NniOg iat-mode=0",
|
||||
"obfs4 193.11.166.194:27020 86AC7B8D430DAC4117E9F42C9EAED18133863AAF cert=0LDeJH4JzMDtkJJrFphJCiPqKx7loozKN7VNfuukMGfHO0Z8OGdzHVkhVAOfo1mUdv9cMg iat-mode=0",
|
||||
"obfs4 193.11.166.194:27025 1AE2C08904527FEA90C4C4F8C1083EA59FBC6FAF cert=ItvYZzW5tn6v3G4UnQa6Qz04Npro6e81AP70YujmK/KXwDFPTs3aHXcHp4n8Vt6w/bv8cA iat-mode=0",
|
||||
"obfs4 209.148.46.65:443 74FAD13168806246602538555B5521A0383A1875 cert=ssH+9rP8dG2NLDN2XuFw63hIO/9MNNinLmxQDpVa+7kTOa9/m+tGWT1SmSYpQ9uTBGa6Hw iat-mode=0",
|
||||
"obfs4 146.57.248.225:22 10A6CD36A537FCE513A322361547444B393989F0 cert=K1gDtDAIcUfeLqbstggjIw2rtgIKqdIhUlHp82XRqNSq/mtAjp1BIC9vHKJ2FAEpGssTPw iat-mode=0",
|
||||
"obfs4 45.145.95.6:27015 C5B7CD6946FF10C5B3E89691A7D3F2C122D2117C cert=TD7PbUO0/0k6xYHMPW3vJxICfkMZNdkRrb63Zhl5j9dW3iRGiCx0A7mPhe5T2EDzQ35+Zw iat-mode=0",
|
||||
"obfs4 51.222.13.177:80 5EDAC3B810E12B01F6FD8050D2FD3E277B289A08 cert=2uplIpLQ0q9+0qMFrK5pkaYRDOe460LL9WHBvatgkuRr/SL31wBOEupaMMJ6koRE6Ld0ew iat-mode=0"
|
||||
],
|
||||
"snowflake" : [
|
||||
"snowflake 192.0.2.3:80 2B280B23E1107BB62ABFC40DDCC8824814F80A72 fingerprint=2B280B23E1107BB62ABFC40DDCC8824814F80A72 url=https://1098762253.rsc.cdn77.org/ fronts=www.cdn77.com,www.phpmyadmin.net ice=stun:stun.l.google.com:19302,stun:stun.antisip.com:3478,stun:stun.bluesip.net:3478,stun:stun.dus.net:3478,stun:stun.epygi.com:3478,stun:stun.sonetel.com:3478,stun:stun.uls.co.za:3478,stun:stun.voipgate.com:3478,stun:stun.voys.nl:3478 utls-imitate=hellorandomizedalpn",
|
||||
"snowflake 192.0.2.4:80 8838024498816A039FCBBAB14E6F40A0843051FA fingerprint=8838024498816A039FCBBAB14E6F40A0843051FA url=https://1098762253.rsc.cdn77.org/ fronts=www.cdn77.com,www.phpmyadmin.net ice=stun:stun.l.google.com:19302,stun:stun.antisip.com:3478,stun:stun.bluesip.net:3478,stun:stun.dus.net:3478,stun:stun.epygi.com:3478,stun:stun.sonetel.net:3478,stun:stun.uls.co.za:3478,stun:stun.voipgate.com:3478,stun:stun.voys.nl:3478 utls-imitate=hellorandomizedalpn"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
BIN
desktopApp/tor/tor-mac/tor/pluggable_transports/snowflake-client
Executable file
BIN
desktopApp/tor/tor-mac/tor/pluggable_transports/snowflake-client
Executable file
Binary file not shown.
BIN
desktopApp/tor/tor-mac/tor/pluggable_transports/webtunnel-client
Executable file
BIN
desktopApp/tor/tor-mac/tor/pluggable_transports/webtunnel-client
Executable file
Binary file not shown.
BIN
desktopApp/tor/tor-mac/tor/tor
Executable file
BIN
desktopApp/tor/tor-mac/tor/tor
Executable file
Binary file not shown.
1
desktopApp/tor/tor-win/data/geoip
Symbolic link
1
desktopApp/tor/tor-win/data/geoip
Symbolic link
@ -0,0 +1 @@
|
||||
../../geoip
|
1
desktopApp/tor/tor-win/data/geoip6
Symbolic link
1
desktopApp/tor/tor-win/data/geoip6
Symbolic link
@ -0,0 +1 @@
|
||||
../../geoip6
|
@ -0,0 +1 @@
|
||||
../../../README.CONJURE.md
|
@ -0,0 +1 @@
|
||||
../../../README.SNOWFLAKE.md
|
@ -0,0 +1 @@
|
||||
../../../README.WEBTUNNEL.md
|
BIN
desktopApp/tor/tor-win/tor/pluggable_transports/conjure-client.exe
Executable file
BIN
desktopApp/tor/tor-win/tor/pluggable_transports/conjure-client.exe
Executable file
Binary file not shown.
BIN
desktopApp/tor/tor-win/tor/pluggable_transports/lyrebird.exe
Executable file
BIN
desktopApp/tor/tor-win/tor/pluggable_transports/lyrebird.exe
Executable file
Binary file not shown.
@ -0,0 +1,32 @@
|
||||
{
|
||||
"recommendedDefault" : "obfs4",
|
||||
"pluggableTransports" : {
|
||||
"lyrebird" : "ClientTransportPlugin meek_lite,obfs2,obfs3,obfs4,scramblesuit exec ${pt_path}lyrebird.exe",
|
||||
"snowflake" : "ClientTransportPlugin snowflake exec ${pt_path}snowflake-client.exe",
|
||||
"webtunnel" : "ClientTransportPlugin webtunnel exec ${pt_path}webtunnel-client.exe",
|
||||
"conjure" : "ClientTransportPlugin conjure exec ${pt_path}conjure-client.exe -registerURL https://registration.refraction.network/api"
|
||||
},
|
||||
"bridges" : {
|
||||
"meek-azure" : [
|
||||
"meek_lite 192.0.2.18:80 BE776A53492E1E044A26F17306E1BC46A55A1625 url=https://meek.azureedge.net/ front=ajax.aspnetcdn.com"
|
||||
],
|
||||
"obfs4" : [
|
||||
"obfs4 192.95.36.142:443 CDF2E852BF539B82BD10E27E9115A31734E378C2 cert=qUVQ0srL1JI/vO6V6m/24anYXiJD3QP2HgzUKQtQ7GRqqUvs7P+tG43RtAqdhLOALP7DJQ iat-mode=1",
|
||||
"obfs4 37.218.245.14:38224 D9A82D2F9C2F65A18407B1D2B764F130847F8B5D cert=bjRaMrr1BRiAW8IE9U5z27fQaYgOhX1UCmOpg2pFpoMvo6ZgQMzLsaTzzQNTlm7hNcb+Sg iat-mode=0",
|
||||
"obfs4 85.31.186.98:443 011F2599C0E9B27EE74B353155E244813763C3E5 cert=ayq0XzCwhpdysn5o0EyDUbmSOx3X/oTEbzDMvczHOdBJKlvIdHHLJGkZARtT4dcBFArPPg iat-mode=0",
|
||||
"obfs4 85.31.186.26:443 91A6354697E6B02A386312F68D82CF86824D3606 cert=PBwr+S8JTVZo6MPdHnkTwXJPILWADLqfMGoVvhZClMq/Urndyd42BwX9YFJHZnBB3H0XCw iat-mode=0",
|
||||
"obfs4 193.11.166.194:27015 2D82C2E354D531A68469ADF7F878FA6060C6BACA cert=4TLQPJrTSaDffMK7Nbao6LC7G9OW/NHkUwIdjLSS3KYf0Nv4/nQiiI8dY2TcsQx01NniOg iat-mode=0",
|
||||
"obfs4 193.11.166.194:27020 86AC7B8D430DAC4117E9F42C9EAED18133863AAF cert=0LDeJH4JzMDtkJJrFphJCiPqKx7loozKN7VNfuukMGfHO0Z8OGdzHVkhVAOfo1mUdv9cMg iat-mode=0",
|
||||
"obfs4 193.11.166.194:27025 1AE2C08904527FEA90C4C4F8C1083EA59FBC6FAF cert=ItvYZzW5tn6v3G4UnQa6Qz04Npro6e81AP70YujmK/KXwDFPTs3aHXcHp4n8Vt6w/bv8cA iat-mode=0",
|
||||
"obfs4 209.148.46.65:443 74FAD13168806246602538555B5521A0383A1875 cert=ssH+9rP8dG2NLDN2XuFw63hIO/9MNNinLmxQDpVa+7kTOa9/m+tGWT1SmSYpQ9uTBGa6Hw iat-mode=0",
|
||||
"obfs4 146.57.248.225:22 10A6CD36A537FCE513A322361547444B393989F0 cert=K1gDtDAIcUfeLqbstggjIw2rtgIKqdIhUlHp82XRqNSq/mtAjp1BIC9vHKJ2FAEpGssTPw iat-mode=0",
|
||||
"obfs4 45.145.95.6:27015 C5B7CD6946FF10C5B3E89691A7D3F2C122D2117C cert=TD7PbUO0/0k6xYHMPW3vJxICfkMZNdkRrb63Zhl5j9dW3iRGiCx0A7mPhe5T2EDzQ35+Zw iat-mode=0",
|
||||
"obfs4 51.222.13.177:80 5EDAC3B810E12B01F6FD8050D2FD3E277B289A08 cert=2uplIpLQ0q9+0qMFrK5pkaYRDOe460LL9WHBvatgkuRr/SL31wBOEupaMMJ6koRE6Ld0ew iat-mode=0"
|
||||
],
|
||||
"snowflake" : [
|
||||
"snowflake 192.0.2.3:80 2B280B23E1107BB62ABFC40DDCC8824814F80A72 fingerprint=2B280B23E1107BB62ABFC40DDCC8824814F80A72 url=https://1098762253.rsc.cdn77.org/ fronts=www.cdn77.com,www.phpmyadmin.net ice=stun:stun.l.google.com:19302,stun:stun.antisip.com:3478,stun:stun.bluesip.net:3478,stun:stun.dus.net:3478,stun:stun.epygi.com:3478,stun:stun.sonetel.com:3478,stun:stun.uls.co.za:3478,stun:stun.voipgate.com:3478,stun:stun.voys.nl:3478 utls-imitate=hellorandomizedalpn",
|
||||
"snowflake 192.0.2.4:80 8838024498816A039FCBBAB14E6F40A0843051FA fingerprint=8838024498816A039FCBBAB14E6F40A0843051FA url=https://1098762253.rsc.cdn77.org/ fronts=www.cdn77.com,www.phpmyadmin.net ice=stun:stun.l.google.com:19302,stun:stun.antisip.com:3478,stun:stun.bluesip.net:3478,stun:stun.dus.net:3478,stun:stun.epygi.com:3478,stun:stun.sonetel.net:3478,stun:stun.uls.co.za:3478,stun:stun.voipgate.com:3478,stun:stun.voys.nl:3478 utls-imitate=hellorandomizedalpn"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
BIN
desktopApp/tor/tor-win/tor/pluggable_transports/snowflake-client.exe
Executable file
BIN
desktopApp/tor/tor-win/tor/pluggable_transports/snowflake-client.exe
Executable file
Binary file not shown.
BIN
desktopApp/tor/tor-win/tor/pluggable_transports/webtunnel-client.exe
Executable file
BIN
desktopApp/tor/tor-win/tor/pluggable_transports/webtunnel-client.exe
Executable file
Binary file not shown.
BIN
desktopApp/tor/tor-win/tor/tor-gencert.exe
Executable file
BIN
desktopApp/tor/tor-win/tor/tor-gencert.exe
Executable file
Binary file not shown.
BIN
desktopApp/tor/tor-win/tor/tor.exe
Executable file
BIN
desktopApp/tor/tor-win/tor/tor.exe
Executable file
Binary file not shown.
14
desktopApp/tsconfig.json
Normal file
14
desktopApp/tsconfig.json
Normal file
@ -0,0 +1,14 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "es5",
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"sourceMap": true,
|
||||
"emitDecoratorMetadata": true,
|
||||
"experimentalDecorators": true,
|
||||
"removeComments": false,
|
||||
"noImplicitAny": false,
|
||||
"outDir": "."
|
||||
},
|
||||
"exclude": [ "node_modules" ]
|
||||
}
|
@ -1,14 +1,9 @@
|
||||
version: '3.9'
|
||||
services:
|
||||
jekyll:
|
||||
image: jekyll/jekyll:4.0
|
||||
container_name: pages
|
||||
build: ./docs
|
||||
restart: always
|
||||
volumes:
|
||||
- .:/usr/src/robosats
|
||||
command: bash -c "cd /usr/src/robosats/docs/
|
||||
&& bundle install
|
||||
&& bundle exec jekyll serve --force_polling -H 0.0.0.0 -P 4000"
|
||||
ports:
|
||||
- 4000:4000
|
||||
|
||||
@ -226,6 +221,16 @@ services:
|
||||
volumes:
|
||||
- ./node/db:/var/lib/postgresql/data
|
||||
|
||||
strfry:
|
||||
build: ./docker/strfry
|
||||
container_name: strfry-dev
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- ./docker/strfry/strfry.conf:/etc/strfry.conf:ro
|
||||
- ./docker/strfry/onion_urls.txt:/app/onion_urls.txt:ro
|
||||
- ./node/strfry/db:/app/strfry-db:rw
|
||||
network_mode: service:tor
|
||||
|
||||
# # Postgresql for CLN
|
||||
# postgres-cln:
|
||||
# image: postgres:14.2-alpine
|
||||
|
@ -3,6 +3,8 @@
|
||||
|
||||
# Some useful handy commands that hopefully are never needed
|
||||
|
||||
# docker-compose -f docker-tests.yml --env-file tests/compose.env down --volumes
|
||||
|
||||
# docker exec -it btc bitcoin-cli -chain=regtest -rpcpassword=test -rpcuser=test createwallet default
|
||||
# docker exec -it btc bitcoin-cli -chain=regtest -rpcpassword=test -rpcuser=test -generate 101
|
||||
|
||||
@ -13,7 +15,7 @@ version: '3.9'
|
||||
services:
|
||||
bitcoind:
|
||||
image: ruimarinho/bitcoin-core:${BITCOIND_VERSION:-24.0.1}-alpine
|
||||
container_name: btc
|
||||
container_name: test-btc
|
||||
restart: always
|
||||
ports:
|
||||
- "8000:8000"
|
||||
@ -25,8 +27,11 @@ services:
|
||||
- "9998:9998"
|
||||
- "5432:5432"
|
||||
- "6379:6379"
|
||||
- "7777:7777"
|
||||
volumes:
|
||||
- bitcoin:/bitcoin/.bitcoin/
|
||||
- ./tests/bitcoind/entrypoint.sh:/entrypoint.sh
|
||||
entrypoint: ["/entrypoint.sh"]
|
||||
command:
|
||||
--txindex=1
|
||||
--printtoconsole
|
||||
@ -46,7 +51,7 @@ services:
|
||||
|
||||
coordinator-LND:
|
||||
image: lightninglabs/lnd:${LND_VERSION:-v0.17.0-beta}
|
||||
container_name: coordinator-LND
|
||||
container_name: test-coordinator-LND
|
||||
restart: always
|
||||
volumes:
|
||||
- bitcoin:/root/.bitcoin/
|
||||
@ -77,23 +82,23 @@ services:
|
||||
network_mode: service:bitcoind
|
||||
|
||||
coordinator-CLN:
|
||||
image: elementsproject/lightningd:${CLN_VERSION:-v23.08.1}
|
||||
image: elementsproject/lightningd:${CLN_VERSION:-v24.05}
|
||||
restart: always
|
||||
container_name: coordinator-CLN
|
||||
container_name: test-coordinator-CLN
|
||||
environment:
|
||||
LIGHTNINGD_NETWORK: 'regtest'
|
||||
volumes:
|
||||
- cln:/root/.lightning
|
||||
- ./docker/cln/plugins/cln-grpc-hold:/root/.lightning/plugins/cln-grpc-hold
|
||||
- ./docker/cln/plugins/holdinvoice:/root/.lightning/plugins/holdinvoice
|
||||
- bitcoin:/root/.bitcoin
|
||||
command: --regtest --wumbo --bitcoin-rpcuser=test --bitcoin-rpcpassword=test --log-level=debug --rest-host=0.0.0.0 --rest-port=3010 --bind-addr=127.0.0.1:9737 --max-concurrent-htlcs=483 --grpc-port=9999 --grpc-hold-port=9998 --important-plugin=/root/.lightning/plugins/cln-grpc-hold --database-upgrade=true
|
||||
command: --regtest --bitcoin-rpcuser=test --bitcoin-rpcpassword=test --developer --dev-bitcoind-poll=1 --dev-fast-gossip --log-level=debug --bind-addr=127.0.0.1:9737 --max-concurrent-htlcs=483 --grpc-port=9999 --grpc-hold-port=9998 --important-plugin=/root/.lightning/plugins/holdinvoice --database-upgrade=true
|
||||
depends_on:
|
||||
- bitcoind
|
||||
network_mode: service:bitcoind
|
||||
|
||||
robot-LND:
|
||||
image: lightninglabs/lnd:${LND_VERSION:-v0.17.0-beta}
|
||||
container_name: robot-LND
|
||||
container_name: test-robot-LND
|
||||
restart: always
|
||||
volumes:
|
||||
- bitcoin:/root/.bitcoin/
|
||||
@ -125,7 +130,7 @@ services:
|
||||
|
||||
redis:
|
||||
image: redis:${REDIS_VERSION:-7.2.1}-alpine
|
||||
container_name: redis
|
||||
container_name: test-redis
|
||||
restart: always
|
||||
volumes:
|
||||
- redisdata:/data
|
||||
@ -137,7 +142,7 @@ services:
|
||||
args:
|
||||
DEVELOPMENT: True
|
||||
image: backend-image
|
||||
container_name: coordinator
|
||||
container_name: test-coordinator
|
||||
restart: always
|
||||
environment:
|
||||
DEVELOPMENT: True
|
||||
@ -167,7 +172,7 @@ services:
|
||||
|
||||
postgres:
|
||||
image: postgres:${POSTGRES_VERSION:-14.2}-alpine
|
||||
container_name: sql
|
||||
container_name: test-sql
|
||||
restart: always
|
||||
environment:
|
||||
POSTGRES_PASSWORD: 'example'
|
||||
@ -178,7 +183,7 @@ services:
|
||||
# celery-worker:
|
||||
# image: backend-image
|
||||
# pull_policy: never
|
||||
# container_name: celery-worker
|
||||
# container_name: test-celery-worker
|
||||
# restart: always
|
||||
# environment:
|
||||
# DEVELOPMENT: True
|
||||
|
@ -1,7 +1,7 @@
|
||||
FROM debian:bullseye-slim as builder
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
ARG LIGHTNINGD_VERSION=v23.08
|
||||
ARG LIGHTNINGD_VERSION=v24.05
|
||||
RUN apt-get update -qq && \
|
||||
apt-get install -qq -y --no-install-recommends \
|
||||
autoconf \
|
||||
@ -18,13 +18,13 @@ RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
RUN rustup toolchain install stable --component rustfmt --allow-downgrade
|
||||
|
||||
WORKDIR /opt/lightningd
|
||||
RUN git clone --recursive --branch cln-grpc-hold https://github.com/daywalker90/lightning.git /tmp/cln-grpc-hold
|
||||
RUN cd /tmp/cln-grpc-hold \
|
||||
RUN git clone https://github.com/daywalker90/holdinvoice.git /tmp/holdinvoice
|
||||
RUN cd /tmp/holdinvoice \
|
||||
&& cargo build --release
|
||||
|
||||
FROM elementsproject/lightningd:v23.08 as final
|
||||
FROM elementsproject/lightningd:v24.05 as final
|
||||
|
||||
COPY --from=builder /tmp/cln-grpc-hold/target/release/cln-grpc-hold /tmp/cln-grpc-hold
|
||||
COPY --from=builder /tmp/holdinvoice/target/release/holdinvoice /tmp/holdinvoice
|
||||
COPY config /tmp/config
|
||||
COPY entrypoint.sh entrypoint.sh
|
||||
RUN chmod +x entrypoint.sh
|
||||
|
@ -5,6 +5,6 @@ addr=statictor:127.0.0.1:9051
|
||||
grpc-port=9999
|
||||
grpc-hold-port=9998
|
||||
always-use-proxy=true
|
||||
important-plugin=/root/.lightning/plugins/cln-grpc-hold
|
||||
important-plugin=/root/.lightning/plugins/holdinvoice
|
||||
# wallet=postgres://user:pass@localhost:5433/cln
|
||||
# bookkeeper-db=postgres://user:pass@localhost:5433/cln
|
@ -17,9 +17,9 @@ if [ "$EXPOSE_TCP" == "true" ]; then
|
||||
socat "TCP4-listen:$LIGHTNINGD_RPC_PORT,fork,reuseaddr" "UNIX-CONNECT:${networkdatadir}/lightning-rpc" &
|
||||
fg %-
|
||||
else
|
||||
# Always copy the cln-grpc-hodl plugin into the plugins directory on start up
|
||||
# Always copy the holdinvoice plugin into the plugins directory on start up
|
||||
mkdir -p /root/.lightning/plugins
|
||||
cp /tmp/cln-grpc-hold /root/.lightning/plugins/cln-grpc-hold
|
||||
cp /tmp/holdinvoice /root/.lightning/plugins/holdinvoice
|
||||
if [ ! -f /root/.lightning/config ]; then
|
||||
cp /tmp/config /root/.lightning/config
|
||||
fi
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user